Lines Matching full:t1
571 .macro CALC_AAD_HASH GHASH_MUL AAD AADLEN T1 T2 T3 T4 T5 T6 T7 T8
587 \GHASH_MUL \T8, \T2, \T1, \T3, \T4, \T5, \T6
605 movq (%r10), \T1
608 vpslldq $8, \T1, \T1
610 vpxor \T1, \T7, \T7
616 movq %rax, \T1
619 vpslldq $12, \T1, \T1
621 vpxor \T1, \T7, \T7
628 vmovdqu 16(%r11), \T1
631 vpand \T1, \T7, \T7
635 \GHASH_MUL \T7, \T2, \T1, \T3, \T4, \T5, \T6
863 .macro GHASH_MUL_AVX GH HK T1 T2 T3 T4 T5
870 vpclmulqdq $0x11, \HK, \GH, \T1 # T1 = a1*b1
874 vpxor \T1, \T2,\T2 # T2 = a0*b1+a1*b0
879 vpxor \T2, \T1, \T1 # <T1:GH> = GH x HK
904 vpxor \T1, \GH, \GH # the result is in GH
909 .macro PRECOMPUTE_AVX HK T1 T2 T3 T4 T5 T6
914 vpshufd $0b01001110, \T5, \T1
915 vpxor \T5, \T1, \T1
916 vmovdqu \T1, HashKey_k(arg2)
918 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^2<<1 mod poly
920 vpshufd $0b01001110, \T5, \T1
921 vpxor \T5, \T1, \T1
922 vmovdqu \T1, HashKey_2_k(arg2)
924 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^3<<1 mod poly
926 vpshufd $0b01001110, \T5, \T1
927 vpxor \T5, \T1, \T1
928 vmovdqu \T1, HashKey_3_k(arg2)
930 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^4<<1 mod poly
932 vpshufd $0b01001110, \T5, \T1
933 vpxor \T5, \T1, \T1
934 vmovdqu \T1, HashKey_4_k(arg2)
936 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^5<<1 mod poly
938 vpshufd $0b01001110, \T5, \T1
939 vpxor \T5, \T1, \T1
940 vmovdqu \T1, HashKey_5_k(arg2)
942 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^6<<1 mod poly
944 vpshufd $0b01001110, \T5, \T1
945 vpxor \T5, \T1, \T1
946 vmovdqu \T1, HashKey_6_k(arg2)
948 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^7<<1 mod poly
950 vpshufd $0b01001110, \T5, \T1
951 vpxor \T5, \T1, \T1
952 vmovdqu \T1, HashKey_7_k(arg2)
954 GHASH_MUL_AVX \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^8<<1 mod poly
956 vpshufd $0b01001110, \T5, \T1
957 vpxor \T5, \T1, \T1
958 vmovdqu \T1, HashKey_8_k(arg2)
969 .macro INITIAL_BLOCKS_AVX REP num_initial_blocks T1 T2 T3 T4 T5 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 X…
1024 vmovdqu (arg4, %r11), \T1
1025 vpxor \T1, reg_i, reg_i
1029 vmovdqa \T1, reg_i
1043 … GHASH_MUL_AVX reg_j, \T2, \T1, \T3, \T4, \T5, \T6 # apply GHASH on num_initial_blocks blocks
1126 vmovdqu (arg4, %r11), \T1
1127 vpxor \T1, \XMM1, \XMM1
1130 vmovdqa \T1, \XMM1
1133 vmovdqu 16*1(arg4, %r11), \T1
1134 vpxor \T1, \XMM2, \XMM2
1137 vmovdqa \T1, \XMM2
1140 vmovdqu 16*2(arg4, %r11), \T1
1141 vpxor \T1, \XMM3, \XMM3
1144 vmovdqa \T1, \XMM3
1147 vmovdqu 16*3(arg4, %r11), \T1
1148 vpxor \T1, \XMM4, \XMM4
1151 vmovdqa \T1, \XMM4
1154 vmovdqu 16*4(arg4, %r11), \T1
1155 vpxor \T1, \XMM5, \XMM5
1158 vmovdqa \T1, \XMM5
1161 vmovdqu 16*5(arg4, %r11), \T1
1162 vpxor \T1, \XMM6, \XMM6
1165 vmovdqa \T1, \XMM6
1168 vmovdqu 16*6(arg4, %r11), \T1
1169 vpxor \T1, \XMM7, \XMM7
1172 vmovdqa \T1, \XMM7
1175 vmovdqu 16*7(arg4, %r11), \T1
1176 vpxor \T1, \XMM8, \XMM8
1179 vmovdqa \T1, \XMM8
1204 .macro GHASH_8_ENCRYPT_8_PARALLEL_AVX REP T1 T2 T3 T4 T5 T6 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 …
1249 vmovdqu (arg1), \T1
1250 vpxor \T1, \XMM1, \XMM1
1251 vpxor \T1, \XMM2, \XMM2
1252 vpxor \T1, \XMM3, \XMM3
1253 vpxor \T1, \XMM4, \XMM4
1254 vpxor \T1, \XMM5, \XMM5
1255 vpxor \T1, \XMM6, \XMM6
1256 vpxor \T1, \XMM7, \XMM7
1257 vpxor \T1, \XMM8, \XMM8
1265 vmovdqu 16*1(arg1), \T1
1266 vaesenc \T1, \XMM1, \XMM1
1267 vaesenc \T1, \XMM2, \XMM2
1268 vaesenc \T1, \XMM3, \XMM3
1269 vaesenc \T1, \XMM4, \XMM4
1270 vaesenc \T1, \XMM5, \XMM5
1271 vaesenc \T1, \XMM6, \XMM6
1272 vaesenc \T1, \XMM7, \XMM7
1273 vaesenc \T1, \XMM8, \XMM8
1275 vmovdqu 16*2(arg1), \T1
1276 vaesenc \T1, \XMM1, \XMM1
1277 vaesenc \T1, \XMM2, \XMM2
1278 vaesenc \T1, \XMM3, \XMM3
1279 vaesenc \T1, \XMM4, \XMM4
1280 vaesenc \T1, \XMM5, \XMM5
1281 vaesenc \T1, \XMM6, \XMM6
1282 vaesenc \T1, \XMM7, \XMM7
1283 vaesenc \T1, \XMM8, \XMM8
1298 vmovdqu 16*3(arg1), \T1
1299 vaesenc \T1, \XMM1, \XMM1
1300 vaesenc \T1, \XMM2, \XMM2
1301 vaesenc \T1, \XMM3, \XMM3
1302 vaesenc \T1, \XMM4, \XMM4
1303 vaesenc \T1, \XMM5, \XMM5
1304 vaesenc \T1, \XMM6, \XMM6
1305 vaesenc \T1, \XMM7, \XMM7
1306 vaesenc \T1, \XMM8, \XMM8
1308 vmovdqa TMP2(%rsp), \T1
1310 vpclmulqdq $0x11, \T5, \T1, \T3
1312 vpclmulqdq $0x00, \T5, \T1, \T3
1315 vpshufd $0b01001110, \T1, \T3
1316 vpxor \T1, \T3, \T3
1321 vmovdqu 16*4(arg1), \T1
1322 vaesenc \T1, \XMM1, \XMM1
1323 vaesenc \T1, \XMM2, \XMM2
1324 vaesenc \T1, \XMM3, \XMM3
1325 vaesenc \T1, \XMM4, \XMM4
1326 vaesenc \T1, \XMM5, \XMM5
1327 vaesenc \T1, \XMM6, \XMM6
1328 vaesenc \T1, \XMM7, \XMM7
1329 vaesenc \T1, \XMM8, \XMM8
1333 vmovdqa TMP3(%rsp), \T1
1335 vpclmulqdq $0x11, \T5, \T1, \T3
1337 vpclmulqdq $0x00, \T5, \T1, \T3
1340 vpshufd $0b01001110, \T1, \T3
1341 vpxor \T1, \T3, \T3
1346 vmovdqu 16*5(arg1), \T1
1347 vaesenc \T1, \XMM1, \XMM1
1348 vaesenc \T1, \XMM2, \XMM2
1349 vaesenc \T1, \XMM3, \XMM3
1350 vaesenc \T1, \XMM4, \XMM4
1351 vaesenc \T1, \XMM5, \XMM5
1352 vaesenc \T1, \XMM6, \XMM6
1353 vaesenc \T1, \XMM7, \XMM7
1354 vaesenc \T1, \XMM8, \XMM8
1356 vmovdqa TMP4(%rsp), \T1
1358 vpclmulqdq $0x11, \T5, \T1, \T3
1360 vpclmulqdq $0x00, \T5, \T1, \T3
1363 vpshufd $0b01001110, \T1, \T3
1364 vpxor \T1, \T3, \T3
1369 vmovdqu 16*6(arg1), \T1
1370 vaesenc \T1, \XMM1, \XMM1
1371 vaesenc \T1, \XMM2, \XMM2
1372 vaesenc \T1, \XMM3, \XMM3
1373 vaesenc \T1, \XMM4, \XMM4
1374 vaesenc \T1, \XMM5, \XMM5
1375 vaesenc \T1, \XMM6, \XMM6
1376 vaesenc \T1, \XMM7, \XMM7
1377 vaesenc \T1, \XMM8, \XMM8
1380 vmovdqa TMP5(%rsp), \T1
1382 vpclmulqdq $0x11, \T5, \T1, \T3
1384 vpclmulqdq $0x00, \T5, \T1, \T3
1387 vpshufd $0b01001110, \T1, \T3
1388 vpxor \T1, \T3, \T3
1393 vmovdqu 16*7(arg1), \T1
1394 vaesenc \T1, \XMM1, \XMM1
1395 vaesenc \T1, \XMM2, \XMM2
1396 vaesenc \T1, \XMM3, \XMM3
1397 vaesenc \T1, \XMM4, \XMM4
1398 vaesenc \T1, \XMM5, \XMM5
1399 vaesenc \T1, \XMM6, \XMM6
1400 vaesenc \T1, \XMM7, \XMM7
1401 vaesenc \T1, \XMM8, \XMM8
1403 vmovdqa TMP6(%rsp), \T1
1405 vpclmulqdq $0x11, \T5, \T1, \T3
1407 vpclmulqdq $0x00, \T5, \T1, \T3
1410 vpshufd $0b01001110, \T1, \T3
1411 vpxor \T1, \T3, \T3
1417 vmovdqu 16*8(arg1), \T1
1418 vaesenc \T1, \XMM1, \XMM1
1419 vaesenc \T1, \XMM2, \XMM2
1420 vaesenc \T1, \XMM3, \XMM3
1421 vaesenc \T1, \XMM4, \XMM4
1422 vaesenc \T1, \XMM5, \XMM5
1423 vaesenc \T1, \XMM6, \XMM6
1424 vaesenc \T1, \XMM7, \XMM7
1425 vaesenc \T1, \XMM8, \XMM8
1427 vmovdqa TMP7(%rsp), \T1
1429 vpclmulqdq $0x11, \T5, \T1, \T3
1431 vpclmulqdq $0x00, \T5, \T1, \T3
1434 vpshufd $0b01001110, \T1, \T3
1435 vpxor \T1, \T3, \T3
1452 vmovdqa TMP8(%rsp), \T1
1454 vpclmulqdq $0x11, \T5, \T1, \T3
1456 vpclmulqdq $0x00, \T5, \T1, \T3
1459 vpshufd $0b01001110, \T1, \T3
1460 vpxor \T1, \T3, \T3
1524 vpsrldq $4, \T2, \T1 # shift-R T1 1 DW
1548 vpxor \T1, \T2, \T2
1571 .macro GHASH_LAST_8_AVX T1 T2 T3 T4 T5 T6 T7 XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8
1712 vpsrldq $4, \T2, \T1 # shift-R T1 1 DW
1726 vpxor \T1, \T2, \T2
1845 .macro GHASH_MUL_AVX2 GH HK T1 T2 T3 T4 T5
1847 vpclmulqdq $0x11,\HK,\GH,\T1 # T1 = a1*b1
1857 vpxor \T3, \T1, \T1
1878 vpxor \T1, \GH, \GH # the result is in GH
1883 .macro PRECOMPUTE_AVX2 HK T1 T2 T3 T4 T5 T6
1887 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^2<<1 mod poly
1890 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^3<<1 mod poly
1893 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^4<<1 mod poly
1896 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^5<<1 mod poly
1899 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^6<<1 mod poly
1902 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^7<<1 mod poly
1905 GHASH_MUL_AVX2 \T5, \HK, \T1, \T3, \T4, \T6, \T2 # T5 = HashKey^8<<1 mod poly
1917 .macro INITIAL_BLOCKS_AVX2 REP num_initial_blocks T1 T2 T3 T4 T5 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 …
1973 vmovdqu (arg4, %r11), \T1
1974 vpxor \T1, reg_i, reg_i
1979 vmovdqa \T1, reg_i
1993 …GHASH_MUL_AVX2 reg_j, \T2, \T1, \T3, \T4, \T5, \T6 # apply GHASH on num_initial_blocks bloc…
2077 vmovdqu (arg4, %r11), \T1
2078 vpxor \T1, \XMM1, \XMM1
2081 vmovdqa \T1, \XMM1
2084 vmovdqu 16*1(arg4, %r11), \T1
2085 vpxor \T1, \XMM2, \XMM2
2088 vmovdqa \T1, \XMM2
2091 vmovdqu 16*2(arg4, %r11), \T1
2092 vpxor \T1, \XMM3, \XMM3
2095 vmovdqa \T1, \XMM3
2098 vmovdqu 16*3(arg4, %r11), \T1
2099 vpxor \T1, \XMM4, \XMM4
2102 vmovdqa \T1, \XMM4
2105 vmovdqu 16*4(arg4, %r11), \T1
2106 vpxor \T1, \XMM5, \XMM5
2109 vmovdqa \T1, \XMM5
2112 vmovdqu 16*5(arg4, %r11), \T1
2113 vpxor \T1, \XMM6, \XMM6
2116 vmovdqa \T1, \XMM6
2119 vmovdqu 16*6(arg4, %r11), \T1
2120 vpxor \T1, \XMM7, \XMM7
2123 vmovdqa \T1, \XMM7
2126 vmovdqu 16*7(arg4, %r11), \T1
2127 vpxor \T1, \XMM8, \XMM8
2130 vmovdqa \T1, \XMM8
2159 .macro GHASH_8_ENCRYPT_8_PARALLEL_AVX2 REP T1 T2 T3 T4 T5 T6 CTR XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7…
2204 vmovdqu (arg1), \T1
2205 vpxor \T1, \XMM1, \XMM1
2206 vpxor \T1, \XMM2, \XMM2
2207 vpxor \T1, \XMM3, \XMM3
2208 vpxor \T1, \XMM4, \XMM4
2209 vpxor \T1, \XMM5, \XMM5
2210 vpxor \T1, \XMM6, \XMM6
2211 vpxor \T1, \XMM7, \XMM7
2212 vpxor \T1, \XMM8, \XMM8
2220 vmovdqu 16*1(arg1), \T1
2221 vaesenc \T1, \XMM1, \XMM1
2222 vaesenc \T1, \XMM2, \XMM2
2223 vaesenc \T1, \XMM3, \XMM3
2224 vaesenc \T1, \XMM4, \XMM4
2225 vaesenc \T1, \XMM5, \XMM5
2226 vaesenc \T1, \XMM6, \XMM6
2227 vaesenc \T1, \XMM7, \XMM7
2228 vaesenc \T1, \XMM8, \XMM8
2230 vmovdqu 16*2(arg1), \T1
2231 vaesenc \T1, \XMM1, \XMM1
2232 vaesenc \T1, \XMM2, \XMM2
2233 vaesenc \T1, \XMM3, \XMM3
2234 vaesenc \T1, \XMM4, \XMM4
2235 vaesenc \T1, \XMM5, \XMM5
2236 vaesenc \T1, \XMM6, \XMM6
2237 vaesenc \T1, \XMM7, \XMM7
2238 vaesenc \T1, \XMM8, \XMM8
2250 vmovdqu 16*3(arg1), \T1
2251 vaesenc \T1, \XMM1, \XMM1
2252 vaesenc \T1, \XMM2, \XMM2
2253 vaesenc \T1, \XMM3, \XMM3
2254 vaesenc \T1, \XMM4, \XMM4
2255 vaesenc \T1, \XMM5, \XMM5
2256 vaesenc \T1, \XMM6, \XMM6
2257 vaesenc \T1, \XMM7, \XMM7
2258 vaesenc \T1, \XMM8, \XMM8
2260 vmovdqa TMP2(%rsp), \T1
2262 vpclmulqdq $0x11, \T5, \T1, \T3
2265 vpclmulqdq $0x00, \T5, \T1, \T3
2268 vpclmulqdq $0x01, \T5, \T1, \T3
2271 vpclmulqdq $0x10, \T5, \T1, \T3
2274 vmovdqu 16*4(arg1), \T1
2275 vaesenc \T1, \XMM1, \XMM1
2276 vaesenc \T1, \XMM2, \XMM2
2277 vaesenc \T1, \XMM3, \XMM3
2278 vaesenc \T1, \XMM4, \XMM4
2279 vaesenc \T1, \XMM5, \XMM5
2280 vaesenc \T1, \XMM6, \XMM6
2281 vaesenc \T1, \XMM7, \XMM7
2282 vaesenc \T1, \XMM8, \XMM8
2286 vmovdqa TMP3(%rsp), \T1
2288 vpclmulqdq $0x11, \T5, \T1, \T3
2291 vpclmulqdq $0x00, \T5, \T1, \T3
2294 vpclmulqdq $0x01, \T5, \T1, \T3
2297 vpclmulqdq $0x10, \T5, \T1, \T3
2300 vmovdqu 16*5(arg1), \T1
2301 vaesenc \T1, \XMM1, \XMM1
2302 vaesenc \T1, \XMM2, \XMM2
2303 vaesenc \T1, \XMM3, \XMM3
2304 vaesenc \T1, \XMM4, \XMM4
2305 vaesenc \T1, \XMM5, \XMM5
2306 vaesenc \T1, \XMM6, \XMM6
2307 vaesenc \T1, \XMM7, \XMM7
2308 vaesenc \T1, \XMM8, \XMM8
2310 vmovdqa TMP4(%rsp), \T1
2312 vpclmulqdq $0x11, \T5, \T1, \T3
2315 vpclmulqdq $0x00, \T5, \T1, \T3
2318 vpclmulqdq $0x01, \T5, \T1, \T3
2321 vpclmulqdq $0x10, \T5, \T1, \T3
2324 vmovdqu 16*6(arg1), \T1
2325 vaesenc \T1, \XMM1, \XMM1
2326 vaesenc \T1, \XMM2, \XMM2
2327 vaesenc \T1, \XMM3, \XMM3
2328 vaesenc \T1, \XMM4, \XMM4
2329 vaesenc \T1, \XMM5, \XMM5
2330 vaesenc \T1, \XMM6, \XMM6
2331 vaesenc \T1, \XMM7, \XMM7
2332 vaesenc \T1, \XMM8, \XMM8
2335 vmovdqa TMP5(%rsp), \T1
2337 vpclmulqdq $0x11, \T5, \T1, \T3
2340 vpclmulqdq $0x00, \T5, \T1, \T3
2343 vpclmulqdq $0x01, \T5, \T1, \T3
2346 vpclmulqdq $0x10, \T5, \T1, \T3
2349 vmovdqu 16*7(arg1), \T1
2350 vaesenc \T1, \XMM1, \XMM1
2351 vaesenc \T1, \XMM2, \XMM2
2352 vaesenc \T1, \XMM3, \XMM3
2353 vaesenc \T1, \XMM4, \XMM4
2354 vaesenc \T1, \XMM5, \XMM5
2355 vaesenc \T1, \XMM6, \XMM6
2356 vaesenc \T1, \XMM7, \XMM7
2357 vaesenc \T1, \XMM8, \XMM8
2359 vmovdqa TMP6(%rsp), \T1
2361 vpclmulqdq $0x11, \T5, \T1, \T3
2364 vpclmulqdq $0x00, \T5, \T1, \T3
2367 vpclmulqdq $0x01, \T5, \T1, \T3
2370 vpclmulqdq $0x10, \T5, \T1, \T3
2373 vmovdqu 16*8(arg1), \T1
2374 vaesenc \T1, \XMM1, \XMM1
2375 vaesenc \T1, \XMM2, \XMM2
2376 vaesenc \T1, \XMM3, \XMM3
2377 vaesenc \T1, \XMM4, \XMM4
2378 vaesenc \T1, \XMM5, \XMM5
2379 vaesenc \T1, \XMM6, \XMM6
2380 vaesenc \T1, \XMM7, \XMM7
2381 vaesenc \T1, \XMM8, \XMM8
2383 vmovdqa TMP7(%rsp), \T1
2385 vpclmulqdq $0x11, \T5, \T1, \T3
2388 vpclmulqdq $0x00, \T5, \T1, \T3
2391 vpclmulqdq $0x01, \T5, \T1, \T3
2394 vpclmulqdq $0x10, \T5, \T1, \T3
2410 vmovdqa TMP8(%rsp), \T1
2413 vpclmulqdq $0x00, \T5, \T1, \T3
2416 vpclmulqdq $0x01, \T5, \T1, \T3
2419 vpclmulqdq $0x10, \T5, \T1, \T3
2422 vpclmulqdq $0x11, \T5, \T1, \T3
2423 vpxor \T3, \T4, \T1
2467 vpxor \T6, \T1, \T1 # accumulate the results in T1:T7
2501 vpxor \T4, \T1, \T1 # the result is in T1
2513 vpxor \T1, \XMM1, \XMM1
2521 .macro GHASH_LAST_8_AVX2 T1 T2 T3 T4 T5 T6 T7 XMM1 XMM2 XMM3 XMM4 XMM5 XMM6 XMM7 XMM8