Lines Matching full:20
53 xxlor 20+32, 2, 2
62 vcipher 15, 15, 20
63 vcipher 16, 16, 20
64 vcipher 17, 17, 20
65 vcipher 18, 18, 20
78 xxlor 20+32, 6, 6
87 vcipher 15, 15, 20
88 vcipher 16, 16, 20
89 vcipher 17, 17, 20
90 vcipher 18, 18, 20
124 vcipher 20, 20, 23
133 vcipher 20, 20, 24
142 vcipher 20, 20, 25
151 vcipher 20, 20, 26
165 vcipher 20, 20, 23
174 vcipher 20, 20, 24
183 vcipher 20, 20, 25
192 vcipher 20, 20, 26
202 vcipher 20, 20, 23
209 xxlor 20+32, 2, 2
214 vcipher 15, 15, 20
219 xxlor 20+32, 6, 6
224 vcipher 15, 15, 20
344 vpmsumd 24, 9, 20
355 vpmsumd 25, 10, 20 # H3.L * X1.H + H3.H * X1.L
376 vpmsumd 25, 11, 20
418 vsldoi 20, 22, 22, 8 # swap
420 vxor 20, 20, 24
421 vxor 22, 22, 20
437 std 20,160(1)
440 stvx 20, 9, 1
469 stxv 20, 560(1)
482 lxv 20, 560(1)
486 lvx 20, 9, 1
517 ld 20,160(1)
663 vxor 20, 30, 29
676 li 20, 96
688 lxvb16x 20, 19, 14 # load block
689 lxvb16x 21, 20, 14 # load block
708 vcipher 20, 20, 23
717 vcipher 20, 20, 24
734 vcipher 20, 20, 23
743 vcipher 20, 20, 24
774 vcipherlast 20, 20, 23
778 xxlxor 52, 52, 20
785 stxvb16x 53, 20, 9 # store output
807 vxor 20, 30, 27
1204 vxor 20, 30, 29
1217 li 20, 96
1229 lxvb16x 20, 19, 14 # load block
1230 lxvb16x 21, 20, 14 # load block
1249 vcipher 20, 20, 23
1258 vcipher 20, 20, 24
1275 vcipher 20, 20, 23
1284 vcipher 20, 20, 24
1315 vcipherlast 20, 20, 23
1319 xxlxor 52, 52, 20
1326 stxvb16x 53, 20, 9 # store output
1337 xxlor 20+32, 20, 20
1357 vxor 20, 30, 27