Home
last modified time | relevance | path

Searched refs:t2 (Results 1 – 25 of 84) sorted by relevance

1234

/openssl/crypto/modes/asm/
H A Dghashv8-armx.pl124 vand $t2,$t2,$t0
126 vext.8 $t2,$t2,$t2,#8
151 veor $t2,$t2,$Xh
194 veor $t2,$t2,$Xh
204 veor $t2,$t2,$H2
238 veor $t2,$t2,$Xh
248 veor $t2,$t2,$H2
282 veor $t2,$t2,$Xh
342 veor $t2,$t2,$Xh
503 veor $t2,$t2,$Xh
[all …]
H A Dghash-armv4.pl398 vmull.p8 $t2, $t2#lo, $b @ J = A3*B
408 veor $t2, $t2, $r @ N = I + J
411 veor $t2#lo, $t2#lo, $t2#hi @ t2 = (N) (P4 + P5) << 24
412 vand $t2#hi, $t2#hi, $k16
417 veor $t2#lo, $t2#lo, $t2#hi
420 vext.8 $t2, $t2, $t2, #13
422 veor $t2, $t2, $t3
424 veor $r, $r, $t2
511 veor $t2,$t2,$t1 @
513 veor $t2, $t2, $t1 @
[all …]
H A Dghash-alpha.pl34 $t2="t2";
286 sll $Zlo,8,$t2
293 zapnot $t2,0x44,$t2
299 or $Zlo,$t2,$Zlo
300 sll $Zhi,8,$t2
312 zapnot $t2,0x44,$t2
375 sll $Zlo,8,$t2
382 zapnot $t2,0x44,$t2
389 sll $Zhi,8,$t2
403 zapnot $t2,0x44,$t2
[all …]
/openssl/crypto/aes/asm/
H A Daes-armv4.pl344 eor $t2,$t2,$i2,ror#8
406 eor $t2,$i2,$t2,lsl#8
553 ldrb $t2,[$tbl,$t2]
562 orr $t2,$t2,$i3,lsl#8
563 eor $t2,$t2,$t1
617 ldrb $t2,[$tbl,$t2]
626 orr $t2,$t2,$i3,lsl#8
689 ldrb $t2,[$tbl,$t2]
698 orr $t2,$t2,$i3,lsl#8
717 ldrb $t2,[$tbl,$t2]
[all …]
H A Daes-parisc.pl269 xor $acc8,$t2,$t2
271 xor $acc9,$t2,$t2
273 xor $acc10,$t2,$t2
275 xor $acc11,$t2,$t2
382 xor $acc8,$t2,$t2
384 xor $acc9,$t2,$t2
386 xor $acc10,$t2,$t2
388 xor $acc11,$t2,$t2
738 xor $acc8,$t2,$t2
740 xor $acc9,$t2,$t2
[all …]
H A Daes-sparcv9.pl300 xor $acc8,$t2,$t2
302 xor $acc9,$t2,$t2
304 xor $acc10,$t2,$t2
306 xor $acc11,$t2,$t2
433 xor $acc8,$t2,$t2
435 xor $acc9,$t2,$t2
437 xor $acc10,$t2,$t2
439 xor $acc11,$t2,$t2
846 xor $acc8,$t2,$t2
848 xor $acc9,$t2,$t2
[all …]
/openssl/crypto/ec/asm/
H A Decp_nistp521-ppc64.pl179 vmsumudm $out[1],$t1,$t2,$vzero
182 vmsumudm $out[2],$t1,$t2,$vzero
186 vmsumudm $out[3],$t1,$t2,$vzero
192 vmsumudm $out[4],$t1,$t2,$vzero
198 vmsumudm $out[5],$t1,$t2,$vzero
203 vmsumudm $out[6],$t1,$t2,$vzero
208 vmsumudm $out[7],$t1,$t2,$vzero
213 vmsumudm $out[8],$t1,$t2,$vzero
219 vmsumudm $out[5],$t1,$t2,$out[5]
349 vmsumudm $out[2],$t1,$t2,$vzero
[all …]
H A Decp_nistz256-armv8.pl278 mov $t2,$acc2
314 mov $t2,$a2
817 mov $t2,$a2
1082 csel $t2,$a2,$t2,ne
1102 csel $t2,$a2,$t2,ne
1172 orr $t2,$t2,$t3
1292 csel $t2,$a2,$t2,ne
1315 csel $t2,$a2,$t2,ne
1413 adcs $t2,$t2,$t1
1456 adcs $t2,$t2,$t1
[all …]
H A Decp_nistz256-ppc64.pl1328 or $t2,$t2,$a2
1336 and $t2,$t2,$in2infty
1372 or $t2,$t2,$a2
1380 and $t2,$t2,$in2infty
1485 or $t2,$t2,$t3
1635 or $t2,$t2,$a2
1643 and $t2,$t2,$in2infty
1688 or $t2,$t2,$a2
1831 adde $t2,$t2,$t1
1874 adde $t2,$t2,$t1
[all …]
H A Decp_nistz256-sparcv9.pl199 mulx $t2,$bi,$t2
284 mulx $t2,$bi,$t2
293 add @acc[2],$t2,$t2
522 subccc $t2,-1,$t2
913 or $t3,$t2,$t2
931 or $t3,$t2,$t2
1183 or $t3,$t2,$t2
1216 or $t3,$t2,$t2
2062 sllx $t2,32,$t2
2076 sllx $t2,32,$t2
[all …]
/openssl/crypto/sha/asm/
H A Dsha1-thumb.pl36 $t2="r2";
53 lsl $t2,$a,#5
54 add $t2,$e
140 mov $t2,sp
155 cmp sp,$t2
159 sub $t2,#32*4
160 sub $t2,#32*4
172 cmp sp,$t2
218 mov $K,$t2
226 add $b,$t2
[all …]
H A Dsha512-armv4.pl89 $t2="r11";
136 eor $t0,$t0,$t2
144 eor $t0,$t0,$t2
154 and $t0,$t2,#0xff
182 and $t0,$Alo,$t2
186 orr $Alo,$Alo,$t2
192 and $Ahi,$Ahi,$t2
444 adds $t2,$Tlo,$t2
461 adds $t2,$Tlo,$t2
476 adds $t2,$Tlo,$t2
[all …]
H A Dsha1-alpha.pl36 $t2="ra";
63 zapnot $t2,0x22,$t2
72 and $b,$c,$t2
81 or $t2,$t3,$t2
95 and $b,$c,$t2
103 or $t2,$t3,$t2
125 or $t2,$t3,$t2
158 xor $d,$t2,$t2
186 xor $d,$t2,$t2
219 or $t2,$t3,$t2
[all …]
H A Dsha256-armv4.pl77 $t2="r12";
128 and $t2,$t2,#0xff
150 ($t2,$t3)=($t3,$t2);
163 eor $t2,$t2,$t4,ror#$sigma1[1]
166 eor $t2,$t2,$t4,lsr#$sigma1[2] @ sigma1(X[i+14])
169 add $t2,$t2,$t0
260 eor $t2,$t2,$t2
292 cmp $inp,$t2
496 mov $t2,sp
529 eor $t2,$t2,$t2
[all …]
H A Dsha1-mb-x86_64.pl459 pxor $t2,$t2
838 vpinsrd \$1,(@ptr[3]),$t2,$t2
854 vpinsrd \$1,`4*$j-16*4`(@ptr[3]),$t2,$t2
873 vpor $t3,$t2,$t2 # rol(a,5)
897 vpor $t3,$t2,$t2 # rol(a,5)
929 vpor $t3,$t2,$t2 # rol(a,5)
967 vpor $t3,$t2,$t2 # rol(a,5)
987 vpor $t3,$t2,$t2 # rol(a,5)
1017 vpor $t3,$t2,$t2 # rol(a,5)
1134 vpxor $t2,$t2,$t2
[all …]
H A Dsha256-mb-x86_64.pl146 movdqa $e,$t2
168 movdqa $a,$t2
171 psrld \$2,$t2
181 pxor $t3,$t2
189 pxor $t3,$t2
218 psrld \$7,$t2
236 pxor $t2,$t1
375 paddd $t2,$B
385 paddd $t2,$F
831 vpinsrd \$1,`4*$i`(@ptr[3]),$t2,$t2
[all …]
H A Dsha512-x86_64.pl854 paddd @X[2],$t2
946 &movdqa ($t2,$t0);
965 &pxor ($t0,$t2);
978 &pxor ($t0,$t2);
981 &movdqa ($t2,$t3);
998 &pxor ($t3,$t2);
1006 &pxor ($t3,$t2);
1029 &movdqa ($t2,$t3);
1038 &pxor ($t3,$t2);
1048 &pxor ($t3,$t2);
[all …]
/openssl/crypto/md5/asm/
H A Dmd5-sparcv9.pl82 or $t2,%lo(@K[$i+1]),$t2
98 or $t2,%lo(@K[$i+1]),$t2
120 or $t2,%lo(@K[$i+1]),$t2
122 add $tx,$t2,$t2 ! X[1]+K[`$i+1`]
145 or $t2,%lo(@K[$i+1]),$t2
147 add $xi,$t2,$t2 ! X[$j]+K[`$i+1`]
169 or $t2,%lo(@K[$i+1]),$t2
171 add $xi,$t2,$t2 ! X[$j]+K[`$i+1`]
195 or $t2,%lo(@K[$i+1]),$t2
346 or $t2,%lo(@K[0]),$t2
[all …]
/openssl/crypto/bn/asm/
H A Darmv8-mont.pl763 mul $t2,$a3,$a0
770 mul $t2,$a7,$a0
797 mul $t2,$a6,$a1
824 mul $t2,$a7,$a2
944 mul $t2,$a2,$n0
1052 extr $t2,$t3,$t2,#63
1070 extr $t2,$t3,$t2,#63
1094 extr $t2,$t3,$t2,#63
1111 extr $t2,xzr,$t2,#63
1273 ldp $a0,$a1,[$t2,#8*0] // recall that $t2 is &n[0]
[all …]
H A Darmv4-gf2m.pl253 my ($r,$t0,$t1,$t2,$t3)=map("q$_",(0..3,8..12));
278 vext.8 $t2#lo, $a, $a, #3 @ A3
280 vmull.p8 $t2, $t2#lo, $b @ J = A3*B
290 veor $t2, $t2, $r @ N = I + J
293 veor $t2#lo, $t2#lo, $t2#hi @ t2 = (N) (P4 + P5) << 24
294 vand $t2#hi, $t2#hi, $k16
299 veor $t2#lo, $t2#lo, $t2#hi
302 vext.8 $t2, $t2, $t2, #13
304 veor $t2, $t2, $t3
306 veor $r, $r, $t2
H A Dppc64-mont.pl588 add $t2,$t2,$carry
654 addc $t2,$t2,$carry
771 add $t2,$t2,$carry
819 addc $t2,$t2,$carry
848 addc $t2,$t2,$carry
1281 addc $t2,$t2,$t6
1317 add $t2,$t2,$carry
1447 addc $t2,$t2,$t6
1526 andc $t2,$t2,$ovf
1597 and $t2,$t2,$ovf
[all …]
H A Dppc-mont.pl848 and $t2,$t2,$carry
880 and $t2,$t2,$carry
1433 add $acc2,$t2,$t2
1434 $SHRI $t2,$t2,$BITS-1
1459 add $acc6,$t2,$t2
1460 $SHRI $t2,$t2,$BITS-1
1488 add $acc2,$t2,$t2
1489 $SHRI $t2,$t2,$BITS-1
1525 add $acc6,$t2,$t2
1526 $SHRI $t2,$t2,$BITS-1
[all …]
H A Dvis3-mont.pl109 or $t2, $m0, $m0
111 ld [$ap+8], $t2 ! ap[1]
124 or $t2, $aj, $aj
126 ld [$np+8], $t2 ! np[1]
143 or $t2, $nj, $nj
168 ld [$np+0], $t2 ! np[j]
176 or $t2, $nj, $nj
221 or $t2, $m0, $m0
323 ld [$rp+0], $t2
330 movcs %icc, $t0, $t2
[all …]
/openssl/include/openssl/
H A Dsafestack.h.in37 # define SKM_DEFINE_STACK_OF_INTERNAL(t1, t2, t3) \ argument
42 static ossl_unused ossl_inline t2 *ossl_check_##t1##_type(t2 *ptr) \
67 # define SKM_DEFINE_STACK_OF(t1, t2, t3) \ argument
106 return (t2 *)OPENSSL_sk_delete((OPENSSL_STACK *)sk, i); \
108 static ossl_unused ossl_inline t2 *sk_##t1##_delete_ptr(STACK_OF(t1) *sk, t2 *ptr) \
123 return (t2 *)OPENSSL_sk_pop((OPENSSL_STACK *)sk); \
127 return (t2 *)OPENSSL_sk_shift((OPENSSL_STACK *)sk); \
137 static ossl_unused ossl_inline t2 *sk_##t1##_set(STACK_OF(t1) *sk, int idx, t2 *ptr) \
180 # define DEFINE_SPECIAL_STACK_OF(t1, t2) SKM_DEFINE_STACK_OF(t1, t2, t2) argument
181 # define DEFINE_SPECIAL_STACK_OF_CONST(t1, t2) \ argument
[all …]
/openssl/crypto/aes/
H A Daes_core.c1437 u32 s0, s1, s2, s3, t0, t1, t2, t3; in AES_encrypt() local
1542 t2 = in AES_encrypt()
1563 Te2[(t2 >> 8) & 0xff] ^ in AES_encrypt()
1568 Te1[(t2 >> 16) & 0xff] ^ in AES_encrypt()
1573 Te0[(t2 >> 24) ] ^ in AES_encrypt()
1582 Te3[(t2 ) & 0xff] ^ in AES_encrypt()
1734 t2 = in AES_decrypt()
1755 Td2[(t2 >> 8) & 0xff] ^ in AES_decrypt()
1762 Td3[(t2 ) & 0xff] ^ in AES_decrypt()
1765 Td0[(t2 >> 24) ] ^ in AES_decrypt()
[all …]

Completed in 132 milliseconds

1234