/openssl/crypto/aes/asm/ |
H A D | aes-riscv32-zkn.pl | 261 lw $T2,8($KEYP) 270 xor $Q2,$Q2,$T2 385 xor $Q2,$Q2,$T2 543 xor $T2,$T2,$T1 544 xor $T3,$T3,$T2 597 xor $T2,$T2,$T1 598 xor $T3,$T3,$T2 670 xor $T2,$T2,$T1 831 xor $T2,$T2,$T1 910 xor $T2,$T2,$T1 [all …]
|
H A D | aes-riscv64.pl | 322 xor $T2,$T2,$T6 409 slli $T2,$T2,24 432 xor $T2,$T2,$T3 434 xor $T2,$T2,$K1 # XOR in key 552 xor $T2,$T2,$T6 629 slli $T2,$T2,16+32 640 xor $T2,$T2,$T3 852 xor $T2,$T2,$T1 884 xor $T2,$T2,$T1 920 xor $T2,$T2,$T1 [all …]
|
H A D | aes-riscv64-zkn.pl | 316 ld $T2,16($UKEY) 319 sd $T2,16($KEYP) 331 @{[aes64ks2 $T2,$T1,$T2]} 355 ld $T2,16($UKEY) 359 sd $T2,16($KEYP) 376 @{[aes64ks2 $T2,$T4,$T2]} 492 ld $T2,16($UKEY) 495 @{[aes64im $T3,$T2]} 512 @{[aes64ks2 $T2,$T1,$T2]} 537 @{[aes64im $T4,$T2]} [all …]
|
H A D | aes-riscv64-zvkb-zvkned.pl | 69 my ($T0, $T1, $T2, $T3) = ("t0", "t1", "t2", "t3"); 90 @{[vsetvli $T2, "zero", "e8", "m1", "ta", "ma"]} 124 li $T2, 12 130 beq $T0, $T2, ctr32_encrypt_blocks_192
|
H A D | aes-riscv64-zvbb-zvkg-zvkned.pl | 73 my ($T0, $T1, $T2) = ("t0", "t1", "t2"); 444 li $T2, 10 446 beq $T0, $T2, aes_xts_enc_128 573 li $T2, 10 575 beq $T0, $T2, aes_xts_dec_128
|
/openssl/crypto/modes/asm/ |
H A D | ghash-x86_64.pl | 1077 vpxor $Hkey,$T2,$T2 1109 vpsrldq \$8,$T2,$T2 1116 vpsrlq \$5,$T2,$T2 1234 vpxor $Ii,$T2,$T2 1248 vpxor $Ii,$T2,$T2 1275 vpxor $Ii,$T2,$T2 1300 vpxor $Ii,$T2,$T2 1335 vpxor $Ii,$T2,$T2 1365 vpxor $Ii,$T2,$T2 1392 vpxor $Ii,$T2,$T2 [all …]
|
H A D | aesni-gcm-x86_64.pl | 219 vpclmulqdq \$0x11,$T2,$Ii,$T2 492 vpshufb $Ii,$T2,$T2 754 vpxor $Z2,$T2,$T2 768 vpxor $T1,$T2,$T2 785 vpunpckhqdq $T2,$T2,$Z3 800 vpxor $T1,$T2,$T2 821 vpxor $inout4,$T2,$T2 838 vpxor $T1,$T2,$T2 859 vpxor $inout1,$T2,$T2 878 vpxor $T1,$T2,$T2 [all …]
|
H A D | ghash-x86.pl | 888 &psrldq ($T2,8); 890 &pxor ($Xhi,$T2); 917 &pxor ($Xi,$T2); 924 &pxor ($T2,$Xi); 1067 &psrldq ($T2,8); 1069 &pxor ($Xhi,$T2); 1125 &psrldq ($T2,8); 1127 &pxor ($Xhi,$T2); 1164 &pslldq ($T2,4); 1174 &pslld ($T2,30); [all …]
|
H A D | aes-gcm-riscv64-zvkb-zvkg-zvkned.pl | 80 my ($T0, $T1, $T2, $T3) = ("t0", "t1", "t2", "t3"); 478 slli $T2, $PADDING_LEN32, 2 492 sub $INP, $INP, $T2 494 sub $OUTP, $OUTP, $T2 510 sub $T0, $XIP, $T2 625 li $T2, 12 629 beq $T0, $T2, aes_gcm_enc_blocks_192 810 li $T2, 12 814 beq $T0, $T2, aes_gcm_dec_blocks_192
|
H A D | aes-gcm-avx512.pl | 1243 vpclmulqdq \$0x00,$HK,$GH,$T2 # ; $T2 = a0*b0 1251 vpxorq $T2,$GH,$GH 1258 vpslldq \$8,$T2,$T2 # ; shift-L $T2 2 DWs 1264 vpsrldq \$4,$T2,$T2 # ; shift-R only 1-DW to obtain 2-DWs shift-R 1286 my $ZT2 = &ZWORD($T2); 1293 my $YT2 = &YWORD($T2); 1420 or $T2,$T2 1427 cmp \$`(48*16)`,$T2 1495 sub \$`(48*16)`,$T2 1503 cmp \$`(32*16)`,$T2 [all …]
|
/openssl/crypto/poly1305/asm/ |
H A D | poly1305-x86.pl | 492 &paddq ($D2,$T2); 502 &paddq ($D4,$T2); 511 &paddq ($D1,$T2); 521 &paddq ($D3,$T2); 540 &paddq ($D0,$T2); 924 &paddq ($D2,$T2); 934 &paddq ($D2,$T2); 1314 &vpmuludq ($T2,$T2,&QWP(16*4,"edx")); # s1*h4 1344 &vpmuludq ($T2,$T2,&QWP(16*0,"edx")); # r4*h0 1438 my $MASK=$T2; [all …]
|
H A D | poly1305-x86_64.pl | 830 vpand $MASK,$T2,$T2 # 2 1136 vpaddq $H2,$T2,$T2 1803 vpsrlq \$4,$T2,$T2 2359 vporq $T3,$T2,$T2 2478 vporq $T3,$T2,$T2 2671 vpsrlq \$4,$T2,$T2 2991 vporq $PAD,$T2,$T2 3161 vporq $PAD,$T2,$T2 3218 vporq $PAD,$T2,$T2 3527 vporq $PAD,$T2,$T2 [all …]
|
H A D | poly1305-ppc.pl | 1347 be?vperm $T2,$T2,$T2,$_4 1371 vsrd $T2,$T2,$_4 1376 vand $T2,$T2,$mask26 1383 vmrgow $I2,$T2,$I2 1483 be?vperm $T2,$T2,$T2,$_4 1566 vsrd $T2,$T2,$_4 1584 vand $T2,$T2,$mask26 1815 be?vperm $T2,$T2,$T2,$_4 1842 vsrd $T4,$T2,$_40 1843 vsrd $T2,$T2,$_14 [all …]
|
H A D | poly1305-s390x.pl | 531 vperm ($T3,$T1,$T2,$bswaplo); 532 vperm ($T4,$T1,$T2,$bswaphi); 533 vperm ($T2,$T1,$T2,$bswapmi); 537 verimg ($I2,$T2,$mask26,60); # >>4 615 vperm ($T3,$T1,$T2,$bswaplo); 616 vperm ($T4,$T1,$T2,$bswaphi); 617 vperm ($T2,$T1,$T2,$bswapmi); 816 vperm ($T3,$T1,$T2,$bswaplo); 817 vperm ($T4,$T1,$T2,$bswaphi); 818 vperm ($T2,$T1,$T2,$bswapmi); [all …]
|
/openssl/crypto/chacha/asm/ |
H A D | chacha-riscv64-v-zbb.pl | 89 my ( $T2 ) = ( "t5" ); 377 li $T2, 0x79622d32 378 @{[vadd_vx $V2, $V2, $T2]} 379 add $STATE2, $STATE2, $T2 385 lw $T2, 8($KEY) 392 add $STATE6, $STATE6, $T2 414 lw $T2, 24($KEY) 433 lw $T2, 12($COUNTER) 481 mv $T2, sp 486 @{[vle8_v $V16, $T2]} [all …]
|
H A D | chacha-armv8.pl | 844 my ($T0,$T1,$T2,$T3,$T4,$T5)=@K; 984 my @thread2=&NEONROUND($A2,$B2,$C2,$D2,$T2,0); 1003 @thread2=&NEONROUND($A2,$B2,$C2,$D2,$T2,1); 1098 @thread2=&NEONROUND($A2,$B2,$C2,$D2,$T2,0); 1115 @thread2=&NEONROUND($A2,$B2,$C2,$D2,$T2,1); 1212 eor $C0,$C0,$T2 1228 eor $C1,$C1,$T2
|
/openssl/crypto/aria/ |
H A D | aria.c | 345 #define ARIA_ADD_ROUND_KEY(RK, T0, T1, T2, T3) \ argument 349 (T2) ^= (RK)->u[2]; \ 354 #define ARIA_SBOX_LAYER1_WITH_PRE_DIFF(T0, T1, T2, T3) \ argument 366 (T2) = \ 367 S1[GET_U8_BE(T2, 0)] ^ \ 404 #define ARIA_DIFF_WORD(T0,T1,T2,T3) \ argument 406 (T1) ^= (T2); \ 407 (T2) ^= (T3); \ 411 (T2) ^= (T0); \ 412 (T1) ^= (T2); \ [all …]
|
/openssl/crypto/sha/asm/ |
H A D | sha256-c64xplus.pl | 34 ($A,$Actx,$B,$Bctx,$C,$Cctx,$D,$Dctx,$T2,$S0,$s1,$t0a,$t1a,$t2a,$X9,$X14) 43 ($Maj,$Ch)=($T2,"B6"); 147 || ADD $S0,$Maj,$T2 ; T2 = Sigma0(a) + Maj(a,b,c) 158 || ADD $T1,$T2,$A ; a = T1 + T2 186 || ADD $S0,$Maj,$T2 ; T2 = Sigma0(a) + Maj(a,b,c) 201 || ADD $T1,$T2,$A ; a = T1 + T2 243 || ADD $S0,$Maj,$T2 ; T2 = Sigma0(a) + Maj(a,b,c) 256 || ADD $T1,$T2,$A ; a = T1 + T2
|
H A D | sha512-armv8.pl | 106 my ($T0,$T1,$T2)=(@X[($i-8)&15],@X[($i-9)&15],@X[($i-10)&15]); 163 ror $T2,@X[($j+14)&15],#$sigma1[0] 175 eor $T2,$T2,@X[($j+14)&15],ror#$sigma1[1] 180 eor $T2,$T2,@X[($j+14)&15],lsr#$sigma1[2] // sigma1(X[i+14]) 187 add @X[$j],@X[$j],$T2 484 &ushr_32 ($T2,$T0,$sigma0[0]); 490 &sli_32 ($T2,$T0,32-$sigma0[0]); 496 &eor_8 ($T1,$T1,$T2); 653 ld1.32 {$T2},[$Ktbl], #16 662 add.32 $T2,$T2,@X[2] [all …]
|
H A D | sha256-armv4.pl | 312 my ($T0,$T1,$T2,$T3,$T4,$T5)=("q8","q9","q10","q11","d24","d25"); 340 &vshr_u32 ($T2,$T0,$sigma0[0]); 349 &vsli_32 ($T2,$T0,32-$sigma0[0]); 355 &veor ($T1,$T1,$T2); 506 vld1.32 {$T2},[$Ktbl,:128]! 520 vadd.i32 $T2,$T2,@X[2] 523 vst1.32 {$T2},[$Xfer,:128]!
|
/openssl/crypto/bn/asm/ |
H A D | rsaz-2k-avx512.pl | 134 my ($T0,$T0h,$T1,$T1h,$T2) = ("$zero", "$Bi", "$Yi", map("%ymm$_", (25..26))); 220 vpsrlq \$52, $_R2, $T2 223 valignq \$3, $T1h, $T2, $T2 241 vpaddq $T2, $_R2, $_R2
|
H A D | rsaz-3k-avx512.pl | 121 my ($T0,$T0h,$T1,$T1h,$T2,$T2h,$T3,$T3h) = ("$zero", "$Bi", "$Yi", map("%ymm$_", (19..23))); 222 vpsrlq \$52, $_R2, $T2 230 valignq \$3, $T2, $T2h, $T2h 231 valignq \$3, $T1h, $T2, $T2 252 vpaddq $T2, $_R2, $_R2
|
H A D | rsaz-4k-avx512.pl | 118 my ($T0,$T0h,$T1,$T1h,$T2,$T2h,$T3,$T3h,$T4,$T4h) = ("$zero", "$Bi", "$Yi", map("%ymm$_", (23..29))… 229 vpsrlq \$52, $_R2, $T2 241 valignq \$3, $T2, $T2h, $T2h 242 valignq \$3, $T1h, $T2, $T2 265 vpaddq $T2, $_R2, $_R2
|
/openssl/crypto/sha/ |
H A D | sha256.c | 215 unsigned MD32_REG_T a, b, c, d, e, f, g, h, s0, s1, T1, T2; in sha256_block_data_order() local 235 T2 = Sigma0(a) + Maj(a, b, c); in sha256_block_data_order() 243 a = T1 + T2; in sha256_block_data_order() 254 T2 = Sigma0(a) + Maj(a, b, c); in sha256_block_data_order() 262 a = T1 + T2; in sha256_block_data_order()
|
H A D | sha512.c | 667 SHA_LONG64 a, b, c, d, e, f, g, h, s0, s1, T1, T2; in sha512_block_data_order() local 689 T2 = Sigma0(a) + Maj(a, b, c); in sha512_block_data_order() 697 a = T1 + T2; in sha512_block_data_order() 708 T2 = Sigma0(a) + Maj(a, b, c); in sha512_block_data_order() 716 a = T1 + T2; in sha512_block_data_order()
|