/openssl/crypto/aes/asm/ |
H A D | aes-riscv64-zkn.pl | 153 xor $Q0,$Q0,$T0 170 xor $Q0,$Q2,$T0 299 @{[aes64ks2 $T0,$T2,$T0]} 324 @{[aes64ks2 $T0,$T3,$T0]} 365 @{[aes64ks2 $T0,$T4,$T0]} 401 li $T0,128 408 li $T0,192 416 li $T0,256 458 @{[aes64ks2 $T0,$T2,$T0]} 501 @{[aes64ks2 $T0,$T3,$T0]} [all …]
|
H A D | aes-riscv32-zkn.pl | 268 xor $Q0,$Q0,$T0 383 xor $Q0,$Q0,$T0 522 xor $T0,$T0,$T4 580 xor $T0,$T0,$T6 653 xor $T0,$T0,$T8 713 li $T0,128 720 li $T0,192 728 li $T0,256 814 xor $T0,$T0,$T4 893 xor $T0,$T0,$T6 [all …]
|
H A D | aes-riscv64.pl | 320 xor $T0,$T0,$T4 407 slli $T0,$T0,24 428 xor $T0,$T0,$T1 550 xor $T0,$T0,$T4 627 slli $T0,$T0,32 639 xor $T0,$T0,$T1 645 xor $T0,$T0,$T2 849 xor $T0,$T0,$T4 850 xor $T0,$T0,$T5 881 xor $T0,$T0,$T6 [all …]
|
H A D | aes-riscv64-zvbb-zvkg-zvkned.pl | 90 addi $T0, $T0, -1 95 addi $T0, $T0, -1 97 bnez $T0, 1b 112 li $T0, 5 159 sll $T0, $T1, $T0 205 li $T0, 0x40 238 li $T0, 0x40 469 slli $T0, $VL, 2 518 slli $T0, $VL, 2 600 slli $T0, $VL, 2 [all …]
|
H A D | aes-riscv64-zvkned.pl | 324 li $T0, 10 327 li $T0, 12 330 li $T0, 14 462 li $T0, 10 465 li $T0, 12 468 li $T0, 14 610 li $T0, 10 613 li $T0, 12 616 li $T0, 14 721 li $T0, 10 [all …]
|
H A D | aes-riscv64-zvkb-zvkned.pl | 89 li $T0, 0b10001000 122 lwu $T0, 240($KEYP) 177 slli $T0, $VL, 2 180 add $INP, $INP, $T0 203 add $OUTP, $OUTP, $T0 253 slli $T0, $VL, 2 256 add $INP, $INP, $T0 281 add $OUTP, $OUTP, $T0 335 slli $T0, $VL, 2 338 add $INP, $INP, $T0 [all …]
|
/openssl/crypto/poly1305/asm/ |
H A D | poly1305-x86.pl | 807 &pshufd ($T0,$T0,0b11101110); 817 &pshufd ($T0,$T0,0b11101110); 827 &pshufd ($T0,$T0,0b11101110); 837 &pshufd ($T0,$T0,0b11101110); 1269 &vpaddd ($T0,$T0,$D2); # *5 1275 &vpaddd ($T0,$T0,$D4); # *5 1370 &vpsllq ($T0,$T0,2); 1419 &vpaddd ($T0,$T0,$D2); # *5 1425 &vpaddd ($T0,$T0,$D4); # *5 1623 &vpand ($T0,$T0,$MASK); # 0 [all …]
|
H A D | poly1305-ppc.pl | 1110 vspltisb $T0,2 1249 vsld $S2,$R2,$T0 1250 vsld $S3,$R3,$T0 1251 vsld $S4,$R4,$T0 1272 vslw $S2,$R2,$T0 1273 vslw $S3,$R3,$T0 1334 vxor $T0,$T0,$T0 # ensure second half is zero 1375 vand $T0,$T0,$mask26 1591 vspltisb $T0,2 1777 vspltisb $T0,2 [all …]
|
H A D | poly1305-x86_64.pl | 826 vpand $MASK,$T0,$T0 # 0 1082 vpand $MASK,$T0,$T0 # 0 1137 vpaddq $H0,$T0,$T0 1807 vpand $MASK,$T0,$T0 # 0 1960 vpand $MASK,$T0,$T0 # 0 2364 vpandq $MASK,$T0,$T0 # 0 2516 vpandq $MASK,$T0,$T0 # 0 2683 vpand $MASK,$T0,$T0 # 0 2868 vpsrlvq $inp_shift,$T0,$T0 2870 vporq $PAD,$T0,$T0 [all …]
|
H A D | poly1305-armv4.pl | 576 vshr.u64 $T0,$D3,#26 585 vshrn.u64 $T0#lo,$D4,#26 594 vshl.u32 $T0#lo,$T0#lo,#2 943 vshr.u64 $T0,$D3,#26 954 vshrn.u64 $T0#lo,$D4,#26 965 vshl.u32 $T0#lo,$T0#lo,#2 1112 vshr.u64 $T0,$D3,#26 1119 vshr.u64 $T0,$D4,#26 1125 vadd.i64 $D0,$D0,$T0 1126 vshl.u64 $T0,$T0,#2 [all …]
|
H A D | poly1305-armv8.pl | 703 ushr $T0.2d,$ACC3,#26 711 ushr $T0.2d,$ACC4,#26 718 add $ACC0,$ACC0,$T0.2d 719 shl $T0.2d,$T0.2d,#2 727 shrn $T0.2s,$ACC0,#26 732 add $H1,$H1,$T0.2s // h0 -> h1 844 ushr $T0.2d,$ACC3,#26 852 ushr $T0.2d,$ACC4,#26 858 add $ACC0,$ACC0,$T0.2d 859 shl $T0.2d,$T0.2d,#2 [all …]
|
H A D | poly1305-sparcv9.pl | 335 mulx $S1,$H1,$T0 ! s1*h1 337 addcc $T0,$D0,$D0 338 mulx $R1,$H0,$T0 ! r1*h0 341 addcc $T0,$D1,$D1 342 mulx $R0,$H1,$T0 ! r0*h1 345 addcc $T0,$D1,$D1 346 mulx $S1,$H2,$T0 ! s1*h2 349 addcc $T0,$D1,$D1 352 srlx $D2,2,$T0 ! final reduction step 355 add $T1,$T0,$T0 [all …]
|
/openssl/crypto/modes/asm/ |
H A D | aes-gcm-riscv64-zvkb-zvkg-zvkned.pl | 428 bltu $T1, $T0, 1b 444 li $T0, 0b10001000 469 and $T0, $T0, $LEN32 470 beqz $T0, 1f 510 sub $T0, $XIP, $T2 614 srli $T0, $LEN, 4 615 beqz $T0, .Lenc_end 616 slli $LEN32, $T0, 2 623 lwu $T0, 240($KEYP) 799 srli $T0, $LEN, 4 [all …]
|
H A D | aes-gcm-avx512.pl | 4132 my $T0 = $_[9]; # [clobered] temporary ZMM register 4181 vmovdqu8 `$DATA_DISPL + (64*0)`($IN,$DATA_OFFSET,1),$T0 4210 vpxorq $T0,$B00_03,$B00_03 4225 vpshufb $SHUF_MASK,$T0,$B00_03
|
/openssl/crypto/seed/ |
H A D | seed_local.h | 58 (T0) = (X3); \ 71 # define KEYUPDATE_TEMP(T0, T1, K) \ argument 72 (K)[0] = G_FUNC((T0)); \ 99 # define E_SEED(T0, T1, X1, X2, X3, X4, rbase) \ argument 100 (T0) = (X3) ^ (ks->data)[(rbase)]; \ 102 (T1) ^= (T0); \ 104 (T0) = ((T0) + (T1)) & 0xffffffff; \ 105 (T0) = G_FUNC((T0)); \ 106 (T1) = ((T1) + (T0)) & 0xffffffff; \ 108 (T0) = ((T0) + (T1)) & 0xffffffff; \ [all …]
|
/openssl/crypto/chacha/asm/ |
H A D | chacha-riscv64-v-zbb.pl | 87 my ( $T0 ) = ( "t3" ); 267 srli $T0, $LEN, 6 323 li $T0, 64 333 li $T0, 10 336 addi $T0, $T0, -1 359 li $T0, 64 375 lw $T0, 0($KEY) 410 lw $T0, 16($KEY) 464 li $T0, 64 469 slli $T0, $VL, 6 [all …]
|
H A D | chacha-ppc.pl | 420 my ($twelve,$seven,$T0,$T1) = @D; 512 ?lvsr $T0,0,$key # prepare unaligned load 526 ?vperm @K[1],@K[2],@K[1],$T0 # align key 527 ?vperm @K[2],@D[0],@K[2],$T0 538 vxor $T0,$T0,$T0 # 0x00..00 542 ?vperm $outmask,$outmask,$T0,$outperm 544 be?lvsl $T0,0,@x[0] # 0x00..0f 546 be?vxor $T0,$T0,$T1 # swap bytes within words 548 be?vperm $inpperm,$inpperm,$inpperm,$T0
|
H A D | chacha-armv8.pl | 844 my ($T0,$T1,$T2,$T3,$T4,$T5)=@K; 982 my @thread0=&NEONROUND($A0,$B0,$C0,$D0,$T0,0); 1001 @thread0=&NEONROUND($A0,$B0,$C0,$D0,$T0,1); 1096 @thread0=&NEONROUND($A0,$B0,$C0,$D0,$T0,0); 1113 @thread0=&NEONROUND($A0,$B0,$C0,$D0,$T0,1); 1202 ld1.8 {$T0-$T3},[$inp],#64 1208 eor $A0,$A0,$T0 1215 ld1.8 {$T0-$T3},[$inp],#64 1226 eor $A1,$A1,$T0
|
/openssl/crypto/sha/asm/ |
H A D | sha512-armv8.pl | 107 $T0=@X[$i+3] if ($i<11); 146 ror $T0,$a,#$Sigma0[0] 165 ror $T0,$a,#$Sigma0[0] 172 eor $T0,$T0,$a,ror#$Sigma0[1] 484 &ushr_32 ($T2,$T0,$sigma0[0]); 486 &ushr_32 ($T1,$T0,$sigma0[2]); 567 &add_32 ($T0,$T0,@X[0]); 596 &add_32 ($T0,$T0,@X[0]); 651 ld1.32 {$T0},[$Ktbl], #16 660 add.32 $T0,$T0,@X[0] [all …]
|
H A D | sha256-armv4.pl | 340 &vshr_u32 ($T2,$T0,$sigma0[0]); 346 &vshr_u32 ($T1,$T0,$sigma0[2]); 349 &vsli_32 ($T2,$T0,32-$sigma0[0]); 352 &vshr_u32 ($T3,$T0,$sigma0[1]); 358 &vsli_32 ($T3,$T0,32-$sigma0[1]); 406 &vld1_32 ("{$T0}","[$Ktbl,:128]!"); 418 &vadd_i32 ($T0,$T0,@X[0]); 447 &vadd_i32 ($T0,$T0,@X[0]); 504 vld1.32 {$T0},[$Ktbl,:128]! 517 vadd.i32 $T0,$T0,@X[0] [all …]
|
/openssl/crypto/aria/ |
H A D | aria.c | 345 #define ARIA_ADD_ROUND_KEY(RK, T0, T1, T2, T3) \ argument 347 (T0) ^= (RK)->u[0]; \ 354 #define ARIA_SBOX_LAYER1_WITH_PRE_DIFF(T0, T1, T2, T3) \ argument 356 (T0) = \ 357 S1[GET_U8_BE(T0, 0)] ^ \ 358 S2[GET_U8_BE(T0, 1)] ^ \ 359 X1[GET_U8_BE(T0, 2)] ^ \ 360 X2[GET_U8_BE(T0, 3)]; \ 404 #define ARIA_DIFF_WORD(T0,T1,T2,T3) \ argument 408 (T0) ^= (T1); \ [all …]
|
/openssl/crypto/bn/asm/ |
H A D | rsaz-2k-avx512.pl | 134 my ($T0,$T0h,$T1,$T1h,$T2) = ("$zero", "$Bi", "$Yi", map("%ymm$_", (25..26))); 211 vpbroadcastq $_acc, $T0 212 vpblendd \$3, $T0, $_R0, $_R0 216 vpsrlq \$52, $_R0, $T0 226 valignq \$3, $T0, $T0h, $T0h 227 valignq \$3, .Lzeros(%rip), $T0, $T0 237 vpaddq $T0, $_R0, $_R0
|
H A D | rsaz-3k-avx512.pl | 121 my ($T0,$T0h,$T1,$T1h,$T2,$T2h,$T3,$T3h) = ("$zero", "$Bi", "$Yi", map("%ymm$_", (19..23))); 213 vpbroadcastq $_acc, $T0 214 vpblendd \$3, $T0, $_R0, $_R0 218 vpsrlq \$52, $_R0, $T0 234 valignq \$3, $T0, $T0h, $T0h 235 valignq \$3, .Lzeros(%rip), $T0, $T0 248 vpaddq $T0, $_R0, $_R0
|
H A D | rsaz-4k-avx512.pl | 118 my ($T0,$T0h,$T1,$T1h,$T2,$T2h,$T3,$T3h,$T4,$T4h) = ("$zero", "$Bi", "$Yi", map("%ymm$_", (23..29))… 220 vpbroadcastq $_acc, $T0 221 vpblendd \$3, $T0, $_R0, $_R0 225 vpsrlq \$52, $_R0, $T0 245 valignq \$3, $T0, $T0h, $T0h 246 valignq \$3, .Lzeros(%rip), $T0, $T0 261 vpaddq $T0, $_R0, $_R0
|