/openssl/crypto/des/ |
H A D | cfb_enc.c | 63 c2ln(in, d0, d1, n); in DES_cfb_encrypt() 65 d0 ^= ti[0]; in DES_cfb_encrypt() 67 l2cn(d0, d1, out, n); in DES_cfb_encrypt() 75 v1 = d0; in DES_cfb_encrypt() 77 v0 = d0; in DES_cfb_encrypt() 84 l2c(d0, iv); in DES_cfb_encrypt() 110 c2ln(in, d0, d1, n); in DES_cfb_encrypt() 118 v1 = d0; in DES_cfb_encrypt() 120 v0 = d0; in DES_cfb_encrypt() 127 l2c(d0, iv); in DES_cfb_encrypt() [all …]
|
H A D | cfb64ede.c | 114 c2ln(in, d0, d1, n); in DES_ede3_cfb_encrypt() 116 d0 ^= ti[0]; in DES_ede3_cfb_encrypt() 118 l2cn(d0, d1, out, n); in DES_ede3_cfb_encrypt() 126 v1 = d0; in DES_ede3_cfb_encrypt() 128 v0 = d0; in DES_ede3_cfb_encrypt() 134 l2c(d0, iv); in DES_ede3_cfb_encrypt() 155 c2ln(in, d0, d1, n); in DES_ede3_cfb_encrypt() 163 v1 = d0; in DES_ede3_cfb_encrypt() 165 v0 = d0; in DES_ede3_cfb_encrypt() 171 l2c(d0, iv); in DES_ede3_cfb_encrypt() [all …]
|
H A D | ofb_enc.c | 28 register DES_LONG d0, d1, vv0, vv1, v0, v1, n = (numbits + 7) / 8; in DES_ofb_encrypt() local 62 c2ln(in, d0, d1, n); in DES_ofb_encrypt() 64 d0 = (d0 ^ vv0) & mask0; in DES_ofb_encrypt() 66 l2cn(d0, d1, out, n); in DES_ofb_encrypt() 87 v0 = v1 = d0 = d1 = ti[0] = ti[1] = vv0 = vv1 = 0; in DES_ofb_encrypt()
|
/openssl/crypto/chacha/asm/ |
H A D | chacha-c64xplus.pl | 141 || SWAP2 @X[$d0],@X[$d0] 171 XOR @Y[$a0],@Y[$d0],@Y[$d0] 177 XOR @X[$a0],@X[$d0],@X[$d0] 183 || SWAP2 @Y[$d0],@Y[$d0] 185 ROTL @X[$d0],8,@X[$d0] 242 ROTL @Y[$d0],8,@Y[$d0] 293 || ROTL @X[$d0],8,@X[$d0] 351 || ROTL @Y[$d0],8,@Y[$d0] 606 SWAP2 @X[$d0],@X[$d0] 632 ROTL @X[$d0],8,@X[$d0] [all …]
|
H A D | chacha-ia64.pl | 101 my ($a0,$b0,$c0,$d0)=@_; 111 xor @x[$d0]=@x[$d0],@x[$a0] 115 extr.u @y[0]=@x[$d0],16,16 };; 117 dep @x[$d0]=@x[$d0],@y[0],16,16 };; 118 { .mii; add @x[$c0]=@x[$c0],@x[$d0] 147 xor @x[$d0]=@x[$d0],@x[$a0] };; 149 extr.u @y[0]=@x[$d0],24,8 150 dep.z @x[$d0]=@x[$d0],8,24 };; 151 { .mii; or @x[$d0]=@x[$d0],@y[0] 182 $code.=<<___ if ($d0 == 12); [all …]
|
H A D | chacha-s390x.pl | 97 rll (@x[$d0],@x[$d0],16); 111 rll (@x[$d0],@x[$d0],8); 188 verllf (@x[$d0],@x[$d0],8); 822 vaf ($d0,$d0,@K[3]); 829 vperm ($d0,$d0,$d0,$beperm); 841 vx ($d0,$d0,$t3); 868 vx ($d0,$d0,$d1); 894 vx ($d0,$d0,$d1); 922 vx ($d0,$d0,$d1); 951 vx ($d0,$d0,$d1); [all …]
|
H A D | chacha-armv4.pl | 65 my ($a0,$b0,$c0,$d0)=@_; 69 my $odd = $d0&1; 799 vadd.i32 $d0,$d0,$t3 813 veor $d0,$d0,$t3 821 vst1.8 {$c0-$d0},[r14]! 836 vadd.i32 $d0#lo,$d0#lo,$t0#lo @ next counter value 1003 vst1.8 {$c0-$d0},[@t[0]] 1013 veor $d0,$d0,$t3 1015 vst1.8 {$c0-$d0},[r14]! 1034 veor $d0,$d0,$t3 [all …]
|
H A D | chacha-loongarch64.pl | 115 xor @x[$d0],@x[$d0],@x[$a0] 116 rotri.w @x[$d0],@x[$d0],16 # rotate left 16 bits 129 xor @x[$d0],@x[$d0],@x[$a0] 130 rotri.w @x[$d0],@x[$d0],24 # rotate left 8 bits 404 vxor.v @x[$d0],@x[$d0],@x[$a0] 405 vrotri.w @x[$d0],@x[$d0],16 # rotate left 16 bits 418 vxor.v @x[$d0],@x[$d0],@x[$a0] 419 vrotri.w @x[$d0],@x[$d0],24 # rotate left 8 bits 828 xvxor.v @x[$d0],@x[$d0],@x[$a0] 842 xvxor.v @x[$d0],@x[$d0],@x[$a0] [all …]
|
H A D | chacha-armv8.pl | 74 my ($a0,$b0,$c0,$d0)=@_; 84 "&eor_32 (@x[$d0],@x[$d0],@x[$a0])", 88 "&ror_32 (@x[$d0],@x[$d0],16)", 93 "&add_32 (@x[$c0],@x[$c0],@x[$d0])", 110 "&eor_32 (@x[$d0],@x[$d0],@x[$a0])", 114 "&ror_32 (@x[$d0],@x[$d0],24)", 394 my ($a0,$b0,$c0,$d0)=@_; 405 "&eor (@x[$d0],@x[$d0],@x[$a0])", 409 "&rev32_16 (@x[$d0],@x[$d0])", 414 "&add (@x[$c0],@x[$c0],@x[$d0])", [all …]
|
H A D | chacha-ppc.pl | 95 my ($a0,$b0,$c0,$d0)=@_; 105 "&xor (@x[$d0],@x[$d0],@x[$a0])", 109 "&rotlwi (@x[$d0],@x[$d0],16)", 114 "&add (@x[$c0],@x[$c0],@x[$d0])", 131 "&xor (@x[$d0],@x[$d0],@x[$a0])", 135 "&rotlwi (@x[$d0],@x[$d0],8)", 928 my ($a0,$b0,$c0,$d0)=@_; 939 "&vxor (@x[$d0],@x[$d0],@x[$a0])", 943 "&vrlw (@x[$d0],@x[$d0],'$sixteen')", 965 "&vxor (@x[$d0],@x[$d0],@x[$a0])", [all …]
|
H A D | chachap10-ppc.pl | 106 my ($a0,$b0,$c0,$d0)=@_; 117 "&vxor (@x[$d0],@x[$d0],@x[$a0])", 121 "&vrlw (@x[$d0],@x[$d0],'$sixteen')", 126 "&vadduwm (@x[$c0],@x[$c0],@x[$d0])", 143 "&vxor (@x[$d0],@x[$d0],@x[$a0])", 147 "&vrlw (@x[$d0],@x[$d0],'$eight')", 152 "&vadduwm (@x[$c0],@x[$c0],@x[$d0])", 530 "&vxor (@x[$d0],@x[$d0],@x[$a0])", 539 "&vrlw (@x[$d0],@x[$d0],@x[$c7])", 593 "&vxor (@x[$d0],@x[$d0],@x[$a0])", [all …]
|
H A D | chacha-x86_64.pl | 1392 "&vpxor (@x[$d0],@x[$a0],@x[$d0])", 1396 "&vprotd (@x[$d0],@x[$d0],16)", 1418 "&vpxor (@x[$d0],@x[$a0],@x[$d0])", 1422 "&vprotd (@x[$d0],@x[$d0],8)", 1862 "&vpxor (@x[$d0],@x[$a0],@x[$d0])", 1863 "&vpshufb (@x[$d0],@x[$d0],$t1)", 1881 "&vpxor (@x[$d0],@x[$a0],@x[$d0])", 1882 "&vpshufb (@x[$d0],@x[$d0],$t0)", 2909 "&vpxord (@x[$d0],@x[$d0],@x[$a0])", 2913 "&vprold (@x[$d0],@x[$d0],16)", [all …]
|
H A D | chacha-armv8-sve.pl | 177 my ($a0,$b0,$c0,$d0,$a1,$b1,$c1,$d1,$a2,$b2,$c2,$d2,$a3,$b3,$c3,$d3) = @_; 180 &SVE2_XAR(16,$d0,$a0,$d1,$a1,$d2,$a2,$d3,$a3); 182 &SVE_ADD($c0,$d0,$c1,$d1,$c2,$d2,$c3,$d3); 186 &SVE2_XAR(8,$d0,$a0,$d1,$a1,$d2,$a2,$d3,$a3); 188 &SVE_ADD($c0,$d0,$c1,$d1,$c2,$d2,$c3,$d3); 196 &SVE_EOR($d0,$a0,$d1,$a1,$d2,$a2,$d3,$a3); 197 &SVE_REV16($d0,$d1,$d2,$d3); 199 &SVE_ADD($c0,$d0,$c1,$d1,$c2,$d2,$c3,$d3); 206 &SVE_EOR($d0,$a0,$d1,$a1,$d2,$a2,$d3,$a3); 207 &SVE_ROT8($d0,$d1,$d2,$d3); [all …]
|
/openssl/crypto/bn/ |
H A D | bn_div.c | 270 BN_ULONG d0, d1; in bn_div_fixed_top() local 323 d0 = sdiv->d[div_n - 1]; in bn_div_fixed_top() 352 if (n0 == d0) in bn_div_fixed_top() 362 q = bn_div_words(n0, n1, d0); in bn_div_fixed_top() 370 rem = (n1 - q * d0) & BN_MASK2; in bn_div_fixed_top() 378 rem += d0; in bn_div_fixed_top() 379 if (rem < d0) in bn_div_fixed_top() 386 q = bn_div_words(n0, n1, d0); in bn_div_fixed_top() 388 rem = (n1 - q * d0) & BN_MASK2; in bn_div_fixed_top() 411 rem += d0; in bn_div_fixed_top() [all …]
|
H A D | bn_gf2m.c | 296 int n, dN, d0, d1; in BN_GF2m_mod_arr() local 334 d0 = n % BN_BITS2; in BN_GF2m_mod_arr() 335 d1 = BN_BITS2 - d0; in BN_GF2m_mod_arr() 338 if (d0) in BN_GF2m_mod_arr() 344 d0 = p[0] % BN_BITS2; in BN_GF2m_mod_arr() 345 d1 = BN_BITS2 - d0; in BN_GF2m_mod_arr() 347 if (d0) in BN_GF2m_mod_arr() 354 d0 = p[0] % BN_BITS2; in BN_GF2m_mod_arr() 355 zz = z[dN] >> d0; in BN_GF2m_mod_arr() 358 d1 = BN_BITS2 - d0; in BN_GF2m_mod_arr() [all …]
|
/openssl/crypto/poly1305/asm/ |
H A D | poly1305-armv8.pl | 105 csel $d0,$d0,$r0,eq 111 stp $d0,$d1,[$len] 153 adds $d0,$d0,$t0 174 adds $h0,$d0,$t0 204 csel $h0,$h0,$d0,eq 242 adds $d0,$d0,$t0 263 adds $h0,$d0,$t0 349 rev $d0,$d0 395 rev $d0,$d0 910 add $d0,$d0,$h2,lsr#2 [all …]
|
H A D | poly1305-ppc.pl | 139 and $d0,$d0,$h0 192 lwbrx $d0,$d0,$inp 211 addc $d0,$d0,$t0 522 addc $d0,$d0,$t0 542 addc $d0,$d0,$t0 562 addc $d0,$d0,$t0 842 addc $d0,$d0,$t0 882 add $d0,$d0,$d1 # * 5 888 add $d0,$d0,$d2 # * 5 895 add $d0,$d0,$d1 # * 5 [all …]
|
H A D | poly1305-sparcv9.pl | 187 srlx $d0,$shr,$d0 190 or $t1,$d0,$d0 195 srlx $d0,32,$t0 203 umul $r0,$h0,$d0 213 add $t0,$d0,$d0 222 add $t1,$d0,$d0 231 add $t2,$d0,$d0 243 srlx $d0,32,$h1 939 andn $d0,$mask,$d0 ! mask exponent 967 and $d0,$mask,$d0 [all …]
|
H A D | poly1305-ppcfp.pl | 579 andc $d0,$d0,$mask # mask exponent 593 adde $h1,$h1,$d0 609 and $d0,$d0,$mask 612 or $h0,$h0,$d0 613 lwz $d0,0($nonce) # load nonce 625 addc $h0,$h0,$d0 # accumulate nonce 633 add $h1,$h1,$d0 637 srdi $d0,$h0,32 638 add $h1,$h1,$d0 660 and $d0,$d0,$mask [all …]
|
H A D | poly1305-s390x.pl | 244 lg ($d0,"0($ctx)"); 249 srlg ($h0,$d0,32); 276 xgr ($h0,$d0); 391 vlvgg ($R1,$d0,0); 395 vlvgg ($R3,$d0,0); 465 vlvgg ($H1,$d0,0); 469 vlvgg ($H3,$d0,0); 928 xgr ($h0,$d0); 933 lgr ($d0,$h0); 945 xgr ($h0,$d0); [all …]
|
H A D | poly1305-mips.pl | 174 my ($h0,$h1,$h2,$r0,$r1,$s1,$d0,$d1,$d2) = 280 mflo ($d0,$r0,$h0) 290 daddu $d0,$tmp0 294 sltu $tmp0,$d0,$tmp0 324 daddu $h0,$d0,$tmp0
|
/openssl/crypto/poly1305/ |
H A D | poly1305.c | 154 u128 d0, d1; in poly1305_blocks() local 167 h0 = (u64)(d0 = (u128)h0 + U8TOU64(inp + 0)); in poly1305_blocks() 168 h1 = (u64)(d1 = (u128)h1 + (d0 >> 64) + U8TOU64(inp + 8)); in poly1305_blocks() 176 d0 = ((u128)h0 * r0) + in poly1305_blocks() 185 h0 = (u64)d0; in poly1305_blocks() 186 h1 = (u64)(d1 += d0 >> 64); in poly1305_blocks() 296 u64 d0, d1, d2, d3; in poly1305_blocks() local 315 h0 = (u32)(d0 = (u64)h0 + U8TOU32(inp + 0)); in poly1305_blocks() 322 d0 = ((u64)h0 * r0) + in poly1305_blocks() 345 h0 = (u32)d0; in poly1305_blocks() [all …]
|
H A D | poly1305_base2_44.c | 88 u128 d0, d1, d2; in poly1305_blocks() local 114 d0 = ((u128)h0 * r0) + ((u128)h1 * s2) + ((u128)h2 * s1); in poly1305_blocks() 119 h0 = (u64)d0 & 0x0fffffffffff; in poly1305_blocks() 120 h1 = (u64)(d1 += (u64)(d0 >> 44)) & 0x0fffffffffff; in poly1305_blocks()
|
/openssl/test/certs/ |
H A D | cyrillic.utf8 | 44 c5:c9:e1:d0:70:c0:bf:5d:21:58:da:df:4e:cf:27:6a:1b:bc: 47 1e:6f:8c:4f:78:d0:ec:06:89:6f:2b:a8:92:05:d9:39:08:55: 48 95:5f:2a:d0:5c:ac:b8:5c:08:38:ea:e0:22:7a:aa:b7:af:36: 55 93:d0:05:c6:f6:36:2e:12:f8:11:91:d3:a1:fd:08:bf:5f:62:
|
H A D | cyrillic.msb | 44 c5:c9:e1:d0:70:c0:bf:5d:21:58:da:df:4e:cf:27:6a:1b:bc: 47 1e:6f:8c:4f:78:d0:ec:06:89:6f:2b:a8:92:05:d9:39:08:55: 48 95:5f:2a:d0:5c:ac:b8:5c:08:38:ea:e0:22:7a:aa:b7:af:36: 55 93:d0:05:c6:f6:36:2e:12:f8:11:91:d3:a1:fd:08:bf:5f:62:
|