/openssl/crypto/modes/ |
H A D | xts128.c | 33 } tweak, scratch; in CRYPTO_xts128_encrypt() local 39 memcpy(tweak.c, iv, 16); in CRYPTO_xts128_encrypt() 41 (*ctx->block2) (tweak.c, tweak.c, ctx->key2); in CRYPTO_xts128_encrypt() 49 scratch.u[0] ^= tweak.u[0]; in CRYPTO_xts128_encrypt() 50 scratch.u[1] ^= tweak.u[1]; in CRYPTO_xts128_encrypt() 57 scratch.u[0] ^= tweak.u[0]; in CRYPTO_xts128_encrypt() 58 scratch.u[1] ^= tweak.u[1]; in CRYPTO_xts128_encrypt() 76 tweak.u[0] = (tweak.u[0] << 1) ^ res; in CRYPTO_xts128_encrypt() 77 tweak.u[1] = (tweak.u[1] << 1) | carry; in CRYPTO_xts128_encrypt() 86 tweak.c[i] = (u8)c; in CRYPTO_xts128_encrypt() [all …]
|
H A D | xts128gb.c | 33 } tweak, scratch; in ossl_crypto_xts128gb_encrypt() local 39 memcpy(tweak.c, iv, 16); in ossl_crypto_xts128gb_encrypt() 41 (*ctx->block2) (tweak.c, tweak.c, ctx->key2); in ossl_crypto_xts128gb_encrypt() 49 scratch.u[0] ^= tweak.u[0]; in ossl_crypto_xts128gb_encrypt() 78 u8 *p = tweak.c; in ossl_crypto_xts128gb_encrypt() 85 tweak.u[1] = hi >> 1; in ossl_crypto_xts128gb_encrypt() 92 p = tweak.c; in ossl_crypto_xts128gb_encrypt() 97 tweak.u[0] = lo; in ossl_crypto_xts128gb_encrypt() 98 tweak.u[1] = hi; in ossl_crypto_xts128gb_encrypt() 104 tweak.c[i] = ((tweak.c[i] >> 1) + carry) & 0xff; in ossl_crypto_xts128gb_encrypt() [all …]
|
/openssl/crypto/sm4/asm/ |
H A D | vpsm4_ex-armv8.pl | 1236 &rev32(@tweak[0],@tweak[0]); 1258 &rbit(@tweak[0],@tweak[0],$std); 1292 &rbit(@tweak[0],@tweak[0],$std); 1293 &rbit(@tweak[1],@tweak[1],$std); 1294 &rbit(@tweak[2],@tweak[2],$std); 1295 &rbit(@tweak[3],@tweak[3],$std); 1376 mov @tweak[0].16b,@tweak[4].16b 1377 mov @tweak[1].16b,@tweak[5].16b 1378 mov @tweak[2].16b,@tweak[6].16b 1469 mov @tweak[1].16b,@tweak[0].16b [all …]
|
H A D | vpsm4-armv8.pl | 1244 &rev32(@tweak[0],@tweak[0]); 1266 &rbit(@tweak[0],@tweak[0],$std); 1373 &rbit(@tweak[0],@tweak[0],$std); 1374 &rbit(@tweak[1],@tweak[1],$std); 1375 &rbit(@tweak[2],@tweak[2],$std); 1376 &rbit(@tweak[3],@tweak[3],$std); 1413 &rbit(@tweak[0],@tweak[0],$std); 1430 &rbit(@tweak[0],@tweak[0],$std); 1431 &rbit(@tweak[1],@tweak[1],$std); 1495 mov @tweak[1].16b,@tweak[0].16b [all …]
|
/openssl/crypto/aes/asm/ |
H A D | aesp8-ppc.pl | 2090 vaddubm $tweak,$tweak,$tweak 2131 vaddubm $tweak,$tweak,$tweak 2298 vaddubm $tweak,$tweak,$tweak 2387 vaddubm $tweak,$tweak,$tweak 2530 vaddubm $tweak,$tweak,$tweak 2539 vaddubm $tweak,$tweak,$tweak 2550 vaddubm $tweak,$tweak,$tweak 2561 vaddubm $tweak,$tweak,$tweak 2572 vaddubm $tweak,$tweak,$tweak 2583 vaddubm $tweak,$tweak,$tweak [all …]
|
H A D | aesni-x86_64.pl | 1952 pxor @tweak[5],@tweak[0] 1968 pxor @tweak[5],@tweak[1] 1985 pxor @tweak[5],@tweak[2] 2000 pxor @tweak[5],@tweak[3] 2013 pxor @tweak[5],@tweak[4] 2439 pxor @tweak[5],@tweak[0] 2455 pxor @tweak[5],@tweak[1] 2472 pxor @tweak[5],@tweak[2] 2487 pxor @tweak[5],@tweak[3] 2500 pxor @tweak[5],@tweak[4] [all …]
|
H A D | aes-s390x.pl | 1836 la $s2,$tweak($sp) 1932 x $s1,$tweak+4($sp) 1933 x $s2,$tweak+8($sp) 1934 x $s3,$tweak+12($sp) 2039 la $s2,$tweak($sp) 2174 x $s1,$tweak+4($sp) 2175 x $s2,$tweak+8($sp) 2176 x $s3,$tweak+12($sp) 2261 x $s1,$tweak+4($sp) 2262 x $s2,$tweak+8($sp) [all …]
|
H A D | aesni-x86.pl | 1182 &paddq ($tweak,$tweak); # &psllq($tweak,1); 1240 &paddq ($tweak,$tweak); # &psllq($tweak,1); 1262 &paddq ($tweak,$tweak); # &psllq($tweak,1); 1271 &paddq ($tweak,$tweak); # &psllq($tweak,1); 1281 &paddq ($tweak,$tweak); # &psllq($tweak,1); 1428 &paddq ($tweak,$tweak); # &psllq($tweak,1); 1537 &paddq ($tweak,$tweak); # &psllq($tweak,1); 1595 &paddq ($tweak,$tweak); # &psllq($tweak,1); 1617 &paddq ($tweak,$tweak); # &psllq($tweak,1); 1626 &paddq ($tweak,$tweak); # &psllq($tweak,1); [all …]
|
H A D | bsaes-armv8.pl | 1504 // generate initial tweak 1596 str q8, [x2] // next round tweak 1610 ldr q11, [x0] // next round tweak 1698 str q18, [x0] // next round tweak 1714 ldr q11, [x0] // next round tweak 1741 ldr q11, [x0] // next round tweak 1764 ldr q11, [x0] // next round tweak 1789 mov v11.16b, v15.16b // next round tweak 1809 mov v11.16b, v14.16b // next round tweak 1827 mov v11.16b, v13.16b // next round tweak [all …]
|
H A D | bsaes-x86_64.pl | 2268 paddq @XMM[7], @XMM[7] # psllq 1,$tweak 2317 paddq @XMM[7], @XMM[7] # psllq 1,$tweak 2335 paddq @XMM[7], @XMM[7] # psllq 1,$tweak 2674 paddq @XMM[7], @XMM[7] # psllq 1,$tweak 2723 paddq @XMM[7], @XMM[7] # psllq 1,$tweak 2741 paddq @XMM[7], @XMM[7] # psllq 1,$tweak 2923 paddq @XMM[7], @XMM[7] # psllq 1,$tweak
|
/openssl/providers/implementations/ciphers/ |
H A D | cipher_aes_xts_s390x.inc | 44 || ivlen > sizeof(km->tweak)) { 48 memcpy(km->tweak, iv, ivlen); 163 memcpy(xctx->base.iv, km->tweak, AES_BLOCK_SIZE);
|
H A D | cipher_aes_xts.h | 28 unsigned char tweak[16]; member
|
/openssl/util/ |
H A D | check-format.pl | 378 my $tweak = $in_comment == -2 ? 1 : 0; 379 … report("indent = ".($count + $tweak)." != $comment_indent at end of multi-line comment") 380 if $count + $tweak != $comment_indent;
|
/openssl/ |
H A D | NOTES-DJGPP.md | 7 have to tweak the installation yourself, including renaming files
|
/openssl/doc/man3/ |
H A D | EVP_aes_128_gcm.pod | 162 the XTS "tweak" value.
|
H A D | ENGINE_add.pod | 430 operations that tweak certain behaviour or cause certain operations to take
|
H A D | EVP_EncryptInit.pod | 1053 primitive element E<alpha> to calculate the tweak values. The IEEE
|