/openssl/crypto/evp/ |
H A D | e_aes_cbc_hmac_sha1.c | 147 int blocks; member 155 int blocks; member 173 } blocks[8]; in tls1_1_multi_block_encrypt() local 252 edges[i].blocks = 1; in tls1_1_multi_block_encrypt() 294 memset(blocks, 0, sizeof(blocks)); in tls1_1_multi_block_encrypt() 326 memset(blocks, 0, sizeof(blocks)); in tls1_1_multi_block_encrypt() 339 blocks[i].c[20] = 0x80; in tls1_1_multi_block_encrypt() 356 edges[i].blocks = 1; in tls1_1_multi_block_encrypt() 402 OPENSSL_cleanse(blocks, sizeof(blocks)); in tls1_1_multi_block_encrypt() 446 aes_off += blocks; in aesni_cbc_hmac_sha1_cipher() [all …]
|
H A D | e_aes_cbc_hmac_sha256.c | 141 int blocks; member 149 int blocks; member 167 } blocks[8]; in tls1_1_multi_block_encrypt() local 250 edges[i].blocks = 1; in tls1_1_multi_block_encrypt() 292 memset(blocks, 0, sizeof(blocks)); in tls1_1_multi_block_encrypt() 324 memset(blocks, 0, sizeof(blocks)); in tls1_1_multi_block_encrypt() 343 blocks[i].c[32] = 0x80; in tls1_1_multi_block_encrypt() 366 edges[i].blocks = 1; in tls1_1_multi_block_encrypt() 415 OPENSSL_cleanse(blocks, sizeof(blocks)); in tls1_1_multi_block_encrypt() 474 aes_off += blocks; in aesni_cbc_hmac_sha256_cipher() [all …]
|
H A D | e_rc4_hmac_md5.c | 83 md5_off = MD5_CBLOCK - key->md.num, blocks; in rc4_hmac_md5_cipher() local 106 blocks *= MD5_CBLOCK; in rc4_hmac_md5_cipher() 107 rc4_off += blocks; in rc4_hmac_md5_cipher() 108 md5_off += blocks; in rc4_hmac_md5_cipher() 109 key->md.Nh += blocks >> 29; in rc4_hmac_md5_cipher() 110 key->md.Nl += blocks <<= 3; in rc4_hmac_md5_cipher() 111 if (key->md.Nl < (unsigned int)blocks) in rc4_hmac_md5_cipher() 150 blocks *= MD5_CBLOCK; in rc4_hmac_md5_cipher() 151 rc4_off += blocks; in rc4_hmac_md5_cipher() 152 md5_off += blocks; in rc4_hmac_md5_cipher() [all …]
|
H A D | e_chacha20_poly1305.c | 86 size_t blocks = len / CHACHA_BLK_SIZE; in chacha_cipher() local 92 if (sizeof(size_t)>sizeof(unsigned int) && blocks>(1U<<28)) in chacha_cipher() 93 blocks = (1U<<28); in chacha_cipher() 101 ctr32 += (unsigned int)blocks; in chacha_cipher() 102 if (ctr32 < blocks) { in chacha_cipher() 103 blocks -= ctr32; in chacha_cipher() 106 blocks *= CHACHA_BLK_SIZE; in chacha_cipher() 107 ChaCha20_ctr32(out, inp, blocks, key->key.d, key->counter); in chacha_cipher() 108 len -= blocks; in chacha_cipher() 109 inp += blocks; in chacha_cipher() [all …]
|
/openssl/providers/implementations/ciphers/ |
H A D | cipher_aes_cbc_hmac_sha1_hw.c | 108 int blocks; member 114 int blocks; member 135 } blocks[8]; in tls1_multi_block_encrypt() local 214 edges[i].blocks = 1; in tls1_multi_block_encrypt() 256 memset(blocks, 0, sizeof(blocks)); in tls1_multi_block_encrypt() 288 memset(blocks, 0, sizeof(blocks)); in tls1_multi_block_encrypt() 301 blocks[i].c[20] = 0x80; in tls1_multi_block_encrypt() 318 edges[i].blocks = 1; in tls1_multi_block_encrypt() 364 OPENSSL_cleanse(blocks, sizeof(blocks)); in tls1_multi_block_encrypt() 406 aes_off += blocks; in aesni_cbc_hmac_sha1_cipher() [all …]
|
H A D | cipher_aes_cbc_hmac_sha256_hw.c | 112 int blocks; member 118 int blocks; member 139 } blocks[8]; in tls1_multi_block_encrypt() local 222 edges[i].blocks = 1; in tls1_multi_block_encrypt() 264 memset(blocks, 0, sizeof(blocks)); in tls1_multi_block_encrypt() 296 memset(blocks, 0, sizeof(blocks)); in tls1_multi_block_encrypt() 315 blocks[i].c[32] = 0x80; in tls1_multi_block_encrypt() 338 edges[i].blocks = 1; in tls1_multi_block_encrypt() 387 OPENSSL_cleanse(blocks, sizeof(blocks)); in tls1_multi_block_encrypt() 444 aes_off += blocks; in aesni_cbc_hmac_sha256_cipher() [all …]
|
H A D | cipher_chacha20_hw.c | 70 size_t blocks = inl / CHACHA_BLK_SIZE; in chacha20_cipher() local 77 if (sizeof(size_t) > sizeof(unsigned int) && blocks > (1U << 28)) in chacha20_cipher() 78 blocks = (1U << 28); in chacha20_cipher() 86 ctr32 += (unsigned int)blocks; in chacha20_cipher() 87 if (ctr32 < blocks) { in chacha20_cipher() 88 blocks -= ctr32; in chacha20_cipher() 91 blocks *= CHACHA_BLK_SIZE; in chacha20_cipher() 92 ChaCha20_ctr32(out, in, blocks, ctx->key.d, ctx->counter); in chacha20_cipher() 93 inl -= blocks; in chacha20_cipher() 94 in += blocks; in chacha20_cipher() [all …]
|
H A D | cipher_rc4_hmac_md5_hw.c | 58 size_t md5_off = MD5_CBLOCK - ctx->md.num, blocks; in cipher_hw_rc4_hmac_md5_cipher() local 82 blocks *= MD5_CBLOCK; in cipher_hw_rc4_hmac_md5_cipher() 83 rc4_off += blocks; in cipher_hw_rc4_hmac_md5_cipher() 84 md5_off += blocks; in cipher_hw_rc4_hmac_md5_cipher() 85 ctx->md.Nh += blocks >> 29; in cipher_hw_rc4_hmac_md5_cipher() 86 ctx->md.Nl += blocks <<= 3; in cipher_hw_rc4_hmac_md5_cipher() 87 if (ctx->md.Nl < (unsigned int)blocks) in cipher_hw_rc4_hmac_md5_cipher() 128 blocks *= MD5_CBLOCK; in cipher_hw_rc4_hmac_md5_cipher() 129 rc4_off += blocks; in cipher_hw_rc4_hmac_md5_cipher() 130 md5_off += blocks; in cipher_hw_rc4_hmac_md5_cipher() [all …]
|
H A D | cipher_aes_ccm_hw_s390x.inc | 28 sctx->ccm.s390x.blocks = 0; 92 sctx->ccm.s390x.blocks += 2; 98 sctx->ccm.s390x.blocks += alen >> 4; 108 sctx->ccm.s390x.blocks++; 130 sctx->ccm.s390x.blocks++; 153 sctx->ccm.s390x.blocks += (((len + 15) >> 4) << 1) + 1; 154 if (sctx->ccm.s390x.blocks > (1ULL << 61))
|
H A D | cipher_aes_ccm.h | 32 unsigned long long blocks; member
|
H A D | cipher_rc4_hmac_md5.h | 36 MD5_CTX *ctx, const void *inp, size_t blocks);
|
/openssl/crypto/modes/ |
H A D | ctr128.c | 168 size_t blocks = len / 16; in CRYPTO_ctr128_encrypt_ctr32() local 174 if (sizeof(size_t) > sizeof(unsigned int) && blocks > (1U << 28)) in CRYPTO_ctr128_encrypt_ctr32() 175 blocks = (1U << 28); in CRYPTO_ctr128_encrypt_ctr32() 182 ctr32 += (u32)blocks; in CRYPTO_ctr128_encrypt_ctr32() 183 if (ctr32 < blocks) { in CRYPTO_ctr128_encrypt_ctr32() 184 blocks -= ctr32; in CRYPTO_ctr128_encrypt_ctr32() 187 (*func) (in, out, blocks, key, ivec); in CRYPTO_ctr128_encrypt_ctr32() 193 blocks *= 16; in CRYPTO_ctr128_encrypt_ctr32() 194 len -= blocks; in CRYPTO_ctr128_encrypt_ctr32() 195 out += blocks; in CRYPTO_ctr128_encrypt_ctr32() [all …]
|
H A D | ccm128.c | 32 ctx->blocks = 0; in CRYPTO_ccm128_init() 78 (*block) (ctx->nonce.c, ctx->cmac.c, ctx->key), ctx->blocks++; in CRYPTO_ccm128_aad() 110 (*block) (ctx->cmac.c, ctx->cmac.c, ctx->key), ctx->blocks++; in CRYPTO_ccm128_aad() 152 (*block) (ctx->nonce.c, ctx->cmac.c, key), ctx->blocks++; in CRYPTO_ccm128_encrypt() 166 ctx->blocks += ((len + 15) >> 3) | 1; in CRYPTO_ccm128_encrypt() 167 if (ctx->blocks > (U64(1) << 61)) in CRYPTO_ccm128_encrypt() 325 (*block) (ctx->nonce.c, ctx->cmac.c, key), ctx->blocks++; in CRYPTO_ccm128_encrypt_ccm64() 339 ctx->blocks += ((len + 15) >> 3) | 1; in CRYPTO_ccm128_encrypt_ccm64() 340 if (ctx->blocks > (U64(1) << 61)) in CRYPTO_ccm128_encrypt_ccm64()
|
/openssl/crypto/sm4/asm/ |
H A D | vpsm4-armv8.pl | 775 subs $blocks,$blocks,#8 790 sub $blocks,$blocks,#4 890 subs $blocks,$blocks,#4 894 subs $blocks,$blocks,#1 955 subs $blocks,$blocks,#8 981 subs $blocks,$blocks,#4 987 subs $blocks,$blocks,#1 1004 subs $blocks,$blocks,1 1115 subs $blocks,$blocks,#4 1143 subs $blocks,$blocks,#8 [all …]
|
H A D | vpsm4_ex-armv8.pl | 765 subs $blocks,$blocks,#8 780 sub $blocks,$blocks,#4 880 subs $blocks,$blocks,#4 884 subs $blocks,$blocks,#1 945 subs $blocks,$blocks,#8 971 subs $blocks,$blocks,#4 977 subs $blocks,$blocks,#1 994 subs $blocks,$blocks,1 1105 subs $blocks,$blocks,#4 1133 subs $blocks,$blocks,#8 [all …]
|
/openssl/include/crypto/ |
H A D | aes_platform.h | 49 size_t blocks, const AES_KEY *key, 216 size_t blocks, const void *key, 222 size_t blocks, const void *key, 231 size_t blocks, 248 size_t blocks, 255 size_t blocks, 333 size_t blocks, const AES_KEY *key1, 336 size_t blocks, const AES_KEY *key1, 339 size_t blocks, const AES_KEY *key1, 568 size_t blocks, const void *key, [all …]
|
H A D | cmll_platform.h | 42 size_t blocks, const CAMELLIA_KEY *key, 45 size_t blocks, const CAMELLIA_KEY *key,
|
H A D | poly1305.h | 36 poly1305_blocks_f blocks; member
|
/openssl/test/recipes/30-test_evp_data/ |
H A D | evpciph_aes_cts.txt | 42 # where aligned blocks are the same as CBC mode, and partial lengths 43 # have the last 2 blocks swapped compared to CS3. 157 # where aligned blocks are the same as CBC mode, and partial lengths 160 # 17 bytes Input (For partial blocks the output should match CS3) 169 # 31 bytes input (For partial blocks the output should match CS3) 178 # 32 bytes input (Aligned blocks should match normal CBC mode) 260 # 32 bytes input (CS3 always swaps the last 2 byte blocks - so it is not equivalent to CBC for a fu… 287 # 64 bytes input (CS3 always swaps the last 2 byte blocks - so it is not equivalent to CBC for a fu…
|
/openssl/crypto/ |
H A D | params_dup.c | 25 size_t blocks; /* Number of aligned blocks */ member 37 size_t sz = OSSL_PARAM_ALIGN_SIZE * (extra_blocks + out->blocks); in ossl_param_buf_alloc() 90 buf[is_secure].blocks += blks; in ossl_param_dup() 124 if (buf[OSSL_PARAM_BUF_SECURE].blocks > 0 in OSSL_PARAM_dup()
|
/openssl/doc/man7/ |
H A D | des_modes.pod | 27 The order of the blocks can be rearranged without detection. 59 The chaining operation makes the ciphertext blocks dependent on the 60 current and all preceding plaintext blocks and therefore blocks can not 70 An error will affect the current and the following ciphertext blocks. 195 key length 112 bits, but this attack also requires 2^56 blocks of
|
/openssl/include/openssl/ |
H A D | modes.h | 37 size_t blocks, const void *key, 41 size_t blocks, const void *key, 187 size_t blocks, const void *key,
|
/openssl/providers/implementations/digests/ |
H A D | blake2b_prov.c | 156 const uint8_t *blocks, in blake2b_compress() argument 191 m[i] = load64(blocks + i * sizeof(m[i])); in blake2b_compress() 253 blocks += increment; in blake2b_compress()
|
H A D | blake2s_prov.c | 146 const uint8_t *blocks, in blake2s_compress() argument 181 m[i] = load32(blocks + i * sizeof(m[i])); in blake2s_compress() 241 blocks += increment; in blake2s_compress()
|
/openssl/providers/implementations/rands/ |
H A D | drbg_ctr.c | 393 unsigned int ctr32, blocks; in drbg_ctr_generate() local 435 blocks = (buflen + 15) / 16; in drbg_ctr_generate() 437 ctr32 = GETU32(ctr->V + 12) + blocks; in drbg_ctr_generate() 438 if (ctr32 < blocks) { in drbg_ctr_generate() 441 blocks -= ctr32; in drbg_ctr_generate() 442 buflen = blocks * 16; in drbg_ctr_generate()
|