Lines Matching refs:gctx

139 #  define AES_GCM_ASM2(gctx)      (gctx->gcm.block==(block128_f)aesni_encrypt && \  argument
140 gctx->gcm.ghash==gcm_ghash_avx)
227 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX, ctx); in aesni_gcm_init_key() local
239 aesni_set_encrypt_key(key, keylen, &gctx->ks.ks); in aesni_gcm_init_key()
240 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt); in aesni_gcm_init_key()
241 gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks; in aesni_gcm_init_key()
245 if (iv == NULL && gctx->iv_set) in aesni_gcm_init_key()
246 iv = gctx->iv; in aesni_gcm_init_key()
248 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); in aesni_gcm_init_key()
249 gctx->iv_set = 1; in aesni_gcm_init_key()
251 gctx->key_set = 1; in aesni_gcm_init_key()
254 if (gctx->key_set) in aesni_gcm_init_key()
255 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); in aesni_gcm_init_key()
257 memcpy(gctx->iv, iv, gctx->ivlen); in aesni_gcm_init_key()
258 gctx->iv_set = 1; in aesni_gcm_init_key()
259 gctx->iv_gen = 0; in aesni_gcm_init_key()
579 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx); in aes_t4_gcm_init_key() local
590 aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks); in aes_t4_gcm_init_key()
591 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, in aes_t4_gcm_init_key()
595 gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt; in aes_t4_gcm_init_key()
598 gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt; in aes_t4_gcm_init_key()
601 gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt; in aes_t4_gcm_init_key()
609 if (iv == NULL && gctx->iv_set) in aes_t4_gcm_init_key()
610 iv = gctx->iv; in aes_t4_gcm_init_key()
612 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); in aes_t4_gcm_init_key()
613 gctx->iv_set = 1; in aes_t4_gcm_init_key()
615 gctx->key_set = 1; in aes_t4_gcm_init_key()
618 if (gctx->key_set) in aes_t4_gcm_init_key()
619 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); in aes_t4_gcm_init_key()
621 memcpy(gctx->iv, iv, gctx->ivlen); in aes_t4_gcm_init_key()
622 gctx->iv_set = 1; in aes_t4_gcm_init_key()
623 gctx->iv_gen = 0; in aes_t4_gcm_init_key()
1438 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c); in s390x_aes_gcm_ctrl() local
1447 gctx->key_set = 0; in s390x_aes_gcm_ctrl()
1448 gctx->iv_set = 0; in s390x_aes_gcm_ctrl()
1449 gctx->ivlen = ivlen; in s390x_aes_gcm_ctrl()
1450 gctx->iv = c->iv; in s390x_aes_gcm_ctrl()
1451 gctx->taglen = -1; in s390x_aes_gcm_ctrl()
1452 gctx->iv_gen = 0; in s390x_aes_gcm_ctrl()
1453 gctx->tls_aad_len = -1; in s390x_aes_gcm_ctrl()
1457 *(int *)ptr = gctx->ivlen; in s390x_aes_gcm_ctrl()
1468 if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) { in s390x_aes_gcm_ctrl()
1469 if (gctx->iv != c->iv) in s390x_aes_gcm_ctrl()
1470 OPENSSL_free(gctx->iv); in s390x_aes_gcm_ctrl()
1472 if ((gctx->iv = OPENSSL_malloc(len)) == NULL) in s390x_aes_gcm_ctrl()
1476 memset(gctx->iv + arg, 0, len - arg - 8); in s390x_aes_gcm_ctrl()
1477 *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3; in s390x_aes_gcm_ctrl()
1479 gctx->ivlen = arg; in s390x_aes_gcm_ctrl()
1489 gctx->taglen = arg; in s390x_aes_gcm_ctrl()
1494 if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0) in s390x_aes_gcm_ctrl()
1497 memcpy(ptr, gctx->kma.param.t.b, arg); in s390x_aes_gcm_ctrl()
1503 memcpy(gctx->iv, ptr, gctx->ivlen); in s390x_aes_gcm_ctrl()
1504 gctx->iv_gen = 1; in s390x_aes_gcm_ctrl()
1511 if ((arg < 4) || (gctx->ivlen - arg) < 8) in s390x_aes_gcm_ctrl()
1515 memcpy(gctx->iv, ptr, arg); in s390x_aes_gcm_ctrl()
1518 if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0) in s390x_aes_gcm_ctrl()
1521 gctx->iv_gen = 1; in s390x_aes_gcm_ctrl()
1525 if (gctx->iv_gen == 0 || gctx->key_set == 0) in s390x_aes_gcm_ctrl()
1528 s390x_aes_gcm_setiv(gctx, gctx->iv); in s390x_aes_gcm_ctrl()
1530 if (arg <= 0 || arg > gctx->ivlen) in s390x_aes_gcm_ctrl()
1531 arg = gctx->ivlen; in s390x_aes_gcm_ctrl()
1533 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg); in s390x_aes_gcm_ctrl()
1538 ctr64_inc(gctx->iv + gctx->ivlen - 8); in s390x_aes_gcm_ctrl()
1539 gctx->iv_set = 1; in s390x_aes_gcm_ctrl()
1544 if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc) in s390x_aes_gcm_ctrl()
1547 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg); in s390x_aes_gcm_ctrl()
1548 s390x_aes_gcm_setiv(gctx, gctx->iv); in s390x_aes_gcm_ctrl()
1549 gctx->iv_set = 1; in s390x_aes_gcm_ctrl()
1559 gctx->tls_aad_len = arg; in s390x_aes_gcm_ctrl()
1560 gctx->tls_enc_records = 0; in s390x_aes_gcm_ctrl()
1584 if (gctx->iv == c->iv) { in s390x_aes_gcm_ctrl()
1587 len = S390X_gcm_ivpadlen(gctx->ivlen); in s390x_aes_gcm_ctrl()
1592 memcpy(gctx_out->iv, gctx->iv, len); in s390x_aes_gcm_ctrl()
1608 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx); in s390x_aes_gcm_init_key() local
1621 memcpy(&gctx->kma.param.k, key, keylen); in s390x_aes_gcm_init_key()
1623 gctx->fc = S390X_AES_FC(keylen); in s390x_aes_gcm_init_key()
1625 gctx->fc |= S390X_DECRYPT; in s390x_aes_gcm_init_key()
1627 if (iv == NULL && gctx->iv_set) in s390x_aes_gcm_init_key()
1628 iv = gctx->iv; in s390x_aes_gcm_init_key()
1631 s390x_aes_gcm_setiv(gctx, iv); in s390x_aes_gcm_init_key()
1632 gctx->iv_set = 1; in s390x_aes_gcm_init_key()
1634 gctx->key_set = 1; in s390x_aes_gcm_init_key()
1636 if (gctx->key_set) in s390x_aes_gcm_init_key()
1637 s390x_aes_gcm_setiv(gctx, iv); in s390x_aes_gcm_init_key()
1639 memcpy(gctx->iv, iv, gctx->ivlen); in s390x_aes_gcm_init_key()
1641 gctx->iv_set = 1; in s390x_aes_gcm_init_key()
1642 gctx->iv_gen = 0; in s390x_aes_gcm_init_key()
1654 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx); in s390x_aes_gcm_tls_cipher() local
1668 if (enc && ++gctx->tls_enc_records == 0) { in s390x_aes_gcm_tls_cipher()
1682 gctx->kma.param.taadl = gctx->tls_aad_len << 3; in s390x_aes_gcm_tls_cipher()
1683 gctx->kma.param.tpcl = len << 3; in s390x_aes_gcm_tls_cipher()
1684 s390x_kma(buf, gctx->tls_aad_len, in, len, out, in s390x_aes_gcm_tls_cipher()
1685 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param); in s390x_aes_gcm_tls_cipher()
1688 memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN); in s390x_aes_gcm_tls_cipher()
1691 if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len, in s390x_aes_gcm_tls_cipher()
1699 gctx->iv_set = 0; in s390x_aes_gcm_tls_cipher()
1700 gctx->tls_aad_len = -1; in s390x_aes_gcm_tls_cipher()
1713 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx); in s390x_aes_gcm_cipher() local
1717 if (!gctx->key_set) in s390x_aes_gcm_cipher()
1720 if (gctx->tls_aad_len >= 0) in s390x_aes_gcm_cipher()
1723 if (!gctx->iv_set) in s390x_aes_gcm_cipher()
1728 if (s390x_aes_gcm_aad(gctx, in, len)) in s390x_aes_gcm_cipher()
1731 if (s390x_aes_gcm(gctx, in, out, len)) in s390x_aes_gcm_cipher()
1736 gctx->kma.param.taadl <<= 3; in s390x_aes_gcm_cipher()
1737 gctx->kma.param.tpcl <<= 3; in s390x_aes_gcm_cipher()
1738 s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp, in s390x_aes_gcm_cipher()
1739 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param); in s390x_aes_gcm_cipher()
1742 OPENSSL_cleanse(tmp, gctx->mreslen); in s390x_aes_gcm_cipher()
1743 gctx->iv_set = 0; in s390x_aes_gcm_cipher()
1747 gctx->taglen = 16; in s390x_aes_gcm_cipher()
1749 if (gctx->taglen < 0) in s390x_aes_gcm_cipher()
1753 if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen)) in s390x_aes_gcm_cipher()
1762 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c); in s390x_aes_gcm_cleanup() local
1764 if (gctx == NULL) in s390x_aes_gcm_cleanup()
1767 if (gctx->iv != c->iv) in s390x_aes_gcm_cleanup()
1768 OPENSSL_free(gctx->iv); in s390x_aes_gcm_cleanup()
1770 OPENSSL_cleanse(gctx, sizeof(*gctx)); in s390x_aes_gcm_cleanup()
2641 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c); in aes_gcm_cleanup() local
2642 if (gctx == NULL) in aes_gcm_cleanup()
2644 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm)); in aes_gcm_cleanup()
2645 if (gctx->iv != c->iv) in aes_gcm_cleanup()
2646 OPENSSL_free(gctx->iv); in aes_gcm_cleanup()
2652 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c); in aes_gcm_ctrl() local
2655 gctx->key_set = 0; in aes_gcm_ctrl()
2656 gctx->iv_set = 0; in aes_gcm_ctrl()
2657 gctx->ivlen = EVP_CIPHER_get_iv_length(c->cipher); in aes_gcm_ctrl()
2658 gctx->iv = c->iv; in aes_gcm_ctrl()
2659 gctx->taglen = -1; in aes_gcm_ctrl()
2660 gctx->iv_gen = 0; in aes_gcm_ctrl()
2661 gctx->tls_aad_len = -1; in aes_gcm_ctrl()
2665 *(int *)ptr = gctx->ivlen; in aes_gcm_ctrl()
2672 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) { in aes_gcm_ctrl()
2673 if (gctx->iv != c->iv) in aes_gcm_ctrl()
2674 OPENSSL_free(gctx->iv); in aes_gcm_ctrl()
2675 if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) in aes_gcm_ctrl()
2678 gctx->ivlen = arg; in aes_gcm_ctrl()
2685 gctx->taglen = arg; in aes_gcm_ctrl()
2690 || gctx->taglen < 0) in aes_gcm_ctrl()
2698 memcpy(gctx->iv, ptr, gctx->ivlen); in aes_gcm_ctrl()
2699 gctx->iv_gen = 1; in aes_gcm_ctrl()
2706 if ((arg < 4) || (gctx->ivlen - arg) < 8) in aes_gcm_ctrl()
2709 memcpy(gctx->iv, ptr, arg); in aes_gcm_ctrl()
2710 if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0) in aes_gcm_ctrl()
2712 gctx->iv_gen = 1; in aes_gcm_ctrl()
2716 if (gctx->iv_gen == 0 || gctx->key_set == 0) in aes_gcm_ctrl()
2718 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen); in aes_gcm_ctrl()
2719 if (arg <= 0 || arg > gctx->ivlen) in aes_gcm_ctrl()
2720 arg = gctx->ivlen; in aes_gcm_ctrl()
2721 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg); in aes_gcm_ctrl()
2726 ctr64_inc(gctx->iv + gctx->ivlen - 8); in aes_gcm_ctrl()
2727 gctx->iv_set = 1; in aes_gcm_ctrl()
2731 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt) in aes_gcm_ctrl()
2733 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg); in aes_gcm_ctrl()
2734 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen); in aes_gcm_ctrl()
2735 gctx->iv_set = 1; in aes_gcm_ctrl()
2743 gctx->tls_aad_len = arg; in aes_gcm_ctrl()
2744 gctx->tls_enc_records = 0; in aes_gcm_ctrl()
2767 if (gctx->gcm.key) { in aes_gcm_ctrl()
2768 if (gctx->gcm.key != &gctx->ks) in aes_gcm_ctrl()
2772 if (gctx->iv == c->iv) in aes_gcm_ctrl()
2775 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) in aes_gcm_ctrl()
2777 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen); in aes_gcm_ctrl()
2791 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx); in aes_gcm_init_key() local
2806 HWAES_set_encrypt_key(key, keylen, &gctx->ks.ks); in aes_gcm_init_key()
2807 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, in aes_gcm_init_key()
2810 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks; in aes_gcm_init_key()
2812 gctx->ctr = NULL; in aes_gcm_init_key()
2819 AES_set_encrypt_key(key, keylen, &gctx->ks.ks); in aes_gcm_init_key()
2820 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, in aes_gcm_init_key()
2822 gctx->ctr = (ctr128_f) ossl_bsaes_ctr32_encrypt_blocks; in aes_gcm_init_key()
2828 vpaes_set_encrypt_key(key, keylen, &gctx->ks.ks); in aes_gcm_init_key()
2829 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, in aes_gcm_init_key()
2831 gctx->ctr = NULL; in aes_gcm_init_key()
2837 AES_set_encrypt_key(key, keylen, &gctx->ks.ks); in aes_gcm_init_key()
2838 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, in aes_gcm_init_key()
2841 gctx->ctr = (ctr128_f) AES_ctr32_encrypt; in aes_gcm_init_key()
2843 gctx->ctr = NULL; in aes_gcm_init_key()
2850 if (iv == NULL && gctx->iv_set) in aes_gcm_init_key()
2851 iv = gctx->iv; in aes_gcm_init_key()
2853 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); in aes_gcm_init_key()
2854 gctx->iv_set = 1; in aes_gcm_init_key()
2856 gctx->key_set = 1; in aes_gcm_init_key()
2859 if (gctx->key_set) in aes_gcm_init_key()
2860 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen); in aes_gcm_init_key()
2862 memcpy(gctx->iv, iv, gctx->ivlen); in aes_gcm_init_key()
2863 gctx->iv_set = 1; in aes_gcm_init_key()
2864 gctx->iv_gen = 0; in aes_gcm_init_key()
2879 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx); in aes_gcm_tls_cipher() local
2892 if (EVP_CIPHER_CTX_is_encrypting(ctx) && ++gctx->tls_enc_records == 0) { in aes_gcm_tls_cipher()
2907 if (CRYPTO_gcm128_aad(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx), in aes_gcm_tls_cipher()
2908 gctx->tls_aad_len)) in aes_gcm_tls_cipher()
2916 if (gctx->ctr) { in aes_gcm_tls_cipher()
2919 if (len >= 32 && AES_GCM_ASM(gctx)) { in aes_gcm_tls_cipher()
2920 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0)) in aes_gcm_tls_cipher()
2924 gctx->gcm.key, in aes_gcm_tls_cipher()
2925 gctx->gcm.Yi.c, gctx->gcm.Xi.u); in aes_gcm_tls_cipher()
2926 gctx->gcm.len.u[1] += bulk; in aes_gcm_tls_cipher()
2929 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, in aes_gcm_tls_cipher()
2932 len - bulk, gctx->ctr)) in aes_gcm_tls_cipher()
2937 if (len >= 32 && AES_GCM_ASM2(gctx)) { in aes_gcm_tls_cipher()
2938 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0)) in aes_gcm_tls_cipher()
2942 gctx->gcm.key, in aes_gcm_tls_cipher()
2943 gctx->gcm.Yi.c, gctx->gcm.Xi.u); in aes_gcm_tls_cipher()
2944 gctx->gcm.len.u[1] += bulk; in aes_gcm_tls_cipher()
2947 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in aes_gcm_tls_cipher()
2953 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN); in aes_gcm_tls_cipher()
2957 if (gctx->ctr) { in aes_gcm_tls_cipher()
2960 if (len >= 16 && AES_GCM_ASM(gctx)) { in aes_gcm_tls_cipher()
2961 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0)) in aes_gcm_tls_cipher()
2965 gctx->gcm.key, in aes_gcm_tls_cipher()
2966 gctx->gcm.Yi.c, gctx->gcm.Xi.u); in aes_gcm_tls_cipher()
2967 gctx->gcm.len.u[1] += bulk; in aes_gcm_tls_cipher()
2970 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, in aes_gcm_tls_cipher()
2973 len - bulk, gctx->ctr)) in aes_gcm_tls_cipher()
2978 if (len >= 16 && AES_GCM_ASM2(gctx)) { in aes_gcm_tls_cipher()
2979 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0)) in aes_gcm_tls_cipher()
2983 gctx->gcm.key, in aes_gcm_tls_cipher()
2984 gctx->gcm.Yi.c, gctx->gcm.Xi.u); in aes_gcm_tls_cipher()
2985 gctx->gcm.len.u[1] += bulk; in aes_gcm_tls_cipher()
2988 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in aes_gcm_tls_cipher()
2993 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx), in aes_gcm_tls_cipher()
3005 gctx->iv_set = 0; in aes_gcm_tls_cipher()
3006 gctx->tls_aad_len = -1; in aes_gcm_tls_cipher()
3019 static int aes_gcm_iv_generate(EVP_AES_GCM_CTX *gctx, int offset) in aes_gcm_iv_generate() argument
3021 int sz = gctx->ivlen - offset; in aes_gcm_iv_generate()
3024 if (sz <= 0 || gctx->ivlen < 12) in aes_gcm_iv_generate()
3028 if (RAND_bytes(gctx->iv + offset, sz) <= 0) in aes_gcm_iv_generate()
3037 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx); in aes_gcm_cipher() local
3040 if (!gctx->key_set) in aes_gcm_cipher()
3043 if (gctx->tls_aad_len >= 0) in aes_gcm_cipher()
3053 if (!gctx->iv_set) { in aes_gcm_cipher()
3054 if (!EVP_CIPHER_CTX_is_encrypting(ctx) || !aes_gcm_iv_generate(gctx, 0)) in aes_gcm_cipher()
3056 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen); in aes_gcm_cipher()
3057 gctx->iv_set = 1; in aes_gcm_cipher()
3058 gctx->iv_gen_rand = 1; in aes_gcm_cipher()
3061 if (!gctx->iv_set) in aes_gcm_cipher()
3067 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len)) in aes_gcm_cipher()
3070 if (gctx->ctr) { in aes_gcm_cipher()
3073 if (len >= 32 && AES_GCM_ASM(gctx)) { in aes_gcm_cipher()
3074 size_t res = (16 - gctx->gcm.mres) % 16; in aes_gcm_cipher()
3076 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res)) in aes_gcm_cipher()
3081 gctx->gcm.key, gctx->gcm.Yi.c, in aes_gcm_cipher()
3082 gctx->gcm.Xi.u); in aes_gcm_cipher()
3083 gctx->gcm.len.u[1] += bulk; in aes_gcm_cipher()
3087 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, in aes_gcm_cipher()
3090 len - bulk, gctx->ctr)) in aes_gcm_cipher()
3095 if (len >= 32 && AES_GCM_ASM2(gctx)) { in aes_gcm_cipher()
3096 size_t res = (16 - gctx->gcm.mres) % 16; in aes_gcm_cipher()
3098 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res)) in aes_gcm_cipher()
3103 gctx->gcm.key, gctx->gcm.Yi.c, in aes_gcm_cipher()
3104 gctx->gcm.Xi.u); in aes_gcm_cipher()
3105 gctx->gcm.len.u[1] += bulk; in aes_gcm_cipher()
3109 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in aes_gcm_cipher()
3114 if (gctx->ctr) { in aes_gcm_cipher()
3117 if (len >= 16 && AES_GCM_ASM(gctx)) { in aes_gcm_cipher()
3118 size_t res = (16 - gctx->gcm.mres) % 16; in aes_gcm_cipher()
3120 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res)) in aes_gcm_cipher()
3125 gctx->gcm.key, in aes_gcm_cipher()
3126 gctx->gcm.Yi.c, gctx->gcm.Xi.u); in aes_gcm_cipher()
3127 gctx->gcm.len.u[1] += bulk; in aes_gcm_cipher()
3131 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, in aes_gcm_cipher()
3134 len - bulk, gctx->ctr)) in aes_gcm_cipher()
3139 if (len >= 16 && AES_GCM_ASM2(gctx)) { in aes_gcm_cipher()
3140 size_t res = (16 - gctx->gcm.mres) % 16; in aes_gcm_cipher()
3142 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res)) in aes_gcm_cipher()
3147 gctx->gcm.key, in aes_gcm_cipher()
3148 gctx->gcm.Yi.c, gctx->gcm.Xi.u); in aes_gcm_cipher()
3149 gctx->gcm.len.u[1] += bulk; in aes_gcm_cipher()
3153 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in aes_gcm_cipher()
3161 if (gctx->taglen < 0) in aes_gcm_cipher()
3163 if (CRYPTO_gcm128_finish(&gctx->gcm, in aes_gcm_cipher()
3165 gctx->taglen) != 0) in aes_gcm_cipher()
3167 gctx->iv_set = 0; in aes_gcm_cipher()
3170 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx), 16); in aes_gcm_cipher()
3171 gctx->taglen = 16; in aes_gcm_cipher()
3173 gctx->iv_set = 0; in aes_gcm_cipher()