1/*
2 * Copyright 2021-2022 The OpenSSL Project Authors. All Rights Reserved.
3 * Copyright (c) 2021, Intel Corporation. All Rights Reserved.
4 *
5 * Licensed under the Apache License 2.0 (the "License").  You may not use
6 * this file except in compliance with the License.  You can obtain a copy
7 * in the file LICENSE in the source distribution or at
8 * https://www.openssl.org/source/license.html
9 */
10
11/*-
12 * AVX512 VAES + VPCLMULDQD support for AES GCM.
13 * This file is included by cipher_aes_gcm_hw_aesni.inc
14 */
15
16#undef VAES_GCM_ENABLED
17#if (defined(__x86_64) || defined(__x86_64__) || \
18     defined(_M_AMD64) || defined(_M_X64))
19# define VAES_GCM_ENABLED
20
21/* Returns non-zero when AVX512F + VAES + VPCLMULDQD combination is available */
22int ossl_vaes_vpclmulqdq_capable(void);
23
24# define OSSL_AES_GCM_UPDATE(direction)                                 \
25    void ossl_aes_gcm_ ## direction ## _avx512(const void *ks,          \
26                                               void *gcm128ctx,         \
27                                               unsigned int *pblocklen, \
28                                               const unsigned char *in, \
29                                               size_t len,              \
30                                               unsigned char *out);
31
32OSSL_AES_GCM_UPDATE(encrypt)
33OSSL_AES_GCM_UPDATE(decrypt)
34
35void ossl_aes_gcm_init_avx512(const void *ks, void *gcm128ctx);
36void ossl_aes_gcm_setiv_avx512(const void *ks, void *gcm128ctx,
37                               const unsigned char *iv, size_t ivlen);
38void ossl_aes_gcm_update_aad_avx512(void *gcm128ctx, const unsigned char *aad,
39                                    size_t aadlen);
40void ossl_aes_gcm_finalize_avx512(void *gcm128ctx, unsigned int pblocklen);
41
42void ossl_gcm_gmult_avx512(u64 Xi[2], const void *gcm128ctx);
43
44static int vaes_gcm_setkey(PROV_GCM_CTX *ctx, const unsigned char *key,
45                           size_t keylen)
46{
47    GCM128_CONTEXT *gcmctx = &ctx->gcm;
48    PROV_AES_GCM_CTX *actx = (PROV_AES_GCM_CTX *)ctx;
49    AES_KEY *ks = &actx->ks.ks;
50
51    aesni_set_encrypt_key(key, keylen * 8, ks);
52    memset(gcmctx, 0, sizeof(*gcmctx));
53    gcmctx->key = ks;
54    ctx->key_set = 1;
55
56    ossl_aes_gcm_init_avx512(ks, gcmctx);
57
58    return 1;
59}
60
61static int vaes_gcm_setiv(PROV_GCM_CTX *ctx, const unsigned char *iv,
62                          size_t ivlen)
63{
64    GCM128_CONTEXT *gcmctx = &ctx->gcm;
65
66    gcmctx->Yi.u[0] = 0;           /* Current counter */
67    gcmctx->Yi.u[1] = 0;
68    gcmctx->Xi.u[0] = 0;           /* AAD hash */
69    gcmctx->Xi.u[1] = 0;
70    gcmctx->len.u[0] = 0;          /* AAD length */
71    gcmctx->len.u[1] = 0;          /* Message length */
72    gcmctx->ares = 0;
73    gcmctx->mres = 0;
74
75    /* IV is limited by 2^64 bits, thus 2^61 bytes */
76    if (ivlen > (U64(1) << 61))
77        return 0;
78
79    ossl_aes_gcm_setiv_avx512(gcmctx->key, gcmctx, iv, ivlen);
80
81    return 1;
82}
83
84static int vaes_gcm_aadupdate(PROV_GCM_CTX *ctx,
85                              const unsigned char *aad,
86                              size_t aad_len)
87{
88    GCM128_CONTEXT *gcmctx = &ctx->gcm;
89    u64 alen = gcmctx->len.u[0];
90    unsigned int ares;
91    size_t i, lenBlks;
92
93    /* Bad sequence: call of AAD update after message processing */
94    if (gcmctx->len.u[1] > 0)
95        return 0;
96
97    alen += aad_len;
98    /* AAD is limited by 2^64 bits, thus 2^61 bytes */
99    if ((alen > (U64(1) << 61)) || (alen < aad_len))
100        return 0;
101
102    gcmctx->len.u[0] = alen;
103
104    ares = gcmctx->ares;
105    /* Partial AAD block left from previous AAD update calls */
106    if (ares > 0) {
107        /*
108         * Fill partial block buffer till full block
109         * (note, the hash is stored reflected)
110         */
111        while (ares > 0 && aad_len > 0) {
112            gcmctx->Xi.c[15 - ares] ^= *(aad++);
113            --aad_len;
114            ares = (ares + 1) % AES_BLOCK_SIZE;
115        }
116        /* Full block gathered */
117        if (ares == 0) {
118            ossl_gcm_gmult_avx512(gcmctx->Xi.u, gcmctx);
119        } else { /* no more AAD */
120            gcmctx->ares = ares;
121            return 1;
122        }
123    }
124
125    /* Bulk AAD processing */
126    lenBlks = aad_len & ((size_t)(-AES_BLOCK_SIZE));
127    if (lenBlks > 0) {
128        ossl_aes_gcm_update_aad_avx512(gcmctx, aad, lenBlks);
129        aad += lenBlks;
130        aad_len -= lenBlks;
131    }
132
133    /* Add remaining AAD to the hash (note, the hash is stored reflected) */
134    if (aad_len > 0) {
135        ares = aad_len;
136        for (i = 0; i < aad_len; i++)
137            gcmctx->Xi.c[15 - i] ^= aad[i];
138    }
139
140    gcmctx->ares = ares;
141
142    return 1;
143}
144
145static int vaes_gcm_cipherupdate(PROV_GCM_CTX *ctx, const unsigned char *in,
146                                 size_t len, unsigned char *out)
147{
148    GCM128_CONTEXT *gcmctx = &ctx->gcm;
149    u64 mlen = gcmctx->len.u[1];
150
151    mlen += len;
152    if (mlen > ((U64(1) << 36) - 32) || (mlen < len))
153        return 0;
154
155    gcmctx->len.u[1] = mlen;
156
157    /* Finalize GHASH(AAD) if AAD partial blocks left unprocessed */
158    if (gcmctx->ares > 0) {
159        ossl_gcm_gmult_avx512(gcmctx->Xi.u, gcmctx);
160        gcmctx->ares = 0;
161    }
162
163    if (ctx->enc)
164        ossl_aes_gcm_encrypt_avx512(gcmctx->key, gcmctx, &gcmctx->mres, in, len, out);
165    else
166        ossl_aes_gcm_decrypt_avx512(gcmctx->key, gcmctx, &gcmctx->mres, in, len, out);
167
168    return 1;
169}
170
171static int vaes_gcm_cipherfinal(PROV_GCM_CTX *ctx, unsigned char *tag)
172{
173    GCM128_CONTEXT *gcmctx = &ctx->gcm;
174    unsigned int *res = &gcmctx->mres;
175
176    /* Finalize AAD processing */
177    if (gcmctx->ares > 0)
178        res = &gcmctx->ares;
179
180    ossl_aes_gcm_finalize_avx512(gcmctx, *res);
181
182    if (ctx->enc) {
183        ctx->taglen = GCM_TAG_MAX_SIZE;
184        memcpy(tag, gcmctx->Xi.c,
185               ctx->taglen <= sizeof(gcmctx->Xi.c) ? ctx->taglen :
186               sizeof(gcmctx->Xi.c));
187        *res = 0;
188    } else {
189        return !CRYPTO_memcmp(gcmctx->Xi.c, tag, ctx->taglen);
190    }
191
192    return 1;
193}
194
195static const PROV_GCM_HW vaes_gcm = {
196    vaes_gcm_setkey,
197    vaes_gcm_setiv,
198    vaes_gcm_aadupdate,
199    vaes_gcm_cipherupdate,
200    vaes_gcm_cipherfinal,
201    ossl_gcm_one_shot
202};
203
204#endif
205