1 /*
2  * Copyright 2019-2021 The OpenSSL Project Authors. All Rights Reserved.
3  *
4  * Licensed under the Apache License 2.0 (the "License").  You may not use
5  * this file except in compliance with the License.  You can obtain a copy
6  * in the file LICENSE in the source distribution or at
7  * https://www.openssl.org/source/license.html
8  */
9 
10 /*
11  * AES low level APIs are deprecated for public use, but still ok for internal
12  * use where we're using them to implement the higher level EVP interface, as is
13  * the case here.
14  */
15 #include "internal/deprecated.h"
16 
17 #include <openssl/evp.h>
18 #include <internal/endian.h>
19 #include <prov/implementations.h>
20 #include "cipher_aes_gcm_siv.h"
21 
22 static int aes_gcm_siv_ctr32(PROV_AES_GCM_SIV_CTX *ctx, const unsigned char *init_counter,
23                              unsigned char *out, const unsigned char *in, size_t len);
24 
aes_gcm_siv_initkey(void * vctx)25 static int aes_gcm_siv_initkey(void *vctx)
26 {
27     PROV_AES_GCM_SIV_CTX *ctx = (PROV_AES_GCM_SIV_CTX *)vctx;
28     uint8_t output[BLOCK_SIZE];
29     uint32_t counter = 0x0;
30     size_t i;
31     union {
32         uint32_t counter;
33         uint8_t block[BLOCK_SIZE];
34     } data;
35     int out_len;
36     EVP_CIPHER *ecb = NULL;
37     DECLARE_IS_ENDIAN;
38 
39     switch (ctx->key_len) {
40     case 16:
41         ecb = EVP_CIPHER_fetch(ctx->libctx, "AES-128-ECB", NULL);
42         break;
43     case 24:
44         ecb = EVP_CIPHER_fetch(ctx->libctx, "AES-192-ECB", NULL);
45         break;
46     case 32:
47         ecb = EVP_CIPHER_fetch(ctx->libctx, "AES-256-ECB", NULL);
48         break;
49     default:
50         goto err;
51     }
52 
53     if (ctx->ecb_ctx == NULL && (ctx->ecb_ctx = EVP_CIPHER_CTX_new()) == NULL)
54         goto err;
55     if (!EVP_EncryptInit_ex2(ctx->ecb_ctx, ecb, ctx->key_gen_key, NULL, NULL))
56         goto err;
57 
58     memset(&data, 0, sizeof(data));
59     memcpy(&data.block[sizeof(data.counter)], ctx->nonce, NONCE_SIZE);
60 
61     /* msg_auth_key is always 16 bytes in size, regardless of AES128/AES256 */
62     /* counter is stored little-endian */
63     for (i = 0; i < BLOCK_SIZE; i += 8) {
64         if (IS_LITTLE_ENDIAN) {
65             data.counter = counter;
66         } else {
67             data.counter = GSWAP4(counter);
68         }
69         /* Block size is 16 (128 bits), but only 8 bytes are used */
70         out_len = BLOCK_SIZE;
71         if (!EVP_EncryptUpdate(ctx->ecb_ctx, output, &out_len, data.block, BLOCK_SIZE))
72             goto err;
73         memcpy(&ctx->msg_auth_key[i], output, 8);
74         counter++;
75     }
76 
77     /* msg_enc_key length is directly tied to key length AES128/AES256 */
78     for (i = 0; i < ctx->key_len; i += 8) {
79         if (IS_LITTLE_ENDIAN) {
80             data.counter = counter;
81         } else {
82             data.counter = GSWAP4(counter);
83         }
84         /* Block size is 16 bytes (128 bits), but only 8 bytes are used */
85         out_len = BLOCK_SIZE;
86         if (!EVP_EncryptUpdate(ctx->ecb_ctx, output, &out_len, data.block, BLOCK_SIZE))
87             goto err;
88         memcpy(&ctx->msg_enc_key[i], output, 8);
89         counter++;
90     }
91 
92     if (!EVP_EncryptInit_ex2(ctx->ecb_ctx, ecb, ctx->msg_enc_key, NULL, NULL))
93         goto err;
94 
95     /* Freshen up the state */
96     ctx->used_enc = 0;
97     ctx->used_dec = 0;
98     EVP_CIPHER_free(ecb);
99     return 1;
100  err:
101     EVP_CIPHER_CTX_free(ctx->ecb_ctx);
102     EVP_CIPHER_free(ecb);
103     ctx->ecb_ctx = NULL;
104     return 0;
105 }
106 
aes_gcm_siv_aad(PROV_AES_GCM_SIV_CTX * ctx,const unsigned char * aad,size_t len)107 static int aes_gcm_siv_aad(PROV_AES_GCM_SIV_CTX *ctx,
108                            const unsigned char *aad, size_t len)
109 {
110     size_t to_alloc;
111     uint8_t *ptr;
112     uint64_t len64;
113 
114     /* length of 0 resets the AAD */
115     if (len == 0) {
116         OPENSSL_free(ctx->aad);
117         ctx->aad = NULL;
118         ctx->aad_len = 0;
119         return 1;
120     }
121     to_alloc = UP16(ctx->aad_len + len);
122     /* need to check the size of the AAD per RFC8452 */
123     len64 = to_alloc;
124     if (len64 > ((uint64_t)1 << 36))
125         return 0;
126     ptr = OPENSSL_realloc(ctx->aad, to_alloc);
127     if (ptr == NULL)
128         return 0;
129     ctx->aad = ptr;
130     memcpy(&ctx->aad[ctx->aad_len], aad, len);
131     ctx->aad_len += len;
132     if (to_alloc > ctx->aad_len)
133         memset(&ctx->aad[ctx->aad_len], 0, to_alloc - ctx->aad_len);
134     return 1;
135 }
136 
aes_gcm_siv_finish(PROV_AES_GCM_SIV_CTX * ctx)137 static int aes_gcm_siv_finish(PROV_AES_GCM_SIV_CTX *ctx)
138 {
139     int ret = 0;
140 
141     if (ctx->enc)
142         return ctx->generated_tag;
143     ret = !CRYPTO_memcmp(ctx->tag, ctx->user_tag, sizeof(ctx->tag));
144     ret &= ctx->have_user_tag;
145     return ret;
146 }
147 
aes_gcm_siv_encrypt(PROV_AES_GCM_SIV_CTX * ctx,const unsigned char * in,unsigned char * out,size_t len)148 static int aes_gcm_siv_encrypt(PROV_AES_GCM_SIV_CTX *ctx, const unsigned char *in,
149                                unsigned char *out, size_t len)
150 {
151     uint64_t len_blk[2];
152     uint8_t S_s[TAG_SIZE];
153     uint8_t counter_block[TAG_SIZE];
154     uint8_t padding[BLOCK_SIZE];
155     size_t i;
156     int64_t len64 = len;
157     int out_len;
158     int error = 0;
159     DECLARE_IS_ENDIAN;
160 
161     ctx->generated_tag = 0;
162     if (!ctx->speed && ctx->used_enc)
163         return 0;
164     /* need to check the size of the input! */
165     if (len64 > ((int64_t)1 << 36) || len == 0)
166         return 0;
167 
168     if (IS_LITTLE_ENDIAN) {
169         len_blk[0] = (uint64_t)ctx->aad_len * 8;
170         len_blk[1] = (uint64_t)len * 8;
171     } else {
172         len_blk[0] = GSWAP8((uint64_t)ctx->aad_len * 8);
173         len_blk[1] = GSWAP8((uint64_t)len * 8);
174     }
175     memset(S_s, 0, TAG_SIZE);
176     ossl_polyval_ghash_init(ctx->Htable, (const uint64_t*)ctx->msg_auth_key);
177 
178     if (ctx->aad != NULL) {
179         /* AAD is allocated with padding, but need to adjust length */
180         ossl_polyval_ghash_hash(ctx->Htable, S_s, ctx->aad, UP16(ctx->aad_len));
181     }
182     if (DOWN16(len) > 0)
183         ossl_polyval_ghash_hash(ctx->Htable, S_s, (uint8_t *) in, DOWN16(len));
184     if (!IS16(len)) {
185         /* deal with padding - probably easier to memset the padding first rather than calculate */
186         memset(padding, 0, sizeof(padding));
187         memcpy(padding, &in[DOWN16(len)], REMAINDER16(len));
188         ossl_polyval_ghash_hash(ctx->Htable, S_s, padding, sizeof(padding));
189     }
190     ossl_polyval_ghash_hash(ctx->Htable, S_s, (uint8_t *) len_blk, sizeof(len_blk));
191 
192     for (i = 0; i < NONCE_SIZE; i++)
193         S_s[i] ^= ctx->nonce[i];
194 
195     S_s[TAG_SIZE - 1] &= 0x7f;
196     out_len = sizeof(ctx->tag);
197     error |= !EVP_EncryptUpdate(ctx->ecb_ctx, ctx->tag, &out_len, S_s, sizeof(S_s));
198     memcpy(counter_block, ctx->tag, TAG_SIZE);
199     counter_block[TAG_SIZE - 1] |= 0x80;
200 
201     error |= !aes_gcm_siv_ctr32(ctx, counter_block, out, in, len);
202 
203     ctx->generated_tag = !error;
204     /* Regardless of error */
205     ctx->used_enc = 1;
206     return !error;
207 }
208 
aes_gcm_siv_decrypt(PROV_AES_GCM_SIV_CTX * ctx,const unsigned char * in,unsigned char * out,size_t len)209 static int aes_gcm_siv_decrypt(PROV_AES_GCM_SIV_CTX *ctx, const unsigned char *in,
210                                unsigned char *out, size_t len)
211 {
212     uint8_t counter_block[TAG_SIZE];
213     uint64_t len_blk[2];
214     uint8_t S_s[TAG_SIZE];
215     size_t i;
216     uint64_t padding[2];
217     int64_t len64 = len;
218     int out_len;
219     int error = 0;
220     DECLARE_IS_ENDIAN;
221 
222     ctx->generated_tag = 0;
223     if (!ctx->speed && ctx->used_dec)
224         return 0;
225     /* need to check the size of the input! */
226     if (len64 > ((int64_t)1 << 36) || len == 0)
227         return 0;
228 
229     memcpy(counter_block, ctx->user_tag, sizeof(counter_block));
230     counter_block[TAG_SIZE - 1] |= 0x80;
231 
232     error |= !aes_gcm_siv_ctr32(ctx, counter_block, out, in, len);
233 
234     if (IS_LITTLE_ENDIAN) {
235         len_blk[0] = (uint64_t)ctx->aad_len * 8;
236         len_blk[1] = (uint64_t)len * 8;
237     } else {
238         len_blk[0] = GSWAP8((uint64_t)ctx->aad_len * 8);
239         len_blk[1] = GSWAP8((uint64_t)len * 8);
240     }
241     memset(S_s, 0, TAG_SIZE);
242     ossl_polyval_ghash_init(ctx->Htable, (const uint64_t*)ctx->msg_auth_key);
243     if (ctx->aad != NULL) {
244         /* AAD allocated with padding, but need to adjust length */
245         ossl_polyval_ghash_hash(ctx->Htable, S_s, ctx->aad, UP16(ctx->aad_len));
246     }
247     if (DOWN16(len) > 0)
248         ossl_polyval_ghash_hash(ctx->Htable, S_s, out, DOWN16(len));
249     if (!IS16(len)) {
250         /* deal with padding - probably easier to "memset" the padding first rather than calculate */
251         padding[0] = padding[1] = 0;
252         memcpy(padding, &out[DOWN16(len)], REMAINDER16(len));
253         ossl_polyval_ghash_hash(ctx->Htable, S_s, (uint8_t *)padding, sizeof(padding));
254     }
255     ossl_polyval_ghash_hash(ctx->Htable, S_s, (uint8_t *)len_blk, TAG_SIZE);
256 
257     for (i = 0; i < NONCE_SIZE; i++)
258         S_s[i] ^= ctx->nonce[i];
259 
260     S_s[TAG_SIZE - 1] &= 0x7f;
261 
262     /*
263      * In the ctx, user_tag is the one received/set by the user,
264      * and tag is generated from the input
265      */
266     out_len = sizeof(ctx->tag);
267     error |= !EVP_EncryptUpdate(ctx->ecb_ctx, ctx->tag, &out_len, S_s, sizeof(S_s));
268     ctx->generated_tag = !error;
269     /* Regardless of error */
270     ctx->used_dec = 1;
271     return !error;
272 }
273 
aes_gcm_siv_cipher(void * vctx,unsigned char * out,const unsigned char * in,size_t len)274 static int aes_gcm_siv_cipher(void *vctx, unsigned char *out,
275                               const unsigned char *in, size_t len)
276 {
277     PROV_AES_GCM_SIV_CTX *ctx = (PROV_AES_GCM_SIV_CTX *)vctx;
278 
279     /* EncryptFinal or DecryptFinal */
280     if (in == NULL)
281         return aes_gcm_siv_finish(ctx);
282 
283     /* Deal with associated data */
284     if (out == NULL)
285         return aes_gcm_siv_aad(ctx, in, len);
286 
287     if (ctx->enc)
288         return aes_gcm_siv_encrypt(ctx, in, out, len);
289 
290     return aes_gcm_siv_decrypt(ctx, in, out, len);
291 }
292 
aes_gcm_siv_clean_ctx(void * vctx)293 static void aes_gcm_siv_clean_ctx(void *vctx)
294 {
295     PROV_AES_GCM_SIV_CTX *ctx = (PROV_AES_GCM_SIV_CTX *)vctx;
296 
297     EVP_CIPHER_CTX_free(ctx->ecb_ctx);
298     ctx->ecb_ctx = NULL;
299 }
300 
aes_gcm_siv_dup_ctx(void * vdst,void * vsrc)301 static int aes_gcm_siv_dup_ctx(void *vdst, void *vsrc)
302 {
303     PROV_AES_GCM_SIV_CTX *dst = (PROV_AES_GCM_SIV_CTX *)vdst;
304     PROV_AES_GCM_SIV_CTX *src = (PROV_AES_GCM_SIV_CTX *)vsrc;
305 
306     dst->ecb_ctx = NULL;
307     if (src->ecb_ctx != NULL) {
308         if ((dst->ecb_ctx = EVP_CIPHER_CTX_new()) == NULL)
309             goto err;
310         if (!EVP_CIPHER_CTX_copy(dst->ecb_ctx, src->ecb_ctx))
311             goto err;
312     }
313     return 1;
314 
315  err:
316     EVP_CIPHER_CTX_free(dst->ecb_ctx);
317     dst->ecb_ctx = NULL;
318     return 0;
319 }
320 
321 static const PROV_CIPHER_HW_AES_GCM_SIV aes_gcm_siv_hw =
322 {
323     aes_gcm_siv_initkey,
324     aes_gcm_siv_cipher,
325     aes_gcm_siv_dup_ctx,
326     aes_gcm_siv_clean_ctx,
327 };
328 
ossl_prov_cipher_hw_aes_gcm_siv(size_t keybits)329 const PROV_CIPHER_HW_AES_GCM_SIV *ossl_prov_cipher_hw_aes_gcm_siv(size_t keybits)
330 {
331     return &aes_gcm_siv_hw;
332 }
333 
334 /* AES-GCM-SIV needs AES-CTR32, which is different than the AES-CTR implementation */
aes_gcm_siv_ctr32(PROV_AES_GCM_SIV_CTX * ctx,const unsigned char * init_counter,unsigned char * out,const unsigned char * in,size_t len)335 static int aes_gcm_siv_ctr32(PROV_AES_GCM_SIV_CTX *ctx, const unsigned char *init_counter,
336                              unsigned char *out, const unsigned char *in, size_t len)
337 {
338     uint8_t keystream[BLOCK_SIZE];
339     int out_len;
340     size_t i;
341     size_t j;
342     size_t todo;
343     uint32_t counter;
344     int error = 0;
345     union {
346         uint32_t x32[BLOCK_SIZE / sizeof(uint32_t)];
347         uint8_t x8[BLOCK_SIZE];
348     } block;
349     DECLARE_IS_ENDIAN;
350 
351     memcpy(&block, init_counter, sizeof(block));
352     if (IS_BIG_ENDIAN) {
353         counter = GSWAP4(block.x32[0]);
354     }
355 
356     for (i = 0; i < len; i += sizeof(block)) {
357         out_len = BLOCK_SIZE;
358         error |= !EVP_EncryptUpdate(ctx->ecb_ctx, keystream, &out_len, (uint8_t*)&block, sizeof(block));
359         if (IS_LITTLE_ENDIAN) {
360             block.x32[0]++;
361         } else {
362             counter++;
363             block.x32[0] = GSWAP4(counter);
364         }
365         todo = len - i;
366         if (todo > sizeof(keystream))
367             todo = sizeof(keystream);
368         /* Non optimal, but avoids alignment issues */
369         for (j = 0; j < todo; j++)
370             out[i + j] = in[i + j] ^ keystream[j];
371     }
372     return !error;
373 }
374