1 /*
2 * Copyright 2017-2021 The OpenSSL Project Authors. All Rights Reserved.
3 * Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved.
4 *
5 * Licensed under the Apache License 2.0 (the "License"). You may not use
6 * this file except in compliance with the License. You can obtain a copy
7 * in the file LICENSE in the source distribution or at
8 * https://www.openssl.org/source/license.html
9 */
10
11 #include "internal/deprecated.h"
12
13 #include "internal/cryptlib.h"
14 #ifndef OPENSSL_NO_ARIA
15 # include <openssl/evp.h>
16 # include <openssl/modes.h>
17 # include <openssl/rand.h>
18 # include "crypto/aria.h"
19 # include "crypto/evp.h"
20 # include "crypto/modes.h"
21 # include "evp_local.h"
22
23 /* ARIA subkey Structure */
24 typedef struct {
25 ARIA_KEY ks;
26 } EVP_ARIA_KEY;
27
28 /* ARIA GCM context */
29 typedef struct {
30 union {
31 OSSL_UNION_ALIGN;
32 ARIA_KEY ks;
33 } ks; /* ARIA subkey to use */
34 int key_set; /* Set if key initialised */
35 int iv_set; /* Set if an iv is set */
36 GCM128_CONTEXT gcm;
37 unsigned char *iv; /* Temporary IV store */
38 int ivlen; /* IV length */
39 int taglen;
40 int iv_gen; /* It is OK to generate IVs */
41 int tls_aad_len; /* TLS AAD length */
42 } EVP_ARIA_GCM_CTX;
43
44 /* ARIA CCM context */
45 typedef struct {
46 union {
47 OSSL_UNION_ALIGN;
48 ARIA_KEY ks;
49 } ks; /* ARIA key schedule to use */
50 int key_set; /* Set if key initialised */
51 int iv_set; /* Set if an iv is set */
52 int tag_set; /* Set if tag is valid */
53 int len_set; /* Set if message length set */
54 int L, M; /* L and M parameters from RFC3610 */
55 int tls_aad_len; /* TLS AAD length */
56 CCM128_CONTEXT ccm;
57 ccm128_f str;
58 } EVP_ARIA_CCM_CTX;
59
60 /* The subkey for ARIA is generated. */
aria_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)61 static int aria_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
62 const unsigned char *iv, int enc)
63 {
64 int ret;
65 int mode = EVP_CIPHER_CTX_get_mode(ctx);
66
67 if (enc || (mode != EVP_CIPH_ECB_MODE && mode != EVP_CIPH_CBC_MODE))
68 ret = ossl_aria_set_encrypt_key(key,
69 EVP_CIPHER_CTX_get_key_length(ctx) * 8,
70 EVP_CIPHER_CTX_get_cipher_data(ctx));
71 else
72 ret = ossl_aria_set_decrypt_key(key,
73 EVP_CIPHER_CTX_get_key_length(ctx) * 8,
74 EVP_CIPHER_CTX_get_cipher_data(ctx));
75 if (ret < 0) {
76 ERR_raise(ERR_LIB_EVP, EVP_R_ARIA_KEY_SETUP_FAILED);
77 return 0;
78 }
79 return 1;
80 }
81
aria_cbc_encrypt(const unsigned char * in,unsigned char * out,size_t len,const ARIA_KEY * key,unsigned char * ivec,const int enc)82 static void aria_cbc_encrypt(const unsigned char *in, unsigned char *out,
83 size_t len, const ARIA_KEY *key,
84 unsigned char *ivec, const int enc)
85 {
86
87 if (enc)
88 CRYPTO_cbc128_encrypt(in, out, len, key, ivec,
89 (block128_f) ossl_aria_encrypt);
90 else
91 CRYPTO_cbc128_decrypt(in, out, len, key, ivec,
92 (block128_f) ossl_aria_encrypt);
93 }
94
aria_cfb128_encrypt(const unsigned char * in,unsigned char * out,size_t length,const ARIA_KEY * key,unsigned char * ivec,int * num,const int enc)95 static void aria_cfb128_encrypt(const unsigned char *in, unsigned char *out,
96 size_t length, const ARIA_KEY *key,
97 unsigned char *ivec, int *num, const int enc)
98 {
99
100 CRYPTO_cfb128_encrypt(in, out, length, key, ivec, num, enc,
101 (block128_f) ossl_aria_encrypt);
102 }
103
aria_cfb1_encrypt(const unsigned char * in,unsigned char * out,size_t length,const ARIA_KEY * key,unsigned char * ivec,int * num,const int enc)104 static void aria_cfb1_encrypt(const unsigned char *in, unsigned char *out,
105 size_t length, const ARIA_KEY *key,
106 unsigned char *ivec, int *num, const int enc)
107 {
108 CRYPTO_cfb128_1_encrypt(in, out, length, key, ivec, num, enc,
109 (block128_f) ossl_aria_encrypt);
110 }
111
aria_cfb8_encrypt(const unsigned char * in,unsigned char * out,size_t length,const ARIA_KEY * key,unsigned char * ivec,int * num,const int enc)112 static void aria_cfb8_encrypt(const unsigned char *in, unsigned char *out,
113 size_t length, const ARIA_KEY *key,
114 unsigned char *ivec, int *num, const int enc)
115 {
116 CRYPTO_cfb128_8_encrypt(in, out, length, key, ivec, num, enc,
117 (block128_f) ossl_aria_encrypt);
118 }
119
aria_ecb_encrypt(const unsigned char * in,unsigned char * out,const ARIA_KEY * key,const int enc)120 static void aria_ecb_encrypt(const unsigned char *in, unsigned char *out,
121 const ARIA_KEY *key, const int enc)
122 {
123 ossl_aria_encrypt(in, out, key);
124 }
125
aria_ofb128_encrypt(const unsigned char * in,unsigned char * out,size_t length,const ARIA_KEY * key,unsigned char * ivec,int * num)126 static void aria_ofb128_encrypt(const unsigned char *in, unsigned char *out,
127 size_t length, const ARIA_KEY *key,
128 unsigned char *ivec, int *num)
129 {
130 CRYPTO_ofb128_encrypt(in, out, length, key, ivec, num,
131 (block128_f) ossl_aria_encrypt);
132 }
133
134 IMPLEMENT_BLOCK_CIPHER(aria_128, ks, aria, EVP_ARIA_KEY,
135 NID_aria_128, 16, 16, 16, 128,
136 0, aria_init_key, NULL,
137 EVP_CIPHER_set_asn1_iv,
138 EVP_CIPHER_get_asn1_iv,
139 NULL)
140 IMPLEMENT_BLOCK_CIPHER(aria_192, ks, aria, EVP_ARIA_KEY,
141 NID_aria_192, 16, 24, 16, 128,
142 0, aria_init_key, NULL,
143 EVP_CIPHER_set_asn1_iv,
144 EVP_CIPHER_get_asn1_iv,
145 NULL)
146 IMPLEMENT_BLOCK_CIPHER(aria_256, ks, aria, EVP_ARIA_KEY,
147 NID_aria_256, 16, 32, 16, 128,
148 0, aria_init_key, NULL,
149 EVP_CIPHER_set_asn1_iv,
150 EVP_CIPHER_get_asn1_iv,
151 NULL)
152
153 # define IMPLEMENT_ARIA_CFBR(ksize,cbits) \
154 IMPLEMENT_CFBR(aria,aria,EVP_ARIA_KEY,ks,ksize,cbits,16,0)
155 IMPLEMENT_ARIA_CFBR(128,1)
156 IMPLEMENT_ARIA_CFBR(192,1)
157 IMPLEMENT_ARIA_CFBR(256,1)
158 IMPLEMENT_ARIA_CFBR(128,8)
159 IMPLEMENT_ARIA_CFBR(192,8)
160 IMPLEMENT_ARIA_CFBR(256,8)
161
162 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
163 static const EVP_CIPHER aria_##keylen##_##mode = { \
164 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
165 flags|EVP_CIPH_##MODE##_MODE, \
166 EVP_ORIG_GLOBAL, \
167 aria_init_key, \
168 aria_##mode##_cipher, \
169 NULL, \
170 sizeof(EVP_ARIA_KEY), \
171 NULL,NULL,NULL,NULL }; \
172 const EVP_CIPHER *EVP_aria_##keylen##_##mode(void) \
173 { return &aria_##keylen##_##mode; }
174
aria_ctr_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)175 static int aria_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
176 const unsigned char *in, size_t len)
177 {
178 int n = EVP_CIPHER_CTX_get_num(ctx);
179 unsigned int num;
180 EVP_ARIA_KEY *dat = EVP_C_DATA(EVP_ARIA_KEY, ctx);
181
182 if (n < 0)
183 return 0;
184 num = (unsigned int)n;
185
186 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks, ctx->iv,
187 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
188 (block128_f) ossl_aria_encrypt);
189 EVP_CIPHER_CTX_set_num(ctx, num);
190 return 1;
191 }
192
193 BLOCK_CIPHER_generic(NID_aria, 128, 1, 16, ctr, ctr, CTR, 0)
194 BLOCK_CIPHER_generic(NID_aria, 192, 1, 16, ctr, ctr, CTR, 0)
195 BLOCK_CIPHER_generic(NID_aria, 256, 1, 16, ctr, ctr, CTR, 0)
196
197 /* Authenticated cipher modes (GCM/CCM) */
198
199 /* increment counter (64-bit int) by 1 */
ctr64_inc(unsigned char * counter)200 static void ctr64_inc(unsigned char *counter)
201 {
202 int n = 8;
203 unsigned char c;
204
205 do {
206 --n;
207 c = counter[n];
208 ++c;
209 counter[n] = c;
210 if (c)
211 return;
212 } while (n);
213 }
214
aria_gcm_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)215 static int aria_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
216 const unsigned char *iv, int enc)
217 {
218 int ret;
219 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX, ctx);
220
221 if (!iv && !key)
222 return 1;
223 if (key) {
224 ret = ossl_aria_set_encrypt_key(key,
225 EVP_CIPHER_CTX_get_key_length(ctx) * 8,
226 &gctx->ks.ks);
227 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
228 (block128_f) ossl_aria_encrypt);
229 if (ret < 0) {
230 ERR_raise(ERR_LIB_EVP, EVP_R_ARIA_KEY_SETUP_FAILED);
231 return 0;
232 }
233
234 /*
235 * If we have an iv can set it directly, otherwise use saved IV.
236 */
237 if (iv == NULL && gctx->iv_set)
238 iv = gctx->iv;
239 if (iv) {
240 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
241 gctx->iv_set = 1;
242 }
243 gctx->key_set = 1;
244 } else {
245 /* If key set use IV, otherwise copy */
246 if (gctx->key_set)
247 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
248 else
249 memcpy(gctx->iv, iv, gctx->ivlen);
250 gctx->iv_set = 1;
251 gctx->iv_gen = 0;
252 }
253 return 1;
254 }
255
aria_gcm_ctrl(EVP_CIPHER_CTX * c,int type,int arg,void * ptr)256 static int aria_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
257 {
258 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX, c);
259
260 switch (type) {
261 case EVP_CTRL_INIT:
262 gctx->key_set = 0;
263 gctx->iv_set = 0;
264 gctx->ivlen = EVP_CIPHER_get_iv_length(c->cipher);
265 gctx->iv = c->iv;
266 gctx->taglen = -1;
267 gctx->iv_gen = 0;
268 gctx->tls_aad_len = -1;
269 return 1;
270
271 case EVP_CTRL_GET_IVLEN:
272 *(int *)ptr = gctx->ivlen;
273 return 1;
274
275 case EVP_CTRL_AEAD_SET_IVLEN:
276 if (arg <= 0)
277 return 0;
278 /* Allocate memory for IV if needed */
279 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
280 if (gctx->iv != c->iv)
281 OPENSSL_free(gctx->iv);
282 if ((gctx->iv = OPENSSL_malloc(arg)) == NULL)
283 return 0;
284 }
285 gctx->ivlen = arg;
286 return 1;
287
288 case EVP_CTRL_AEAD_SET_TAG:
289 if (arg <= 0 || arg > 16 || EVP_CIPHER_CTX_is_encrypting(c))
290 return 0;
291 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
292 gctx->taglen = arg;
293 return 1;
294
295 case EVP_CTRL_AEAD_GET_TAG:
296 if (arg <= 0 || arg > 16 || !EVP_CIPHER_CTX_is_encrypting(c)
297 || gctx->taglen < 0)
298 return 0;
299 memcpy(ptr, EVP_CIPHER_CTX_buf_noconst(c), arg);
300 return 1;
301
302 case EVP_CTRL_GCM_SET_IV_FIXED:
303 /* Special case: -1 length restores whole IV */
304 if (arg == -1) {
305 memcpy(gctx->iv, ptr, gctx->ivlen);
306 gctx->iv_gen = 1;
307 return 1;
308 }
309 /*
310 * Fixed field must be at least 4 bytes and invocation field at least
311 * 8.
312 */
313 if ((arg < 4) || (gctx->ivlen - arg) < 8)
314 return 0;
315 if (arg)
316 memcpy(gctx->iv, ptr, arg);
317 if (EVP_CIPHER_CTX_is_encrypting(c)
318 && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
319 return 0;
320 gctx->iv_gen = 1;
321 return 1;
322
323 case EVP_CTRL_GCM_IV_GEN:
324 if (gctx->iv_gen == 0 || gctx->key_set == 0)
325 return 0;
326 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
327 if (arg <= 0 || arg > gctx->ivlen)
328 arg = gctx->ivlen;
329 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
330 /*
331 * Invocation field will be at least 8 bytes in size and so no need
332 * to check wrap around or increment more than last 8 bytes.
333 */
334 ctr64_inc(gctx->iv + gctx->ivlen - 8);
335 gctx->iv_set = 1;
336 return 1;
337
338 case EVP_CTRL_GCM_SET_IV_INV:
339 if (gctx->iv_gen == 0 || gctx->key_set == 0
340 || EVP_CIPHER_CTX_is_encrypting(c))
341 return 0;
342 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
343 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
344 gctx->iv_set = 1;
345 return 1;
346
347 case EVP_CTRL_AEAD_TLS1_AAD:
348 /* Save the AAD for later use */
349 if (arg != EVP_AEAD_TLS1_AAD_LEN)
350 return 0;
351 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
352 gctx->tls_aad_len = arg;
353 {
354 unsigned int len =
355 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
356 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
357 /* Correct length for explicit IV */
358 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
359 return 0;
360 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
361 /* If decrypting correct for tag too */
362 if (!EVP_CIPHER_CTX_is_encrypting(c)) {
363 if (len < EVP_GCM_TLS_TAG_LEN)
364 return 0;
365 len -= EVP_GCM_TLS_TAG_LEN;
366 }
367 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
368 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
369 }
370 /* Extra padding: tag appended to record */
371 return EVP_GCM_TLS_TAG_LEN;
372
373 case EVP_CTRL_COPY:
374 {
375 EVP_CIPHER_CTX *out = ptr;
376 EVP_ARIA_GCM_CTX *gctx_out = EVP_C_DATA(EVP_ARIA_GCM_CTX, out);
377 if (gctx->gcm.key) {
378 if (gctx->gcm.key != &gctx->ks)
379 return 0;
380 gctx_out->gcm.key = &gctx_out->ks;
381 }
382 if (gctx->iv == c->iv)
383 gctx_out->iv = out->iv;
384 else {
385 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL)
386 return 0;
387 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
388 }
389 return 1;
390 }
391
392 default:
393 return -1;
394
395 }
396 }
397
aria_gcm_tls_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)398 static int aria_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
399 const unsigned char *in, size_t len)
400 {
401 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX, ctx);
402 int rv = -1;
403
404 /* Encrypt/decrypt must be performed in place */
405 if (out != in
406 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
407 return -1;
408 /*
409 * Set IV from start of buffer or generate IV and write to start of
410 * buffer.
411 */
412 if (EVP_CIPHER_CTX_ctrl(ctx, EVP_CIPHER_CTX_is_encrypting(ctx) ?
413 EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
414 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
415 goto err;
416 /* Use saved AAD */
417 if (CRYPTO_gcm128_aad(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
418 gctx->tls_aad_len))
419 goto err;
420 /* Fix buffer and length to point to payload */
421 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
422 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
423 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
424 if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
425 /* Encrypt payload */
426 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, len))
427 goto err;
428 out += len;
429 /* Finally write tag */
430 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
431 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
432 } else {
433 /* Decrypt */
434 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, len))
435 goto err;
436 /* Retrieve tag */
437 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
438 EVP_GCM_TLS_TAG_LEN);
439 /* If tag mismatch wipe buffer */
440 if (CRYPTO_memcmp(EVP_CIPHER_CTX_buf_noconst(ctx), in + len,
441 EVP_GCM_TLS_TAG_LEN)) {
442 OPENSSL_cleanse(out, len);
443 goto err;
444 }
445 rv = len;
446 }
447
448 err:
449 gctx->iv_set = 0;
450 gctx->tls_aad_len = -1;
451 return rv;
452 }
453
aria_gcm_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)454 static int aria_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
455 const unsigned char *in, size_t len)
456 {
457 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX, ctx);
458
459 /* If not set up, return error */
460 if (!gctx->key_set)
461 return -1;
462
463 if (gctx->tls_aad_len >= 0)
464 return aria_gcm_tls_cipher(ctx, out, in, len);
465
466 if (!gctx->iv_set)
467 return -1;
468 if (in) {
469 if (out == NULL) {
470 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
471 return -1;
472 } else if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
473 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, len))
474 return -1;
475 } else {
476 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, len))
477 return -1;
478 }
479 return len;
480 }
481 if (!EVP_CIPHER_CTX_is_encrypting(ctx)) {
482 if (gctx->taglen < 0)
483 return -1;
484 if (CRYPTO_gcm128_finish(&gctx->gcm,
485 EVP_CIPHER_CTX_buf_noconst(ctx),
486 gctx->taglen) != 0)
487 return -1;
488 gctx->iv_set = 0;
489 return 0;
490 }
491 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx), 16);
492 gctx->taglen = 16;
493 /* Don't reuse the IV */
494 gctx->iv_set = 0;
495 return 0;
496 }
497
aria_gcm_cleanup(EVP_CIPHER_CTX * ctx)498 static int aria_gcm_cleanup(EVP_CIPHER_CTX *ctx)
499 {
500 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX, ctx);
501
502 if (gctx->iv != ctx->iv)
503 OPENSSL_free(gctx->iv);
504
505 return 1;
506 }
507
aria_ccm_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)508 static int aria_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
509 const unsigned char *iv, int enc)
510 {
511 int ret;
512 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX, ctx);
513
514 if (!iv && !key)
515 return 1;
516
517 if (key) {
518 ret = ossl_aria_set_encrypt_key(key,
519 EVP_CIPHER_CTX_get_key_length(ctx) * 8,
520 &cctx->ks.ks);
521 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
522 &cctx->ks, (block128_f) ossl_aria_encrypt);
523 if (ret < 0) {
524 ERR_raise(ERR_LIB_EVP, EVP_R_ARIA_KEY_SETUP_FAILED);
525 return 0;
526 }
527 cctx->str = NULL;
528 cctx->key_set = 1;
529 }
530 if (iv) {
531 memcpy(ctx->iv, iv, 15 - cctx->L);
532 cctx->iv_set = 1;
533 }
534 return 1;
535 }
536
aria_ccm_ctrl(EVP_CIPHER_CTX * c,int type,int arg,void * ptr)537 static int aria_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
538 {
539 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX, c);
540
541 switch (type) {
542 case EVP_CTRL_INIT:
543 cctx->key_set = 0;
544 cctx->iv_set = 0;
545 cctx->L = 8;
546 cctx->M = 12;
547 cctx->tag_set = 0;
548 cctx->len_set = 0;
549 cctx->tls_aad_len = -1;
550 return 1;
551
552 case EVP_CTRL_GET_IVLEN:
553 *(int *)ptr = 15 - cctx->L;
554 return 1;
555
556 case EVP_CTRL_AEAD_TLS1_AAD:
557 /* Save the AAD for later use */
558 if (arg != EVP_AEAD_TLS1_AAD_LEN)
559 return 0;
560 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
561 cctx->tls_aad_len = arg;
562 {
563 uint16_t len =
564 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
565 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
566 /* Correct length for explicit IV */
567 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
568 return 0;
569 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
570 /* If decrypting correct for tag too */
571 if (!EVP_CIPHER_CTX_is_encrypting(c)) {
572 if (len < cctx->M)
573 return 0;
574 len -= cctx->M;
575 }
576 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
577 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
578 }
579 /* Extra padding: tag appended to record */
580 return cctx->M;
581
582 case EVP_CTRL_CCM_SET_IV_FIXED:
583 /* Sanity check length */
584 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
585 return 0;
586 /* Just copy to first part of IV */
587 memcpy(c->iv, ptr, arg);
588 return 1;
589
590 case EVP_CTRL_AEAD_SET_IVLEN:
591 arg = 15 - arg;
592 /* fall through */
593 case EVP_CTRL_CCM_SET_L:
594 if (arg < 2 || arg > 8)
595 return 0;
596 cctx->L = arg;
597 return 1;
598 case EVP_CTRL_AEAD_SET_TAG:
599 if ((arg & 1) || arg < 4 || arg > 16)
600 return 0;
601 if (EVP_CIPHER_CTX_is_encrypting(c) && ptr)
602 return 0;
603 if (ptr) {
604 cctx->tag_set = 1;
605 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
606 }
607 cctx->M = arg;
608 return 1;
609
610 case EVP_CTRL_AEAD_GET_TAG:
611 if (!EVP_CIPHER_CTX_is_encrypting(c) || !cctx->tag_set)
612 return 0;
613 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
614 return 0;
615 cctx->tag_set = 0;
616 cctx->iv_set = 0;
617 cctx->len_set = 0;
618 return 1;
619
620 case EVP_CTRL_COPY:
621 {
622 EVP_CIPHER_CTX *out = ptr;
623 EVP_ARIA_CCM_CTX *cctx_out = EVP_C_DATA(EVP_ARIA_CCM_CTX, out);
624 if (cctx->ccm.key) {
625 if (cctx->ccm.key != &cctx->ks)
626 return 0;
627 cctx_out->ccm.key = &cctx_out->ks;
628 }
629 return 1;
630 }
631
632 default:
633 return -1;
634 }
635 }
636
aria_ccm_tls_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)637 static int aria_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
638 const unsigned char *in, size_t len)
639 {
640 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX, ctx);
641 CCM128_CONTEXT *ccm = &cctx->ccm;
642
643 /* Encrypt/decrypt must be performed in place */
644 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
645 return -1;
646 /* If encrypting set explicit IV from sequence number (start of AAD) */
647 if (EVP_CIPHER_CTX_is_encrypting(ctx))
648 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
649 EVP_CCM_TLS_EXPLICIT_IV_LEN);
650 /* Get rest of IV from explicit IV */
651 memcpy(ctx->iv + EVP_CCM_TLS_FIXED_IV_LEN, in,
652 EVP_CCM_TLS_EXPLICIT_IV_LEN);
653 /* Correct length value */
654 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
655 if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L,
656 len))
657 return -1;
658 /* Use saved AAD */
659 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx),
660 cctx->tls_aad_len);
661 /* Fix buffer to point to payload */
662 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
663 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
664 if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
665 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len, cctx->str)
666 : CRYPTO_ccm128_encrypt(ccm, in, out, len))
667 return -1;
668 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
669 return -1;
670 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
671 } else {
672 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len, cctx->str)
673 : !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
674 unsigned char tag[16];
675 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
676 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
677 return len;
678 }
679 }
680 OPENSSL_cleanse(out, len);
681 return -1;
682 }
683 }
684
aria_ccm_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)685 static int aria_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
686 const unsigned char *in, size_t len)
687 {
688 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX, ctx);
689 CCM128_CONTEXT *ccm = &cctx->ccm;
690
691 /* If not set up, return error */
692 if (!cctx->key_set)
693 return -1;
694
695 if (cctx->tls_aad_len >= 0)
696 return aria_ccm_tls_cipher(ctx, out, in, len);
697
698 /* EVP_*Final() doesn't return any data */
699 if (in == NULL && out != NULL)
700 return 0;
701
702 if (!cctx->iv_set)
703 return -1;
704
705 if (!out) {
706 if (!in) {
707 if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len))
708 return -1;
709 cctx->len_set = 1;
710 return len;
711 }
712 /* If have AAD need message length */
713 if (!cctx->len_set && len)
714 return -1;
715 CRYPTO_ccm128_aad(ccm, in, len);
716 return len;
717 }
718
719 /* The tag must be set before actually decrypting data */
720 if (!EVP_CIPHER_CTX_is_encrypting(ctx) && !cctx->tag_set)
721 return -1;
722
723 /* If not set length yet do it */
724 if (!cctx->len_set) {
725 if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len))
726 return -1;
727 cctx->len_set = 1;
728 }
729 if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
730 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len, cctx->str)
731 : CRYPTO_ccm128_encrypt(ccm, in, out, len))
732 return -1;
733 cctx->tag_set = 1;
734 return len;
735 } else {
736 int rv = -1;
737 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
738 cctx->str) :
739 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
740 unsigned char tag[16];
741 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
742 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
743 cctx->M))
744 rv = len;
745 }
746 }
747 if (rv == -1)
748 OPENSSL_cleanse(out, len);
749 cctx->iv_set = 0;
750 cctx->tag_set = 0;
751 cctx->len_set = 0;
752 return rv;
753 }
754 }
755
756 #define aria_ccm_cleanup NULL
757
758 #define ARIA_AUTH_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
759 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
760 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
761 | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_FLAG_AEAD_CIPHER \
762 | EVP_CIPH_CUSTOM_IV_LENGTH)
763
764 #define BLOCK_CIPHER_aead(keylen,mode,MODE) \
765 static const EVP_CIPHER aria_##keylen##_##mode = { \
766 NID_aria_##keylen##_##mode, \
767 1, keylen/8, 12, \
768 ARIA_AUTH_FLAGS|EVP_CIPH_##MODE##_MODE, \
769 EVP_ORIG_GLOBAL, \
770 aria_##mode##_init_key, \
771 aria_##mode##_cipher, \
772 aria_##mode##_cleanup, \
773 sizeof(EVP_ARIA_##MODE##_CTX), \
774 NULL,NULL,aria_##mode##_ctrl,NULL }; \
775 const EVP_CIPHER *EVP_aria_##keylen##_##mode(void) \
776 { return (EVP_CIPHER*)&aria_##keylen##_##mode; }
777
778 BLOCK_CIPHER_aead(128, gcm, GCM)
779 BLOCK_CIPHER_aead(192, gcm, GCM)
780 BLOCK_CIPHER_aead(256, gcm, GCM)
781
782 BLOCK_CIPHER_aead(128, ccm, CCM)
783 BLOCK_CIPHER_aead(192, ccm, CCM)
784 BLOCK_CIPHER_aead(256, ccm, CCM)
785
786 #endif
787