1 /*
2  * Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved.
3  *
4  * Licensed under the Apache License 2.0 (the "License").  You may not use
5  * this file except in compliance with the License.  You can obtain a copy
6  * in the file LICENSE in the source distribution or at
7  * https://www.openssl.org/source/license.html
8  */
9 
10 /* Dispatch functions for AES GCM mode */
11 
12 /*
13  * This file uses the low level AES functions (which are deprecated for
14  * non-internal use) in order to implement provider AES ciphers.
15  */
16 #include "internal/deprecated.h"
17 
18 #include "cipher_aes_gcm.h"
19 
aes_gcm_initkey(PROV_GCM_CTX * ctx,const unsigned char * key,size_t keylen)20 static int aes_gcm_initkey(PROV_GCM_CTX *ctx, const unsigned char *key,
21                                    size_t keylen)
22 {
23     PROV_AES_GCM_CTX *actx = (PROV_AES_GCM_CTX *)ctx;
24     AES_KEY *ks = &actx->ks.ks;
25 
26 # ifdef HWAES_CAPABLE
27     if (HWAES_CAPABLE) {
28 #  ifdef HWAES_ctr32_encrypt_blocks
29         GCM_HW_SET_KEY_CTR_FN(ks, HWAES_set_encrypt_key, HWAES_encrypt,
30                               HWAES_ctr32_encrypt_blocks);
31 #  else
32         GCM_HW_SET_KEY_CTR_FN(ks, HWAES_set_encrypt_key, HWAES_encrypt, NULL);
33 #  endif /* HWAES_ctr32_encrypt_blocks */
34     } else
35 # endif /* HWAES_CAPABLE */
36 
37 # ifdef BSAES_CAPABLE
38     if (BSAES_CAPABLE) {
39         GCM_HW_SET_KEY_CTR_FN(ks, AES_set_encrypt_key, AES_encrypt,
40                               ossl_bsaes_ctr32_encrypt_blocks);
41     } else
42 # endif /* BSAES_CAPABLE */
43 
44 # ifdef VPAES_CAPABLE
45     if (VPAES_CAPABLE) {
46         GCM_HW_SET_KEY_CTR_FN(ks, vpaes_set_encrypt_key, vpaes_encrypt, NULL);
47     } else
48 # endif /* VPAES_CAPABLE */
49 
50     {
51 # ifdef AES_CTR_ASM
52         GCM_HW_SET_KEY_CTR_FN(ks, AES_set_encrypt_key, AES_encrypt,
53                               AES_ctr32_encrypt);
54 # else
55         GCM_HW_SET_KEY_CTR_FN(ks, AES_set_encrypt_key, AES_encrypt, NULL);
56 # endif /* AES_CTR_ASM */
57     }
58     return 1;
59 }
60 
generic_aes_gcm_cipher_update(PROV_GCM_CTX * ctx,const unsigned char * in,size_t len,unsigned char * out)61 static int generic_aes_gcm_cipher_update(PROV_GCM_CTX *ctx, const unsigned char *in,
62                                          size_t len, unsigned char *out)
63 {
64     if (ctx->enc) {
65         if (ctx->ctr != NULL) {
66 #if defined(AES_GCM_ASM)
67             size_t bulk = 0;
68 
69             if (len >= AES_GCM_ENC_BYTES && AES_GCM_ASM(ctx)) {
70                 size_t res = (16 - ctx->gcm.mres) % 16;
71 
72                 if (CRYPTO_gcm128_encrypt(&ctx->gcm, in, out, res))
73                     return 0;
74 
75                 bulk = AES_gcm_encrypt(in + res, out + res, len - res,
76                                        ctx->gcm.key,
77                                        ctx->gcm.Yi.c, ctx->gcm.Xi.u);
78 
79                 ctx->gcm.len.u[1] += bulk;
80                 bulk += res;
81             }
82             if (CRYPTO_gcm128_encrypt_ctr32(&ctx->gcm, in + bulk, out + bulk,
83                                             len - bulk, ctx->ctr))
84                 return 0;
85 #else
86             if (CRYPTO_gcm128_encrypt_ctr32(&ctx->gcm, in, out, len, ctx->ctr))
87                 return 0;
88 #endif /* AES_GCM_ASM */
89         } else {
90             if (CRYPTO_gcm128_encrypt(&ctx->gcm, in, out, len))
91                 return 0;
92         }
93     } else {
94         if (ctx->ctr != NULL) {
95 #if defined(AES_GCM_ASM)
96             size_t bulk = 0;
97 
98             if (len >= AES_GCM_DEC_BYTES && AES_GCM_ASM(ctx)) {
99                 size_t res = (16 - ctx->gcm.mres) % 16;
100 
101                 if (CRYPTO_gcm128_decrypt(&ctx->gcm, in, out, res))
102                     return 0;
103 
104                 bulk = AES_gcm_decrypt(in + res, out + res, len - res,
105                                        ctx->gcm.key,
106                                        ctx->gcm.Yi.c, ctx->gcm.Xi.u);
107 
108                 ctx->gcm.len.u[1] += bulk;
109                 bulk += res;
110             }
111             if (CRYPTO_gcm128_decrypt_ctr32(&ctx->gcm, in + bulk, out + bulk,
112                                             len - bulk, ctx->ctr))
113                 return 0;
114 #else
115             if (CRYPTO_gcm128_decrypt_ctr32(&ctx->gcm, in, out, len, ctx->ctr))
116                 return 0;
117 #endif /* AES_GCM_ASM */
118         } else {
119             if (CRYPTO_gcm128_decrypt(&ctx->gcm, in, out, len))
120                 return 0;
121         }
122     }
123     return 1;
124 }
125 
126 static const PROV_GCM_HW aes_gcm = {
127     aes_gcm_initkey,
128     ossl_gcm_setiv,
129     ossl_gcm_aad_update,
130     generic_aes_gcm_cipher_update,
131     ossl_gcm_cipher_final,
132     ossl_gcm_one_shot
133 };
134 
135 #if defined(S390X_aes_128_CAPABLE)
136 # include "cipher_aes_gcm_hw_s390x.inc"
137 #elif defined(AESNI_CAPABLE)
138 # include "cipher_aes_gcm_hw_aesni.inc"
139 #elif defined(SPARC_AES_CAPABLE)
140 # include "cipher_aes_gcm_hw_t4.inc"
141 #elif defined(AES_PMULL_CAPABLE) && defined(AES_GCM_ASM)
142 # include "cipher_aes_gcm_hw_armv8.inc"
143 #elif defined(PPC_AES_GCM_CAPABLE) && defined(_ARCH_PPC64)
144 # include "cipher_aes_gcm_hw_ppc.inc"
145 #elif defined(OPENSSL_CPUID_OBJ) && defined(__riscv) && __riscv_xlen == 64
146 # include "cipher_aes_gcm_hw_rv64i.inc"
147 #elif defined(OPENSSL_CPUID_OBJ) && defined(__riscv) && __riscv_xlen == 32
148 # include "cipher_aes_gcm_hw_rv32i.inc"
149 #else
ossl_prov_aes_hw_gcm(size_t keybits)150 const PROV_GCM_HW *ossl_prov_aes_hw_gcm(size_t keybits)
151 {
152     return &aes_gcm;
153 }
154 #endif
155 
156