1/*
2 * Copyright 2019-2023 The OpenSSL Project Authors. All Rights Reserved.
3 *
4 * Licensed under the Apache License 2.0 (the "License").  You may not use
5 * this file except in compliance with the License.  You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
8 */
9
10/*
11 * Crypto extension support for AES GCM.
12 * This file is included by cipher_aes_gcm_hw.c
13 */
14
15size_t armv8_aes_gcm_encrypt(const unsigned char *in, unsigned char *out, size_t len,
16                             const void *key, unsigned char ivec[16], u64 *Xi)
17{
18    size_t align_bytes = 0;
19    align_bytes = len - len % 16;
20
21    AES_KEY *aes_key = (AES_KEY *)key;
22
23    switch(aes_key->rounds) {
24        case 10:
25            if (IS_CPU_SUPPORT_UNROLL8_EOR3()) {
26                unroll8_eor3_aes_gcm_enc_128_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
27            } else {
28                aes_gcm_enc_128_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
29            }
30            break;
31        case 12:
32            if (IS_CPU_SUPPORT_UNROLL8_EOR3()) {
33                unroll8_eor3_aes_gcm_enc_192_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
34            } else {
35                aes_gcm_enc_192_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
36            }
37            break;
38        case 14:
39            if (IS_CPU_SUPPORT_UNROLL8_EOR3()) {
40                unroll8_eor3_aes_gcm_enc_256_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
41            } else {
42                aes_gcm_enc_256_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
43            }
44            break;
45    }
46    return align_bytes;
47}
48
49size_t armv8_aes_gcm_decrypt(const unsigned char *in, unsigned char *out, size_t len,
50                             const void *key, unsigned char ivec[16], u64 *Xi)
51{
52    size_t align_bytes = 0;
53    align_bytes = len - len % 16;
54
55    AES_KEY *aes_key = (AES_KEY *)key;
56
57    switch(aes_key->rounds) {
58        case 10:
59            if (IS_CPU_SUPPORT_UNROLL8_EOR3()) {
60                unroll8_eor3_aes_gcm_dec_128_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
61            } else {
62                aes_gcm_dec_128_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
63            }
64            break;
65        case 12:
66            if (IS_CPU_SUPPORT_UNROLL8_EOR3()) {
67                unroll8_eor3_aes_gcm_dec_192_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
68            } else {
69                aes_gcm_dec_192_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
70            }
71            break;
72        case 14:
73            if (IS_CPU_SUPPORT_UNROLL8_EOR3()) {
74                unroll8_eor3_aes_gcm_dec_256_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
75            } else {
76                aes_gcm_dec_256_kernel(in, align_bytes * 8, out, (uint64_t *)Xi, ivec, key);
77            }
78            break;
79    }
80    return align_bytes;
81}
82
83static int armv8_aes_gcm_initkey(PROV_GCM_CTX *ctx, const unsigned char *key,
84                                 size_t keylen)
85{
86    PROV_AES_GCM_CTX *actx = (PROV_AES_GCM_CTX *)ctx;
87    AES_KEY *ks = &actx->ks.ks;
88
89    if (AES_UNROLL12_EOR3_CAPABLE) {
90        GCM_HW_SET_KEY_CTR_FN(ks, aes_v8_set_encrypt_key, aes_v8_encrypt,
91                              aes_v8_ctr32_encrypt_blocks_unroll12_eor3);
92    } else {
93        GCM_HW_SET_KEY_CTR_FN(ks, aes_v8_set_encrypt_key, aes_v8_encrypt,
94                              aes_v8_ctr32_encrypt_blocks);
95    }
96    return 1;
97}
98
99
100static const PROV_GCM_HW armv8_aes_gcm = {
101    armv8_aes_gcm_initkey,
102    ossl_gcm_setiv,
103    ossl_gcm_aad_update,
104    generic_aes_gcm_cipher_update,
105    ossl_gcm_cipher_final,
106    ossl_gcm_one_shot
107};
108
109const PROV_GCM_HW *ossl_prov_aes_hw_gcm(size_t keybits)
110{
111    return AES_PMULL_CAPABLE ? &armv8_aes_gcm : &aes_gcm;
112}
113