1 /* ====================================================================
2 * Copyright (c) 2001-2011 The OpenSSL Project. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in
13 * the documentation and/or other materials provided with the
16 * 3. All advertising materials mentioning features or use of this
17 * software must display the following acknowledgment:
18 * "This product includes software developed by the OpenSSL Project
19 * for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
21 * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
22 * endorse or promote products derived from this software without
23 * prior written permission. For written permission, please contact
24 * openssl-core@openssl.org.
26 * 5. Products derived from this software may not be called "OpenSSL"
27 * nor may "OpenSSL" appear in their names without prior written
28 * permission of the OpenSSL Project.
30 * 6. Redistributions of any form whatsoever must retain the following
32 * "This product includes software developed by the OpenSSL Project
33 * for use in the OpenSSL Toolkit (http://www.openssl.org/)"
35 * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
36 * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
37 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
38 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
39 * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
40 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
41 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
42 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
43 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
44 * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
45 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
46 * OF THE POSSIBILITY OF SUCH DAMAGE.
47 * ==================================================================== */
51 #include <openssl/aead.h>
52 #include <openssl/aes.h>
53 #include <openssl/cipher.h>
54 #include <openssl/cpu.h>
55 #include <openssl/err.h>
56 #include <openssl/mem.h>
57 #include <openssl/nid.h>
58 #include <openssl/rand.h>
61 #include "../../internal.h"
62 #include "../aes/internal.h"
63 #include "../modes/internal.h"
64 #include "../delocate.h"
66 #if defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64)
67 #include <openssl/arm_arch.h>
71 OPENSSL_MSVC_PRAGMA(warning(disable: 4702)) // Unreachable code.
89 } ks; // AES key schedule to use
90 int key_set; // Set if key initialised
91 int iv_set; // Set if an iv is set
93 uint8_t *iv; // Temporary IV store
94 int ivlen; // IV length
96 int iv_gen; // It is OK to generate IVs
100 #if !defined(OPENSSL_NO_ASM) && \
101 (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
103 static char vpaes_capable(void) {
104 return (OPENSSL_ia32cap_P[1] & (1 << (41 - 32))) != 0;
107 #if defined(OPENSSL_X86_64)
109 static char bsaes_capable(void) {
110 return vpaes_capable();
114 #elif !defined(OPENSSL_NO_ASM) && \
115 (defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64))
117 #if defined(OPENSSL_ARM) && __ARM_MAX_ARCH__ >= 7
119 static char bsaes_capable(void) {
120 return CRYPTO_is_NEON_capable();
128 // On platforms where BSAES gets defined (just above), then these functions are
130 void bsaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
131 const AES_KEY *key, uint8_t ivec[16], int enc);
132 void bsaes_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t len,
133 const AES_KEY *key, const uint8_t ivec[16]);
135 static char bsaes_capable(void) {
139 // On other platforms, bsaes_capable() will always return false and so the
140 // following will never be called.
141 static void bsaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
142 const AES_KEY *key, uint8_t ivec[16], int enc) {
146 static void bsaes_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out,
147 size_t len, const AES_KEY *key,
148 const uint8_t ivec[16]) {
154 // On platforms where VPAES gets defined (just above), then these functions are
156 int vpaes_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
157 int vpaes_set_decrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
159 void vpaes_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
160 void vpaes_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
162 void vpaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
163 const AES_KEY *key, uint8_t *ivec, int enc);
165 static char vpaes_capable(void) {
169 // On other platforms, vpaes_capable() will always return false and so the
170 // following will never be called.
171 static int vpaes_set_encrypt_key(const uint8_t *userKey, int bits,
175 static int vpaes_set_decrypt_key(const uint8_t *userKey, int bits,
179 static void vpaes_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
182 static void vpaes_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
185 static void vpaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
186 const AES_KEY *key, uint8_t *ivec, int enc) {
191 #if !defined(OPENSSL_NO_ASM) && \
192 (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
193 int aesni_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
194 int aesni_set_decrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
196 void aesni_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
197 void aesni_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
199 void aesni_ecb_encrypt(const uint8_t *in, uint8_t *out, size_t length,
200 const AES_KEY *key, int enc);
201 void aesni_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
202 const AES_KEY *key, uint8_t *ivec, int enc);
206 // On other platforms, aesni_capable() will always return false and so the
207 // following will never be called.
208 static void aesni_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
211 static int aesni_set_encrypt_key(const uint8_t *userKey, int bits,
215 static void aesni_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out,
216 size_t blocks, const void *key,
217 const uint8_t *ivec) {
223 static int aes_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
224 const uint8_t *iv, int enc) {
226 EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
228 mode = ctx->cipher->flags & EVP_CIPH_MODE_MASK;
229 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) && !enc) {
230 if (hwaes_capable()) {
231 ret = aes_hw_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
232 dat->block = (block128_f)aes_hw_decrypt;
233 dat->stream.cbc = NULL;
234 if (mode == EVP_CIPH_CBC_MODE) {
235 dat->stream.cbc = (cbc128_f)aes_hw_cbc_encrypt;
237 } else if (bsaes_capable() && mode == EVP_CIPH_CBC_MODE) {
238 ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
239 dat->block = (block128_f)AES_decrypt;
240 dat->stream.cbc = (cbc128_f)bsaes_cbc_encrypt;
241 } else if (vpaes_capable()) {
242 ret = vpaes_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
243 dat->block = (block128_f)vpaes_decrypt;
245 mode == EVP_CIPH_CBC_MODE ? (cbc128_f)vpaes_cbc_encrypt : NULL;
247 ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
248 dat->block = (block128_f)AES_decrypt;
250 mode == EVP_CIPH_CBC_MODE ? (cbc128_f)AES_cbc_encrypt : NULL;
252 } else if (hwaes_capable()) {
253 ret = aes_hw_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
254 dat->block = (block128_f)aes_hw_encrypt;
255 dat->stream.cbc = NULL;
256 if (mode == EVP_CIPH_CBC_MODE) {
257 dat->stream.cbc = (cbc128_f)aes_hw_cbc_encrypt;
258 } else if (mode == EVP_CIPH_CTR_MODE) {
259 dat->stream.ctr = (ctr128_f)aes_hw_ctr32_encrypt_blocks;
261 } else if (bsaes_capable() && mode == EVP_CIPH_CTR_MODE) {
262 ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
263 dat->block = (block128_f)AES_encrypt;
264 dat->stream.ctr = (ctr128_f)bsaes_ctr32_encrypt_blocks;
265 } else if (vpaes_capable()) {
266 ret = vpaes_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
267 dat->block = (block128_f)vpaes_encrypt;
269 mode == EVP_CIPH_CBC_MODE ? (cbc128_f)vpaes_cbc_encrypt : NULL;
271 ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
272 dat->block = (block128_f)AES_encrypt;
274 mode == EVP_CIPH_CBC_MODE ? (cbc128_f)AES_cbc_encrypt : NULL;
278 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_AES_KEY_SETUP_FAILED);
285 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
287 EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
289 if (dat->stream.cbc) {
290 (*dat->stream.cbc)(in, out, len, &dat->ks, ctx->iv, ctx->encrypt);
291 } else if (ctx->encrypt) {
292 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, ctx->iv, dat->block);
294 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks, ctx->iv, dat->block);
300 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
302 size_t bl = ctx->cipher->block_size;
303 EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
310 for (size_t i = 0; i <= len; i += bl) {
311 (*dat->block)(in + i, out + i, &dat->ks);
317 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
319 EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
321 if (dat->stream.ctr) {
322 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks, ctx->iv, ctx->buf,
323 &ctx->num, dat->stream.ctr);
325 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks, ctx->iv, ctx->buf, &ctx->num,
331 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
333 EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
335 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks, ctx->iv, &ctx->num, dat->block);
339 static char aesni_capable(void);
341 ctr128_f aes_ctr_set_key(AES_KEY *aes_key, GCM128_CONTEXT *gcm_ctx,
342 block128_f *out_block, const uint8_t *key,
344 if (aesni_capable()) {
345 aesni_set_encrypt_key(key, key_bytes * 8, aes_key);
346 if (gcm_ctx != NULL) {
347 CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)aesni_encrypt, 1);
350 *out_block = (block128_f) aesni_encrypt;
352 return (ctr128_f)aesni_ctr32_encrypt_blocks;
355 if (hwaes_capable()) {
356 aes_hw_set_encrypt_key(key, key_bytes * 8, aes_key);
357 if (gcm_ctx != NULL) {
358 CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)aes_hw_encrypt, 0);
361 *out_block = (block128_f) aes_hw_encrypt;
363 return (ctr128_f)aes_hw_ctr32_encrypt_blocks;
366 if (bsaes_capable()) {
367 AES_set_encrypt_key(key, key_bytes * 8, aes_key);
368 if (gcm_ctx != NULL) {
369 CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt, 0);
372 *out_block = (block128_f) AES_encrypt;
374 return (ctr128_f)bsaes_ctr32_encrypt_blocks;
377 if (vpaes_capable()) {
378 vpaes_set_encrypt_key(key, key_bytes * 8, aes_key);
380 *out_block = (block128_f) vpaes_encrypt;
382 if (gcm_ctx != NULL) {
383 CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)vpaes_encrypt, 0);
388 AES_set_encrypt_key(key, key_bytes * 8, aes_key);
389 if (gcm_ctx != NULL) {
390 CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt, 0);
393 *out_block = (block128_f) AES_encrypt;
398 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
399 const uint8_t *iv, int enc) {
400 EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
406 aes_ctr_set_key(&gctx->ks.ks, &gctx->gcm, NULL, key, ctx->key_len);
407 // If we have an iv can set it directly, otherwise use saved IV.
408 if (iv == NULL && gctx->iv_set) {
412 CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
417 // If key set use IV, otherwise copy
419 CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
421 OPENSSL_memcpy(gctx->iv, iv, gctx->ivlen);
429 static void aes_gcm_cleanup(EVP_CIPHER_CTX *c) {
430 EVP_AES_GCM_CTX *gctx = c->cipher_data;
431 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
432 if (gctx->iv != c->iv) {
433 OPENSSL_free(gctx->iv);
437 // increment counter (64-bit int) by 1
438 static void ctr64_inc(uint8_t *counter) {
453 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) {
454 EVP_AES_GCM_CTX *gctx = c->cipher_data;
459 gctx->ivlen = c->cipher->iv_len;
465 case EVP_CTRL_GCM_SET_IVLEN:
470 // Allocate memory for IV if needed
471 if (arg > EVP_MAX_IV_LENGTH && arg > gctx->ivlen) {
472 if (gctx->iv != c->iv) {
473 OPENSSL_free(gctx->iv);
475 gctx->iv = OPENSSL_malloc(arg);
483 case EVP_CTRL_GCM_SET_TAG:
484 if (arg <= 0 || arg > 16 || c->encrypt) {
487 OPENSSL_memcpy(c->buf, ptr, arg);
491 case EVP_CTRL_GCM_GET_TAG:
492 if (arg <= 0 || arg > 16 || !c->encrypt || gctx->taglen < 0) {
495 OPENSSL_memcpy(ptr, c->buf, arg);
498 case EVP_CTRL_GCM_SET_IV_FIXED:
499 // Special case: -1 length restores whole IV
501 OPENSSL_memcpy(gctx->iv, ptr, gctx->ivlen);
505 // Fixed field must be at least 4 bytes and invocation field
507 if (arg < 4 || (gctx->ivlen - arg) < 8) {
511 OPENSSL_memcpy(gctx->iv, ptr, arg);
513 if (c->encrypt && !RAND_bytes(gctx->iv + arg, gctx->ivlen - arg)) {
519 case EVP_CTRL_GCM_IV_GEN:
520 if (gctx->iv_gen == 0 || gctx->key_set == 0) {
523 CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, gctx->iv, gctx->ivlen);
524 if (arg <= 0 || arg > gctx->ivlen) {
527 OPENSSL_memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
528 // Invocation field will be at least 8 bytes in size and
529 // so no need to check wrap around or increment more than
531 ctr64_inc(gctx->iv + gctx->ivlen - 8);
535 case EVP_CTRL_GCM_SET_IV_INV:
536 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt) {
539 OPENSSL_memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
540 CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, gctx->iv, gctx->ivlen);
544 case EVP_CTRL_COPY: {
545 EVP_CIPHER_CTX *out = ptr;
546 EVP_AES_GCM_CTX *gctx_out = out->cipher_data;
547 if (gctx->iv == c->iv) {
548 gctx_out->iv = out->iv;
550 gctx_out->iv = OPENSSL_malloc(gctx->ivlen);
554 OPENSSL_memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
564 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
566 EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
568 // If not set up, return error
569 if (!gctx->key_set) {
578 if (!CRYPTO_gcm128_aad(&gctx->gcm, in, len)) {
581 } else if (ctx->encrypt) {
583 if (!CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, &gctx->ks.ks, in, out, len,
588 if (!CRYPTO_gcm128_encrypt(&gctx->gcm, &gctx->ks.ks, in, out, len)) {
594 if (!CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, &gctx->ks.ks, in, out, len,
599 if (!CRYPTO_gcm128_decrypt(&gctx->gcm, &gctx->ks.ks, in, out, len)) {
607 if (gctx->taglen < 0 ||
608 !CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen)) {
614 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
616 // Don't reuse the IV
622 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_cbc_generic) {
623 memset(out, 0, sizeof(EVP_CIPHER));
625 out->nid = NID_aes_128_cbc;
626 out->block_size = 16;
629 out->ctx_size = sizeof(EVP_AES_KEY);
630 out->flags = EVP_CIPH_CBC_MODE;
631 out->init = aes_init_key;
632 out->cipher = aes_cbc_cipher;
635 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_ctr_generic) {
636 memset(out, 0, sizeof(EVP_CIPHER));
638 out->nid = NID_aes_128_ctr;
642 out->ctx_size = sizeof(EVP_AES_KEY);
643 out->flags = EVP_CIPH_CTR_MODE;
644 out->init = aes_init_key;
645 out->cipher = aes_ctr_cipher;
648 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_ecb_generic) {
649 memset(out, 0, sizeof(EVP_CIPHER));
651 out->nid = NID_aes_128_ecb;
652 out->block_size = 16;
654 out->ctx_size = sizeof(EVP_AES_KEY);
655 out->flags = EVP_CIPH_ECB_MODE;
656 out->init = aes_init_key;
657 out->cipher = aes_ecb_cipher;
660 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_ofb_generic) {
661 memset(out, 0, sizeof(EVP_CIPHER));
663 out->nid = NID_aes_128_ofb128;
667 out->ctx_size = sizeof(EVP_AES_KEY);
668 out->flags = EVP_CIPH_OFB_MODE;
669 out->init = aes_init_key;
670 out->cipher = aes_ofb_cipher;
673 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_gcm_generic) {
674 memset(out, 0, sizeof(EVP_CIPHER));
676 out->nid = NID_aes_128_gcm;
680 out->ctx_size = sizeof(EVP_AES_GCM_CTX);
681 out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV |
682 EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
683 EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
684 out->init = aes_gcm_init_key;
685 out->cipher = aes_gcm_cipher;
686 out->cleanup = aes_gcm_cleanup;
687 out->ctrl = aes_gcm_ctrl;
690 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_cbc_generic) {
691 memset(out, 0, sizeof(EVP_CIPHER));
693 out->nid = NID_aes_192_cbc;
694 out->block_size = 16;
697 out->ctx_size = sizeof(EVP_AES_KEY);
698 out->flags = EVP_CIPH_CBC_MODE;
699 out->init = aes_init_key;
700 out->cipher = aes_cbc_cipher;
703 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_ctr_generic) {
704 memset(out, 0, sizeof(EVP_CIPHER));
706 out->nid = NID_aes_192_ctr;
710 out->ctx_size = sizeof(EVP_AES_KEY);
711 out->flags = EVP_CIPH_CTR_MODE;
712 out->init = aes_init_key;
713 out->cipher = aes_ctr_cipher;
716 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_ecb_generic) {
717 memset(out, 0, sizeof(EVP_CIPHER));
719 out->nid = NID_aes_192_ecb;
720 out->block_size = 16;
722 out->ctx_size = sizeof(EVP_AES_KEY);
723 out->flags = EVP_CIPH_ECB_MODE;
724 out->init = aes_init_key;
725 out->cipher = aes_ecb_cipher;
728 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_gcm_generic) {
729 memset(out, 0, sizeof(EVP_CIPHER));
731 out->nid = NID_aes_192_gcm;
735 out->ctx_size = sizeof(EVP_AES_GCM_CTX);
736 out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV |
737 EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
738 EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
739 out->init = aes_gcm_init_key;
740 out->cipher = aes_gcm_cipher;
741 out->cleanup = aes_gcm_cleanup;
742 out->ctrl = aes_gcm_ctrl;
745 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_cbc_generic) {
746 memset(out, 0, sizeof(EVP_CIPHER));
748 out->nid = NID_aes_256_cbc;
749 out->block_size = 16;
752 out->ctx_size = sizeof(EVP_AES_KEY);
753 out->flags = EVP_CIPH_CBC_MODE;
754 out->init = aes_init_key;
755 out->cipher = aes_cbc_cipher;
758 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_ctr_generic) {
759 memset(out, 0, sizeof(EVP_CIPHER));
761 out->nid = NID_aes_256_ctr;
765 out->ctx_size = sizeof(EVP_AES_KEY);
766 out->flags = EVP_CIPH_CTR_MODE;
767 out->init = aes_init_key;
768 out->cipher = aes_ctr_cipher;
771 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_ecb_generic) {
772 memset(out, 0, sizeof(EVP_CIPHER));
774 out->nid = NID_aes_256_ecb;
775 out->block_size = 16;
777 out->ctx_size = sizeof(EVP_AES_KEY);
778 out->flags = EVP_CIPH_ECB_MODE;
779 out->init = aes_init_key;
780 out->cipher = aes_ecb_cipher;
783 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_ofb_generic) {
784 memset(out, 0, sizeof(EVP_CIPHER));
786 out->nid = NID_aes_256_ofb128;
790 out->ctx_size = sizeof(EVP_AES_KEY);
791 out->flags = EVP_CIPH_OFB_MODE;
792 out->init = aes_init_key;
793 out->cipher = aes_ofb_cipher;
796 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_gcm_generic) {
797 memset(out, 0, sizeof(EVP_CIPHER));
799 out->nid = NID_aes_256_gcm;
803 out->ctx_size = sizeof(EVP_AES_GCM_CTX);
804 out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV |
805 EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
806 EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
807 out->init = aes_gcm_init_key;
808 out->cipher = aes_gcm_cipher;
809 out->cleanup = aes_gcm_cleanup;
810 out->ctrl = aes_gcm_ctrl;
813 #if !defined(OPENSSL_NO_ASM) && \
814 (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
818 static char aesni_capable(void) {
819 return (OPENSSL_ia32cap_P[1] & (1 << (57 - 32))) != 0;
822 static int aesni_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
823 const uint8_t *iv, int enc) {
825 EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
827 mode = ctx->cipher->flags & EVP_CIPH_MODE_MASK;
828 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) && !enc) {
829 ret = aesni_set_decrypt_key(key, ctx->key_len * 8, ctx->cipher_data);
830 dat->block = (block128_f)aesni_decrypt;
832 mode == EVP_CIPH_CBC_MODE ? (cbc128_f)aesni_cbc_encrypt : NULL;
834 ret = aesni_set_encrypt_key(key, ctx->key_len * 8, ctx->cipher_data);
835 dat->block = (block128_f)aesni_encrypt;
836 if (mode == EVP_CIPH_CBC_MODE) {
837 dat->stream.cbc = (cbc128_f)aesni_cbc_encrypt;
838 } else if (mode == EVP_CIPH_CTR_MODE) {
839 dat->stream.ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
841 dat->stream.cbc = NULL;
846 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_AES_KEY_SETUP_FAILED);
853 static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out,
854 const uint8_t *in, size_t len) {
855 aesni_cbc_encrypt(in, out, len, ctx->cipher_data, ctx->iv, ctx->encrypt);
860 static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out,
861 const uint8_t *in, size_t len) {
862 size_t bl = ctx->cipher->block_size;
868 aesni_ecb_encrypt(in, out, len, ctx->cipher_data, ctx->encrypt);
873 static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
874 const uint8_t *iv, int enc) {
875 EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
880 aesni_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
881 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f)aesni_encrypt, 1);
882 gctx->ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
883 // If we have an iv can set it directly, otherwise use
885 if (iv == NULL && gctx->iv_set) {
889 CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
894 // If key set use IV, otherwise copy
896 CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
898 OPENSSL_memcpy(gctx->iv, iv, gctx->ivlen);
906 DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_128_cbc) {
907 memset(out, 0, sizeof(EVP_CIPHER));
909 out->nid = NID_aes_128_cbc;
910 out->block_size = 16;
913 out->ctx_size = sizeof(EVP_AES_KEY);
914 out->flags = EVP_CIPH_CBC_MODE;
915 out->init = aesni_init_key;
916 out->cipher = aesni_cbc_cipher;
919 DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_128_ctr) {
920 memset(out, 0, sizeof(EVP_CIPHER));
922 out->nid = NID_aes_128_ctr;
926 out->ctx_size = sizeof(EVP_AES_KEY);
927 out->flags = EVP_CIPH_CTR_MODE;
928 out->init = aesni_init_key;
929 out->cipher = aes_ctr_cipher;
932 DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_128_ecb) {
933 memset(out, 0, sizeof(EVP_CIPHER));
935 out->nid = NID_aes_128_ecb;
936 out->block_size = 16;
938 out->ctx_size = sizeof(EVP_AES_KEY);
939 out->flags = EVP_CIPH_ECB_MODE;
940 out->init = aesni_init_key;
941 out->cipher = aesni_ecb_cipher;
944 DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_128_ofb) {
945 memset(out, 0, sizeof(EVP_CIPHER));
947 out->nid = NID_aes_128_ofb128;
951 out->ctx_size = sizeof(EVP_AES_KEY);
952 out->flags = EVP_CIPH_OFB_MODE;
953 out->init = aesni_init_key;
954 out->cipher = aes_ofb_cipher;
957 DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_128_gcm) {
958 memset(out, 0, sizeof(EVP_CIPHER));
960 out->nid = NID_aes_128_gcm;
964 out->ctx_size = sizeof(EVP_AES_GCM_CTX);
965 out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV |
966 EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
967 EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
968 out->init = aesni_gcm_init_key;
969 out->cipher = aes_gcm_cipher;
970 out->cleanup = aes_gcm_cleanup;
971 out->ctrl = aes_gcm_ctrl;
974 DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_192_cbc) {
975 memset(out, 0, sizeof(EVP_CIPHER));
977 out->nid = NID_aes_192_cbc;
978 out->block_size = 16;
981 out->ctx_size = sizeof(EVP_AES_KEY);
982 out->flags = EVP_CIPH_CBC_MODE;
983 out->init = aesni_init_key;
984 out->cipher = aesni_cbc_cipher;
987 DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_192_ctr) {
988 memset(out, 0, sizeof(EVP_CIPHER));
990 out->nid = NID_aes_192_ctr;
994 out->ctx_size = sizeof(EVP_AES_KEY);
995 out->flags = EVP_CIPH_CTR_MODE;
996 out->init = aesni_init_key;
997 out->cipher = aes_ctr_cipher;
1000 DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_192_ecb) {
1001 memset(out, 0, sizeof(EVP_CIPHER));
1003 out->nid = NID_aes_192_ecb;
1004 out->block_size = 16;
1006 out->ctx_size = sizeof(EVP_AES_KEY);
1007 out->flags = EVP_CIPH_ECB_MODE;
1008 out->init = aesni_init_key;
1009 out->cipher = aesni_ecb_cipher;
1012 DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_192_gcm) {
1013 memset(out, 0, sizeof(EVP_CIPHER));
1015 out->nid = NID_aes_192_gcm;
1016 out->block_size = 1;
1019 out->ctx_size = sizeof(EVP_AES_GCM_CTX);
1020 out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV |
1021 EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
1022 EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
1023 out->init = aesni_gcm_init_key;
1024 out->cipher = aes_gcm_cipher;
1025 out->cleanup = aes_gcm_cleanup;
1026 out->ctrl = aes_gcm_ctrl;
1029 DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_256_cbc) {
1030 memset(out, 0, sizeof(EVP_CIPHER));
1032 out->nid = NID_aes_256_cbc;
1033 out->block_size = 16;
1036 out->ctx_size = sizeof(EVP_AES_KEY);
1037 out->flags = EVP_CIPH_CBC_MODE;
1038 out->init = aesni_init_key;
1039 out->cipher = aesni_cbc_cipher;
1042 DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_256_ctr) {
1043 memset(out, 0, sizeof(EVP_CIPHER));
1045 out->nid = NID_aes_256_ctr;
1046 out->block_size = 1;
1049 out->ctx_size = sizeof(EVP_AES_KEY);
1050 out->flags = EVP_CIPH_CTR_MODE;
1051 out->init = aesni_init_key;
1052 out->cipher = aes_ctr_cipher;
1055 DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_256_ecb) {
1056 memset(out, 0, sizeof(EVP_CIPHER));
1058 out->nid = NID_aes_256_ecb;
1059 out->block_size = 16;
1061 out->ctx_size = sizeof(EVP_AES_KEY);
1062 out->flags = EVP_CIPH_ECB_MODE;
1063 out->init = aesni_init_key;
1064 out->cipher = aesni_ecb_cipher;
1067 DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_256_ofb) {
1068 memset(out, 0, sizeof(EVP_CIPHER));
1070 out->nid = NID_aes_256_ofb128;
1071 out->block_size = 1;
1074 out->ctx_size = sizeof(EVP_AES_KEY);
1075 out->flags = EVP_CIPH_OFB_MODE;
1076 out->init = aesni_init_key;
1077 out->cipher = aes_ofb_cipher;
1080 DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_256_gcm) {
1081 memset(out, 0, sizeof(EVP_CIPHER));
1083 out->nid = NID_aes_256_gcm;
1084 out->block_size = 1;
1087 out->ctx_size = sizeof(EVP_AES_GCM_CTX);
1088 out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV |
1089 EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
1090 EVP_CIPH_CTRL_INIT | EVP_CIPH_CUSTOM_COPY |
1091 EVP_CIPH_FLAG_AEAD_CIPHER;
1092 out->init = aesni_gcm_init_key;
1093 out->cipher = aes_gcm_cipher;
1094 out->cleanup = aes_gcm_cleanup;
1095 out->ctrl = aes_gcm_ctrl;
1098 #define EVP_CIPHER_FUNCTION(keybits, mode) \
1099 const EVP_CIPHER *EVP_aes_##keybits##_##mode(void) { \
1100 if (aesni_capable()) { \
1101 return aesni_##keybits##_##mode(); \
1103 return aes_##keybits##_##mode##_generic(); \
1107 #else // ^^^ OPENSSL_X86_64 || OPENSSL_X86
1109 static char aesni_capable(void) {
1113 #define EVP_CIPHER_FUNCTION(keybits, mode) \
1114 const EVP_CIPHER *EVP_aes_##keybits##_##mode(void) { \
1115 return aes_##keybits##_##mode##_generic(); \
1120 EVP_CIPHER_FUNCTION(128, cbc)
1121 EVP_CIPHER_FUNCTION(128, ctr)
1122 EVP_CIPHER_FUNCTION(128, ecb)
1123 EVP_CIPHER_FUNCTION(128, ofb)
1124 EVP_CIPHER_FUNCTION(128, gcm)
1126 EVP_CIPHER_FUNCTION(192, cbc)
1127 EVP_CIPHER_FUNCTION(192, ctr)
1128 EVP_CIPHER_FUNCTION(192, ecb)
1129 EVP_CIPHER_FUNCTION(192, gcm)
1131 EVP_CIPHER_FUNCTION(256, cbc)
1132 EVP_CIPHER_FUNCTION(256, ctr)
1133 EVP_CIPHER_FUNCTION(256, ecb)
1134 EVP_CIPHER_FUNCTION(256, ofb)
1135 EVP_CIPHER_FUNCTION(256, gcm)
1138 #define EVP_AEAD_AES_GCM_TAG_LEN 16
1140 struct aead_aes_gcm_ctx {
1149 struct aead_aes_gcm_tls12_ctx {
1150 struct aead_aes_gcm_ctx gcm_ctx;
1151 uint64_t min_next_nonce;
1154 static int aead_aes_gcm_init_impl(struct aead_aes_gcm_ctx *gcm_ctx,
1155 size_t *out_tag_len, const uint8_t *key,
1156 size_t key_len, size_t tag_len) {
1157 const size_t key_bits = key_len * 8;
1159 if (key_bits != 128 && key_bits != 256) {
1160 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
1161 return 0; // EVP_AEAD_CTX_init should catch this.
1164 if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
1165 tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1168 if (tag_len > EVP_AEAD_AES_GCM_TAG_LEN) {
1169 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TAG_TOO_LARGE);
1174 aes_ctr_set_key(&gcm_ctx->ks.ks, &gcm_ctx->gcm, NULL, key, key_len);
1175 *out_tag_len = tag_len;
1179 static int aead_aes_gcm_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
1180 size_t key_len, size_t requested_tag_len) {
1181 struct aead_aes_gcm_ctx *gcm_ctx;
1182 gcm_ctx = OPENSSL_malloc(sizeof(struct aead_aes_gcm_ctx));
1183 if (gcm_ctx == NULL) {
1187 size_t actual_tag_len;
1188 if (!aead_aes_gcm_init_impl(gcm_ctx, &actual_tag_len, key, key_len,
1189 requested_tag_len)) {
1190 OPENSSL_free(gcm_ctx);
1194 ctx->aead_state = gcm_ctx;
1195 ctx->tag_len = actual_tag_len;
1199 static void aead_aes_gcm_cleanup(EVP_AEAD_CTX *ctx) {
1200 OPENSSL_free(ctx->aead_state);
1203 static int aead_aes_gcm_seal_scatter(const EVP_AEAD_CTX *ctx, uint8_t *out,
1204 uint8_t *out_tag, size_t *out_tag_len,
1205 size_t max_out_tag_len,
1206 const uint8_t *nonce, size_t nonce_len,
1207 const uint8_t *in, size_t in_len,
1208 const uint8_t *extra_in,
1209 size_t extra_in_len,
1210 const uint8_t *ad, size_t ad_len) {
1211 const struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
1214 if (extra_in_len + ctx->tag_len < ctx->tag_len) {
1215 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
1218 if (max_out_tag_len < extra_in_len + ctx->tag_len) {
1219 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
1222 if (nonce_len == 0) {
1223 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE_SIZE);
1227 const AES_KEY *key = &gcm_ctx->ks.ks;
1229 OPENSSL_memcpy(&gcm, &gcm_ctx->gcm, sizeof(gcm));
1230 CRYPTO_gcm128_setiv(&gcm, key, nonce, nonce_len);
1232 if (ad_len > 0 && !CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
1237 if (!CRYPTO_gcm128_encrypt_ctr32(&gcm, key, in, out, in_len,
1242 if (!CRYPTO_gcm128_encrypt(&gcm, key, in, out, in_len)) {
1249 if (!CRYPTO_gcm128_encrypt_ctr32(&gcm, key, extra_in, out_tag,
1250 extra_in_len, gcm_ctx->ctr)) {
1254 if (!CRYPTO_gcm128_encrypt(&gcm, key, extra_in, out_tag, extra_in_len)) {
1260 CRYPTO_gcm128_tag(&gcm, out_tag + extra_in_len, ctx->tag_len);
1261 *out_tag_len = ctx->tag_len + extra_in_len;
1266 static int aead_aes_gcm_open_gather(const EVP_AEAD_CTX *ctx, uint8_t *out,
1267 const uint8_t *nonce, size_t nonce_len,
1268 const uint8_t *in, size_t in_len,
1269 const uint8_t *in_tag, size_t in_tag_len,
1270 const uint8_t *ad, size_t ad_len) {
1271 const struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
1272 uint8_t tag[EVP_AEAD_AES_GCM_TAG_LEN];
1275 if (nonce_len == 0) {
1276 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE_SIZE);
1280 if (in_tag_len != ctx->tag_len) {
1281 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
1285 const AES_KEY *key = &gcm_ctx->ks.ks;
1287 OPENSSL_memcpy(&gcm, &gcm_ctx->gcm, sizeof(gcm));
1288 CRYPTO_gcm128_setiv(&gcm, key, nonce, nonce_len);
1290 if (!CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
1295 if (!CRYPTO_gcm128_decrypt_ctr32(&gcm, key, in, out, in_len,
1300 if (!CRYPTO_gcm128_decrypt(&gcm, key, in, out, in_len)) {
1305 CRYPTO_gcm128_tag(&gcm, tag, ctx->tag_len);
1306 if (CRYPTO_memcmp(tag, in_tag, ctx->tag_len) != 0) {
1307 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
1314 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_128_gcm) {
1315 memset(out, 0, sizeof(EVP_AEAD));
1318 out->nonce_len = 12;
1319 out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1320 out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1321 out->seal_scatter_supports_extra_in = 1;
1323 out->init = aead_aes_gcm_init;
1324 out->cleanup = aead_aes_gcm_cleanup;
1325 out->seal_scatter = aead_aes_gcm_seal_scatter;
1326 out->open_gather = aead_aes_gcm_open_gather;
1329 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_256_gcm) {
1330 memset(out, 0, sizeof(EVP_AEAD));
1333 out->nonce_len = 12;
1334 out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1335 out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1336 out->seal_scatter_supports_extra_in = 1;
1338 out->init = aead_aes_gcm_init;
1339 out->cleanup = aead_aes_gcm_cleanup;
1340 out->seal_scatter = aead_aes_gcm_seal_scatter;
1341 out->open_gather = aead_aes_gcm_open_gather;
1344 static int aead_aes_gcm_tls12_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
1345 size_t key_len, size_t requested_tag_len) {
1346 struct aead_aes_gcm_tls12_ctx *gcm_ctx;
1347 gcm_ctx = OPENSSL_malloc(sizeof(struct aead_aes_gcm_tls12_ctx));
1348 if (gcm_ctx == NULL) {
1352 gcm_ctx->min_next_nonce = 0;
1354 size_t actual_tag_len;
1355 if (!aead_aes_gcm_init_impl(&gcm_ctx->gcm_ctx, &actual_tag_len, key, key_len,
1356 requested_tag_len)) {
1357 OPENSSL_free(gcm_ctx);
1361 ctx->aead_state = gcm_ctx;
1362 ctx->tag_len = actual_tag_len;
1366 static void aead_aes_gcm_tls12_cleanup(EVP_AEAD_CTX *ctx) {
1367 OPENSSL_free(ctx->aead_state);
1370 static int aead_aes_gcm_tls12_seal_scatter(
1371 const EVP_AEAD_CTX *ctx, uint8_t *out, uint8_t *out_tag,
1372 size_t *out_tag_len, size_t max_out_tag_len, const uint8_t *nonce,
1373 size_t nonce_len, const uint8_t *in, size_t in_len, const uint8_t *extra_in,
1374 size_t extra_in_len, const uint8_t *ad, size_t ad_len) {
1375 struct aead_aes_gcm_tls12_ctx *gcm_ctx = ctx->aead_state;
1376 if (nonce_len != 12) {
1377 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
1381 // The given nonces must be strictly monotonically increasing.
1382 uint64_t given_counter;
1383 OPENSSL_memcpy(&given_counter, nonce + nonce_len - sizeof(given_counter),
1384 sizeof(given_counter));
1385 given_counter = CRYPTO_bswap8(given_counter);
1386 if (given_counter == UINT64_MAX ||
1387 given_counter < gcm_ctx->min_next_nonce) {
1388 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE);
1392 gcm_ctx->min_next_nonce = given_counter + 1;
1394 return aead_aes_gcm_seal_scatter(ctx, out, out_tag, out_tag_len,
1395 max_out_tag_len, nonce, nonce_len, in,
1396 in_len, extra_in, extra_in_len, ad, ad_len);
1399 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_128_gcm_tls12) {
1400 memset(out, 0, sizeof(EVP_AEAD));
1403 out->nonce_len = 12;
1404 out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1405 out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1406 out->seal_scatter_supports_extra_in = 1;
1408 out->init = aead_aes_gcm_tls12_init;
1409 out->cleanup = aead_aes_gcm_tls12_cleanup;
1410 out->seal_scatter = aead_aes_gcm_tls12_seal_scatter;
1411 out->open_gather = aead_aes_gcm_open_gather;
1414 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_256_gcm_tls12) {
1415 memset(out, 0, sizeof(EVP_AEAD));
1418 out->nonce_len = 12;
1419 out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1420 out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1421 out->seal_scatter_supports_extra_in = 1;
1423 out->init = aead_aes_gcm_tls12_init;
1424 out->cleanup = aead_aes_gcm_tls12_cleanup;
1425 out->seal_scatter = aead_aes_gcm_tls12_seal_scatter;
1426 out->open_gather = aead_aes_gcm_open_gather;
1429 int EVP_has_aes_hardware(void) {
1430 #if defined(OPENSSL_X86) || defined(OPENSSL_X86_64)
1431 return aesni_capable() && crypto_gcm_clmul_enabled();
1432 #elif defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64)
1433 return hwaes_capable() && CRYPTO_is_ARMv8_PMULL_capable();