1/* ====================================================================
2 * Copyright (c) 2001-2011 The OpenSSL Project. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 *
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in
13 * the documentation and/or other materials provided with the
14 * distribution.
15 *
16 * 3. All advertising materials mentioning features or use of this
17 * software must display the following acknowledgment:
18 * "This product includes software developed by the OpenSSL Project
19 * for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
20 *
21 * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
22 * endorse or promote products derived from this software without
23 * prior written permission. For written permission, please contact
24 * openssl-core@openssl.org.
25 *
26 * 5. Products derived from this software may not be called "OpenSSL"
27 * nor may "OpenSSL" appear in their names without prior written
28 * permission of the OpenSSL Project.
29 *
30 * 6. Redistributions of any form whatsoever must retain the following
31 * acknowledgment:
32 * "This product includes software developed by the OpenSSL Project
33 * for use in the OpenSSL Toolkit (http://www.openssl.org/)"
34 *
35 * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
36 * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
37 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
38 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
39 * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
40 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
41 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
42 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
43 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
44 * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
45 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
46 * OF THE POSSIBILITY OF SUCH DAMAGE.
47 * ==================================================================== */
48
49#include <assert.h>
50#include <string.h>
51
52#include <openssl/aead.h>
53#include <openssl/aes.h>
54#include <openssl/cipher.h>
55#include <openssl/cpu.h>
56#include <openssl/err.h>
57#include <openssl/mem.h>
58#include <openssl/nid.h>
59#include <openssl/rand.h>
60
61#include "internal.h"
62#include "../../internal.h"
63#include "../aes/internal.h"
64#include "../modes/internal.h"
65#include "../delocate.h"
66
67
68OPENSSL_MSVC_PRAGMA(warning(push))
69OPENSSL_MSVC_PRAGMA(warning(disable: 4702)) // Unreachable code.
70
71typedef struct {
72 union {
73 double align;
74 AES_KEY ks;
75 } ks;
76 block128_f block;
77 union {
78 cbc128_f cbc;
79 ctr128_f ctr;
80 } stream;
81} EVP_AES_KEY;
82
83typedef struct {
84 GCM128_CONTEXT gcm;
85 union {
86 double align;
87 AES_KEY ks;
88 } ks; // AES key schedule to use
89 int key_set; // Set if key initialised
90 int iv_set; // Set if an iv is set
91 uint8_t *iv; // Temporary IV store
92 int ivlen; // IV length
93 int taglen;
94 int iv_gen; // It is OK to generate IVs
95 ctr128_f ctr;
96} EVP_AES_GCM_CTX;
97
98static int aes_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
99 const uint8_t *iv, int enc) {
100 int ret, mode;
101 EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
102
103 mode = ctx->cipher->flags & EVP_CIPH_MODE_MASK;
104 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) && !enc) {
105 if (hwaes_capable()) {
106 ret = aes_hw_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
107 dat->block = aes_hw_decrypt;
108 dat->stream.cbc = NULL;
109 if (mode == EVP_CIPH_CBC_MODE) {
110 dat->stream.cbc = aes_hw_cbc_encrypt;
111 }
112 } else if (bsaes_capable() && mode == EVP_CIPH_CBC_MODE) {
113 ret = aes_nohw_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
114 // If |dat->stream.cbc| is provided, |dat->block| is never used.
115 dat->block = NULL;
116 dat->stream.cbc = bsaes_cbc_encrypt;
117 } else if (vpaes_capable()) {
118 ret = vpaes_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
119 dat->block = vpaes_decrypt;
120 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? vpaes_cbc_encrypt : NULL;
121 } else {
122 ret = aes_nohw_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
123 dat->block = aes_nohw_decrypt;
124 dat->stream.cbc = NULL;
125#if defined(AES_NOHW_CBC)
126 if (mode == EVP_CIPH_CBC_MODE) {
127 dat->stream.cbc = aes_nohw_cbc_encrypt;
128 }
129#endif
130 }
131 } else if (hwaes_capable()) {
132 ret = aes_hw_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
133 dat->block = aes_hw_encrypt;
134 dat->stream.cbc = NULL;
135 if (mode == EVP_CIPH_CBC_MODE) {
136 dat->stream.cbc = aes_hw_cbc_encrypt;
137 } else if (mode == EVP_CIPH_CTR_MODE) {
138 dat->stream.ctr = aes_hw_ctr32_encrypt_blocks;
139 }
140 } else if (bsaes_capable() && mode == EVP_CIPH_CTR_MODE) {
141 ret = aes_nohw_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
142 // If |dat->stream.ctr| is provided, |dat->block| is never used.
143 dat->block = NULL;
144 dat->stream.ctr = bsaes_ctr32_encrypt_blocks;
145 } else if (vpaes_capable()) {
146 ret = vpaes_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
147 dat->block = vpaes_encrypt;
148 dat->stream.cbc = NULL;
149 if (mode == EVP_CIPH_CBC_MODE) {
150 dat->stream.cbc = vpaes_cbc_encrypt;
151 }
152#if defined(VPAES_CTR32)
153 if (mode == EVP_CIPH_CTR_MODE) {
154 dat->stream.ctr = vpaes_ctr32_encrypt_blocks;
155 }
156#endif
157 } else {
158 ret = aes_nohw_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
159 dat->block = aes_nohw_encrypt;
160 dat->stream.cbc = NULL;
161#if defined(AES_NOHW_CBC)
162 if (mode == EVP_CIPH_CBC_MODE) {
163 dat->stream.cbc = aes_nohw_cbc_encrypt;
164 }
165#endif
166 }
167
168 if (ret < 0) {
169 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_AES_KEY_SETUP_FAILED);
170 return 0;
171 }
172
173 return 1;
174}
175
176static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
177 size_t len) {
178 EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
179
180 if (dat->stream.cbc) {
181 (*dat->stream.cbc)(in, out, len, &dat->ks.ks, ctx->iv, ctx->encrypt);
182 } else if (ctx->encrypt) {
183 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks.ks, ctx->iv, dat->block);
184 } else {
185 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks.ks, ctx->iv, dat->block);
186 }
187
188 return 1;
189}
190
191static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
192 size_t len) {
193 size_t bl = ctx->cipher->block_size;
194 EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
195
196 if (len < bl) {
197 return 1;
198 }
199
200 len -= bl;
201 for (size_t i = 0; i <= len; i += bl) {
202 (*dat->block)(in + i, out + i, &dat->ks.ks);
203 }
204
205 return 1;
206}
207
208static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
209 size_t len) {
210 EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
211
212 if (dat->stream.ctr) {
213 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks.ks, ctx->iv, ctx->buf,
214 &ctx->num, dat->stream.ctr);
215 } else {
216 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks.ks, ctx->iv, ctx->buf,
217 &ctx->num, dat->block);
218 }
219 return 1;
220}
221
222static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
223 size_t len) {
224 EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
225
226 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks.ks, ctx->iv, &ctx->num,
227 dat->block);
228 return 1;
229}
230
231ctr128_f aes_ctr_set_key(AES_KEY *aes_key, GCM128_KEY *gcm_key,
232 block128_f *out_block, const uint8_t *key,
233 size_t key_bytes) {
234 if (hwaes_capable()) {
235 aes_hw_set_encrypt_key(key, key_bytes * 8, aes_key);
236 if (gcm_key != NULL) {
237 CRYPTO_gcm128_init_key(gcm_key, aes_key, aes_hw_encrypt, 1);
238 }
239 if (out_block) {
240 *out_block = aes_hw_encrypt;
241 }
242 return aes_hw_ctr32_encrypt_blocks;
243 }
244
245 if (bsaes_capable()) {
246 aes_nohw_set_encrypt_key(key, key_bytes * 8, aes_key);
247 if (gcm_key != NULL) {
248 CRYPTO_gcm128_init_key(gcm_key, aes_key, aes_nohw_encrypt, 0);
249 }
250 if (out_block) {
251 *out_block = aes_nohw_encrypt;
252 }
253 return bsaes_ctr32_encrypt_blocks;
254 }
255
256 if (vpaes_capable()) {
257 vpaes_set_encrypt_key(key, key_bytes * 8, aes_key);
258 if (out_block) {
259 *out_block = vpaes_encrypt;
260 }
261 if (gcm_key != NULL) {
262 CRYPTO_gcm128_init_key(gcm_key, aes_key, vpaes_encrypt, 0);
263 }
264#if defined(VPAES_CTR32)
265 return vpaes_ctr32_encrypt_blocks;
266#else
267 return NULL;
268#endif
269 }
270
271 aes_nohw_set_encrypt_key(key, key_bytes * 8, aes_key);
272 if (gcm_key != NULL) {
273 CRYPTO_gcm128_init_key(gcm_key, aes_key, aes_nohw_encrypt, 0);
274 }
275 if (out_block) {
276 *out_block = aes_nohw_encrypt;
277 }
278 return NULL;
279}
280
281#if defined(OPENSSL_32_BIT)
282#define EVP_AES_GCM_CTX_PADDING (4+8)
283#else
284#define EVP_AES_GCM_CTX_PADDING 8
285#endif
286
287static EVP_AES_GCM_CTX *aes_gcm_from_cipher_ctx(EVP_CIPHER_CTX *ctx) {
288#if defined(__GNUC__) || defined(__clang__)
289 OPENSSL_STATIC_ASSERT(
290 alignof(EVP_AES_GCM_CTX) <= 16,
291 "EVP_AES_GCM_CTX needs more alignment than this function provides");
292#endif
293
294 // |malloc| guarantees up to 4-byte alignment on 32-bit and 8-byte alignment
295 // on 64-bit systems, so we need to adjust to reach 16-byte alignment.
296 assert(ctx->cipher->ctx_size ==
297 sizeof(EVP_AES_GCM_CTX) + EVP_AES_GCM_CTX_PADDING);
298
299 char *ptr = ctx->cipher_data;
300#if defined(OPENSSL_32_BIT)
301 assert((uintptr_t)ptr % 4 == 0);
302 ptr += (uintptr_t)ptr & 4;
303#endif
304 assert((uintptr_t)ptr % 8 == 0);
305 ptr += (uintptr_t)ptr & 8;
306 return (EVP_AES_GCM_CTX *)ptr;
307}
308
309static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
310 const uint8_t *iv, int enc) {
311 EVP_AES_GCM_CTX *gctx = aes_gcm_from_cipher_ctx(ctx);
312 if (!iv && !key) {
313 return 1;
314 }
315 if (key) {
316 OPENSSL_memset(&gctx->gcm, 0, sizeof(gctx->gcm));
317 gctx->ctr = aes_ctr_set_key(&gctx->ks.ks, &gctx->gcm.gcm_key, NULL, key,
318 ctx->key_len);
319 // If we have an iv can set it directly, otherwise use saved IV.
320 if (iv == NULL && gctx->iv_set) {
321 iv = gctx->iv;
322 }
323 if (iv) {
324 CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
325 gctx->iv_set = 1;
326 }
327 gctx->key_set = 1;
328 } else {
329 // If key set use IV, otherwise copy
330 if (gctx->key_set) {
331 CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
332 } else {
333 OPENSSL_memcpy(gctx->iv, iv, gctx->ivlen);
334 }
335 gctx->iv_set = 1;
336 gctx->iv_gen = 0;
337 }
338 return 1;
339}
340
341static void aes_gcm_cleanup(EVP_CIPHER_CTX *c) {
342 EVP_AES_GCM_CTX *gctx = aes_gcm_from_cipher_ctx(c);
343 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
344 if (gctx->iv != c->iv) {
345 OPENSSL_free(gctx->iv);
346 }
347}
348
349// increment counter (64-bit int) by 1
350static void ctr64_inc(uint8_t *counter) {
351 int n = 8;
352 uint8_t c;
353
354 do {
355 --n;
356 c = counter[n];
357 ++c;
358 counter[n] = c;
359 if (c) {
360 return;
361 }
362 } while (n);
363}
364
365static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) {
366 EVP_AES_GCM_CTX *gctx = aes_gcm_from_cipher_ctx(c);
367 switch (type) {
368 case EVP_CTRL_INIT:
369 gctx->key_set = 0;
370 gctx->iv_set = 0;
371 gctx->ivlen = c->cipher->iv_len;
372 gctx->iv = c->iv;
373 gctx->taglen = -1;
374 gctx->iv_gen = 0;
375 return 1;
376
377 case EVP_CTRL_AEAD_SET_IVLEN:
378 if (arg <= 0) {
379 return 0;
380 }
381
382 // Allocate memory for IV if needed
383 if (arg > EVP_MAX_IV_LENGTH && arg > gctx->ivlen) {
384 if (gctx->iv != c->iv) {
385 OPENSSL_free(gctx->iv);
386 }
387 gctx->iv = OPENSSL_malloc(arg);
388 if (!gctx->iv) {
389 return 0;
390 }
391 }
392 gctx->ivlen = arg;
393 return 1;
394
395 case EVP_CTRL_AEAD_SET_TAG:
396 if (arg <= 0 || arg > 16 || c->encrypt) {
397 return 0;
398 }
399 OPENSSL_memcpy(c->buf, ptr, arg);
400 gctx->taglen = arg;
401 return 1;
402
403 case EVP_CTRL_AEAD_GET_TAG:
404 if (arg <= 0 || arg > 16 || !c->encrypt || gctx->taglen < 0) {
405 return 0;
406 }
407 OPENSSL_memcpy(ptr, c->buf, arg);
408 return 1;
409
410 case EVP_CTRL_AEAD_SET_IV_FIXED:
411 // Special case: -1 length restores whole IV
412 if (arg == -1) {
413 OPENSSL_memcpy(gctx->iv, ptr, gctx->ivlen);
414 gctx->iv_gen = 1;
415 return 1;
416 }
417 // Fixed field must be at least 4 bytes and invocation field
418 // at least 8.
419 if (arg < 4 || (gctx->ivlen - arg) < 8) {
420 return 0;
421 }
422 if (arg) {
423 OPENSSL_memcpy(gctx->iv, ptr, arg);
424 }
425 if (c->encrypt && !RAND_bytes(gctx->iv + arg, gctx->ivlen - arg)) {
426 return 0;
427 }
428 gctx->iv_gen = 1;
429 return 1;
430
431 case EVP_CTRL_GCM_IV_GEN:
432 if (gctx->iv_gen == 0 || gctx->key_set == 0) {
433 return 0;
434 }
435 CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, gctx->iv, gctx->ivlen);
436 if (arg <= 0 || arg > gctx->ivlen) {
437 arg = gctx->ivlen;
438 }
439 OPENSSL_memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
440 // Invocation field will be at least 8 bytes in size and
441 // so no need to check wrap around or increment more than
442 // last 8 bytes.
443 ctr64_inc(gctx->iv + gctx->ivlen - 8);
444 gctx->iv_set = 1;
445 return 1;
446
447 case EVP_CTRL_GCM_SET_IV_INV:
448 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt) {
449 return 0;
450 }
451 OPENSSL_memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
452 CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, gctx->iv, gctx->ivlen);
453 gctx->iv_set = 1;
454 return 1;
455
456 case EVP_CTRL_COPY: {
457 EVP_CIPHER_CTX *out = ptr;
458 EVP_AES_GCM_CTX *gctx_out = aes_gcm_from_cipher_ctx(out);
459 // |EVP_CIPHER_CTX_copy| copies this generically, but we must redo it in
460 // case |out->cipher_data| and |in->cipher_data| are differently aligned.
461 OPENSSL_memcpy(gctx_out, gctx, sizeof(EVP_AES_GCM_CTX));
462 if (gctx->iv == c->iv) {
463 gctx_out->iv = out->iv;
464 } else {
465 gctx_out->iv = OPENSSL_malloc(gctx->ivlen);
466 if (!gctx_out->iv) {
467 return 0;
468 }
469 OPENSSL_memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
470 }
471 return 1;
472 }
473
474 default:
475 return -1;
476 }
477}
478
479static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
480 size_t len) {
481 EVP_AES_GCM_CTX *gctx = aes_gcm_from_cipher_ctx(ctx);
482
483 // If not set up, return error
484 if (!gctx->key_set) {
485 return -1;
486 }
487 if (!gctx->iv_set) {
488 return -1;
489 }
490
491 if (in) {
492 if (out == NULL) {
493 if (!CRYPTO_gcm128_aad(&gctx->gcm, in, len)) {
494 return -1;
495 }
496 } else if (ctx->encrypt) {
497 if (gctx->ctr) {
498 if (!CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, &gctx->ks.ks, in, out, len,
499 gctx->ctr)) {
500 return -1;
501 }
502 } else {
503 if (!CRYPTO_gcm128_encrypt(&gctx->gcm, &gctx->ks.ks, in, out, len)) {
504 return -1;
505 }
506 }
507 } else {
508 if (gctx->ctr) {
509 if (!CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, &gctx->ks.ks, in, out, len,
510 gctx->ctr)) {
511 return -1;
512 }
513 } else {
514 if (!CRYPTO_gcm128_decrypt(&gctx->gcm, &gctx->ks.ks, in, out, len)) {
515 return -1;
516 }
517 }
518 }
519 return len;
520 } else {
521 if (!ctx->encrypt) {
522 if (gctx->taglen < 0 ||
523 !CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen)) {
524 return -1;
525 }
526 gctx->iv_set = 0;
527 return 0;
528 }
529 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
530 gctx->taglen = 16;
531 // Don't reuse the IV
532 gctx->iv_set = 0;
533 return 0;
534 }
535}
536
537DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_cbc_generic) {
538 memset(out, 0, sizeof(EVP_CIPHER));
539
540 out->nid = NID_aes_128_cbc;
541 out->block_size = 16;
542 out->key_len = 16;
543 out->iv_len = 16;
544 out->ctx_size = sizeof(EVP_AES_KEY);
545 out->flags = EVP_CIPH_CBC_MODE;
546 out->init = aes_init_key;
547 out->cipher = aes_cbc_cipher;
548}
549
550DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_ctr_generic) {
551 memset(out, 0, sizeof(EVP_CIPHER));
552
553 out->nid = NID_aes_128_ctr;
554 out->block_size = 1;
555 out->key_len = 16;
556 out->iv_len = 16;
557 out->ctx_size = sizeof(EVP_AES_KEY);
558 out->flags = EVP_CIPH_CTR_MODE;
559 out->init = aes_init_key;
560 out->cipher = aes_ctr_cipher;
561}
562
563DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_ecb_generic) {
564 memset(out, 0, sizeof(EVP_CIPHER));
565
566 out->nid = NID_aes_128_ecb;
567 out->block_size = 16;
568 out->key_len = 16;
569 out->ctx_size = sizeof(EVP_AES_KEY);
570 out->flags = EVP_CIPH_ECB_MODE;
571 out->init = aes_init_key;
572 out->cipher = aes_ecb_cipher;
573}
574
575DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_ofb_generic) {
576 memset(out, 0, sizeof(EVP_CIPHER));
577
578 out->nid = NID_aes_128_ofb128;
579 out->block_size = 1;
580 out->key_len = 16;
581 out->iv_len = 16;
582 out->ctx_size = sizeof(EVP_AES_KEY);
583 out->flags = EVP_CIPH_OFB_MODE;
584 out->init = aes_init_key;
585 out->cipher = aes_ofb_cipher;
586}
587
588DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_gcm_generic) {
589 memset(out, 0, sizeof(EVP_CIPHER));
590
591 out->nid = NID_aes_128_gcm;
592 out->block_size = 1;
593 out->key_len = 16;
594 out->iv_len = 12;
595 out->ctx_size = sizeof(EVP_AES_GCM_CTX) + EVP_AES_GCM_CTX_PADDING;
596 out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_CUSTOM_COPY |
597 EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
598 EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
599 out->init = aes_gcm_init_key;
600 out->cipher = aes_gcm_cipher;
601 out->cleanup = aes_gcm_cleanup;
602 out->ctrl = aes_gcm_ctrl;
603}
604
605DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_cbc_generic) {
606 memset(out, 0, sizeof(EVP_CIPHER));
607
608 out->nid = NID_aes_192_cbc;
609 out->block_size = 16;
610 out->key_len = 24;
611 out->iv_len = 16;
612 out->ctx_size = sizeof(EVP_AES_KEY);
613 out->flags = EVP_CIPH_CBC_MODE;
614 out->init = aes_init_key;
615 out->cipher = aes_cbc_cipher;
616}
617
618DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_ctr_generic) {
619 memset(out, 0, sizeof(EVP_CIPHER));
620
621 out->nid = NID_aes_192_ctr;
622 out->block_size = 1;
623 out->key_len = 24;
624 out->iv_len = 16;
625 out->ctx_size = sizeof(EVP_AES_KEY);
626 out->flags = EVP_CIPH_CTR_MODE;
627 out->init = aes_init_key;
628 out->cipher = aes_ctr_cipher;
629}
630
631DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_ecb_generic) {
632 memset(out, 0, sizeof(EVP_CIPHER));
633
634 out->nid = NID_aes_192_ecb;
635 out->block_size = 16;
636 out->key_len = 24;
637 out->ctx_size = sizeof(EVP_AES_KEY);
638 out->flags = EVP_CIPH_ECB_MODE;
639 out->init = aes_init_key;
640 out->cipher = aes_ecb_cipher;
641}
642
643DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_ofb_generic) {
644 memset(out, 0, sizeof(EVP_CIPHER));
645
646 out->nid = NID_aes_192_ofb128;
647 out->block_size = 1;
648 out->key_len = 24;
649 out->iv_len = 16;
650 out->ctx_size = sizeof(EVP_AES_KEY);
651 out->flags = EVP_CIPH_OFB_MODE;
652 out->init = aes_init_key;
653 out->cipher = aes_ofb_cipher;
654}
655
656DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_gcm_generic) {
657 memset(out, 0, sizeof(EVP_CIPHER));
658
659 out->nid = NID_aes_192_gcm;
660 out->block_size = 1;
661 out->key_len = 24;
662 out->iv_len = 12;
663 out->ctx_size = sizeof(EVP_AES_GCM_CTX) + EVP_AES_GCM_CTX_PADDING;
664 out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_CUSTOM_COPY |
665 EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
666 EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
667 out->init = aes_gcm_init_key;
668 out->cipher = aes_gcm_cipher;
669 out->cleanup = aes_gcm_cleanup;
670 out->ctrl = aes_gcm_ctrl;
671}
672
673DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_cbc_generic) {
674 memset(out, 0, sizeof(EVP_CIPHER));
675
676 out->nid = NID_aes_256_cbc;
677 out->block_size = 16;
678 out->key_len = 32;
679 out->iv_len = 16;
680 out->ctx_size = sizeof(EVP_AES_KEY);
681 out->flags = EVP_CIPH_CBC_MODE;
682 out->init = aes_init_key;
683 out->cipher = aes_cbc_cipher;
684}
685
686DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_ctr_generic) {
687 memset(out, 0, sizeof(EVP_CIPHER));
688
689 out->nid = NID_aes_256_ctr;
690 out->block_size = 1;
691 out->key_len = 32;
692 out->iv_len = 16;
693 out->ctx_size = sizeof(EVP_AES_KEY);
694 out->flags = EVP_CIPH_CTR_MODE;
695 out->init = aes_init_key;
696 out->cipher = aes_ctr_cipher;
697}
698
699DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_ecb_generic) {
700 memset(out, 0, sizeof(EVP_CIPHER));
701
702 out->nid = NID_aes_256_ecb;
703 out->block_size = 16;
704 out->key_len = 32;
705 out->ctx_size = sizeof(EVP_AES_KEY);
706 out->flags = EVP_CIPH_ECB_MODE;
707 out->init = aes_init_key;
708 out->cipher = aes_ecb_cipher;
709}
710
711DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_ofb_generic) {
712 memset(out, 0, sizeof(EVP_CIPHER));
713
714 out->nid = NID_aes_256_ofb128;
715 out->block_size = 1;
716 out->key_len = 32;
717 out->iv_len = 16;
718 out->ctx_size = sizeof(EVP_AES_KEY);
719 out->flags = EVP_CIPH_OFB_MODE;
720 out->init = aes_init_key;
721 out->cipher = aes_ofb_cipher;
722}
723
724DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_gcm_generic) {
725 memset(out, 0, sizeof(EVP_CIPHER));
726
727 out->nid = NID_aes_256_gcm;
728 out->block_size = 1;
729 out->key_len = 32;
730 out->iv_len = 12;
731 out->ctx_size = sizeof(EVP_AES_GCM_CTX) + EVP_AES_GCM_CTX_PADDING;
732 out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_CUSTOM_COPY |
733 EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
734 EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
735 out->init = aes_gcm_init_key;
736 out->cipher = aes_gcm_cipher;
737 out->cleanup = aes_gcm_cleanup;
738 out->ctrl = aes_gcm_ctrl;
739}
740
741#if defined(HWAES_ECB)
742
743static int aes_hw_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out,
744 const uint8_t *in, size_t len) {
745 size_t bl = ctx->cipher->block_size;
746
747 if (len < bl) {
748 return 1;
749 }
750
751 aes_hw_ecb_encrypt(in, out, len, ctx->cipher_data, ctx->encrypt);
752
753 return 1;
754}
755
756DEFINE_LOCAL_DATA(EVP_CIPHER, aes_hw_128_ecb) {
757 memset(out, 0, sizeof(EVP_CIPHER));
758
759 out->nid = NID_aes_128_ecb;
760 out->block_size = 16;
761 out->key_len = 16;
762 out->ctx_size = sizeof(EVP_AES_KEY);
763 out->flags = EVP_CIPH_ECB_MODE;
764 out->init = aes_init_key;
765 out->cipher = aes_hw_ecb_cipher;
766}
767
768DEFINE_LOCAL_DATA(EVP_CIPHER, aes_hw_192_ecb) {
769 memset(out, 0, sizeof(EVP_CIPHER));
770
771 out->nid = NID_aes_192_ecb;
772 out->block_size = 16;
773 out->key_len = 24;
774 out->ctx_size = sizeof(EVP_AES_KEY);
775 out->flags = EVP_CIPH_ECB_MODE;
776 out->init = aes_init_key;
777 out->cipher = aes_hw_ecb_cipher;
778}
779
780DEFINE_LOCAL_DATA(EVP_CIPHER, aes_hw_256_ecb) {
781 memset(out, 0, sizeof(EVP_CIPHER));
782
783 out->nid = NID_aes_256_ecb;
784 out->block_size = 16;
785 out->key_len = 32;
786 out->ctx_size = sizeof(EVP_AES_KEY);
787 out->flags = EVP_CIPH_ECB_MODE;
788 out->init = aes_init_key;
789 out->cipher = aes_hw_ecb_cipher;
790}
791
792#define EVP_ECB_CIPHER_FUNCTION(keybits) \
793 const EVP_CIPHER *EVP_aes_##keybits##_ecb(void) { \
794 if (hwaes_capable()) { \
795 return aes_hw_##keybits##_ecb(); \
796 } \
797 return aes_##keybits##_ecb_generic(); \
798 }
799
800#else
801
802#define EVP_ECB_CIPHER_FUNCTION(keybits) \
803 const EVP_CIPHER *EVP_aes_##keybits##_ecb(void) { \
804 return aes_##keybits##_ecb_generic(); \
805 }
806
807#endif // HWAES_ECB
808
809#define EVP_CIPHER_FUNCTION(keybits, mode) \
810 const EVP_CIPHER *EVP_aes_##keybits##_##mode(void) { \
811 return aes_##keybits##_##mode##_generic(); \
812 }
813
814EVP_CIPHER_FUNCTION(128, cbc)
815EVP_CIPHER_FUNCTION(128, ctr)
816EVP_CIPHER_FUNCTION(128, ofb)
817EVP_CIPHER_FUNCTION(128, gcm)
818
819EVP_CIPHER_FUNCTION(192, cbc)
820EVP_CIPHER_FUNCTION(192, ctr)
821EVP_CIPHER_FUNCTION(192, ofb)
822EVP_CIPHER_FUNCTION(192, gcm)
823
824EVP_CIPHER_FUNCTION(256, cbc)
825EVP_CIPHER_FUNCTION(256, ctr)
826EVP_CIPHER_FUNCTION(256, ofb)
827EVP_CIPHER_FUNCTION(256, gcm)
828
829EVP_ECB_CIPHER_FUNCTION(128)
830EVP_ECB_CIPHER_FUNCTION(192)
831EVP_ECB_CIPHER_FUNCTION(256)
832
833
834#define EVP_AEAD_AES_GCM_TAG_LEN 16
835
836struct aead_aes_gcm_ctx {
837 union {
838 double align;
839 AES_KEY ks;
840 } ks;
841 GCM128_KEY gcm_key;
842 ctr128_f ctr;
843};
844
845static int aead_aes_gcm_init_impl(struct aead_aes_gcm_ctx *gcm_ctx,
846 size_t *out_tag_len, const uint8_t *key,
847 size_t key_len, size_t tag_len) {
848 const size_t key_bits = key_len * 8;
849
850 if (key_bits != 128 && key_bits != 256) {
851 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
852 return 0; // EVP_AEAD_CTX_init should catch this.
853 }
854
855 if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
856 tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
857 }
858
859 if (tag_len > EVP_AEAD_AES_GCM_TAG_LEN) {
860 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TAG_TOO_LARGE);
861 return 0;
862 }
863
864 gcm_ctx->ctr =
865 aes_ctr_set_key(&gcm_ctx->ks.ks, &gcm_ctx->gcm_key, NULL, key, key_len);
866 *out_tag_len = tag_len;
867 return 1;
868}
869
870OPENSSL_STATIC_ASSERT(sizeof(((EVP_AEAD_CTX *)NULL)->state) >=
871 sizeof(struct aead_aes_gcm_ctx),
872 "AEAD state is too small");
873#if defined(__GNUC__) || defined(__clang__)
874OPENSSL_STATIC_ASSERT(alignof(union evp_aead_ctx_st_state) >=
875 alignof(struct aead_aes_gcm_ctx),
876 "AEAD state has insufficient alignment");
877#endif
878
879static int aead_aes_gcm_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
880 size_t key_len, size_t requested_tag_len) {
881 struct aead_aes_gcm_ctx *gcm_ctx = (struct aead_aes_gcm_ctx *) &ctx->state;
882
883 size_t actual_tag_len;
884 if (!aead_aes_gcm_init_impl(gcm_ctx, &actual_tag_len, key, key_len,
885 requested_tag_len)) {
886 return 0;
887 }
888
889 ctx->tag_len = actual_tag_len;
890 return 1;
891}
892
893static void aead_aes_gcm_cleanup(EVP_AEAD_CTX *ctx) {}
894
895static int aead_aes_gcm_seal_scatter(const EVP_AEAD_CTX *ctx, uint8_t *out,
896 uint8_t *out_tag, size_t *out_tag_len,
897 size_t max_out_tag_len,
898 const uint8_t *nonce, size_t nonce_len,
899 const uint8_t *in, size_t in_len,
900 const uint8_t *extra_in,
901 size_t extra_in_len,
902 const uint8_t *ad, size_t ad_len) {
903 struct aead_aes_gcm_ctx *gcm_ctx = (struct aead_aes_gcm_ctx *) &ctx->state;
904
905 if (extra_in_len + ctx->tag_len < ctx->tag_len) {
906 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
907 return 0;
908 }
909 if (max_out_tag_len < extra_in_len + ctx->tag_len) {
910 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
911 return 0;
912 }
913 if (nonce_len == 0) {
914 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE_SIZE);
915 return 0;
916 }
917
918 const AES_KEY *key = &gcm_ctx->ks.ks;
919
920 GCM128_CONTEXT gcm;
921 OPENSSL_memset(&gcm, 0, sizeof(gcm));
922 OPENSSL_memcpy(&gcm.gcm_key, &gcm_ctx->gcm_key, sizeof(gcm.gcm_key));
923 CRYPTO_gcm128_setiv(&gcm, key, nonce, nonce_len);
924
925 if (ad_len > 0 && !CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
926 return 0;
927 }
928
929 if (gcm_ctx->ctr) {
930 if (!CRYPTO_gcm128_encrypt_ctr32(&gcm, key, in, out, in_len,
931 gcm_ctx->ctr)) {
932 return 0;
933 }
934 } else {
935 if (!CRYPTO_gcm128_encrypt(&gcm, key, in, out, in_len)) {
936 return 0;
937 }
938 }
939
940 if (extra_in_len) {
941 if (gcm_ctx->ctr) {
942 if (!CRYPTO_gcm128_encrypt_ctr32(&gcm, key, extra_in, out_tag,
943 extra_in_len, gcm_ctx->ctr)) {
944 return 0;
945 }
946 } else {
947 if (!CRYPTO_gcm128_encrypt(&gcm, key, extra_in, out_tag, extra_in_len)) {
948 return 0;
949 }
950 }
951 }
952
953 CRYPTO_gcm128_tag(&gcm, out_tag + extra_in_len, ctx->tag_len);
954 *out_tag_len = ctx->tag_len + extra_in_len;
955
956 return 1;
957}
958
959static int aead_aes_gcm_open_gather(const EVP_AEAD_CTX *ctx, uint8_t *out,
960 const uint8_t *nonce, size_t nonce_len,
961 const uint8_t *in, size_t in_len,
962 const uint8_t *in_tag, size_t in_tag_len,
963 const uint8_t *ad, size_t ad_len) {
964 struct aead_aes_gcm_ctx *gcm_ctx = (struct aead_aes_gcm_ctx *) &ctx->state;
965 uint8_t tag[EVP_AEAD_AES_GCM_TAG_LEN];
966
967 if (nonce_len == 0) {
968 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE_SIZE);
969 return 0;
970 }
971
972 if (in_tag_len != ctx->tag_len) {
973 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
974 return 0;
975 }
976
977 const AES_KEY *key = &gcm_ctx->ks.ks;
978
979 GCM128_CONTEXT gcm;
980 OPENSSL_memset(&gcm, 0, sizeof(gcm));
981 OPENSSL_memcpy(&gcm.gcm_key, &gcm_ctx->gcm_key, sizeof(gcm.gcm_key));
982 CRYPTO_gcm128_setiv(&gcm, key, nonce, nonce_len);
983
984 if (!CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
985 return 0;
986 }
987
988 if (gcm_ctx->ctr) {
989 if (!CRYPTO_gcm128_decrypt_ctr32(&gcm, key, in, out, in_len,
990 gcm_ctx->ctr)) {
991 return 0;
992 }
993 } else {
994 if (!CRYPTO_gcm128_decrypt(&gcm, key, in, out, in_len)) {
995 return 0;
996 }
997 }
998
999 CRYPTO_gcm128_tag(&gcm, tag, ctx->tag_len);
1000 if (CRYPTO_memcmp(tag, in_tag, ctx->tag_len) != 0) {
1001 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
1002 return 0;
1003 }
1004
1005 return 1;
1006}
1007
1008DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_128_gcm) {
1009 memset(out, 0, sizeof(EVP_AEAD));
1010
1011 out->key_len = 16;
1012 out->nonce_len = 12;
1013 out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1014 out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1015 out->seal_scatter_supports_extra_in = 1;
1016
1017 out->init = aead_aes_gcm_init;
1018 out->cleanup = aead_aes_gcm_cleanup;
1019 out->seal_scatter = aead_aes_gcm_seal_scatter;
1020 out->open_gather = aead_aes_gcm_open_gather;
1021}
1022
1023DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_256_gcm) {
1024 memset(out, 0, sizeof(EVP_AEAD));
1025
1026 out->key_len = 32;
1027 out->nonce_len = 12;
1028 out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1029 out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1030 out->seal_scatter_supports_extra_in = 1;
1031
1032 out->init = aead_aes_gcm_init;
1033 out->cleanup = aead_aes_gcm_cleanup;
1034 out->seal_scatter = aead_aes_gcm_seal_scatter;
1035 out->open_gather = aead_aes_gcm_open_gather;
1036}
1037
1038struct aead_aes_gcm_tls12_ctx {
1039 struct aead_aes_gcm_ctx gcm_ctx;
1040 uint64_t min_next_nonce;
1041};
1042
1043OPENSSL_STATIC_ASSERT(sizeof(((EVP_AEAD_CTX *)NULL)->state) >=
1044 sizeof(struct aead_aes_gcm_tls12_ctx),
1045 "AEAD state is too small");
1046#if defined(__GNUC__) || defined(__clang__)
1047OPENSSL_STATIC_ASSERT(alignof(union evp_aead_ctx_st_state) >=
1048 alignof(struct aead_aes_gcm_tls12_ctx),
1049 "AEAD state has insufficient alignment");
1050#endif
1051
1052static int aead_aes_gcm_tls12_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
1053 size_t key_len, size_t requested_tag_len) {
1054 struct aead_aes_gcm_tls12_ctx *gcm_ctx =
1055 (struct aead_aes_gcm_tls12_ctx *) &ctx->state;
1056
1057 gcm_ctx->min_next_nonce = 0;
1058
1059 size_t actual_tag_len;
1060 if (!aead_aes_gcm_init_impl(&gcm_ctx->gcm_ctx, &actual_tag_len, key, key_len,
1061 requested_tag_len)) {
1062 return 0;
1063 }
1064
1065 ctx->tag_len = actual_tag_len;
1066 return 1;
1067}
1068
1069static int aead_aes_gcm_tls12_seal_scatter(
1070 const EVP_AEAD_CTX *ctx, uint8_t *out, uint8_t *out_tag,
1071 size_t *out_tag_len, size_t max_out_tag_len, const uint8_t *nonce,
1072 size_t nonce_len, const uint8_t *in, size_t in_len, const uint8_t *extra_in,
1073 size_t extra_in_len, const uint8_t *ad, size_t ad_len) {
1074 struct aead_aes_gcm_tls12_ctx *gcm_ctx =
1075 (struct aead_aes_gcm_tls12_ctx *) &ctx->state;
1076
1077 if (nonce_len != 12) {
1078 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
1079 return 0;
1080 }
1081
1082 // The given nonces must be strictly monotonically increasing.
1083 uint64_t given_counter;
1084 OPENSSL_memcpy(&given_counter, nonce + nonce_len - sizeof(given_counter),
1085 sizeof(given_counter));
1086 given_counter = CRYPTO_bswap8(given_counter);
1087 if (given_counter == UINT64_MAX ||
1088 given_counter < gcm_ctx->min_next_nonce) {
1089 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE);
1090 return 0;
1091 }
1092
1093 gcm_ctx->min_next_nonce = given_counter + 1;
1094
1095 return aead_aes_gcm_seal_scatter(ctx, out, out_tag, out_tag_len,
1096 max_out_tag_len, nonce, nonce_len, in,
1097 in_len, extra_in, extra_in_len, ad, ad_len);
1098}
1099
1100DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_128_gcm_tls12) {
1101 memset(out, 0, sizeof(EVP_AEAD));
1102
1103 out->key_len = 16;
1104 out->nonce_len = 12;
1105 out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1106 out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1107 out->seal_scatter_supports_extra_in = 1;
1108
1109 out->init = aead_aes_gcm_tls12_init;
1110 out->cleanup = aead_aes_gcm_cleanup;
1111 out->seal_scatter = aead_aes_gcm_tls12_seal_scatter;
1112 out->open_gather = aead_aes_gcm_open_gather;
1113}
1114
1115DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_256_gcm_tls12) {
1116 memset(out, 0, sizeof(EVP_AEAD));
1117
1118 out->key_len = 32;
1119 out->nonce_len = 12;
1120 out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1121 out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1122 out->seal_scatter_supports_extra_in = 1;
1123
1124 out->init = aead_aes_gcm_tls12_init;
1125 out->cleanup = aead_aes_gcm_cleanup;
1126 out->seal_scatter = aead_aes_gcm_tls12_seal_scatter;
1127 out->open_gather = aead_aes_gcm_open_gather;
1128}
1129
1130struct aead_aes_gcm_tls13_ctx {
1131 struct aead_aes_gcm_ctx gcm_ctx;
1132 uint64_t min_next_nonce;
1133 uint64_t mask;
1134 uint8_t first;
1135};
1136
1137OPENSSL_STATIC_ASSERT(sizeof(((EVP_AEAD_CTX *)NULL)->state) >=
1138 sizeof(struct aead_aes_gcm_tls13_ctx),
1139 "AEAD state is too small");
1140#if defined(__GNUC__) || defined(__clang__)
1141OPENSSL_STATIC_ASSERT(alignof(union evp_aead_ctx_st_state) >=
1142 alignof(struct aead_aes_gcm_tls13_ctx),
1143 "AEAD state has insufficient alignment");
1144#endif
1145
1146static int aead_aes_gcm_tls13_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
1147 size_t key_len, size_t requested_tag_len) {
1148 struct aead_aes_gcm_tls13_ctx *gcm_ctx =
1149 (struct aead_aes_gcm_tls13_ctx *) &ctx->state;
1150
1151 gcm_ctx->min_next_nonce = 0;
1152 gcm_ctx->first = 1;
1153
1154 size_t actual_tag_len;
1155 if (!aead_aes_gcm_init_impl(&gcm_ctx->gcm_ctx, &actual_tag_len, key, key_len,
1156 requested_tag_len)) {
1157 return 0;
1158 }
1159
1160 ctx->tag_len = actual_tag_len;
1161 return 1;
1162}
1163
1164static int aead_aes_gcm_tls13_seal_scatter(
1165 const EVP_AEAD_CTX *ctx, uint8_t *out, uint8_t *out_tag,
1166 size_t *out_tag_len, size_t max_out_tag_len, const uint8_t *nonce,
1167 size_t nonce_len, const uint8_t *in, size_t in_len, const uint8_t *extra_in,
1168 size_t extra_in_len, const uint8_t *ad, size_t ad_len) {
1169 struct aead_aes_gcm_tls13_ctx *gcm_ctx =
1170 (struct aead_aes_gcm_tls13_ctx *) &ctx->state;
1171
1172 if (nonce_len != 12) {
1173 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
1174 return 0;
1175 }
1176
1177 // The given nonces must be strictly monotonically increasing. See
1178 // https://tools.ietf.org/html/rfc8446#section-5.3 for details of the TLS 1.3
1179 // nonce construction.
1180 uint64_t given_counter;
1181 OPENSSL_memcpy(&given_counter, nonce + nonce_len - sizeof(given_counter),
1182 sizeof(given_counter));
1183 given_counter = CRYPTO_bswap8(given_counter);
1184
1185 if (gcm_ctx->first) {
1186 // In the first call the sequence number will be zero and therefore the
1187 // given nonce will be 0 ^ mask = mask.
1188 gcm_ctx->mask = given_counter;
1189 gcm_ctx->first = 0;
1190 }
1191 given_counter ^= gcm_ctx->mask;
1192
1193 if (given_counter == UINT64_MAX ||
1194 given_counter < gcm_ctx->min_next_nonce) {
1195 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE);
1196 return 0;
1197 }
1198
1199 gcm_ctx->min_next_nonce = given_counter + 1;
1200
1201 return aead_aes_gcm_seal_scatter(ctx, out, out_tag, out_tag_len,
1202 max_out_tag_len, nonce, nonce_len, in,
1203 in_len, extra_in, extra_in_len, ad, ad_len);
1204}
1205
1206DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_128_gcm_tls13) {
1207 memset(out, 0, sizeof(EVP_AEAD));
1208
1209 out->key_len = 16;
1210 out->nonce_len = 12;
1211 out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1212 out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1213 out->seal_scatter_supports_extra_in = 1;
1214
1215 out->init = aead_aes_gcm_tls13_init;
1216 out->cleanup = aead_aes_gcm_cleanup;
1217 out->seal_scatter = aead_aes_gcm_tls13_seal_scatter;
1218 out->open_gather = aead_aes_gcm_open_gather;
1219}
1220
1221DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_256_gcm_tls13) {
1222 memset(out, 0, sizeof(EVP_AEAD));
1223
1224 out->key_len = 32;
1225 out->nonce_len = 12;
1226 out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1227 out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1228 out->seal_scatter_supports_extra_in = 1;
1229
1230 out->init = aead_aes_gcm_tls13_init;
1231 out->cleanup = aead_aes_gcm_cleanup;
1232 out->seal_scatter = aead_aes_gcm_tls13_seal_scatter;
1233 out->open_gather = aead_aes_gcm_open_gather;
1234}
1235
1236int EVP_has_aes_hardware(void) {
1237#if defined(OPENSSL_X86) || defined(OPENSSL_X86_64)
1238 return hwaes_capable() && crypto_gcm_clmul_enabled();
1239#elif defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64)
1240 return hwaes_capable() && CRYPTO_is_ARMv8_PMULL_capable();
1241#else
1242 return 0;
1243#endif
1244}
1245
1246OPENSSL_MSVC_PRAGMA(warning(pop))
1247