2 * Copyright 2001-2019 The OpenSSL Project Authors. All Rights Reserved.
4 * Licensed under the Apache License 2.0 (the "License"). You may not use
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
12 #include <openssl/opensslconf.h>
13 #include <openssl/crypto.h>
14 #include <openssl/evp.h>
15 #include <openssl/err.h>
16 #include <openssl/aes.h>
17 #include <openssl/rand.h>
18 #include <openssl/cmac.h>
19 #include "crypto/evp.h"
20 #include "internal/cryptlib.h"
21 #include "crypto/modes.h"
22 #include "crypto/siv.h"
23 #include "crypto/aes_platform.h"
24 #include "evp_local.h"
42 } ks; /* AES key schedule to use */
43 int key_set; /* Set if key initialised */
44 int iv_set; /* Set if an iv is set */
46 unsigned char *iv; /* Temporary IV store */
47 int ivlen; /* IV length */
49 int iv_gen; /* It is OK to generate IVs */
50 int iv_gen_rand; /* No IV was specified, so generate a rand IV */
51 int tls_aad_len; /* TLS AAD length */
52 uint64_t tls_enc_records; /* Number of TLS records encrypted */
60 } ks1, ks2; /* AES key schedules to use */
62 void (*stream) (const unsigned char *in,
63 unsigned char *out, size_t length,
64 const AES_KEY *key1, const AES_KEY *key2,
65 const unsigned char iv[16]);
69 static const int allow_insecure_decrypt = 0;
71 static const int allow_insecure_decrypt = 1;
78 } ks; /* AES key schedule to use */
79 int key_set; /* Set if key initialised */
80 int iv_set; /* Set if an iv is set */
81 int tag_set; /* Set if tag is valid */
82 int len_set; /* Set if message length set */
83 int L, M; /* L and M parameters from RFC3610 */
84 int tls_aad_len; /* TLS AAD length */
89 #ifndef OPENSSL_NO_OCB
94 } ksenc; /* AES key schedule to use for encryption */
98 } ksdec; /* AES key schedule to use for decryption */
99 int key_set; /* Set if key initialised */
100 int iv_set; /* Set if an iv is set */
102 unsigned char *iv; /* Temporary IV store */
103 unsigned char tag[16];
104 unsigned char data_buf[16]; /* Store partial data blocks */
105 unsigned char aad_buf[16]; /* Store partial AAD blocks */
108 int ivlen; /* IV length */
113 #define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
115 /* increment counter (64-bit int) by 1 */
116 static void ctr64_inc(unsigned char *counter)
131 #if defined(AESNI_CAPABLE)
132 # if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
133 # define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
134 gctx->gcm.ghash==gcm_ghash_avx)
135 # undef AES_GCM_ASM2 /* minor size optimization */
138 static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
139 const unsigned char *iv, int enc)
142 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
144 mode = EVP_CIPHER_CTX_mode(ctx);
145 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
147 ret = aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
149 dat->block = (block128_f) aesni_decrypt;
150 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
151 (cbc128_f) aesni_cbc_encrypt : NULL;
153 ret = aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
155 dat->block = (block128_f) aesni_encrypt;
156 if (mode == EVP_CIPH_CBC_MODE)
157 dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
158 else if (mode == EVP_CIPH_CTR_MODE)
159 dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
161 dat->stream.cbc = NULL;
165 EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
172 static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
173 const unsigned char *in, size_t len)
175 aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
176 EVP_CIPHER_CTX_iv_noconst(ctx),
177 EVP_CIPHER_CTX_encrypting(ctx));
182 static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
183 const unsigned char *in, size_t len)
185 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
190 aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
191 EVP_CIPHER_CTX_encrypting(ctx));
196 # define aesni_ofb_cipher aes_ofb_cipher
197 static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
198 const unsigned char *in, size_t len);
200 # define aesni_cfb_cipher aes_cfb_cipher
201 static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
202 const unsigned char *in, size_t len);
204 # define aesni_cfb8_cipher aes_cfb8_cipher
205 static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
206 const unsigned char *in, size_t len);
208 # define aesni_cfb1_cipher aes_cfb1_cipher
209 static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
210 const unsigned char *in, size_t len);
212 # define aesni_ctr_cipher aes_ctr_cipher
213 static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
214 const unsigned char *in, size_t len);
216 static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
217 const unsigned char *iv, int enc)
219 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
223 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
225 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
226 gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
228 * If we have an iv can set it directly, otherwise use saved IV.
230 if (iv == NULL && gctx->iv_set)
233 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
238 /* If key set use IV, otherwise copy */
240 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
242 memcpy(gctx->iv, iv, gctx->ivlen);
249 # define aesni_gcm_cipher aes_gcm_cipher
250 static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
251 const unsigned char *in, size_t len);
253 static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
254 const unsigned char *iv, int enc)
256 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
262 /* The key is two half length keys in reality */
263 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
264 const int bits = bytes * 8;
267 * Verify that the two keys are different.
269 * This addresses Rogaway's vulnerability.
270 * See comment in aes_xts_init_key() below.
272 if ((!allow_insecure_decrypt || enc)
273 && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
274 EVPerr(EVP_F_AESNI_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
278 /* key_len is two AES keys */
280 aesni_set_encrypt_key(key, bits, &xctx->ks1.ks);
281 xctx->xts.block1 = (block128_f) aesni_encrypt;
282 xctx->stream = aesni_xts_encrypt;
284 aesni_set_decrypt_key(key, bits, &xctx->ks1.ks);
285 xctx->xts.block1 = (block128_f) aesni_decrypt;
286 xctx->stream = aesni_xts_decrypt;
289 aesni_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
290 xctx->xts.block2 = (block128_f) aesni_encrypt;
292 xctx->xts.key1 = &xctx->ks1;
296 xctx->xts.key2 = &xctx->ks2;
297 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
303 # define aesni_xts_cipher aes_xts_cipher
304 static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
305 const unsigned char *in, size_t len);
307 static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
308 const unsigned char *iv, int enc)
310 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
314 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
316 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
317 &cctx->ks, (block128_f) aesni_encrypt);
318 cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
319 (ccm128_f) aesni_ccm64_decrypt_blocks;
323 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
329 # define aesni_ccm_cipher aes_ccm_cipher
330 static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
331 const unsigned char *in, size_t len);
333 # ifndef OPENSSL_NO_OCB
334 static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
335 const unsigned char *iv, int enc)
337 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
343 * We set both the encrypt and decrypt key here because decrypt
344 * needs both. We could possibly optimise to remove setting the
345 * decrypt for an encryption operation.
347 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
349 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
351 if (!CRYPTO_ocb128_init(&octx->ocb,
352 &octx->ksenc.ks, &octx->ksdec.ks,
353 (block128_f) aesni_encrypt,
354 (block128_f) aesni_decrypt,
355 enc ? aesni_ocb_encrypt
356 : aesni_ocb_decrypt))
362 * If we have an iv we can set it directly, otherwise use saved IV.
364 if (iv == NULL && octx->iv_set)
367 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
374 /* If key set use IV, otherwise copy */
376 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
378 memcpy(octx->iv, iv, octx->ivlen);
384 # define aesni_ocb_cipher aes_ocb_cipher
385 static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
386 const unsigned char *in, size_t len);
387 # endif /* OPENSSL_NO_OCB */
389 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
390 static const EVP_CIPHER aesni_##keylen##_##mode = { \
391 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
392 flags|EVP_CIPH_##MODE##_MODE, \
394 aesni_##mode##_cipher, \
396 sizeof(EVP_AES_KEY), \
397 NULL,NULL,NULL,NULL }; \
398 static const EVP_CIPHER aes_##keylen##_##mode = { \
399 nid##_##keylen##_##nmode,blocksize, \
401 flags|EVP_CIPH_##MODE##_MODE, \
403 aes_##mode##_cipher, \
405 sizeof(EVP_AES_KEY), \
406 NULL,NULL,NULL,NULL }; \
407 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
408 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
410 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
411 static const EVP_CIPHER aesni_##keylen##_##mode = { \
412 nid##_##keylen##_##mode,blocksize, \
413 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
415 flags|EVP_CIPH_##MODE##_MODE, \
416 aesni_##mode##_init_key, \
417 aesni_##mode##_cipher, \
418 aes_##mode##_cleanup, \
419 sizeof(EVP_AES_##MODE##_CTX), \
420 NULL,NULL,aes_##mode##_ctrl,NULL }; \
421 static const EVP_CIPHER aes_##keylen##_##mode = { \
422 nid##_##keylen##_##mode,blocksize, \
423 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
425 flags|EVP_CIPH_##MODE##_MODE, \
426 aes_##mode##_init_key, \
427 aes_##mode##_cipher, \
428 aes_##mode##_cleanup, \
429 sizeof(EVP_AES_##MODE##_CTX), \
430 NULL,NULL,aes_##mode##_ctrl,NULL }; \
431 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
432 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
434 #elif defined(SPARC_AES_CAPABLE)
436 static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
437 const unsigned char *iv, int enc)
440 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
442 mode = EVP_CIPHER_CTX_mode(ctx);
443 bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
444 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
447 aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
448 dat->block = (block128_f) aes_t4_decrypt;
451 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
452 (cbc128_f) aes128_t4_cbc_decrypt : NULL;
455 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
456 (cbc128_f) aes192_t4_cbc_decrypt : NULL;
459 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
460 (cbc128_f) aes256_t4_cbc_decrypt : NULL;
467 aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
468 dat->block = (block128_f) aes_t4_encrypt;
471 if (mode == EVP_CIPH_CBC_MODE)
472 dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
473 else if (mode == EVP_CIPH_CTR_MODE)
474 dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
476 dat->stream.cbc = NULL;
479 if (mode == EVP_CIPH_CBC_MODE)
480 dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
481 else if (mode == EVP_CIPH_CTR_MODE)
482 dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
484 dat->stream.cbc = NULL;
487 if (mode == EVP_CIPH_CBC_MODE)
488 dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
489 else if (mode == EVP_CIPH_CTR_MODE)
490 dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
492 dat->stream.cbc = NULL;
500 EVPerr(EVP_F_AES_T4_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
507 # define aes_t4_cbc_cipher aes_cbc_cipher
508 static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
509 const unsigned char *in, size_t len);
511 # define aes_t4_ecb_cipher aes_ecb_cipher
512 static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
513 const unsigned char *in, size_t len);
515 # define aes_t4_ofb_cipher aes_ofb_cipher
516 static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
517 const unsigned char *in, size_t len);
519 # define aes_t4_cfb_cipher aes_cfb_cipher
520 static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
521 const unsigned char *in, size_t len);
523 # define aes_t4_cfb8_cipher aes_cfb8_cipher
524 static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
525 const unsigned char *in, size_t len);
527 # define aes_t4_cfb1_cipher aes_cfb1_cipher
528 static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
529 const unsigned char *in, size_t len);
531 # define aes_t4_ctr_cipher aes_ctr_cipher
532 static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
533 const unsigned char *in, size_t len);
535 static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
536 const unsigned char *iv, int enc)
538 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
542 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
543 aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
544 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
545 (block128_f) aes_t4_encrypt);
548 gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
551 gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
554 gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
560 * If we have an iv can set it directly, otherwise use saved IV.
562 if (iv == NULL && gctx->iv_set)
565 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
570 /* If key set use IV, otherwise copy */
572 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
574 memcpy(gctx->iv, iv, gctx->ivlen);
581 # define aes_t4_gcm_cipher aes_gcm_cipher
582 static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
583 const unsigned char *in, size_t len);
585 static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
586 const unsigned char *iv, int enc)
588 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
594 /* The key is two half length keys in reality */
595 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
596 const int bits = bytes * 8;
599 * Verify that the two keys are different.
601 * This addresses Rogaway's vulnerability.
602 * See comment in aes_xts_init_key() below.
604 if ((!allow_insecure_decrypt || enc)
605 && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
606 EVPerr(EVP_F_AES_T4_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
611 /* key_len is two AES keys */
613 aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
614 xctx->xts.block1 = (block128_f) aes_t4_encrypt;
617 xctx->stream = aes128_t4_xts_encrypt;
620 xctx->stream = aes256_t4_xts_encrypt;
626 aes_t4_set_decrypt_key(key, bits, &xctx->ks1.ks);
627 xctx->xts.block1 = (block128_f) aes_t4_decrypt;
630 xctx->stream = aes128_t4_xts_decrypt;
633 xctx->stream = aes256_t4_xts_decrypt;
640 aes_t4_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
641 xctx->xts.block2 = (block128_f) aes_t4_encrypt;
643 xctx->xts.key1 = &xctx->ks1;
647 xctx->xts.key2 = &xctx->ks2;
648 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
654 # define aes_t4_xts_cipher aes_xts_cipher
655 static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
656 const unsigned char *in, size_t len);
658 static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
659 const unsigned char *iv, int enc)
661 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
665 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
666 aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
667 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
668 &cctx->ks, (block128_f) aes_t4_encrypt);
673 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
679 # define aes_t4_ccm_cipher aes_ccm_cipher
680 static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
681 const unsigned char *in, size_t len);
683 # ifndef OPENSSL_NO_OCB
684 static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
685 const unsigned char *iv, int enc)
687 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
693 * We set both the encrypt and decrypt key here because decrypt
694 * needs both. We could possibly optimise to remove setting the
695 * decrypt for an encryption operation.
697 aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
699 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
701 if (!CRYPTO_ocb128_init(&octx->ocb,
702 &octx->ksenc.ks, &octx->ksdec.ks,
703 (block128_f) aes_t4_encrypt,
704 (block128_f) aes_t4_decrypt,
711 * If we have an iv we can set it directly, otherwise use saved IV.
713 if (iv == NULL && octx->iv_set)
716 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
723 /* If key set use IV, otherwise copy */
725 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
727 memcpy(octx->iv, iv, octx->ivlen);
733 # define aes_t4_ocb_cipher aes_ocb_cipher
734 static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
735 const unsigned char *in, size_t len);
736 # endif /* OPENSSL_NO_OCB */
738 # ifndef OPENSSL_NO_SIV
739 # define aes_t4_siv_init_key aes_siv_init_key
740 # define aes_t4_siv_cipher aes_siv_cipher
741 # endif /* OPENSSL_NO_SIV */
743 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
744 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
745 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
746 flags|EVP_CIPH_##MODE##_MODE, \
748 aes_t4_##mode##_cipher, \
750 sizeof(EVP_AES_KEY), \
751 NULL,NULL,NULL,NULL }; \
752 static const EVP_CIPHER aes_##keylen##_##mode = { \
753 nid##_##keylen##_##nmode,blocksize, \
755 flags|EVP_CIPH_##MODE##_MODE, \
757 aes_##mode##_cipher, \
759 sizeof(EVP_AES_KEY), \
760 NULL,NULL,NULL,NULL }; \
761 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
762 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
764 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
765 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
766 nid##_##keylen##_##mode,blocksize, \
767 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
769 flags|EVP_CIPH_##MODE##_MODE, \
770 aes_t4_##mode##_init_key, \
771 aes_t4_##mode##_cipher, \
772 aes_##mode##_cleanup, \
773 sizeof(EVP_AES_##MODE##_CTX), \
774 NULL,NULL,aes_##mode##_ctrl,NULL }; \
775 static const EVP_CIPHER aes_##keylen##_##mode = { \
776 nid##_##keylen##_##mode,blocksize, \
777 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
779 flags|EVP_CIPH_##MODE##_MODE, \
780 aes_##mode##_init_key, \
781 aes_##mode##_cipher, \
782 aes_##mode##_cleanup, \
783 sizeof(EVP_AES_##MODE##_CTX), \
784 NULL,NULL,aes_##mode##_ctrl,NULL }; \
785 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
786 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
788 #elif defined(S390X_aes_128_CAPABLE)
789 /* IBM S390X support */
794 * KM-AES parameter block - begin
795 * (see z/Architecture Principles of Operation >= SA22-7832-06)
800 /* KM-AES parameter block - end */
809 * KMO-AES parameter block - begin
810 * (see z/Architecture Principles of Operation >= SA22-7832-08)
813 unsigned char cv[16];
816 /* KMO-AES parameter block - end */
827 * KMF-AES parameter block - begin
828 * (see z/Architecture Principles of Operation >= SA22-7832-08)
831 unsigned char cv[16];
834 /* KMF-AES parameter block - end */
845 * KMA-GCM-AES parameter block - begin
846 * (see z/Architecture Principles of Operation >= SA22-7832-11)
849 unsigned char reserved[12];
855 unsigned long long g[2];
859 unsigned long long taadl;
860 unsigned long long tpcl;
862 unsigned long long g[2];
867 /* KMA-GCM-AES parameter block - end */
879 unsigned char ares[16];
880 unsigned char mres[16];
881 unsigned char kres[16];
887 uint64_t tls_enc_records; /* Number of TLS records encrypted */
894 * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and
895 * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's
896 * rounds field is used to store the function code and that the key
897 * schedule is not stored (if aes hardware support is detected).
900 unsigned char pad[16];
906 * KMAC-AES parameter block - begin
907 * (see z/Architecture Principles of Operation >= SA22-7832-08)
911 unsigned long long g[2];
916 /* KMAC-AES parameter block - end */
919 unsigned long long g[2];
923 unsigned long long g[2];
927 unsigned long long blocks;
936 unsigned char pad[140];
942 # define s390x_aes_init_key aes_init_key
943 static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
944 const unsigned char *iv, int enc);
946 # define S390X_AES_CBC_CTX EVP_AES_KEY
948 # define s390x_aes_cbc_init_key aes_init_key
950 # define s390x_aes_cbc_cipher aes_cbc_cipher
951 static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
952 const unsigned char *in, size_t len);
954 static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX *ctx,
955 const unsigned char *key,
956 const unsigned char *iv, int enc)
958 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
959 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
961 cctx->fc = S390X_AES_FC(keylen);
963 cctx->fc |= S390X_DECRYPT;
965 memcpy(cctx->km.param.k, key, keylen);
969 static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
970 const unsigned char *in, size_t len)
972 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
974 s390x_km(in, len, out, cctx->fc, &cctx->km.param);
978 static int s390x_aes_ofb_init_key(EVP_CIPHER_CTX *ctx,
979 const unsigned char *key,
980 const unsigned char *ivec, int enc)
982 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
983 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
984 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
985 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
987 memcpy(cctx->kmo.param.cv, iv, ivlen);
988 memcpy(cctx->kmo.param.k, key, keylen);
989 cctx->fc = S390X_AES_FC(keylen);
994 static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
995 const unsigned char *in, size_t len)
997 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1002 *out = *in ^ cctx->kmo.param.cv[n];
1011 len &= ~(size_t)0xf;
1013 s390x_kmo(in, len, out, cctx->fc, &cctx->kmo.param);
1020 s390x_km(cctx->kmo.param.cv, 16, cctx->kmo.param.cv, cctx->fc,
1024 out[n] = in[n] ^ cctx->kmo.param.cv[n];
1033 static int s390x_aes_cfb_init_key(EVP_CIPHER_CTX *ctx,
1034 const unsigned char *key,
1035 const unsigned char *ivec, int enc)
1037 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1038 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1039 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1040 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1042 cctx->fc = S390X_AES_FC(keylen);
1043 cctx->fc |= 16 << 24; /* 16 bytes cipher feedback */
1045 cctx->fc |= S390X_DECRYPT;
1048 memcpy(cctx->kmf.param.cv, iv, ivlen);
1049 memcpy(cctx->kmf.param.k, key, keylen);
1053 static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1054 const unsigned char *in, size_t len)
1056 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1057 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1058 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1065 *out = cctx->kmf.param.cv[n] ^ tmp;
1066 cctx->kmf.param.cv[n] = enc ? *out : tmp;
1075 len &= ~(size_t)0xf;
1077 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1084 s390x_km(cctx->kmf.param.cv, 16, cctx->kmf.param.cv,
1085 S390X_AES_FC(keylen), cctx->kmf.param.k);
1089 out[n] = cctx->kmf.param.cv[n] ^ tmp;
1090 cctx->kmf.param.cv[n] = enc ? out[n] : tmp;
1099 static int s390x_aes_cfb8_init_key(EVP_CIPHER_CTX *ctx,
1100 const unsigned char *key,
1101 const unsigned char *ivec, int enc)
1103 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1104 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1105 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1106 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1108 cctx->fc = S390X_AES_FC(keylen);
1109 cctx->fc |= 1 << 24; /* 1 byte cipher feedback */
1111 cctx->fc |= S390X_DECRYPT;
1113 memcpy(cctx->kmf.param.cv, iv, ivlen);
1114 memcpy(cctx->kmf.param.k, key, keylen);
1118 static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1119 const unsigned char *in, size_t len)
1121 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1123 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1127 # define s390x_aes_cfb1_init_key aes_init_key
1129 # define s390x_aes_cfb1_cipher aes_cfb1_cipher
1130 static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1131 const unsigned char *in, size_t len);
1133 # define S390X_AES_CTR_CTX EVP_AES_KEY
1135 # define s390x_aes_ctr_init_key aes_init_key
1137 # define s390x_aes_ctr_cipher aes_ctr_cipher
1138 static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1139 const unsigned char *in, size_t len);
1141 /* iv + padding length for iv lengths != 12 */
1142 # define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16)
1145 * Process additional authenticated data. Returns 0 on success. Code is
1148 static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad,
1151 unsigned long long alen;
1154 if (ctx->kma.param.tpcl)
1157 alen = ctx->kma.param.taadl + len;
1158 if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
1160 ctx->kma.param.taadl = alen;
1165 ctx->ares[n] = *aad;
1170 /* ctx->ares contains a complete block if offset has wrapped around */
1172 s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1173 ctx->fc |= S390X_KMA_HS;
1180 len &= ~(size_t)0xf;
1182 s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1184 ctx->fc |= S390X_KMA_HS;
1192 ctx->ares[rem] = aad[rem];
1199 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for
1200 * success. Code is big-endian.
1202 static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in,
1203 unsigned char *out, size_t len)
1205 const unsigned char *inptr;
1206 unsigned long long mlen;
1209 unsigned char b[16];
1214 mlen = ctx->kma.param.tpcl + len;
1215 if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
1217 ctx->kma.param.tpcl = mlen;
1223 while (n && inlen) {
1224 ctx->mres[n] = *inptr;
1229 /* ctx->mres contains a complete block if offset has wrapped around */
1231 s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b,
1232 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1233 ctx->fc |= S390X_KMA_HS;
1236 /* previous call already encrypted/decrypted its remainder,
1237 * see comment below */
1252 len &= ~(size_t)0xf;
1254 s390x_kma(ctx->ares, ctx->areslen, in, len, out,
1255 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1258 ctx->fc |= S390X_KMA_HS;
1263 * If there is a remainder, it has to be saved such that it can be
1264 * processed by kma later. However, we also have to do the for-now
1265 * unauthenticated encryption/decryption part here and now...
1268 if (!ctx->mreslen) {
1269 buf.w[0] = ctx->kma.param.j0.w[0];
1270 buf.w[1] = ctx->kma.param.j0.w[1];
1271 buf.w[2] = ctx->kma.param.j0.w[2];
1272 buf.w[3] = ctx->kma.param.cv.w + 1;
1273 s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k);
1277 for (i = 0; i < rem; i++) {
1278 ctx->mres[n + i] = in[i];
1279 out[i] = in[i] ^ ctx->kres[n + i];
1282 ctx->mreslen += rem;
1288 * Initialize context structure. Code is big-endian.
1290 static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx,
1291 const unsigned char *iv)
1293 ctx->kma.param.t.g[0] = 0;
1294 ctx->kma.param.t.g[1] = 0;
1295 ctx->kma.param.tpcl = 0;
1296 ctx->kma.param.taadl = 0;
1301 if (ctx->ivlen == 12) {
1302 memcpy(&ctx->kma.param.j0, iv, ctx->ivlen);
1303 ctx->kma.param.j0.w[3] = 1;
1304 ctx->kma.param.cv.w = 1;
1306 /* ctx->iv has the right size and is already padded. */
1307 memcpy(ctx->iv, iv, ctx->ivlen);
1308 s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL,
1309 ctx->fc, &ctx->kma.param);
1310 ctx->fc |= S390X_KMA_HS;
1312 ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0];
1313 ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1];
1314 ctx->kma.param.cv.w = ctx->kma.param.j0.w[3];
1315 ctx->kma.param.t.g[0] = 0;
1316 ctx->kma.param.t.g[1] = 0;
1321 * Performs various operations on the context structure depending on control
1322 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
1323 * Code is big-endian.
1325 static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1327 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1328 S390X_AES_GCM_CTX *gctx_out;
1329 EVP_CIPHER_CTX *out;
1330 unsigned char *buf, *iv;
1331 int ivlen, enc, len;
1335 ivlen = EVP_CIPHER_iv_length(c->cipher);
1336 iv = EVP_CIPHER_CTX_iv_noconst(c);
1339 gctx->ivlen = ivlen;
1343 gctx->tls_aad_len = -1;
1346 case EVP_CTRL_GET_IVLEN:
1347 *(int *)ptr = gctx->ivlen;
1350 case EVP_CTRL_AEAD_SET_IVLEN:
1355 iv = EVP_CIPHER_CTX_iv_noconst(c);
1356 len = S390X_gcm_ivpadlen(arg);
1358 /* Allocate memory for iv if needed. */
1359 if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) {
1361 OPENSSL_free(gctx->iv);
1363 if ((gctx->iv = OPENSSL_malloc(len)) == NULL) {
1364 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
1369 memset(gctx->iv + arg, 0, len - arg - 8);
1370 *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3;
1375 case EVP_CTRL_AEAD_SET_TAG:
1376 buf = EVP_CIPHER_CTX_buf_noconst(c);
1377 enc = EVP_CIPHER_CTX_encrypting(c);
1378 if (arg <= 0 || arg > 16 || enc)
1381 memcpy(buf, ptr, arg);
1385 case EVP_CTRL_AEAD_GET_TAG:
1386 enc = EVP_CIPHER_CTX_encrypting(c);
1387 if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0)
1390 memcpy(ptr, gctx->kma.param.t.b, arg);
1393 case EVP_CTRL_GCM_SET_IV_FIXED:
1394 /* Special case: -1 length restores whole iv */
1396 memcpy(gctx->iv, ptr, gctx->ivlen);
1401 * Fixed field must be at least 4 bytes and invocation field at least
1404 if ((arg < 4) || (gctx->ivlen - arg) < 8)
1408 memcpy(gctx->iv, ptr, arg);
1410 enc = EVP_CIPHER_CTX_encrypting(c);
1411 if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
1417 case EVP_CTRL_GCM_IV_GEN:
1418 if (gctx->iv_gen == 0 || gctx->key_set == 0)
1421 s390x_aes_gcm_setiv(gctx, gctx->iv);
1423 if (arg <= 0 || arg > gctx->ivlen)
1426 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1428 * Invocation field will be at least 8 bytes in size and so no need
1429 * to check wrap around or increment more than last 8 bytes.
1431 ctr64_inc(gctx->iv + gctx->ivlen - 8);
1435 case EVP_CTRL_GCM_SET_IV_INV:
1436 enc = EVP_CIPHER_CTX_encrypting(c);
1437 if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc)
1440 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1441 s390x_aes_gcm_setiv(gctx, gctx->iv);
1445 case EVP_CTRL_AEAD_TLS1_AAD:
1446 /* Save the aad for later use. */
1447 if (arg != EVP_AEAD_TLS1_AAD_LEN)
1450 buf = EVP_CIPHER_CTX_buf_noconst(c);
1451 memcpy(buf, ptr, arg);
1452 gctx->tls_aad_len = arg;
1453 gctx->tls_enc_records = 0;
1455 len = buf[arg - 2] << 8 | buf[arg - 1];
1456 /* Correct length for explicit iv. */
1457 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1459 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1461 /* If decrypting correct for tag too. */
1462 enc = EVP_CIPHER_CTX_encrypting(c);
1464 if (len < EVP_GCM_TLS_TAG_LEN)
1466 len -= EVP_GCM_TLS_TAG_LEN;
1468 buf[arg - 2] = len >> 8;
1469 buf[arg - 1] = len & 0xff;
1470 /* Extra padding: tag appended to record. */
1471 return EVP_GCM_TLS_TAG_LEN;
1475 gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out);
1476 iv = EVP_CIPHER_CTX_iv_noconst(c);
1478 if (gctx->iv == iv) {
1479 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
1481 len = S390X_gcm_ivpadlen(gctx->ivlen);
1483 if ((gctx_out->iv = OPENSSL_malloc(len)) == NULL) {
1484 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
1488 memcpy(gctx_out->iv, gctx->iv, len);
1498 * Set key and/or iv. Returns 1 on success. Otherwise 0 is returned.
1500 static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
1501 const unsigned char *key,
1502 const unsigned char *iv, int enc)
1504 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1507 if (iv == NULL && key == NULL)
1511 keylen = EVP_CIPHER_CTX_key_length(ctx);
1512 memcpy(&gctx->kma.param.k, key, keylen);
1514 gctx->fc = S390X_AES_FC(keylen);
1516 gctx->fc |= S390X_DECRYPT;
1518 if (iv == NULL && gctx->iv_set)
1522 s390x_aes_gcm_setiv(gctx, iv);
1528 s390x_aes_gcm_setiv(gctx, iv);
1530 memcpy(gctx->iv, iv, gctx->ivlen);
1539 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1540 * if successful. Otherwise -1 is returned. Code is big-endian.
1542 static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1543 const unsigned char *in, size_t len)
1545 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1546 const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1547 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1550 if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1554 * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
1555 * Requirements from SP 800-38D". The requirements is for one party to the
1556 * communication to fail after 2^64 - 1 keys. We do this on the encrypting
1559 if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
1560 EVPerr(EVP_F_S390X_AES_GCM_TLS_CIPHER, EVP_R_TOO_MANY_RECORDS);
1564 if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN
1565 : EVP_CTRL_GCM_SET_IV_INV,
1566 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
1569 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1570 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1571 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1573 gctx->kma.param.taadl = gctx->tls_aad_len << 3;
1574 gctx->kma.param.tpcl = len << 3;
1575 s390x_kma(buf, gctx->tls_aad_len, in, len, out,
1576 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1579 memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN);
1580 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1582 if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len,
1583 EVP_GCM_TLS_TAG_LEN)) {
1584 OPENSSL_cleanse(out, len);
1591 gctx->tls_aad_len = -1;
1596 * Called from EVP layer to initialize context, process additional
1597 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1598 * ciphertext or process a TLS packet, depending on context. Returns bytes
1599 * written on success. Otherwise -1 is returned. Code is big-endian.
1601 static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1602 const unsigned char *in, size_t len)
1604 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1605 unsigned char *buf, tmp[16];
1611 if (gctx->tls_aad_len >= 0)
1612 return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
1619 if (s390x_aes_gcm_aad(gctx, in, len))
1622 if (s390x_aes_gcm(gctx, in, out, len))
1627 gctx->kma.param.taadl <<= 3;
1628 gctx->kma.param.tpcl <<= 3;
1629 s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp,
1630 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1631 /* recall that we already did en-/decrypt gctx->mres
1632 * and returned it to caller... */
1633 OPENSSL_cleanse(tmp, gctx->mreslen);
1636 enc = EVP_CIPHER_CTX_encrypting(ctx);
1640 if (gctx->taglen < 0)
1643 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1644 if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen))
1651 static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1653 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1654 const unsigned char *iv;
1659 iv = EVP_CIPHER_CTX_iv(c);
1661 OPENSSL_free(gctx->iv);
1663 OPENSSL_cleanse(gctx, sizeof(*gctx));
1667 # define S390X_AES_XTS_CTX EVP_AES_XTS_CTX
1669 # define s390x_aes_xts_init_key aes_xts_init_key
1670 static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx,
1671 const unsigned char *key,
1672 const unsigned char *iv, int enc);
1673 # define s390x_aes_xts_cipher aes_xts_cipher
1674 static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1675 const unsigned char *in, size_t len);
1676 # define s390x_aes_xts_ctrl aes_xts_ctrl
1677 static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
1678 # define s390x_aes_xts_cleanup aes_xts_cleanup
1681 * Set nonce and length fields. Code is big-endian.
1683 static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx,
1684 const unsigned char *nonce,
1687 ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG;
1688 ctx->aes.ccm.nonce.g[1] = mlen;
1689 memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l);
1693 * Process additional authenticated data. Code is big-endian.
1695 static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad,
1704 ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG;
1706 /* Suppress 'type-punned pointer dereference' warning. */
1707 ptr = ctx->aes.ccm.buf.b;
1709 if (alen < ((1 << 16) - (1 << 8))) {
1710 *(uint16_t *)ptr = alen;
1712 } else if (sizeof(alen) == 8
1713 && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) {
1714 *(uint16_t *)ptr = 0xffff;
1715 *(uint64_t *)(ptr + 2) = alen;
1718 *(uint16_t *)ptr = 0xfffe;
1719 *(uint32_t *)(ptr + 2) = alen;
1723 while (i < 16 && alen) {
1724 ctx->aes.ccm.buf.b[i] = *aad;
1730 ctx->aes.ccm.buf.b[i] = 0;
1734 ctx->aes.ccm.kmac_param.icv.g[0] = 0;
1735 ctx->aes.ccm.kmac_param.icv.g[1] = 0;
1736 s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc,
1737 &ctx->aes.ccm.kmac_param);
1738 ctx->aes.ccm.blocks += 2;
1741 alen &= ~(size_t)0xf;
1743 s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1744 ctx->aes.ccm.blocks += alen >> 4;
1748 for (i = 0; i < rem; i++)
1749 ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i];
1751 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1752 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1753 ctx->aes.ccm.kmac_param.k);
1754 ctx->aes.ccm.blocks++;
1759 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for
1762 static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in,
1763 unsigned char *out, size_t len, int enc)
1766 unsigned int i, l, num;
1767 unsigned char flags;
1769 flags = ctx->aes.ccm.nonce.b[0];
1770 if (!(flags & S390X_CCM_AAD_FLAG)) {
1771 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b,
1772 ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k);
1773 ctx->aes.ccm.blocks++;
1776 ctx->aes.ccm.nonce.b[0] = l;
1779 * Reconstruct length from encoded length field
1780 * and initialize it with counter value.
1783 for (i = 15 - l; i < 15; i++) {
1784 n |= ctx->aes.ccm.nonce.b[i];
1785 ctx->aes.ccm.nonce.b[i] = 0;
1788 n |= ctx->aes.ccm.nonce.b[15];
1789 ctx->aes.ccm.nonce.b[15] = 1;
1792 return -1; /* length mismatch */
1795 /* Two operations per block plus one for tag encryption */
1796 ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1;
1797 if (ctx->aes.ccm.blocks > (1ULL << 61))
1798 return -2; /* too much data */
1803 len &= ~(size_t)0xf;
1806 /* mac-then-encrypt */
1808 s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1810 for (i = 0; i < rem; i++)
1811 ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i];
1813 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1814 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1815 ctx->aes.ccm.kmac_param.k);
1818 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1819 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1820 &num, (ctr128_f)AES_ctr32_encrypt);
1822 /* decrypt-then-mac */
1823 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1824 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1825 &num, (ctr128_f)AES_ctr32_encrypt);
1828 s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1830 for (i = 0; i < rem; i++)
1831 ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i];
1833 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1834 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1835 ctx->aes.ccm.kmac_param.k);
1839 for (i = 15 - l; i < 16; i++)
1840 ctx->aes.ccm.nonce.b[i] = 0;
1842 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc,
1843 ctx->aes.ccm.kmac_param.k);
1844 ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0];
1845 ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1];
1847 ctx->aes.ccm.nonce.b[0] = flags; /* restore flags field */
1852 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1853 * if successful. Otherwise -1 is returned.
1855 static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1856 const unsigned char *in, size_t len)
1858 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1859 unsigned char *ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
1860 unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1861 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1864 || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m))
1868 /* Set explicit iv (sequence number). */
1869 memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1872 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
1874 * Get explicit iv (sequence number). We already have fixed iv
1875 * (server/client_write_iv) here.
1877 memcpy(ivec + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1878 s390x_aes_ccm_setiv(cctx, ivec, len);
1880 /* Process aad (sequence number|type|version|length) */
1881 s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len);
1883 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
1884 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
1887 if (s390x_aes_ccm(cctx, in, out, len, enc))
1890 memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
1891 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
1893 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
1894 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len,
1899 OPENSSL_cleanse(out, len);
1905 * Set key and flag field and/or iv. Returns 1 if successful. Otherwise 0 is
1908 static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx,
1909 const unsigned char *key,
1910 const unsigned char *iv, int enc)
1912 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1913 unsigned char *ivec;
1916 if (iv == NULL && key == NULL)
1920 keylen = EVP_CIPHER_CTX_key_length(ctx);
1921 cctx->aes.ccm.fc = S390X_AES_FC(keylen);
1922 memcpy(cctx->aes.ccm.kmac_param.k, key, keylen);
1924 /* Store encoded m and l. */
1925 cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7)
1926 | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3;
1927 memset(cctx->aes.ccm.nonce.b + 1, 0,
1928 sizeof(cctx->aes.ccm.nonce.b));
1929 cctx->aes.ccm.blocks = 0;
1931 cctx->aes.ccm.key_set = 1;
1935 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
1936 memcpy(ivec, iv, 15 - cctx->aes.ccm.l);
1938 cctx->aes.ccm.iv_set = 1;
1945 * Called from EVP layer to initialize context, process additional
1946 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1947 * plaintext or process a TLS packet, depending on context. Returns bytes
1948 * written on success. Otherwise -1 is returned.
1950 static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1951 const unsigned char *in, size_t len)
1953 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1954 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1956 unsigned char *buf, *ivec;
1958 if (!cctx->aes.ccm.key_set)
1961 if (cctx->aes.ccm.tls_aad_len >= 0)
1962 return s390x_aes_ccm_tls_cipher(ctx, out, in, len);
1965 * Final(): Does not return any data. Recall that ccm is mac-then-encrypt
1966 * so integrity must be checked already at Update() i.e., before
1967 * potentially corrupted data is output.
1969 if (in == NULL && out != NULL)
1972 if (!cctx->aes.ccm.iv_set)
1976 /* Update(): Pass message length. */
1978 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
1979 s390x_aes_ccm_setiv(cctx, ivec, len);
1981 cctx->aes.ccm.len_set = 1;
1985 /* Update(): Process aad. */
1986 if (!cctx->aes.ccm.len_set && len)
1989 s390x_aes_ccm_aad(cctx, in, len);
1993 /* The tag must be set before actually decrypting data */
1994 if (!enc && !cctx->aes.ccm.tag_set)
1997 /* Update(): Process message. */
1999 if (!cctx->aes.ccm.len_set) {
2001 * In case message length was not previously set explicitly via
2002 * Update(), set it now.
2004 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2005 s390x_aes_ccm_setiv(cctx, ivec, len);
2007 cctx->aes.ccm.len_set = 1;
2011 if (s390x_aes_ccm(cctx, in, out, len, enc))
2014 cctx->aes.ccm.tag_set = 1;
2019 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2020 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2021 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf,
2027 OPENSSL_cleanse(out, len);
2029 cctx->aes.ccm.iv_set = 0;
2030 cctx->aes.ccm.tag_set = 0;
2031 cctx->aes.ccm.len_set = 0;
2037 * Performs various operations on the context structure depending on control
2038 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
2039 * Code is big-endian.
2041 static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2043 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c);
2044 unsigned char *buf, *iv;
2049 cctx->aes.ccm.key_set = 0;
2050 cctx->aes.ccm.iv_set = 0;
2051 cctx->aes.ccm.l = 8;
2052 cctx->aes.ccm.m = 12;
2053 cctx->aes.ccm.tag_set = 0;
2054 cctx->aes.ccm.len_set = 0;
2055 cctx->aes.ccm.tls_aad_len = -1;
2058 case EVP_CTRL_GET_IVLEN:
2059 *(int *)ptr = 15 - cctx->aes.ccm.l;
2062 case EVP_CTRL_AEAD_TLS1_AAD:
2063 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2066 /* Save the aad for later use. */
2067 buf = EVP_CIPHER_CTX_buf_noconst(c);
2068 memcpy(buf, ptr, arg);
2069 cctx->aes.ccm.tls_aad_len = arg;
2071 len = buf[arg - 2] << 8 | buf[arg - 1];
2072 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
2075 /* Correct length for explicit iv. */
2076 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
2078 enc = EVP_CIPHER_CTX_encrypting(c);
2080 if (len < cctx->aes.ccm.m)
2083 /* Correct length for tag. */
2084 len -= cctx->aes.ccm.m;
2087 buf[arg - 2] = len >> 8;
2088 buf[arg - 1] = len & 0xff;
2090 /* Extra padding: tag appended to record. */
2091 return cctx->aes.ccm.m;
2093 case EVP_CTRL_CCM_SET_IV_FIXED:
2094 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
2097 /* Copy to first part of the iv. */
2098 iv = EVP_CIPHER_CTX_iv_noconst(c);
2099 memcpy(iv, ptr, arg);
2102 case EVP_CTRL_AEAD_SET_IVLEN:
2106 case EVP_CTRL_CCM_SET_L:
2107 if (arg < 2 || arg > 8)
2110 cctx->aes.ccm.l = arg;
2113 case EVP_CTRL_AEAD_SET_TAG:
2114 if ((arg & 1) || arg < 4 || arg > 16)
2117 enc = EVP_CIPHER_CTX_encrypting(c);
2122 cctx->aes.ccm.tag_set = 1;
2123 buf = EVP_CIPHER_CTX_buf_noconst(c);
2124 memcpy(buf, ptr, arg);
2127 cctx->aes.ccm.m = arg;
2130 case EVP_CTRL_AEAD_GET_TAG:
2131 enc = EVP_CIPHER_CTX_encrypting(c);
2132 if (!enc || !cctx->aes.ccm.tag_set)
2135 if(arg < cctx->aes.ccm.m)
2138 memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2139 cctx->aes.ccm.tag_set = 0;
2140 cctx->aes.ccm.iv_set = 0;
2141 cctx->aes.ccm.len_set = 0;
2152 # define s390x_aes_ccm_cleanup aes_ccm_cleanup
2154 # ifndef OPENSSL_NO_OCB
2155 # define S390X_AES_OCB_CTX EVP_AES_OCB_CTX
2157 # define s390x_aes_ocb_init_key aes_ocb_init_key
2158 static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2159 const unsigned char *iv, int enc);
2160 # define s390x_aes_ocb_cipher aes_ocb_cipher
2161 static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2162 const unsigned char *in, size_t len);
2163 # define s390x_aes_ocb_cleanup aes_ocb_cleanup
2164 static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *);
2165 # define s390x_aes_ocb_ctrl aes_ocb_ctrl
2166 static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
2169 # ifndef OPENSSL_NO_SIV
2170 # define S390X_AES_SIV_CTX EVP_AES_SIV_CTX
2172 # define s390x_aes_siv_init_key aes_siv_init_key
2173 # define s390x_aes_siv_cipher aes_siv_cipher
2174 # define s390x_aes_siv_cleanup aes_siv_cleanup
2175 # define s390x_aes_siv_ctrl aes_siv_ctrl
2178 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode, \
2180 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2181 nid##_##keylen##_##nmode,blocksize, \
2184 flags | EVP_CIPH_##MODE##_MODE, \
2185 s390x_aes_##mode##_init_key, \
2186 s390x_aes_##mode##_cipher, \
2188 sizeof(S390X_AES_##MODE##_CTX), \
2194 static const EVP_CIPHER aes_##keylen##_##mode = { \
2195 nid##_##keylen##_##nmode, \
2199 flags | EVP_CIPH_##MODE##_MODE, \
2201 aes_##mode##_cipher, \
2203 sizeof(EVP_AES_KEY), \
2209 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2211 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2212 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2215 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\
2216 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2217 nid##_##keylen##_##mode, \
2219 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2221 flags | EVP_CIPH_##MODE##_MODE, \
2222 s390x_aes_##mode##_init_key, \
2223 s390x_aes_##mode##_cipher, \
2224 s390x_aes_##mode##_cleanup, \
2225 sizeof(S390X_AES_##MODE##_CTX), \
2228 s390x_aes_##mode##_ctrl, \
2231 static const EVP_CIPHER aes_##keylen##_##mode = { \
2232 nid##_##keylen##_##mode,blocksize, \
2233 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2235 flags | EVP_CIPH_##MODE##_MODE, \
2236 aes_##mode##_init_key, \
2237 aes_##mode##_cipher, \
2238 aes_##mode##_cleanup, \
2239 sizeof(EVP_AES_##MODE##_CTX), \
2242 aes_##mode##_ctrl, \
2245 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2247 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2248 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2253 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
2254 static const EVP_CIPHER aes_##keylen##_##mode = { \
2255 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
2256 flags|EVP_CIPH_##MODE##_MODE, \
2258 aes_##mode##_cipher, \
2260 sizeof(EVP_AES_KEY), \
2261 NULL,NULL,NULL,NULL }; \
2262 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2263 { return &aes_##keylen##_##mode; }
2265 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
2266 static const EVP_CIPHER aes_##keylen##_##mode = { \
2267 nid##_##keylen##_##mode,blocksize, \
2268 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
2270 flags|EVP_CIPH_##MODE##_MODE, \
2271 aes_##mode##_init_key, \
2272 aes_##mode##_cipher, \
2273 aes_##mode##_cleanup, \
2274 sizeof(EVP_AES_##MODE##_CTX), \
2275 NULL,NULL,aes_##mode##_ctrl,NULL }; \
2276 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2277 { return &aes_##keylen##_##mode; }
2281 #define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
2282 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2283 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2284 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2285 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2286 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
2287 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
2288 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
2290 static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2291 const unsigned char *iv, int enc)
2294 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2296 mode = EVP_CIPHER_CTX_mode(ctx);
2297 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
2299 #ifdef HWAES_CAPABLE
2300 if (HWAES_CAPABLE) {
2301 ret = HWAES_set_decrypt_key(key,
2302 EVP_CIPHER_CTX_key_length(ctx) * 8,
2304 dat->block = (block128_f) HWAES_decrypt;
2305 dat->stream.cbc = NULL;
2306 # ifdef HWAES_cbc_encrypt
2307 if (mode == EVP_CIPH_CBC_MODE)
2308 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2312 #ifdef BSAES_CAPABLE
2313 if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
2314 ret = AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2316 dat->block = (block128_f) AES_decrypt;
2317 dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt;
2320 #ifdef VPAES_CAPABLE
2321 if (VPAES_CAPABLE) {
2322 ret = vpaes_set_decrypt_key(key,
2323 EVP_CIPHER_CTX_key_length(ctx) * 8,
2325 dat->block = (block128_f) vpaes_decrypt;
2326 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2327 (cbc128_f) vpaes_cbc_encrypt : NULL;
2331 ret = AES_set_decrypt_key(key,
2332 EVP_CIPHER_CTX_key_length(ctx) * 8,
2334 dat->block = (block128_f) AES_decrypt;
2335 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2336 (cbc128_f) AES_cbc_encrypt : NULL;
2339 #ifdef HWAES_CAPABLE
2340 if (HWAES_CAPABLE) {
2341 ret = HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2343 dat->block = (block128_f) HWAES_encrypt;
2344 dat->stream.cbc = NULL;
2345 # ifdef HWAES_cbc_encrypt
2346 if (mode == EVP_CIPH_CBC_MODE)
2347 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2350 # ifdef HWAES_ctr32_encrypt_blocks
2351 if (mode == EVP_CIPH_CTR_MODE)
2352 dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2355 (void)0; /* terminate potentially open 'else' */
2358 #ifdef BSAES_CAPABLE
2359 if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
2360 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2362 dat->block = (block128_f) AES_encrypt;
2363 dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2366 #ifdef VPAES_CAPABLE
2367 if (VPAES_CAPABLE) {
2368 ret = vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2370 dat->block = (block128_f) vpaes_encrypt;
2371 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2372 (cbc128_f) vpaes_cbc_encrypt : NULL;
2376 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2378 dat->block = (block128_f) AES_encrypt;
2379 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2380 (cbc128_f) AES_cbc_encrypt : NULL;
2382 if (mode == EVP_CIPH_CTR_MODE)
2383 dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
2388 EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
2395 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2396 const unsigned char *in, size_t len)
2398 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2400 if (dat->stream.cbc)
2401 (*dat->stream.cbc) (in, out, len, &dat->ks,
2402 EVP_CIPHER_CTX_iv_noconst(ctx),
2403 EVP_CIPHER_CTX_encrypting(ctx));
2404 else if (EVP_CIPHER_CTX_encrypting(ctx))
2405 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks,
2406 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2408 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
2409 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2414 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2415 const unsigned char *in, size_t len)
2417 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
2419 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2424 for (i = 0, len -= bl; i <= len; i += bl)
2425 (*dat->block) (in + i, out + i, &dat->ks);
2430 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2431 const unsigned char *in, size_t len)
2433 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2435 int num = EVP_CIPHER_CTX_num(ctx);
2436 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
2437 EVP_CIPHER_CTX_iv_noconst(ctx), &num, dat->block);
2438 EVP_CIPHER_CTX_set_num(ctx, num);
2442 static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2443 const unsigned char *in, size_t len)
2445 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2447 int num = EVP_CIPHER_CTX_num(ctx);
2448 CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
2449 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2450 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2451 EVP_CIPHER_CTX_set_num(ctx, num);
2455 static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2456 const unsigned char *in, size_t len)
2458 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2460 int num = EVP_CIPHER_CTX_num(ctx);
2461 CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
2462 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2463 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2464 EVP_CIPHER_CTX_set_num(ctx, num);
2468 static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2469 const unsigned char *in, size_t len)
2471 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2473 if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
2474 int num = EVP_CIPHER_CTX_num(ctx);
2475 CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
2476 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2477 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2478 EVP_CIPHER_CTX_set_num(ctx, num);
2482 while (len >= MAXBITCHUNK) {
2483 int num = EVP_CIPHER_CTX_num(ctx);
2484 CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
2485 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2486 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2487 EVP_CIPHER_CTX_set_num(ctx, num);
2493 int num = EVP_CIPHER_CTX_num(ctx);
2494 CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
2495 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2496 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2497 EVP_CIPHER_CTX_set_num(ctx, num);
2503 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2504 const unsigned char *in, size_t len)
2506 unsigned int num = EVP_CIPHER_CTX_num(ctx);
2507 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2509 if (dat->stream.ctr)
2510 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
2511 EVP_CIPHER_CTX_iv_noconst(ctx),
2512 EVP_CIPHER_CTX_buf_noconst(ctx),
2513 &num, dat->stream.ctr);
2515 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
2516 EVP_CIPHER_CTX_iv_noconst(ctx),
2517 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
2519 EVP_CIPHER_CTX_set_num(ctx, num);
2523 BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
2524 BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
2525 BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
2527 static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
2529 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2532 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
2533 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
2534 OPENSSL_free(gctx->iv);
2538 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2540 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2545 gctx->ivlen = EVP_CIPHER_iv_length(c->cipher);
2549 gctx->tls_aad_len = -1;
2552 case EVP_CTRL_GET_IVLEN:
2553 *(int *)ptr = gctx->ivlen;
2556 case EVP_CTRL_AEAD_SET_IVLEN:
2559 /* Allocate memory for IV if needed */
2560 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
2561 if (gctx->iv != c->iv)
2562 OPENSSL_free(gctx->iv);
2563 if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
2564 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
2571 case EVP_CTRL_AEAD_SET_TAG:
2572 if (arg <= 0 || arg > 16 || c->encrypt)
2574 memcpy(c->buf, ptr, arg);
2578 case EVP_CTRL_AEAD_GET_TAG:
2579 if (arg <= 0 || arg > 16 || !c->encrypt
2580 || gctx->taglen < 0)
2582 memcpy(ptr, c->buf, arg);
2585 case EVP_CTRL_GET_IV:
2586 if (gctx->iv_gen != 1 && gctx->iv_gen_rand != 1)
2588 if (gctx->ivlen != arg)
2590 memcpy(ptr, gctx->iv, arg);
2593 case EVP_CTRL_GCM_SET_IV_FIXED:
2594 /* Special case: -1 length restores whole IV */
2596 memcpy(gctx->iv, ptr, gctx->ivlen);
2601 * Fixed field must be at least 4 bytes and invocation field at least
2604 if ((arg < 4) || (gctx->ivlen - arg) < 8)
2607 memcpy(gctx->iv, ptr, arg);
2608 if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
2613 case EVP_CTRL_GCM_IV_GEN:
2614 if (gctx->iv_gen == 0 || gctx->key_set == 0)
2616 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2617 if (arg <= 0 || arg > gctx->ivlen)
2619 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
2621 * Invocation field will be at least 8 bytes in size and so no need
2622 * to check wrap around or increment more than last 8 bytes.
2624 ctr64_inc(gctx->iv + gctx->ivlen - 8);
2628 case EVP_CTRL_GCM_SET_IV_INV:
2629 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
2631 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
2632 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2636 case EVP_CTRL_AEAD_TLS1_AAD:
2637 /* Save the AAD for later use */
2638 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2640 memcpy(c->buf, ptr, arg);
2641 gctx->tls_aad_len = arg;
2642 gctx->tls_enc_records = 0;
2644 unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1];
2645 /* Correct length for explicit IV */
2646 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
2648 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
2649 /* If decrypting correct for tag too */
2651 if (len < EVP_GCM_TLS_TAG_LEN)
2653 len -= EVP_GCM_TLS_TAG_LEN;
2655 c->buf[arg - 2] = len >> 8;
2656 c->buf[arg - 1] = len & 0xff;
2658 /* Extra padding: tag appended to record */
2659 return EVP_GCM_TLS_TAG_LEN;
2663 EVP_CIPHER_CTX *out = ptr;
2664 EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out);
2665 if (gctx->gcm.key) {
2666 if (gctx->gcm.key != &gctx->ks)
2668 gctx_out->gcm.key = &gctx_out->ks;
2670 if (gctx->iv == c->iv)
2671 gctx_out->iv = out->iv;
2673 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
2674 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
2677 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
2688 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2689 const unsigned char *iv, int enc)
2691 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
2696 #ifdef HWAES_CAPABLE
2697 if (HWAES_CAPABLE) {
2698 HWAES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2699 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2700 (block128_f) HWAES_encrypt);
2701 # ifdef HWAES_ctr32_encrypt_blocks
2702 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2709 #ifdef BSAES_CAPABLE
2710 if (BSAES_CAPABLE) {
2711 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2712 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2713 (block128_f) AES_encrypt);
2714 gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2718 #ifdef VPAES_CAPABLE
2719 if (VPAES_CAPABLE) {
2720 vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2721 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2722 (block128_f) vpaes_encrypt);
2727 (void)0; /* terminate potentially open 'else' */
2729 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2730 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2731 (block128_f) AES_encrypt);
2733 gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
2740 * If we have an iv can set it directly, otherwise use saved IV.
2742 if (iv == NULL && gctx->iv_set)
2745 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2750 /* If key set use IV, otherwise copy */
2752 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2754 memcpy(gctx->iv, iv, gctx->ivlen);
2762 * Handle TLS GCM packet format. This consists of the last portion of the IV
2763 * followed by the payload and finally the tag. On encrypt generate IV,
2764 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
2768 static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2769 const unsigned char *in, size_t len)
2771 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
2773 /* Encrypt/decrypt must be performed in place */
2775 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
2779 * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
2780 * Requirements from SP 800-38D". The requirements is for one party to the
2781 * communication to fail after 2^64 - 1 keys. We do this on the encrypting
2784 if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
2785 EVPerr(EVP_F_AES_GCM_TLS_CIPHER, EVP_R_TOO_MANY_RECORDS);
2790 * Set IV from start of buffer or generate IV and write to start of
2793 if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ? EVP_CTRL_GCM_IV_GEN
2794 : EVP_CTRL_GCM_SET_IV_INV,
2795 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
2798 if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
2800 /* Fix buffer and length to point to payload */
2801 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2802 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2803 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
2805 /* Encrypt payload */
2808 #if defined(AES_GCM_ASM)
2809 if (len >= 32 && AES_GCM_ASM(gctx)) {
2810 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2813 bulk = AES_gcm_encrypt(in, out, len,
2815 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2816 gctx->gcm.len.u[1] += bulk;
2819 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
2822 len - bulk, gctx->ctr))
2826 #if defined(AES_GCM_ASM2)
2827 if (len >= 32 && AES_GCM_ASM2(gctx)) {
2828 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2831 bulk = AES_gcm_encrypt(in, out, len,
2833 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2834 gctx->gcm.len.u[1] += bulk;
2837 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
2838 in + bulk, out + bulk, len - bulk))
2842 /* Finally write tag */
2843 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
2844 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
2849 #if defined(AES_GCM_ASM)
2850 if (len >= 16 && AES_GCM_ASM(gctx)) {
2851 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2854 bulk = AES_gcm_decrypt(in, out, len,
2856 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2857 gctx->gcm.len.u[1] += bulk;
2860 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
2863 len - bulk, gctx->ctr))
2867 #if defined(AES_GCM_ASM2)
2868 if (len >= 16 && AES_GCM_ASM2(gctx)) {
2869 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2872 bulk = AES_gcm_decrypt(in, out, len,
2874 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2875 gctx->gcm.len.u[1] += bulk;
2878 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
2879 in + bulk, out + bulk, len - bulk))
2883 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN);
2884 /* If tag mismatch wipe buffer */
2885 if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
2886 OPENSSL_cleanse(out, len);
2894 gctx->tls_aad_len = -1;
2900 * See SP800-38D (GCM) Section 8 "Uniqueness requirement on IVS and keys"
2902 * See also 8.2.2 RBG-based construction.
2903 * Random construction consists of a free field (which can be NULL) and a
2904 * random field which will use a DRBG that can return at least 96 bits of
2905 * entropy strength. (The DRBG must be seeded by the FIPS module).
2907 static int aes_gcm_iv_generate(EVP_AES_GCM_CTX *gctx, int offset)
2909 int sz = gctx->ivlen - offset;
2911 /* Must be at least 96 bits */
2912 if (sz <= 0 || gctx->ivlen < 12)
2915 /* Use DRBG to generate random iv */
2916 if (RAND_bytes(gctx->iv + offset, sz) <= 0)
2920 #endif /* FIPS_MODE */
2922 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2923 const unsigned char *in, size_t len)
2925 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
2927 /* If not set up, return error */
2931 if (gctx->tls_aad_len >= 0)
2932 return aes_gcm_tls_cipher(ctx, out, in, len);
2936 * FIPS requires generation of AES-GCM IV's inside the FIPS module.
2937 * The IV can still be set externally (the security policy will state that
2938 * this is not FIPS compliant). There are some applications
2939 * where setting the IV externally is the only option available.
2941 if (!gctx->iv_set) {
2942 if (!ctx->encrypt || !aes_gcm_iv_generate(gctx, 0))
2944 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2946 gctx->iv_gen_rand = 1;
2951 #endif /* FIPS_MODE */
2955 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
2957 } else if (ctx->encrypt) {
2960 #if defined(AES_GCM_ASM)
2961 if (len >= 32 && AES_GCM_ASM(gctx)) {
2962 size_t res = (16 - gctx->gcm.mres) % 16;
2964 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
2967 bulk = AES_gcm_encrypt(in + res,
2968 out + res, len - res,
2969 gctx->gcm.key, gctx->gcm.Yi.c,
2971 gctx->gcm.len.u[1] += bulk;
2975 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
2978 len - bulk, gctx->ctr))
2982 #if defined(AES_GCM_ASM2)
2983 if (len >= 32 && AES_GCM_ASM2(gctx)) {
2984 size_t res = (16 - gctx->gcm.mres) % 16;
2986 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
2989 bulk = AES_gcm_encrypt(in + res,
2990 out + res, len - res,
2991 gctx->gcm.key, gctx->gcm.Yi.c,
2993 gctx->gcm.len.u[1] += bulk;
2997 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
2998 in + bulk, out + bulk, len - bulk))
3004 #if defined(AES_GCM_ASM)
3005 if (len >= 16 && AES_GCM_ASM(gctx)) {
3006 size_t res = (16 - gctx->gcm.mres) % 16;
3008 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3011 bulk = AES_gcm_decrypt(in + res,
3012 out + res, len - res,
3014 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3015 gctx->gcm.len.u[1] += bulk;
3019 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3022 len - bulk, gctx->ctr))
3026 #if defined(AES_GCM_ASM2)
3027 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3028 size_t res = (16 - gctx->gcm.mres) % 16;
3030 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3033 bulk = AES_gcm_decrypt(in + res,
3034 out + res, len - res,
3036 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3037 gctx->gcm.len.u[1] += bulk;
3041 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3042 in + bulk, out + bulk, len - bulk))
3048 if (!ctx->encrypt) {
3049 if (gctx->taglen < 0)
3051 if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0)
3056 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
3058 /* Don't reuse the IV */
3065 #define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
3066 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3067 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3068 | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_CUSTOM_IV_LENGTH)
3070 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
3071 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3072 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
3073 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3074 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
3075 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3077 static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3079 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX, c);
3081 if (type == EVP_CTRL_COPY) {
3082 EVP_CIPHER_CTX *out = ptr;
3083 EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out);
3085 if (xctx->xts.key1) {
3086 if (xctx->xts.key1 != &xctx->ks1)
3088 xctx_out->xts.key1 = &xctx_out->ks1;
3090 if (xctx->xts.key2) {
3091 if (xctx->xts.key2 != &xctx->ks2)
3093 xctx_out->xts.key2 = &xctx_out->ks2;
3096 } else if (type != EVP_CTRL_INIT)
3098 /* key1 and key2 are used as an indicator both key and IV are set */
3099 xctx->xts.key1 = NULL;
3100 xctx->xts.key2 = NULL;
3104 static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3105 const unsigned char *iv, int enc)
3107 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3114 /* The key is two half length keys in reality */
3115 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
3116 const int bits = bytes * 8;
3119 * Verify that the two keys are different.
3121 * This addresses the vulnerability described in Rogaway's
3122 * September 2004 paper:
3124 * "Efficient Instantiations of Tweakable Blockciphers and
3125 * Refinements to Modes OCB and PMAC".
3126 * (http://web.cs.ucdavis.edu/~rogaway/papers/offsets.pdf)
3128 * FIPS 140-2 IG A.9 XTS-AES Key Generation Requirements states
3130 * "The check for Key_1 != Key_2 shall be done at any place
3131 * BEFORE using the keys in the XTS-AES algorithm to process
3134 if ((!allow_insecure_decrypt || enc)
3135 && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
3136 EVPerr(EVP_F_AES_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
3141 xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
3143 xctx->stream = NULL;
3145 /* key_len is two AES keys */
3146 #ifdef HWAES_CAPABLE
3147 if (HWAES_CAPABLE) {
3149 HWAES_set_encrypt_key(key, bits, &xctx->ks1.ks);
3150 xctx->xts.block1 = (block128_f) HWAES_encrypt;
3151 # ifdef HWAES_xts_encrypt
3152 xctx->stream = HWAES_xts_encrypt;
3155 HWAES_set_decrypt_key(key, bits, &xctx->ks1.ks);
3156 xctx->xts.block1 = (block128_f) HWAES_decrypt;
3157 # ifdef HWAES_xts_decrypt
3158 xctx->stream = HWAES_xts_decrypt;
3162 HWAES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3163 xctx->xts.block2 = (block128_f) HWAES_encrypt;
3165 xctx->xts.key1 = &xctx->ks1;
3169 #ifdef BSAES_CAPABLE
3171 xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt;
3174 #ifdef VPAES_CAPABLE
3175 if (VPAES_CAPABLE) {
3177 vpaes_set_encrypt_key(key, bits, &xctx->ks1.ks);
3178 xctx->xts.block1 = (block128_f) vpaes_encrypt;
3180 vpaes_set_decrypt_key(key, bits, &xctx->ks1.ks);
3181 xctx->xts.block1 = (block128_f) vpaes_decrypt;
3184 vpaes_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3185 xctx->xts.block2 = (block128_f) vpaes_encrypt;
3187 xctx->xts.key1 = &xctx->ks1;
3191 (void)0; /* terminate potentially open 'else' */
3194 AES_set_encrypt_key(key, bits, &xctx->ks1.ks);
3195 xctx->xts.block1 = (block128_f) AES_encrypt;
3197 AES_set_decrypt_key(key, bits, &xctx->ks1.ks);
3198 xctx->xts.block1 = (block128_f) AES_decrypt;
3201 AES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3202 xctx->xts.block2 = (block128_f) AES_encrypt;
3204 xctx->xts.key1 = &xctx->ks1;
3209 xctx->xts.key2 = &xctx->ks2;
3210 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
3216 static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3217 const unsigned char *in, size_t len)
3219 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3221 if (xctx->xts.key1 == NULL
3222 || xctx->xts.key2 == NULL
3225 || len < AES_BLOCK_SIZE)
3229 * Impose a limit of 2^20 blocks per data unit as specified by
3230 * IEEE Std 1619-2018. The earlier and obsolete IEEE Std 1619-2007
3231 * indicated that this was a SHOULD NOT rather than a MUST NOT.
3232 * NIST SP 800-38E mandates the same limit.
3234 if (len > XTS_MAX_BLOCKS_PER_DATA_UNIT * AES_BLOCK_SIZE) {
3235 EVPerr(EVP_F_AES_XTS_CIPHER, EVP_R_XTS_DATA_UNIT_IS_TOO_LARGE);
3240 (*xctx->stream) (in, out, len,
3241 xctx->xts.key1, xctx->xts.key2,
3242 EVP_CIPHER_CTX_iv_noconst(ctx));
3243 else if (CRYPTO_xts128_encrypt(&xctx->xts, EVP_CIPHER_CTX_iv_noconst(ctx),
3245 EVP_CIPHER_CTX_encrypting(ctx)))
3250 #define aes_xts_cleanup NULL
3252 #define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
3253 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3254 | EVP_CIPH_CUSTOM_COPY)
3256 BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
3257 BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
3259 static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3261 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c);
3270 cctx->tls_aad_len = -1;
3273 case EVP_CTRL_GET_IVLEN:
3274 *(int *)ptr = 15 - cctx->L;
3277 case EVP_CTRL_AEAD_TLS1_AAD:
3278 /* Save the AAD for later use */
3279 if (arg != EVP_AEAD_TLS1_AAD_LEN)
3281 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3282 cctx->tls_aad_len = arg;
3285 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
3286 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
3287 /* Correct length for explicit IV */
3288 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
3290 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
3291 /* If decrypting correct for tag too */
3292 if (!EVP_CIPHER_CTX_encrypting(c)) {
3297 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
3298 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
3300 /* Extra padding: tag appended to record */
3303 case EVP_CTRL_CCM_SET_IV_FIXED:
3304 /* Sanity check length */
3305 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
3307 /* Just copy to first part of IV */
3308 memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg);
3311 case EVP_CTRL_AEAD_SET_IVLEN:
3314 case EVP_CTRL_CCM_SET_L:
3315 if (arg < 2 || arg > 8)
3320 case EVP_CTRL_AEAD_SET_TAG:
3321 if ((arg & 1) || arg < 4 || arg > 16)
3323 if (EVP_CIPHER_CTX_encrypting(c) && ptr)
3327 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3332 case EVP_CTRL_AEAD_GET_TAG:
3333 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
3335 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
3344 EVP_CIPHER_CTX *out = ptr;
3345 EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out);
3346 if (cctx->ccm.key) {
3347 if (cctx->ccm.key != &cctx->ks)
3349 cctx_out->ccm.key = &cctx_out->ks;
3360 static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3361 const unsigned char *iv, int enc)
3363 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3368 #ifdef HWAES_CAPABLE
3369 if (HWAES_CAPABLE) {
3370 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3373 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3374 &cctx->ks, (block128_f) HWAES_encrypt);
3380 #ifdef VPAES_CAPABLE
3381 if (VPAES_CAPABLE) {
3382 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3384 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3385 &cctx->ks, (block128_f) vpaes_encrypt);
3391 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3393 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3394 &cctx->ks, (block128_f) AES_encrypt);
3399 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
3405 static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3406 const unsigned char *in, size_t len)
3408 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3409 CCM128_CONTEXT *ccm = &cctx->ccm;
3410 /* Encrypt/decrypt must be performed in place */
3411 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
3413 /* If encrypting set explicit IV from sequence number (start of AAD) */
3414 if (EVP_CIPHER_CTX_encrypting(ctx))
3415 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
3416 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3417 /* Get rest of IV from explicit IV */
3418 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in,
3419 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3420 /* Correct length value */
3421 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3422 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L,
3426 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
3427 /* Fix buffer to point to payload */
3428 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3429 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3430 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3431 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3433 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3435 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
3437 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3439 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3441 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3442 unsigned char tag[16];
3443 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3444 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
3448 OPENSSL_cleanse(out, len);
3453 static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3454 const unsigned char *in, size_t len)
3456 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3457 CCM128_CONTEXT *ccm = &cctx->ccm;
3458 /* If not set up, return error */
3462 if (cctx->tls_aad_len >= 0)
3463 return aes_ccm_tls_cipher(ctx, out, in, len);
3465 /* EVP_*Final() doesn't return any data */
3466 if (in == NULL && out != NULL)
3474 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3480 /* If have AAD need message length */
3481 if (!cctx->len_set && len)
3483 CRYPTO_ccm128_aad(ccm, in, len);
3487 /* The tag must be set before actually decrypting data */
3488 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
3491 /* If not set length yet do it */
3492 if (!cctx->len_set) {
3493 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3498 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3499 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3501 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3507 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3509 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3510 unsigned char tag[16];
3511 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3512 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
3518 OPENSSL_cleanse(out, len);
3526 #define aes_ccm_cleanup NULL
3528 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
3529 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3530 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
3531 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3532 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
3533 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3540 /* Indicates if IV has been set */
3544 static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3545 const unsigned char *iv, int enc)
3547 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3551 if (EVP_CIPHER_CTX_encrypting(ctx))
3552 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3555 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3561 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, EVP_CIPHER_CTX_iv_length(ctx));
3562 wctx->iv = EVP_CIPHER_CTX_iv_noconst(ctx);
3567 static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3568 const unsigned char *in, size_t inlen)
3570 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3572 /* AES wrap with padding has IV length of 4, without padding 8 */
3573 int pad = EVP_CIPHER_CTX_iv_length(ctx) == 4;
3574 /* No final operation so always return zero length */
3577 /* Input length must always be non-zero */
3580 /* If decrypting need at least 16 bytes and multiple of 8 */
3581 if (!EVP_CIPHER_CTX_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
3583 /* If not padding input must be multiple of 8 */
3584 if (!pad && inlen & 0x7)
3586 if (is_partially_overlapping(out, in, inlen)) {
3587 EVPerr(EVP_F_AES_WRAP_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
3591 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3592 /* If padding round up to multiple of 8 */
3594 inlen = (inlen + 7) / 8 * 8;
3599 * If not padding output will be exactly 8 bytes smaller than
3600 * input. If padding it will be at least 8 bytes smaller but we
3601 * don't know how much.
3607 if (EVP_CIPHER_CTX_encrypting(ctx))
3608 rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
3610 (block128_f) AES_encrypt);
3612 rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
3614 (block128_f) AES_decrypt);
3616 if (EVP_CIPHER_CTX_encrypting(ctx))
3617 rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
3618 out, in, inlen, (block128_f) AES_encrypt);
3620 rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
3621 out, in, inlen, (block128_f) AES_decrypt);
3623 return rv ? (int)rv : -1;
3626 #define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \
3627 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3628 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
3630 static const EVP_CIPHER aes_128_wrap = {
3632 8, 16, 8, WRAP_FLAGS,
3633 aes_wrap_init_key, aes_wrap_cipher,
3635 sizeof(EVP_AES_WRAP_CTX),
3636 NULL, NULL, NULL, NULL
3639 const EVP_CIPHER *EVP_aes_128_wrap(void)
3641 return &aes_128_wrap;
3644 static const EVP_CIPHER aes_192_wrap = {
3646 8, 24, 8, WRAP_FLAGS,
3647 aes_wrap_init_key, aes_wrap_cipher,
3649 sizeof(EVP_AES_WRAP_CTX),
3650 NULL, NULL, NULL, NULL
3653 const EVP_CIPHER *EVP_aes_192_wrap(void)
3655 return &aes_192_wrap;
3658 static const EVP_CIPHER aes_256_wrap = {
3660 8, 32, 8, WRAP_FLAGS,
3661 aes_wrap_init_key, aes_wrap_cipher,
3663 sizeof(EVP_AES_WRAP_CTX),
3664 NULL, NULL, NULL, NULL
3667 const EVP_CIPHER *EVP_aes_256_wrap(void)
3669 return &aes_256_wrap;
3672 static const EVP_CIPHER aes_128_wrap_pad = {
3673 NID_id_aes128_wrap_pad,
3674 8, 16, 4, WRAP_FLAGS,
3675 aes_wrap_init_key, aes_wrap_cipher,
3677 sizeof(EVP_AES_WRAP_CTX),
3678 NULL, NULL, NULL, NULL
3681 const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
3683 return &aes_128_wrap_pad;
3686 static const EVP_CIPHER aes_192_wrap_pad = {
3687 NID_id_aes192_wrap_pad,
3688 8, 24, 4, WRAP_FLAGS,
3689 aes_wrap_init_key, aes_wrap_cipher,
3691 sizeof(EVP_AES_WRAP_CTX),
3692 NULL, NULL, NULL, NULL
3695 const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
3697 return &aes_192_wrap_pad;
3700 static const EVP_CIPHER aes_256_wrap_pad = {
3701 NID_id_aes256_wrap_pad,
3702 8, 32, 4, WRAP_FLAGS,
3703 aes_wrap_init_key, aes_wrap_cipher,
3705 sizeof(EVP_AES_WRAP_CTX),
3706 NULL, NULL, NULL, NULL
3709 const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
3711 return &aes_256_wrap_pad;
3714 #ifndef OPENSSL_NO_OCB
3715 static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3717 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
3718 EVP_CIPHER_CTX *newc;
3719 EVP_AES_OCB_CTX *new_octx;
3725 octx->ivlen = EVP_CIPHER_iv_length(c->cipher);
3726 octx->iv = EVP_CIPHER_CTX_iv_noconst(c);
3728 octx->data_buf_len = 0;
3729 octx->aad_buf_len = 0;
3732 case EVP_CTRL_GET_IVLEN:
3733 *(int *)ptr = octx->ivlen;
3736 case EVP_CTRL_AEAD_SET_IVLEN:
3737 /* IV len must be 1 to 15 */
3738 if (arg <= 0 || arg > 15)
3744 case EVP_CTRL_AEAD_SET_TAG:
3746 /* Tag len must be 0 to 16 */
3747 if (arg < 0 || arg > 16)
3753 if (arg != octx->taglen || EVP_CIPHER_CTX_encrypting(c))
3755 memcpy(octx->tag, ptr, arg);
3758 case EVP_CTRL_AEAD_GET_TAG:
3759 if (arg != octx->taglen || !EVP_CIPHER_CTX_encrypting(c))
3762 memcpy(ptr, octx->tag, arg);
3766 newc = (EVP_CIPHER_CTX *)ptr;
3767 new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc);
3768 return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
3769 &new_octx->ksenc.ks,
3770 &new_octx->ksdec.ks);
3778 static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3779 const unsigned char *iv, int enc)
3781 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
3787 * We set both the encrypt and decrypt key here because decrypt
3788 * needs both. We could possibly optimise to remove setting the
3789 * decrypt for an encryption operation.
3791 # ifdef HWAES_CAPABLE
3792 if (HWAES_CAPABLE) {
3793 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3795 HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3797 if (!CRYPTO_ocb128_init(&octx->ocb,
3798 &octx->ksenc.ks, &octx->ksdec.ks,
3799 (block128_f) HWAES_encrypt,
3800 (block128_f) HWAES_decrypt,
3801 enc ? HWAES_ocb_encrypt
3802 : HWAES_ocb_decrypt))
3807 # ifdef VPAES_CAPABLE
3808 if (VPAES_CAPABLE) {
3809 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3811 vpaes_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3813 if (!CRYPTO_ocb128_init(&octx->ocb,
3814 &octx->ksenc.ks, &octx->ksdec.ks,
3815 (block128_f) vpaes_encrypt,
3816 (block128_f) vpaes_decrypt,
3822 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3824 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3826 if (!CRYPTO_ocb128_init(&octx->ocb,
3827 &octx->ksenc.ks, &octx->ksdec.ks,
3828 (block128_f) AES_encrypt,
3829 (block128_f) AES_decrypt,
3836 * If we have an iv we can set it directly, otherwise use saved IV.
3838 if (iv == NULL && octx->iv_set)
3841 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
3848 /* If key set use IV, otherwise copy */
3850 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
3852 memcpy(octx->iv, iv, octx->ivlen);
3858 static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3859 const unsigned char *in, size_t len)
3863 int written_len = 0;
3864 size_t trailing_len;
3865 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
3867 /* If IV or Key not set then return error */
3876 * Need to ensure we are only passing full blocks to low level OCB
3877 * routines. We do it here rather than in EVP_EncryptUpdate/
3878 * EVP_DecryptUpdate because we need to pass full blocks of AAD too
3879 * and those routines don't support that
3882 /* Are we dealing with AAD or normal data here? */
3884 buf = octx->aad_buf;
3885 buf_len = &(octx->aad_buf_len);
3887 buf = octx->data_buf;
3888 buf_len = &(octx->data_buf_len);
3890 if (is_partially_overlapping(out + *buf_len, in, len)) {
3891 EVPerr(EVP_F_AES_OCB_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
3897 * If we've got a partially filled buffer from a previous call then
3898 * use that data first
3901 unsigned int remaining;
3903 remaining = AES_BLOCK_SIZE - (*buf_len);
3904 if (remaining > len) {
3905 memcpy(buf + (*buf_len), in, len);
3909 memcpy(buf + (*buf_len), in, remaining);
3912 * If we get here we've filled the buffer, so process it
3917 if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE))
3919 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
3920 if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out,
3924 if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out,
3928 written_len = AES_BLOCK_SIZE;
3931 out += AES_BLOCK_SIZE;
3934 /* Do we have a partial block to handle at the end? */
3935 trailing_len = len % AES_BLOCK_SIZE;
3938 * If we've got some full blocks to handle, then process these first
3940 if (len != trailing_len) {
3942 if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
3944 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
3945 if (!CRYPTO_ocb128_encrypt
3946 (&octx->ocb, in, out, len - trailing_len))
3949 if (!CRYPTO_ocb128_decrypt
3950 (&octx->ocb, in, out, len - trailing_len))
3953 written_len += len - trailing_len;
3954 in += len - trailing_len;
3957 /* Handle any trailing partial block */
3958 if (trailing_len > 0) {
3959 memcpy(buf, in, trailing_len);
3960 *buf_len = trailing_len;
3966 * First of all empty the buffer of any partial block that we might
3967 * have been provided - both for data and AAD
3969 if (octx->data_buf_len > 0) {
3970 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3971 if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
3972 octx->data_buf_len))
3975 if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
3976 octx->data_buf_len))
3979 written_len = octx->data_buf_len;
3980 octx->data_buf_len = 0;
3982 if (octx->aad_buf_len > 0) {
3983 if (!CRYPTO_ocb128_aad
3984 (&octx->ocb, octx->aad_buf, octx->aad_buf_len))
3986 octx->aad_buf_len = 0;
3988 /* If decrypting then verify */
3989 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
3990 if (octx->taglen < 0)
3992 if (CRYPTO_ocb128_finish(&octx->ocb,
3993 octx->tag, octx->taglen) != 0)
3998 /* If encrypting then just get the tag */
3999 if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
4001 /* Don't reuse the IV */
4007 static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
4009 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
4010 CRYPTO_ocb128_cleanup(&octx->ocb);
4014 BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
4015 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4016 BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
4017 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4018 BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
4019 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4020 #endif /* OPENSSL_NO_OCB */
4023 #ifndef OPENSSL_NO_SIV
4025 typedef SIV128_CONTEXT EVP_AES_SIV_CTX;
4027 #define aesni_siv_init_key aes_siv_init_key
4028 static int aes_siv_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
4029 const unsigned char *iv, int enc)
4031 const EVP_CIPHER *ctr;
4032 const EVP_CIPHER *cbc;
4033 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, ctx);
4034 int klen = EVP_CIPHER_CTX_key_length(ctx) / 2;
4041 cbc = EVP_aes_128_cbc();
4042 ctr = EVP_aes_128_ctr();
4045 cbc = EVP_aes_192_cbc();
4046 ctr = EVP_aes_192_ctr();
4049 cbc = EVP_aes_256_cbc();
4050 ctr = EVP_aes_256_ctr();
4056 /* klen is the length of the underlying cipher, not the input key,
4057 which should be twice as long */
4058 return CRYPTO_siv128_init(sctx, key, klen, cbc, ctr);
4061 #define aesni_siv_cipher aes_siv_cipher
4062 static int aes_siv_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
4063 const unsigned char *in, size_t len)
4065 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, ctx);
4067 /* EncryptFinal or DecryptFinal */
4069 return CRYPTO_siv128_finish(sctx);
4071 /* Deal with associated data */
4073 return CRYPTO_siv128_aad(sctx, in, len);
4075 if (EVP_CIPHER_CTX_encrypting(ctx))
4076 return CRYPTO_siv128_encrypt(sctx, in, out, len);
4078 return CRYPTO_siv128_decrypt(sctx, in, out, len);
4081 #define aesni_siv_cleanup aes_siv_cleanup
4082 static int aes_siv_cleanup(EVP_CIPHER_CTX *c)
4084 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, c);
4086 return CRYPTO_siv128_cleanup(sctx);
4090 #define aesni_siv_ctrl aes_siv_ctrl
4091 static int aes_siv_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
4093 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, c);
4094 SIV128_CONTEXT *sctx_out;
4098 return CRYPTO_siv128_cleanup(sctx);
4100 case EVP_CTRL_SET_SPEED:
4101 return CRYPTO_siv128_speed(sctx, arg);
4103 case EVP_CTRL_AEAD_SET_TAG:
4104 if (!EVP_CIPHER_CTX_encrypting(c))
4105 return CRYPTO_siv128_set_tag(sctx, ptr, arg);
4108 case EVP_CTRL_AEAD_GET_TAG:
4109 if (!EVP_CIPHER_CTX_encrypting(c))
4111 return CRYPTO_siv128_get_tag(sctx, ptr, arg);
4114 sctx_out = EVP_C_DATA(SIV128_CONTEXT, (EVP_CIPHER_CTX*)ptr);
4115 return CRYPTO_siv128_copy_ctx(sctx_out, sctx);
4123 #define SIV_FLAGS (EVP_CIPH_FLAG_AEAD_CIPHER | EVP_CIPH_FLAG_DEFAULT_ASN1 \
4124 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
4125 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CUSTOM_COPY \
4126 | EVP_CIPH_CTRL_INIT)
4128 BLOCK_CIPHER_custom(NID_aes, 128, 1, 0, siv, SIV, SIV_FLAGS)
4129 BLOCK_CIPHER_custom(NID_aes, 192, 1, 0, siv, SIV, SIV_FLAGS)
4130 BLOCK_CIPHER_custom(NID_aes, 256, 1, 0, siv, SIV, SIV_FLAGS)