2 * Copyright 2001-2018 The OpenSSL Project Authors. All Rights Reserved.
4 * Licensed under the OpenSSL license (the "License"). You may not use
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
10 #include <openssl/opensslconf.h>
11 #include <openssl/crypto.h>
12 #include <openssl/evp.h>
13 #include <openssl/err.h>
16 #include <openssl/aes.h>
17 #include "internal/evp_int.h"
18 #include "modes_lcl.h"
19 #include <openssl/rand.h>
20 #include <openssl/rand_drbg.h>
39 } ks; /* AES key schedule to use */
40 int key_set; /* Set if key initialised */
41 int iv_set; /* Set if an iv is set */
43 unsigned char *iv; /* Temporary IV store */
44 int ivlen; /* IV length */
46 int iv_gen; /* It is OK to generate IVs */
47 int tls_aad_len; /* TLS AAD length */
55 } ks1, ks2; /* AES key schedules to use */
57 void (*stream) (const unsigned char *in,
58 unsigned char *out, size_t length,
59 const AES_KEY *key1, const AES_KEY *key2,
60 const unsigned char iv[16]);
67 } ks; /* AES key schedule to use */
68 int key_set; /* Set if key initialised */
69 int iv_set; /* Set if an iv is set */
70 int tag_set; /* Set if tag is valid */
71 int len_set; /* Set if message length set */
72 int L, M; /* L and M parameters from RFC3610 */
73 int tls_aad_len; /* TLS AAD length */
78 #ifndef OPENSSL_NO_OCB
83 } ksenc; /* AES key schedule to use for encryption */
87 } ksdec; /* AES key schedule to use for decryption */
88 int key_set; /* Set if key initialised */
89 int iv_set; /* Set if an iv is set */
91 unsigned char *iv; /* Temporary IV store */
92 unsigned char tag[16];
93 unsigned char data_buf[16]; /* Store partial data blocks */
94 unsigned char aad_buf[16]; /* Store partial AAD blocks */
97 int ivlen; /* IV length */
102 #define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
105 int vpaes_set_encrypt_key(const unsigned char *userKey, int bits,
107 int vpaes_set_decrypt_key(const unsigned char *userKey, int bits,
110 void vpaes_encrypt(const unsigned char *in, unsigned char *out,
112 void vpaes_decrypt(const unsigned char *in, unsigned char *out,
115 void vpaes_cbc_encrypt(const unsigned char *in,
118 const AES_KEY *key, unsigned char *ivec, int enc);
121 void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out,
122 size_t length, const AES_KEY *key,
123 unsigned char ivec[16], int enc);
124 void bsaes_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
125 size_t len, const AES_KEY *key,
126 const unsigned char ivec[16]);
127 void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out,
128 size_t len, const AES_KEY *key1,
129 const AES_KEY *key2, const unsigned char iv[16]);
130 void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out,
131 size_t len, const AES_KEY *key1,
132 const AES_KEY *key2, const unsigned char iv[16]);
135 void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out,
136 size_t blocks, const AES_KEY *key,
137 const unsigned char ivec[AES_BLOCK_SIZE]);
140 void AES_xts_encrypt(const unsigned char *inp, unsigned char *out, size_t len,
141 const AES_KEY *key1, const AES_KEY *key2,
142 const unsigned char iv[16]);
143 void AES_xts_decrypt(const unsigned char *inp, unsigned char *out, size_t len,
144 const AES_KEY *key1, const AES_KEY *key2,
145 const unsigned char iv[16]);
148 #if defined(OPENSSL_CPUID_OBJ) && (defined(__powerpc__) || defined(__ppc__) || defined(_ARCH_PPC))
149 # include "ppc_arch.h"
151 # define VPAES_CAPABLE (OPENSSL_ppccap_P & PPC_ALTIVEC)
153 # define HWAES_CAPABLE (OPENSSL_ppccap_P & PPC_CRYPTO207)
154 # define HWAES_set_encrypt_key aes_p8_set_encrypt_key
155 # define HWAES_set_decrypt_key aes_p8_set_decrypt_key
156 # define HWAES_encrypt aes_p8_encrypt
157 # define HWAES_decrypt aes_p8_decrypt
158 # define HWAES_cbc_encrypt aes_p8_cbc_encrypt
159 # define HWAES_ctr32_encrypt_blocks aes_p8_ctr32_encrypt_blocks
160 # define HWAES_xts_encrypt aes_p8_xts_encrypt
161 # define HWAES_xts_decrypt aes_p8_xts_decrypt
164 #if defined(AES_ASM) && !defined(I386_ONLY) && ( \
165 ((defined(__i386) || defined(__i386__) || \
166 defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \
167 defined(__x86_64) || defined(__x86_64__) || \
168 defined(_M_AMD64) || defined(_M_X64) )
170 extern unsigned int OPENSSL_ia32cap_P[];
173 # define VPAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
176 # define BSAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
181 # define AESNI_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(57-32)))
183 int aesni_set_encrypt_key(const unsigned char *userKey, int bits,
185 int aesni_set_decrypt_key(const unsigned char *userKey, int bits,
188 void aesni_encrypt(const unsigned char *in, unsigned char *out,
190 void aesni_decrypt(const unsigned char *in, unsigned char *out,
193 void aesni_ecb_encrypt(const unsigned char *in,
195 size_t length, const AES_KEY *key, int enc);
196 void aesni_cbc_encrypt(const unsigned char *in,
199 const AES_KEY *key, unsigned char *ivec, int enc);
201 void aesni_ctr32_encrypt_blocks(const unsigned char *in,
204 const void *key, const unsigned char *ivec);
206 void aesni_xts_encrypt(const unsigned char *in,
209 const AES_KEY *key1, const AES_KEY *key2,
210 const unsigned char iv[16]);
212 void aesni_xts_decrypt(const unsigned char *in,
215 const AES_KEY *key1, const AES_KEY *key2,
216 const unsigned char iv[16]);
218 void aesni_ccm64_encrypt_blocks(const unsigned char *in,
222 const unsigned char ivec[16],
223 unsigned char cmac[16]);
225 void aesni_ccm64_decrypt_blocks(const unsigned char *in,
229 const unsigned char ivec[16],
230 unsigned char cmac[16]);
232 # if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
233 size_t aesni_gcm_encrypt(const unsigned char *in,
236 const void *key, unsigned char ivec[16], u64 *Xi);
237 # define AES_gcm_encrypt aesni_gcm_encrypt
238 size_t aesni_gcm_decrypt(const unsigned char *in,
241 const void *key, unsigned char ivec[16], u64 *Xi);
242 # define AES_gcm_decrypt aesni_gcm_decrypt
243 void gcm_ghash_avx(u64 Xi[2], const u128 Htable[16], const u8 *in,
245 # define AES_GCM_ASM(gctx) (gctx->ctr==aesni_ctr32_encrypt_blocks && \
246 gctx->gcm.ghash==gcm_ghash_avx)
247 # define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
248 gctx->gcm.ghash==gcm_ghash_avx)
249 # undef AES_GCM_ASM2 /* minor size optimization */
252 static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
253 const unsigned char *iv, int enc)
256 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
258 mode = EVP_CIPHER_CTX_mode(ctx);
259 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
261 ret = aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
263 dat->block = (block128_f) aesni_decrypt;
264 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
265 (cbc128_f) aesni_cbc_encrypt : NULL;
267 ret = aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
269 dat->block = (block128_f) aesni_encrypt;
270 if (mode == EVP_CIPH_CBC_MODE)
271 dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
272 else if (mode == EVP_CIPH_CTR_MODE)
273 dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
275 dat->stream.cbc = NULL;
279 EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
286 static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
287 const unsigned char *in, size_t len)
289 aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
290 EVP_CIPHER_CTX_iv_noconst(ctx),
291 EVP_CIPHER_CTX_encrypting(ctx));
296 static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
297 const unsigned char *in, size_t len)
299 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
304 aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
305 EVP_CIPHER_CTX_encrypting(ctx));
310 # define aesni_ofb_cipher aes_ofb_cipher
311 static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
312 const unsigned char *in, size_t len);
314 # define aesni_cfb_cipher aes_cfb_cipher
315 static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
316 const unsigned char *in, size_t len);
318 # define aesni_cfb8_cipher aes_cfb8_cipher
319 static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
320 const unsigned char *in, size_t len);
322 # define aesni_cfb1_cipher aes_cfb1_cipher
323 static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
324 const unsigned char *in, size_t len);
326 # define aesni_ctr_cipher aes_ctr_cipher
327 static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
328 const unsigned char *in, size_t len);
330 static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
331 const unsigned char *iv, int enc)
333 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
337 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
339 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
340 gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
342 * If we have an iv can set it directly, otherwise use saved IV.
344 if (iv == NULL && gctx->iv_set)
347 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
352 /* If key set use IV, otherwise copy */
354 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
356 memcpy(gctx->iv, iv, gctx->ivlen);
363 # define aesni_gcm_cipher aes_gcm_cipher
364 static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
365 const unsigned char *in, size_t len);
367 static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
368 const unsigned char *iv, int enc)
370 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
375 /* key_len is two AES keys */
377 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
379 xctx->xts.block1 = (block128_f) aesni_encrypt;
380 xctx->stream = aesni_xts_encrypt;
382 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
384 xctx->xts.block1 = (block128_f) aesni_decrypt;
385 xctx->stream = aesni_xts_decrypt;
388 aesni_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
389 EVP_CIPHER_CTX_key_length(ctx) * 4,
391 xctx->xts.block2 = (block128_f) aesni_encrypt;
393 xctx->xts.key1 = &xctx->ks1;
397 xctx->xts.key2 = &xctx->ks2;
398 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
404 # define aesni_xts_cipher aes_xts_cipher
405 static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
406 const unsigned char *in, size_t len);
408 static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
409 const unsigned char *iv, int enc)
411 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
415 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
417 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
418 &cctx->ks, (block128_f) aesni_encrypt);
419 cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
420 (ccm128_f) aesni_ccm64_decrypt_blocks;
424 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
430 # define aesni_ccm_cipher aes_ccm_cipher
431 static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
432 const unsigned char *in, size_t len);
434 # ifndef OPENSSL_NO_OCB
435 void aesni_ocb_encrypt(const unsigned char *in, unsigned char *out,
436 size_t blocks, const void *key,
437 size_t start_block_num,
438 unsigned char offset_i[16],
439 const unsigned char L_[][16],
440 unsigned char checksum[16]);
441 void aesni_ocb_decrypt(const unsigned char *in, unsigned char *out,
442 size_t blocks, const void *key,
443 size_t start_block_num,
444 unsigned char offset_i[16],
445 const unsigned char L_[][16],
446 unsigned char checksum[16]);
448 static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
449 const unsigned char *iv, int enc)
451 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
457 * We set both the encrypt and decrypt key here because decrypt
458 * needs both. We could possibly optimise to remove setting the
459 * decrypt for an encryption operation.
461 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
463 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
465 if (!CRYPTO_ocb128_init(&octx->ocb,
466 &octx->ksenc.ks, &octx->ksdec.ks,
467 (block128_f) aesni_encrypt,
468 (block128_f) aesni_decrypt,
469 enc ? aesni_ocb_encrypt
470 : aesni_ocb_decrypt))
476 * If we have an iv we can set it directly, otherwise use saved IV.
478 if (iv == NULL && octx->iv_set)
481 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
488 /* If key set use IV, otherwise copy */
490 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
492 memcpy(octx->iv, iv, octx->ivlen);
498 # define aesni_ocb_cipher aes_ocb_cipher
499 static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
500 const unsigned char *in, size_t len);
501 # endif /* OPENSSL_NO_OCB */
503 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
504 static const EVP_CIPHER aesni_##keylen##_##mode = { \
505 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
506 flags|EVP_CIPH_##MODE##_MODE, \
508 aesni_##mode##_cipher, \
510 sizeof(EVP_AES_KEY), \
511 NULL,NULL,NULL,NULL }; \
512 static const EVP_CIPHER aes_##keylen##_##mode = { \
513 nid##_##keylen##_##nmode,blocksize, \
515 flags|EVP_CIPH_##MODE##_MODE, \
517 aes_##mode##_cipher, \
519 sizeof(EVP_AES_KEY), \
520 NULL,NULL,NULL,NULL }; \
521 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
522 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
524 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
525 static const EVP_CIPHER aesni_##keylen##_##mode = { \
526 nid##_##keylen##_##mode,blocksize, \
527 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
528 flags|EVP_CIPH_##MODE##_MODE, \
529 aesni_##mode##_init_key, \
530 aesni_##mode##_cipher, \
531 aes_##mode##_cleanup, \
532 sizeof(EVP_AES_##MODE##_CTX), \
533 NULL,NULL,aes_##mode##_ctrl,NULL }; \
534 static const EVP_CIPHER aes_##keylen##_##mode = { \
535 nid##_##keylen##_##mode,blocksize, \
536 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
537 flags|EVP_CIPH_##MODE##_MODE, \
538 aes_##mode##_init_key, \
539 aes_##mode##_cipher, \
540 aes_##mode##_cleanup, \
541 sizeof(EVP_AES_##MODE##_CTX), \
542 NULL,NULL,aes_##mode##_ctrl,NULL }; \
543 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
544 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
546 #elif defined(AES_ASM) && (defined(__sparc) || defined(__sparc__))
548 # include "sparc_arch.h"
550 extern unsigned int OPENSSL_sparcv9cap_P[];
553 * Initial Fujitsu SPARC64 X support
555 # define HWAES_CAPABLE (OPENSSL_sparcv9cap_P[0] & SPARCV9_FJAESX)
556 # define HWAES_set_encrypt_key aes_fx_set_encrypt_key
557 # define HWAES_set_decrypt_key aes_fx_set_decrypt_key
558 # define HWAES_encrypt aes_fx_encrypt
559 # define HWAES_decrypt aes_fx_decrypt
560 # define HWAES_cbc_encrypt aes_fx_cbc_encrypt
561 # define HWAES_ctr32_encrypt_blocks aes_fx_ctr32_encrypt_blocks
563 # define SPARC_AES_CAPABLE (OPENSSL_sparcv9cap_P[1] & CFR_AES)
565 void aes_t4_set_encrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
566 void aes_t4_set_decrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
567 void aes_t4_encrypt(const unsigned char *in, unsigned char *out,
569 void aes_t4_decrypt(const unsigned char *in, unsigned char *out,
572 * Key-length specific subroutines were chosen for following reason.
573 * Each SPARC T4 core can execute up to 8 threads which share core's
574 * resources. Loading as much key material to registers allows to
575 * minimize references to shared memory interface, as well as amount
576 * of instructions in inner loops [much needed on T4]. But then having
577 * non-key-length specific routines would require conditional branches
578 * either in inner loops or on subroutines' entries. Former is hardly
579 * acceptable, while latter means code size increase to size occupied
580 * by multiple key-length specific subroutines, so why fight?
582 void aes128_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
583 size_t len, const AES_KEY *key,
584 unsigned char *ivec);
585 void aes128_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
586 size_t len, const AES_KEY *key,
587 unsigned char *ivec);
588 void aes192_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
589 size_t len, const AES_KEY *key,
590 unsigned char *ivec);
591 void aes192_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
592 size_t len, const AES_KEY *key,
593 unsigned char *ivec);
594 void aes256_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
595 size_t len, const AES_KEY *key,
596 unsigned char *ivec);
597 void aes256_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
598 size_t len, const AES_KEY *key,
599 unsigned char *ivec);
600 void aes128_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
601 size_t blocks, const AES_KEY *key,
602 unsigned char *ivec);
603 void aes192_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
604 size_t blocks, const AES_KEY *key,
605 unsigned char *ivec);
606 void aes256_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
607 size_t blocks, const AES_KEY *key,
608 unsigned char *ivec);
609 void aes128_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
610 size_t blocks, const AES_KEY *key1,
611 const AES_KEY *key2, const unsigned char *ivec);
612 void aes128_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
613 size_t blocks, const AES_KEY *key1,
614 const AES_KEY *key2, const unsigned char *ivec);
615 void aes256_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
616 size_t blocks, const AES_KEY *key1,
617 const AES_KEY *key2, const unsigned char *ivec);
618 void aes256_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
619 size_t blocks, const AES_KEY *key1,
620 const AES_KEY *key2, const unsigned char *ivec);
622 static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
623 const unsigned char *iv, int enc)
626 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
628 mode = EVP_CIPHER_CTX_mode(ctx);
629 bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
630 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
633 aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
634 dat->block = (block128_f) aes_t4_decrypt;
637 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
638 (cbc128_f) aes128_t4_cbc_decrypt : NULL;
641 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
642 (cbc128_f) aes192_t4_cbc_decrypt : NULL;
645 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
646 (cbc128_f) aes256_t4_cbc_decrypt : NULL;
653 aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
654 dat->block = (block128_f) aes_t4_encrypt;
657 if (mode == EVP_CIPH_CBC_MODE)
658 dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
659 else if (mode == EVP_CIPH_CTR_MODE)
660 dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
662 dat->stream.cbc = NULL;
665 if (mode == EVP_CIPH_CBC_MODE)
666 dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
667 else if (mode == EVP_CIPH_CTR_MODE)
668 dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
670 dat->stream.cbc = NULL;
673 if (mode == EVP_CIPH_CBC_MODE)
674 dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
675 else if (mode == EVP_CIPH_CTR_MODE)
676 dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
678 dat->stream.cbc = NULL;
686 EVPerr(EVP_F_AES_T4_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
693 # define aes_t4_cbc_cipher aes_cbc_cipher
694 static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
695 const unsigned char *in, size_t len);
697 # define aes_t4_ecb_cipher aes_ecb_cipher
698 static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
699 const unsigned char *in, size_t len);
701 # define aes_t4_ofb_cipher aes_ofb_cipher
702 static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
703 const unsigned char *in, size_t len);
705 # define aes_t4_cfb_cipher aes_cfb_cipher
706 static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
707 const unsigned char *in, size_t len);
709 # define aes_t4_cfb8_cipher aes_cfb8_cipher
710 static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
711 const unsigned char *in, size_t len);
713 # define aes_t4_cfb1_cipher aes_cfb1_cipher
714 static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
715 const unsigned char *in, size_t len);
717 # define aes_t4_ctr_cipher aes_ctr_cipher
718 static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
719 const unsigned char *in, size_t len);
721 static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
722 const unsigned char *iv, int enc)
724 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
728 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
729 aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
730 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
731 (block128_f) aes_t4_encrypt);
734 gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
737 gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
740 gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
746 * If we have an iv can set it directly, otherwise use saved IV.
748 if (iv == NULL && gctx->iv_set)
751 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
756 /* If key set use IV, otherwise copy */
758 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
760 memcpy(gctx->iv, iv, gctx->ivlen);
767 # define aes_t4_gcm_cipher aes_gcm_cipher
768 static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
769 const unsigned char *in, size_t len);
771 static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
772 const unsigned char *iv, int enc)
774 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
779 int bits = EVP_CIPHER_CTX_key_length(ctx) * 4;
781 /* key_len is two AES keys */
783 aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
784 xctx->xts.block1 = (block128_f) aes_t4_encrypt;
787 xctx->stream = aes128_t4_xts_encrypt;
790 xctx->stream = aes256_t4_xts_encrypt;
796 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
798 xctx->xts.block1 = (block128_f) aes_t4_decrypt;
801 xctx->stream = aes128_t4_xts_decrypt;
804 xctx->stream = aes256_t4_xts_decrypt;
811 aes_t4_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
812 EVP_CIPHER_CTX_key_length(ctx) * 4,
814 xctx->xts.block2 = (block128_f) aes_t4_encrypt;
816 xctx->xts.key1 = &xctx->ks1;
820 xctx->xts.key2 = &xctx->ks2;
821 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
827 # define aes_t4_xts_cipher aes_xts_cipher
828 static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
829 const unsigned char *in, size_t len);
831 static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
832 const unsigned char *iv, int enc)
834 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
838 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
839 aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
840 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
841 &cctx->ks, (block128_f) aes_t4_encrypt);
846 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
852 # define aes_t4_ccm_cipher aes_ccm_cipher
853 static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
854 const unsigned char *in, size_t len);
856 # ifndef OPENSSL_NO_OCB
857 static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
858 const unsigned char *iv, int enc)
860 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
866 * We set both the encrypt and decrypt key here because decrypt
867 * needs both. We could possibly optimise to remove setting the
868 * decrypt for an encryption operation.
870 aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
872 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
874 if (!CRYPTO_ocb128_init(&octx->ocb,
875 &octx->ksenc.ks, &octx->ksdec.ks,
876 (block128_f) aes_t4_encrypt,
877 (block128_f) aes_t4_decrypt,
884 * If we have an iv we can set it directly, otherwise use saved IV.
886 if (iv == NULL && octx->iv_set)
889 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
896 /* If key set use IV, otherwise copy */
898 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
900 memcpy(octx->iv, iv, octx->ivlen);
906 # define aes_t4_ocb_cipher aes_ocb_cipher
907 static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
908 const unsigned char *in, size_t len);
909 # endif /* OPENSSL_NO_OCB */
911 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
912 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
913 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
914 flags|EVP_CIPH_##MODE##_MODE, \
916 aes_t4_##mode##_cipher, \
918 sizeof(EVP_AES_KEY), \
919 NULL,NULL,NULL,NULL }; \
920 static const EVP_CIPHER aes_##keylen##_##mode = { \
921 nid##_##keylen##_##nmode,blocksize, \
923 flags|EVP_CIPH_##MODE##_MODE, \
925 aes_##mode##_cipher, \
927 sizeof(EVP_AES_KEY), \
928 NULL,NULL,NULL,NULL }; \
929 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
930 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
932 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
933 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
934 nid##_##keylen##_##mode,blocksize, \
935 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
936 flags|EVP_CIPH_##MODE##_MODE, \
937 aes_t4_##mode##_init_key, \
938 aes_t4_##mode##_cipher, \
939 aes_##mode##_cleanup, \
940 sizeof(EVP_AES_##MODE##_CTX), \
941 NULL,NULL,aes_##mode##_ctrl,NULL }; \
942 static const EVP_CIPHER aes_##keylen##_##mode = { \
943 nid##_##keylen##_##mode,blocksize, \
944 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
945 flags|EVP_CIPH_##MODE##_MODE, \
946 aes_##mode##_init_key, \
947 aes_##mode##_cipher, \
948 aes_##mode##_cleanup, \
949 sizeof(EVP_AES_##MODE##_CTX), \
950 NULL,NULL,aes_##mode##_ctrl,NULL }; \
951 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
952 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
954 #elif defined(OPENSSL_CPUID_OBJ) && defined(__s390__)
958 # include "s390x_arch.h"
964 * KMA-GCM-AES parameter block - begin
965 * (see z/Architecture Principles of Operation >= SA22-7832-11)
968 unsigned char reserved[12];
974 unsigned long long g[2];
978 unsigned long long taadl;
979 unsigned long long tpcl;
981 unsigned long long g[2];
986 /* KMA-GCM-AES parameter block - end */
998 unsigned char ares[16];
999 unsigned char mres[16];
1000 unsigned char kres[16];
1006 } S390X_AES_GCM_CTX;
1012 * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and
1013 * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's
1014 * rounds field is used to store the function code and that the key
1015 * schedule is not stored (if aes hardware support is detected).
1018 unsigned char pad[16];
1024 * KMAC-AES parameter block - begin
1025 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1029 unsigned long long g[2];
1030 unsigned char b[16];
1032 unsigned char k[32];
1034 /* KMAC-AES paramater block - end */
1037 unsigned long long g[2];
1038 unsigned char b[16];
1041 unsigned long long g[2];
1042 unsigned char b[16];
1045 unsigned long long blocks;
1054 unsigned char pad[140];
1058 } S390X_AES_CCM_CTX;
1060 # define S390X_aes_128_CAPABLE ((OPENSSL_s390xcap_P.km[0] & \
1061 S390X_CAPBIT(S390X_AES_128)) &&\
1062 (OPENSSL_s390xcap_P.kmc[0] & \
1063 S390X_CAPBIT(S390X_AES_128)))
1064 # define S390X_aes_192_CAPABLE ((OPENSSL_s390xcap_P.km[0] & \
1065 S390X_CAPBIT(S390X_AES_192)) &&\
1066 (OPENSSL_s390xcap_P.kmc[0] & \
1067 S390X_CAPBIT(S390X_AES_192)))
1068 # define S390X_aes_256_CAPABLE ((OPENSSL_s390xcap_P.km[0] & \
1069 S390X_CAPBIT(S390X_AES_256)) &&\
1070 (OPENSSL_s390xcap_P.kmc[0] & \
1071 S390X_CAPBIT(S390X_AES_256)))
1073 # define s390x_aes_init_key aes_init_key
1074 static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1075 const unsigned char *iv, int enc);
1077 # define S390X_aes_128_cbc_CAPABLE 1 /* checked by callee */
1078 # define S390X_aes_192_cbc_CAPABLE 1
1079 # define S390X_aes_256_cbc_CAPABLE 1
1081 # define s390x_aes_cbc_cipher aes_cbc_cipher
1082 static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1083 const unsigned char *in, size_t len);
1085 # define S390X_aes_128_ecb_CAPABLE 0
1086 # define S390X_aes_192_ecb_CAPABLE 0
1087 # define S390X_aes_256_ecb_CAPABLE 0
1089 # define s390x_aes_ecb_cipher aes_ecb_cipher
1090 static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1091 const unsigned char *in, size_t len);
1093 # define S390X_aes_128_ofb_CAPABLE 0
1094 # define S390X_aes_192_ofb_CAPABLE 0
1095 # define S390X_aes_256_ofb_CAPABLE 0
1097 # define s390x_aes_ofb_cipher aes_ofb_cipher
1098 static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1099 const unsigned char *in, size_t len);
1101 # define S390X_aes_128_cfb_CAPABLE 0
1102 # define S390X_aes_192_cfb_CAPABLE 0
1103 # define S390X_aes_256_cfb_CAPABLE 0
1105 # define s390x_aes_cfb_cipher aes_cfb_cipher
1106 static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1107 const unsigned char *in, size_t len);
1109 # define S390X_aes_128_cfb8_CAPABLE 0
1110 # define S390X_aes_192_cfb8_CAPABLE 0
1111 # define S390X_aes_256_cfb8_CAPABLE 0
1113 # define s390x_aes_cfb8_cipher aes_cfb8_cipher
1114 static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1115 const unsigned char *in, size_t len);
1117 # define S390X_aes_128_cfb1_CAPABLE 0
1118 # define S390X_aes_192_cfb1_CAPABLE 0
1119 # define S390X_aes_256_cfb1_CAPABLE 0
1121 # define s390x_aes_cfb1_cipher aes_cfb1_cipher
1122 static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1123 const unsigned char *in, size_t len);
1125 # define S390X_aes_128_ctr_CAPABLE 1 /* checked by callee */
1126 # define S390X_aes_192_ctr_CAPABLE 1
1127 # define S390X_aes_256_ctr_CAPABLE 1
1129 # define s390x_aes_ctr_cipher aes_ctr_cipher
1130 static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1131 const unsigned char *in, size_t len);
1133 # define S390X_aes_128_gcm_CAPABLE (S390X_aes_128_CAPABLE && \
1134 (OPENSSL_s390xcap_P.kma[0] & \
1135 S390X_CAPBIT(S390X_AES_128)))
1136 # define S390X_aes_192_gcm_CAPABLE (S390X_aes_192_CAPABLE && \
1137 (OPENSSL_s390xcap_P.kma[0] & \
1138 S390X_CAPBIT(S390X_AES_192)))
1139 # define S390X_aes_256_gcm_CAPABLE (S390X_aes_256_CAPABLE && \
1140 (OPENSSL_s390xcap_P.kma[0] & \
1141 S390X_CAPBIT(S390X_AES_256)))
1143 /* iv + padding length for iv lenghts != 12 */
1144 # define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16)
1147 * Process additional authenticated data. Returns 0 on success. Code is
1150 static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad,
1153 unsigned long long alen;
1156 if (ctx->kma.param.tpcl)
1159 alen = ctx->kma.param.taadl + len;
1160 if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
1162 ctx->kma.param.taadl = alen;
1167 ctx->ares[n] = *aad;
1172 /* ctx->ares contains a complete block if offset has wrapped around */
1174 s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1175 ctx->fc |= S390X_KMA_HS;
1184 s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1186 ctx->fc |= S390X_KMA_HS;
1194 ctx->ares[rem] = aad[rem];
1201 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for
1202 * success. Code is big-endian.
1204 static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in,
1205 unsigned char *out, size_t len)
1207 const unsigned char *inptr;
1208 unsigned long long mlen;
1211 unsigned char b[16];
1216 mlen = ctx->kma.param.tpcl + len;
1217 if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
1219 ctx->kma.param.tpcl = mlen;
1225 while (n && inlen) {
1226 ctx->mres[n] = *inptr;
1231 /* ctx->mres contains a complete block if offset has wrapped around */
1233 s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b,
1234 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1235 ctx->fc |= S390X_KMA_HS;
1238 /* previous call already encrypted/decrypted its remainder,
1239 * see comment below */
1256 s390x_kma(ctx->ares, ctx->areslen, in, len, out,
1257 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1260 ctx->fc |= S390X_KMA_HS;
1265 * If there is a remainder, it has to be saved such that it can be
1266 * processed by kma later. However, we also have to do the for-now
1267 * unauthenticated encryption/decryption part here and now...
1270 if (!ctx->mreslen) {
1271 buf.w[0] = ctx->kma.param.j0.w[0];
1272 buf.w[1] = ctx->kma.param.j0.w[1];
1273 buf.w[2] = ctx->kma.param.j0.w[2];
1274 buf.w[3] = ctx->kma.param.cv.w + 1;
1275 s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k);
1279 for (i = 0; i < rem; i++) {
1280 ctx->mres[n + i] = in[i];
1281 out[i] = in[i] ^ ctx->kres[n + i];
1284 ctx->mreslen += rem;
1290 * Initialize context structure. Code is big-endian.
1292 static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx,
1293 const unsigned char *iv)
1295 ctx->kma.param.t.g[0] = 0;
1296 ctx->kma.param.t.g[1] = 0;
1297 ctx->kma.param.tpcl = 0;
1298 ctx->kma.param.taadl = 0;
1303 if (ctx->ivlen == 12) {
1304 memcpy(&ctx->kma.param.j0, iv, ctx->ivlen);
1305 ctx->kma.param.j0.w[3] = 1;
1306 ctx->kma.param.cv.w = 1;
1308 /* ctx->iv has the right size and is already padded. */
1309 memcpy(ctx->iv, iv, ctx->ivlen);
1310 s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL,
1311 ctx->fc, &ctx->kma.param);
1312 ctx->fc |= S390X_KMA_HS;
1314 ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0];
1315 ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1];
1316 ctx->kma.param.cv.w = ctx->kma.param.j0.w[3];
1317 ctx->kma.param.t.g[0] = 0;
1318 ctx->kma.param.t.g[1] = 0;
1323 * Performs various operations on the context structure depending on control
1324 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
1325 * Code is big-endian.
1327 static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1329 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1330 S390X_AES_GCM_CTX *gctx_out;
1331 EVP_CIPHER_CTX *out;
1332 unsigned char *buf, *iv;
1333 int ivlen, enc, len;
1337 ivlen = EVP_CIPHER_CTX_iv_length(c);
1338 iv = EVP_CIPHER_CTX_iv_noconst(c);
1341 gctx->ivlen = ivlen;
1345 gctx->tls_aad_len = -1;
1348 case EVP_CTRL_AEAD_SET_IVLEN:
1353 iv = EVP_CIPHER_CTX_iv_noconst(c);
1354 len = S390X_gcm_ivpadlen(arg);
1356 /* Allocate memory for iv if needed. */
1357 if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) {
1359 OPENSSL_free(gctx->iv);
1361 gctx->iv = OPENSSL_malloc(len);
1362 if (gctx->iv == NULL)
1366 memset(gctx->iv + arg, 0, len - arg - 8);
1367 *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3;
1372 case EVP_CTRL_AEAD_SET_TAG:
1373 buf = EVP_CIPHER_CTX_buf_noconst(c);
1374 enc = EVP_CIPHER_CTX_encrypting(c);
1375 if (arg <= 0 || arg > 16 || enc)
1378 memcpy(buf, ptr, arg);
1382 case EVP_CTRL_AEAD_GET_TAG:
1383 enc = EVP_CIPHER_CTX_encrypting(c);
1384 if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0)
1387 memcpy(ptr, gctx->kma.param.t.b, arg);
1390 case EVP_CTRL_GCM_SET_IV_FIXED:
1391 /* Special case: -1 length restores whole iv */
1393 memcpy(gctx->iv, ptr, gctx->ivlen);
1398 * Fixed field must be at least 4 bytes and invocation field at least
1401 if ((arg < 4) || (gctx->ivlen - arg) < 8)
1405 memcpy(gctx->iv, ptr, arg);
1407 enc = EVP_CIPHER_CTX_encrypting(c);
1409 if (c->drbg != NULL) {
1410 if (RAND_DRBG_bytes(c->drbg, gctx->iv + arg, gctx->ivlen - arg) == 0)
1412 } else if (RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0) {
1420 case EVP_CTRL_GCM_IV_GEN:
1421 if (gctx->iv_gen == 0 || gctx->key_set == 0)
1424 s390x_aes_gcm_setiv(gctx, gctx->iv);
1426 if (arg <= 0 || arg > gctx->ivlen)
1429 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1431 * Invocation field will be at least 8 bytes in size and so no need
1432 * to check wrap around or increment more than last 8 bytes.
1434 (*(unsigned long long *)(gctx->iv + gctx->ivlen - 8))++;
1438 case EVP_CTRL_GCM_SET_IV_INV:
1439 enc = EVP_CIPHER_CTX_encrypting(c);
1440 if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc)
1443 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1444 s390x_aes_gcm_setiv(gctx, gctx->iv);
1448 case EVP_CTRL_AEAD_TLS1_AAD:
1449 /* Save the aad for later use. */
1450 if (arg != EVP_AEAD_TLS1_AAD_LEN)
1453 buf = EVP_CIPHER_CTX_buf_noconst(c);
1454 memcpy(buf, ptr, arg);
1455 gctx->tls_aad_len = arg;
1457 len = buf[arg - 2] << 8 | buf[arg - 1];
1458 /* Correct length for explicit iv. */
1459 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1461 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1463 /* If decrypting correct for tag too. */
1464 enc = EVP_CIPHER_CTX_encrypting(c);
1466 if (len < EVP_GCM_TLS_TAG_LEN)
1468 len -= EVP_GCM_TLS_TAG_LEN;
1470 buf[arg - 2] = len >> 8;
1471 buf[arg - 1] = len & 0xff;
1472 /* Extra padding: tag appended to record. */
1473 return EVP_GCM_TLS_TAG_LEN;
1477 gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out);
1478 iv = EVP_CIPHER_CTX_iv_noconst(c);
1480 if (gctx->iv == iv) {
1481 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
1483 len = S390X_gcm_ivpadlen(gctx->ivlen);
1485 gctx_out->iv = OPENSSL_malloc(len);
1486 if (gctx_out->iv == NULL)
1489 memcpy(gctx_out->iv, gctx->iv, len);
1499 * Set key and/or iv. Returns 1 on success. Otherwise 0 is returned.
1501 static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
1502 const unsigned char *key,
1503 const unsigned char *iv, int enc)
1505 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1508 if (iv == NULL && key == NULL)
1512 keylen = EVP_CIPHER_CTX_key_length(ctx);
1513 memcpy(&gctx->kma.param.k, key, keylen);
1515 /* Convert key size to function code. */
1516 gctx->fc = S390X_AES_128 + (((keylen << 3) - 128) >> 6);
1518 gctx->fc |= S390X_DECRYPT;
1520 if (iv == NULL && gctx->iv_set)
1524 s390x_aes_gcm_setiv(gctx, iv);
1530 s390x_aes_gcm_setiv(gctx, iv);
1532 memcpy(gctx->iv, iv, gctx->ivlen);
1541 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1542 * if successful. Otherwise -1 is returned. Code is big-endian.
1544 static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1545 const unsigned char *in, size_t len)
1547 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1548 const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1549 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1552 if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1555 if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN
1556 : EVP_CTRL_GCM_SET_IV_INV,
1557 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
1560 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1561 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1562 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1564 gctx->kma.param.taadl = gctx->tls_aad_len << 3;
1565 gctx->kma.param.tpcl = len << 3;
1566 s390x_kma(buf, gctx->tls_aad_len, in, len, out,
1567 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1570 memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN);
1571 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1573 if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len,
1574 EVP_GCM_TLS_TAG_LEN)) {
1575 OPENSSL_cleanse(out, len);
1582 gctx->tls_aad_len = -1;
1587 * Called from EVP layer to initialize context, process additional
1588 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1589 * ciphertext or process a TLS packet, depending on context. Returns bytes
1590 * written on success. Otherwise -1 is returned. Code is big-endian.
1592 static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1593 const unsigned char *in, size_t len)
1595 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1596 unsigned char *buf, tmp[16];
1602 if (gctx->tls_aad_len >= 0)
1603 return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
1610 if (s390x_aes_gcm_aad(gctx, in, len))
1613 if (s390x_aes_gcm(gctx, in, out, len))
1618 gctx->kma.param.taadl <<= 3;
1619 gctx->kma.param.tpcl <<= 3;
1620 s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp,
1621 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1622 /* recall that we already did en-/decrypt gctx->mres
1623 * and returned it to caller... */
1624 OPENSSL_cleanse(tmp, gctx->mreslen);
1627 enc = EVP_CIPHER_CTX_encrypting(ctx);
1631 if (gctx->taglen < 0)
1634 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1635 if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen))
1642 static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1644 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1645 const unsigned char *iv;
1650 iv = EVP_CIPHER_CTX_iv(c);
1652 OPENSSL_free(gctx->iv);
1654 OPENSSL_cleanse(gctx, sizeof(*gctx));
1658 # define S390X_AES_XTS_CTX EVP_AES_XTS_CTX
1659 # define S390X_aes_128_xts_CAPABLE 1 /* checked by callee */
1660 # define S390X_aes_256_xts_CAPABLE 1
1662 # define s390x_aes_xts_init_key aes_xts_init_key
1663 static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx,
1664 const unsigned char *key,
1665 const unsigned char *iv, int enc);
1666 # define s390x_aes_xts_cipher aes_xts_cipher
1667 static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1668 const unsigned char *in, size_t len);
1669 # define s390x_aes_xts_ctrl aes_xts_ctrl
1670 static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
1671 # define s390x_aes_xts_cleanup aes_xts_cleanup
1673 # define S390X_aes_128_ccm_CAPABLE (S390X_aes_128_CAPABLE && \
1674 (OPENSSL_s390xcap_P.kmac[0] & \
1675 S390X_CAPBIT(S390X_AES_128)))
1676 # define S390X_aes_192_ccm_CAPABLE (S390X_aes_192_CAPABLE && \
1677 (OPENSSL_s390xcap_P.kmac[0] & \
1678 S390X_CAPBIT(S390X_AES_192)))
1679 # define S390X_aes_256_ccm_CAPABLE (S390X_aes_256_CAPABLE && \
1680 (OPENSSL_s390xcap_P.kmac[0] & \
1681 S390X_CAPBIT(S390X_AES_256)))
1683 # define S390X_CCM_AAD_FLAG 0x40
1686 * Set nonce and length fields. Code is big-endian.
1688 static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx,
1689 const unsigned char *nonce,
1692 ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG;
1693 ctx->aes.ccm.nonce.g[1] = mlen;
1694 memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l);
1698 * Process additional authenticated data. Code is big-endian.
1700 static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad,
1709 ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG;
1711 /* Suppress 'type-punned pointer dereference' warning. */
1712 ptr = ctx->aes.ccm.buf.b;
1714 if (alen < ((1 << 16) - (1 << 8))) {
1715 *(uint16_t *)ptr = alen;
1717 } else if (sizeof(alen) == 8
1718 && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) {
1719 *(uint16_t *)ptr = 0xffff;
1720 *(uint64_t *)(ptr + 2) = alen;
1723 *(uint16_t *)ptr = 0xfffe;
1724 *(uint32_t *)(ptr + 2) = alen;
1728 while (i < 16 && alen) {
1729 ctx->aes.ccm.buf.b[i] = *aad;
1735 ctx->aes.ccm.buf.b[i] = 0;
1739 ctx->aes.ccm.kmac_param.icv.g[0] = 0;
1740 ctx->aes.ccm.kmac_param.icv.g[1] = 0;
1741 s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc,
1742 &ctx->aes.ccm.kmac_param);
1743 ctx->aes.ccm.blocks += 2;
1748 s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1749 ctx->aes.ccm.blocks += alen >> 4;
1753 for (i = 0; i < rem; i++)
1754 ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i];
1756 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1757 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1758 ctx->aes.ccm.kmac_param.k);
1759 ctx->aes.ccm.blocks++;
1764 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for
1767 static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in,
1768 unsigned char *out, size_t len, int enc)
1771 unsigned int i, l, num;
1772 unsigned char flags;
1774 flags = ctx->aes.ccm.nonce.b[0];
1775 if (!(flags & S390X_CCM_AAD_FLAG)) {
1776 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b,
1777 ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k);
1778 ctx->aes.ccm.blocks++;
1781 ctx->aes.ccm.nonce.b[0] = l;
1784 * Reconstruct length from encoded length field
1785 * and initialize it with counter value.
1788 for (i = 15 - l; i < 15; i++) {
1789 n |= ctx->aes.ccm.nonce.b[i];
1790 ctx->aes.ccm.nonce.b[i] = 0;
1793 n |= ctx->aes.ccm.nonce.b[15];
1794 ctx->aes.ccm.nonce.b[15] = 1;
1797 return -1; /* length mismatch */
1800 /* Two operations per block plus one for tag encryption */
1801 ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1;
1802 if (ctx->aes.ccm.blocks > (1ULL << 61))
1803 return -2; /* too much data */
1811 /* mac-then-encrypt */
1813 s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1815 for (i = 0; i < rem; i++)
1816 ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i];
1818 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1819 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1820 ctx->aes.ccm.kmac_param.k);
1823 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1824 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1825 &num, (ctr128_f)AES_ctr32_encrypt);
1827 /* decrypt-then-mac */
1828 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1829 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1830 &num, (ctr128_f)AES_ctr32_encrypt);
1833 s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1835 for (i = 0; i < rem; i++)
1836 ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i];
1838 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1839 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1840 ctx->aes.ccm.kmac_param.k);
1844 for (i = 15 - l; i < 16; i++)
1845 ctx->aes.ccm.nonce.b[i] = 0;
1847 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc,
1848 ctx->aes.ccm.kmac_param.k);
1849 ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0];
1850 ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1];
1852 ctx->aes.ccm.nonce.b[0] = flags; /* restore flags field */
1857 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1858 * if successful. Otherwise -1 is returned.
1860 static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1861 const unsigned char *in, size_t len)
1863 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1864 unsigned char *ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
1865 unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1866 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1869 || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m))
1873 /* Set explicit iv (sequence number). */
1874 memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1877 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
1879 * Get explicit iv (sequence number). We already have fixed iv
1880 * (server/client_write_iv) here.
1882 memcpy(ivec + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1883 s390x_aes_ccm_setiv(cctx, ivec, len);
1885 /* Process aad (sequence number|type|version|length) */
1886 s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len);
1888 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
1889 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
1892 if (s390x_aes_ccm(cctx, in, out, len, enc))
1895 memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
1896 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
1898 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
1899 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len,
1904 OPENSSL_cleanse(out, len);
1910 * Set key and flag field and/or iv. Returns 1 if successful. Otherwise 0 is
1913 static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx,
1914 const unsigned char *key,
1915 const unsigned char *iv, int enc)
1917 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1918 unsigned char *ivec;
1921 if (iv == NULL && key == NULL)
1925 keylen = EVP_CIPHER_CTX_key_length(ctx);
1926 /* Convert key size to function code. */
1927 cctx->aes.ccm.fc = S390X_AES_128 + (((keylen << 3) - 128) >> 6);
1928 memcpy(cctx->aes.ccm.kmac_param.k, key, keylen);
1930 /* Store encoded m and l. */
1931 cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7)
1932 | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3;
1933 memset(cctx->aes.ccm.nonce.b + 1, 0,
1934 sizeof(cctx->aes.ccm.nonce.b));
1935 cctx->aes.ccm.blocks = 0;
1937 cctx->aes.ccm.key_set = 1;
1941 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
1942 memcpy(ivec, iv, 15 - cctx->aes.ccm.l);
1944 cctx->aes.ccm.iv_set = 1;
1951 * Called from EVP layer to initialize context, process additional
1952 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1953 * plaintext or process a TLS packet, depending on context. Returns bytes
1954 * written on success. Otherwise -1 is returned.
1956 static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1957 const unsigned char *in, size_t len)
1959 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1960 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1962 unsigned char *buf, *ivec;
1964 if (!cctx->aes.ccm.key_set)
1967 if (cctx->aes.ccm.tls_aad_len >= 0)
1968 return s390x_aes_ccm_tls_cipher(ctx, out, in, len);
1971 * Final(): Does not return any data. Recall that ccm is mac-then-encrypt
1972 * so integrity must be checked already at Update() i.e., before
1973 * potentially corrupted data is output.
1975 if (in == NULL && out != NULL)
1978 if (!cctx->aes.ccm.iv_set)
1981 if (!enc && !cctx->aes.ccm.tag_set)
1985 /* Update(): Pass message length. */
1987 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
1988 s390x_aes_ccm_setiv(cctx, ivec, len);
1990 cctx->aes.ccm.len_set = 1;
1994 /* Update(): Process aad. */
1995 if (!cctx->aes.ccm.len_set && len)
1998 s390x_aes_ccm_aad(cctx, in, len);
2002 /* Update(): Process message. */
2004 if (!cctx->aes.ccm.len_set) {
2006 * In case message length was not previously set explicitely via
2007 * Update(), set it now.
2009 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2010 s390x_aes_ccm_setiv(cctx, ivec, len);
2012 cctx->aes.ccm.len_set = 1;
2016 if (s390x_aes_ccm(cctx, in, out, len, enc))
2019 cctx->aes.ccm.tag_set = 1;
2024 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2025 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2026 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf,
2032 OPENSSL_cleanse(out, len);
2034 cctx->aes.ccm.iv_set = 0;
2035 cctx->aes.ccm.tag_set = 0;
2036 cctx->aes.ccm.len_set = 0;
2042 * Performs various operations on the context structure depending on control
2043 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
2044 * Code is big-endian.
2046 static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2048 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c);
2049 unsigned char *buf, *iv;
2054 cctx->aes.ccm.key_set = 0;
2055 cctx->aes.ccm.iv_set = 0;
2056 cctx->aes.ccm.l = 8;
2057 cctx->aes.ccm.m = 12;
2058 cctx->aes.ccm.tag_set = 0;
2059 cctx->aes.ccm.len_set = 0;
2060 cctx->aes.ccm.tls_aad_len = -1;
2063 case EVP_CTRL_AEAD_TLS1_AAD:
2064 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2067 /* Save the aad for later use. */
2068 buf = EVP_CIPHER_CTX_buf_noconst(c);
2069 memcpy(buf, ptr, arg);
2070 cctx->aes.ccm.tls_aad_len = arg;
2072 len = *(uint16_t *)(buf + arg - 2);
2073 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
2076 /* Correct length for explicit iv. */
2077 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
2079 enc = EVP_CIPHER_CTX_encrypting(c);
2081 if (len < cctx->aes.ccm.m)
2084 /* Correct length for tag. */
2085 len -= cctx->aes.ccm.m;
2088 *(uint16_t *)(buf + arg - 2) = len;
2089 /* Extra padding: tag appended to record. */
2090 return cctx->aes.ccm.m;
2092 case EVP_CTRL_CCM_SET_IV_FIXED:
2093 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
2096 /* Copy to first part of the iv. */
2097 iv = EVP_CIPHER_CTX_iv_noconst(c);
2098 memcpy(iv, ptr, arg);
2101 case EVP_CTRL_AEAD_SET_IVLEN:
2105 case EVP_CTRL_CCM_SET_L:
2106 if (arg < 2 || arg > 8)
2109 cctx->aes.ccm.l = arg;
2112 case EVP_CTRL_AEAD_SET_TAG:
2113 if ((arg & 1) || arg < 4 || arg > 16)
2116 enc = EVP_CIPHER_CTX_encrypting(c);
2121 cctx->aes.ccm.tag_set = 1;
2122 buf = EVP_CIPHER_CTX_buf_noconst(c);
2123 memcpy(buf, ptr, arg);
2126 cctx->aes.ccm.m = arg;
2129 case EVP_CTRL_AEAD_GET_TAG:
2130 enc = EVP_CIPHER_CTX_encrypting(c);
2131 if (!enc || !cctx->aes.ccm.tag_set)
2134 if(arg < cctx->aes.ccm.m)
2137 memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2138 cctx->aes.ccm.tag_set = 0;
2139 cctx->aes.ccm.iv_set = 0;
2140 cctx->aes.ccm.len_set = 0;
2151 # define s390x_aes_ccm_cleanup aes_ccm_cleanup
2153 # ifndef OPENSSL_NO_OCB
2154 # define S390X_AES_OCB_CTX EVP_AES_OCB_CTX
2155 # define S390X_aes_128_ocb_CAPABLE 0
2156 # define S390X_aes_192_ocb_CAPABLE 0
2157 # define S390X_aes_256_ocb_CAPABLE 0
2159 # define s390x_aes_ocb_init_key aes_ocb_init_key
2160 static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2161 const unsigned char *iv, int enc);
2162 # define s390x_aes_ocb_cipher aes_ocb_cipher
2163 static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2164 const unsigned char *in, size_t len);
2165 # define s390x_aes_ocb_cleanup aes_ocb_cleanup
2166 static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *);
2167 # define s390x_aes_ocb_ctrl aes_ocb_ctrl
2168 static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
2171 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode, \
2173 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2174 nid##_##keylen##_##nmode,blocksize, \
2177 flags | EVP_CIPH_##MODE##_MODE, \
2178 s390x_aes_init_key, \
2179 s390x_aes_##mode##_cipher, \
2181 sizeof(EVP_AES_KEY), \
2187 static const EVP_CIPHER aes_##keylen##_##mode = { \
2188 nid##_##keylen##_##nmode, \
2192 flags | EVP_CIPH_##MODE##_MODE, \
2194 aes_##mode##_cipher, \
2196 sizeof(EVP_AES_KEY), \
2197 NULL,NULL,NULL,NULL \
2199 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2201 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2202 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2205 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\
2206 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2207 nid##_##keylen##_##mode, \
2209 (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * keylen / 8, \
2211 flags | EVP_CIPH_##MODE##_MODE, \
2212 s390x_aes_##mode##_init_key, \
2213 s390x_aes_##mode##_cipher, \
2214 s390x_aes_##mode##_cleanup, \
2215 sizeof(S390X_AES_##MODE##_CTX), \
2218 s390x_aes_##mode##_ctrl, \
2221 static const EVP_CIPHER aes_##keylen##_##mode = { \
2222 nid##_##keylen##_##mode,blocksize, \
2223 (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * keylen / 8, \
2225 flags | EVP_CIPH_##MODE##_MODE, \
2226 aes_##mode##_init_key, \
2227 aes_##mode##_cipher, \
2228 aes_##mode##_cleanup, \
2229 sizeof(EVP_AES_##MODE##_CTX), \
2232 aes_##mode##_ctrl, \
2235 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2237 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2238 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2243 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
2244 static const EVP_CIPHER aes_##keylen##_##mode = { \
2245 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
2246 flags|EVP_CIPH_##MODE##_MODE, \
2248 aes_##mode##_cipher, \
2250 sizeof(EVP_AES_KEY), \
2251 NULL,NULL,NULL,NULL }; \
2252 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2253 { return &aes_##keylen##_##mode; }
2255 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
2256 static const EVP_CIPHER aes_##keylen##_##mode = { \
2257 nid##_##keylen##_##mode,blocksize, \
2258 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
2259 flags|EVP_CIPH_##MODE##_MODE, \
2260 aes_##mode##_init_key, \
2261 aes_##mode##_cipher, \
2262 aes_##mode##_cleanup, \
2263 sizeof(EVP_AES_##MODE##_CTX), \
2264 NULL,NULL,aes_##mode##_ctrl,NULL }; \
2265 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2266 { return &aes_##keylen##_##mode; }
2270 #if defined(OPENSSL_CPUID_OBJ) && (defined(__arm__) || defined(__arm) || defined(__aarch64__))
2271 # include "arm_arch.h"
2272 # if __ARM_MAX_ARCH__>=7
2273 # if defined(BSAES_ASM)
2274 # define BSAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2276 # if defined(VPAES_ASM)
2277 # define VPAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2279 # define HWAES_CAPABLE (OPENSSL_armcap_P & ARMV8_AES)
2280 # define HWAES_set_encrypt_key aes_v8_set_encrypt_key
2281 # define HWAES_set_decrypt_key aes_v8_set_decrypt_key
2282 # define HWAES_encrypt aes_v8_encrypt
2283 # define HWAES_decrypt aes_v8_decrypt
2284 # define HWAES_cbc_encrypt aes_v8_cbc_encrypt
2285 # define HWAES_ctr32_encrypt_blocks aes_v8_ctr32_encrypt_blocks
2289 #if defined(HWAES_CAPABLE)
2290 int HWAES_set_encrypt_key(const unsigned char *userKey, const int bits,
2292 int HWAES_set_decrypt_key(const unsigned char *userKey, const int bits,
2294 void HWAES_encrypt(const unsigned char *in, unsigned char *out,
2295 const AES_KEY *key);
2296 void HWAES_decrypt(const unsigned char *in, unsigned char *out,
2297 const AES_KEY *key);
2298 void HWAES_cbc_encrypt(const unsigned char *in, unsigned char *out,
2299 size_t length, const AES_KEY *key,
2300 unsigned char *ivec, const int enc);
2301 void HWAES_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
2302 size_t len, const AES_KEY *key,
2303 const unsigned char ivec[16]);
2304 void HWAES_xts_encrypt(const unsigned char *inp, unsigned char *out,
2305 size_t len, const AES_KEY *key1,
2306 const AES_KEY *key2, const unsigned char iv[16]);
2307 void HWAES_xts_decrypt(const unsigned char *inp, unsigned char *out,
2308 size_t len, const AES_KEY *key1,
2309 const AES_KEY *key2, const unsigned char iv[16]);
2312 #define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
2313 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2314 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2315 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2316 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2317 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
2318 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
2319 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
2321 static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2322 const unsigned char *iv, int enc)
2325 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2327 mode = EVP_CIPHER_CTX_mode(ctx);
2328 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
2330 #ifdef HWAES_CAPABLE
2331 if (HWAES_CAPABLE) {
2332 ret = HWAES_set_decrypt_key(key,
2333 EVP_CIPHER_CTX_key_length(ctx) * 8,
2335 dat->block = (block128_f) HWAES_decrypt;
2336 dat->stream.cbc = NULL;
2337 # ifdef HWAES_cbc_encrypt
2338 if (mode == EVP_CIPH_CBC_MODE)
2339 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2343 #ifdef BSAES_CAPABLE
2344 if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
2345 ret = AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2347 dat->block = (block128_f) AES_decrypt;
2348 dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt;
2351 #ifdef VPAES_CAPABLE
2352 if (VPAES_CAPABLE) {
2353 ret = vpaes_set_decrypt_key(key,
2354 EVP_CIPHER_CTX_key_length(ctx) * 8,
2356 dat->block = (block128_f) vpaes_decrypt;
2357 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2358 (cbc128_f) vpaes_cbc_encrypt : NULL;
2362 ret = AES_set_decrypt_key(key,
2363 EVP_CIPHER_CTX_key_length(ctx) * 8,
2365 dat->block = (block128_f) AES_decrypt;
2366 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2367 (cbc128_f) AES_cbc_encrypt : NULL;
2370 #ifdef HWAES_CAPABLE
2371 if (HWAES_CAPABLE) {
2372 ret = HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2374 dat->block = (block128_f) HWAES_encrypt;
2375 dat->stream.cbc = NULL;
2376 # ifdef HWAES_cbc_encrypt
2377 if (mode == EVP_CIPH_CBC_MODE)
2378 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2381 # ifdef HWAES_ctr32_encrypt_blocks
2382 if (mode == EVP_CIPH_CTR_MODE)
2383 dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2386 (void)0; /* terminate potentially open 'else' */
2389 #ifdef BSAES_CAPABLE
2390 if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
2391 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2393 dat->block = (block128_f) AES_encrypt;
2394 dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2397 #ifdef VPAES_CAPABLE
2398 if (VPAES_CAPABLE) {
2399 ret = vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2401 dat->block = (block128_f) vpaes_encrypt;
2402 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2403 (cbc128_f) vpaes_cbc_encrypt : NULL;
2407 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2409 dat->block = (block128_f) AES_encrypt;
2410 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2411 (cbc128_f) AES_cbc_encrypt : NULL;
2413 if (mode == EVP_CIPH_CTR_MODE)
2414 dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
2419 EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
2426 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2427 const unsigned char *in, size_t len)
2429 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2431 if (dat->stream.cbc)
2432 (*dat->stream.cbc) (in, out, len, &dat->ks,
2433 EVP_CIPHER_CTX_iv_noconst(ctx),
2434 EVP_CIPHER_CTX_encrypting(ctx));
2435 else if (EVP_CIPHER_CTX_encrypting(ctx))
2436 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks,
2437 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2439 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
2440 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2445 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2446 const unsigned char *in, size_t len)
2448 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
2450 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2455 for (i = 0, len -= bl; i <= len; i += bl)
2456 (*dat->block) (in + i, out + i, &dat->ks);
2461 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2462 const unsigned char *in, size_t len)
2464 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2466 int num = EVP_CIPHER_CTX_num(ctx);
2467 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
2468 EVP_CIPHER_CTX_iv_noconst(ctx), &num, dat->block);
2469 EVP_CIPHER_CTX_set_num(ctx, num);
2473 static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2474 const unsigned char *in, size_t len)
2476 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2478 int num = EVP_CIPHER_CTX_num(ctx);
2479 CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
2480 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2481 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2482 EVP_CIPHER_CTX_set_num(ctx, num);
2486 static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2487 const unsigned char *in, size_t len)
2489 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2491 int num = EVP_CIPHER_CTX_num(ctx);
2492 CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
2493 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2494 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2495 EVP_CIPHER_CTX_set_num(ctx, num);
2499 static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2500 const unsigned char *in, size_t len)
2502 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2504 if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
2505 int num = EVP_CIPHER_CTX_num(ctx);
2506 CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
2507 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2508 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2509 EVP_CIPHER_CTX_set_num(ctx, num);
2513 while (len >= MAXBITCHUNK) {
2514 int num = EVP_CIPHER_CTX_num(ctx);
2515 CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
2516 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2517 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2518 EVP_CIPHER_CTX_set_num(ctx, num);
2524 int num = EVP_CIPHER_CTX_num(ctx);
2525 CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
2526 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2527 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2528 EVP_CIPHER_CTX_set_num(ctx, num);
2534 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2535 const unsigned char *in, size_t len)
2537 unsigned int num = EVP_CIPHER_CTX_num(ctx);
2538 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2540 if (dat->stream.ctr)
2541 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
2542 EVP_CIPHER_CTX_iv_noconst(ctx),
2543 EVP_CIPHER_CTX_buf_noconst(ctx),
2544 &num, dat->stream.ctr);
2546 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
2547 EVP_CIPHER_CTX_iv_noconst(ctx),
2548 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
2550 EVP_CIPHER_CTX_set_num(ctx, num);
2554 BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
2555 BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
2556 BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
2558 static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
2560 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2563 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
2564 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
2565 OPENSSL_free(gctx->iv);
2569 /* increment counter (64-bit int) by 1 */
2570 static void ctr64_inc(unsigned char *counter)
2585 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2587 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2592 gctx->ivlen = EVP_CIPHER_CTX_iv_length(c);
2593 gctx->iv = EVP_CIPHER_CTX_iv_noconst(c);
2596 gctx->tls_aad_len = -1;
2599 case EVP_CTRL_AEAD_SET_IVLEN:
2602 /* Allocate memory for IV if needed */
2603 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
2604 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
2605 OPENSSL_free(gctx->iv);
2606 gctx->iv = OPENSSL_malloc(arg);
2607 if (gctx->iv == NULL)
2613 case EVP_CTRL_AEAD_SET_TAG:
2614 if (arg <= 0 || arg > 16 || EVP_CIPHER_CTX_encrypting(c))
2616 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
2620 case EVP_CTRL_AEAD_GET_TAG:
2621 if (arg <= 0 || arg > 16 || !EVP_CIPHER_CTX_encrypting(c)
2622 || gctx->taglen < 0)
2624 memcpy(ptr, EVP_CIPHER_CTX_buf_noconst(c), arg);
2627 case EVP_CTRL_GCM_SET_IV_FIXED:
2628 /* Special case: -1 length restores whole IV */
2630 memcpy(gctx->iv, ptr, gctx->ivlen);
2635 * Fixed field must be at least 4 bytes and invocation field at least
2638 if ((arg < 4) || (gctx->ivlen - arg) < 8)
2641 memcpy(gctx->iv, ptr, arg);
2642 if (EVP_CIPHER_CTX_encrypting(c)) {
2643 if (c->drbg != NULL) {
2644 if (RAND_DRBG_bytes(c->drbg, gctx->iv + arg, gctx->ivlen - arg) == 0)
2646 } else if (RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0) {
2653 case EVP_CTRL_GCM_IV_GEN:
2654 if (gctx->iv_gen == 0 || gctx->key_set == 0)
2656 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2657 if (arg <= 0 || arg > gctx->ivlen)
2659 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
2661 * Invocation field will be at least 8 bytes in size and so no need
2662 * to check wrap around or increment more than last 8 bytes.
2664 ctr64_inc(gctx->iv + gctx->ivlen - 8);
2668 case EVP_CTRL_GCM_SET_IV_INV:
2669 if (gctx->iv_gen == 0 || gctx->key_set == 0
2670 || EVP_CIPHER_CTX_encrypting(c))
2672 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
2673 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2677 case EVP_CTRL_AEAD_TLS1_AAD:
2678 /* Save the AAD for later use */
2679 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2681 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
2682 gctx->tls_aad_len = arg;
2685 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
2686 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
2687 /* Correct length for explicit IV */
2688 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
2690 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
2691 /* If decrypting correct for tag too */
2692 if (!EVP_CIPHER_CTX_encrypting(c)) {
2693 if (len < EVP_GCM_TLS_TAG_LEN)
2695 len -= EVP_GCM_TLS_TAG_LEN;
2697 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
2698 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
2700 /* Extra padding: tag appended to record */
2701 return EVP_GCM_TLS_TAG_LEN;
2705 EVP_CIPHER_CTX *out = ptr;
2706 EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out);
2707 if (gctx->gcm.key) {
2708 if (gctx->gcm.key != &gctx->ks)
2710 gctx_out->gcm.key = &gctx_out->ks;
2712 if (gctx->iv == EVP_CIPHER_CTX_iv_noconst(c))
2713 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
2715 gctx_out->iv = OPENSSL_malloc(gctx->ivlen);
2716 if (gctx_out->iv == NULL)
2718 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
2729 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2730 const unsigned char *iv, int enc)
2732 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
2737 #ifdef HWAES_CAPABLE
2738 if (HWAES_CAPABLE) {
2739 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2741 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2742 (block128_f) HWAES_encrypt);
2743 # ifdef HWAES_ctr32_encrypt_blocks
2744 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2751 #ifdef BSAES_CAPABLE
2752 if (BSAES_CAPABLE) {
2753 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2755 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2756 (block128_f) AES_encrypt);
2757 gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2761 #ifdef VPAES_CAPABLE
2762 if (VPAES_CAPABLE) {
2763 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2765 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2766 (block128_f) vpaes_encrypt);
2771 (void)0; /* terminate potentially open 'else' */
2773 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2775 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2776 (block128_f) AES_encrypt);
2778 gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
2785 * If we have an iv can set it directly, otherwise use saved IV.
2787 if (iv == NULL && gctx->iv_set)
2790 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2795 /* If key set use IV, otherwise copy */
2797 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2799 memcpy(gctx->iv, iv, gctx->ivlen);
2807 * Handle TLS GCM packet format. This consists of the last portion of the IV
2808 * followed by the payload and finally the tag. On encrypt generate IV,
2809 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
2813 static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2814 const unsigned char *in, size_t len)
2816 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
2818 /* Encrypt/decrypt must be performed in place */
2820 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
2823 * Set IV from start of buffer or generate IV and write to start of
2826 if (EVP_CIPHER_CTX_ctrl(ctx, EVP_CIPHER_CTX_encrypting(ctx) ?
2827 EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
2828 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
2831 if (CRYPTO_gcm128_aad(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
2834 /* Fix buffer and length to point to payload */
2835 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2836 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2837 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
2838 if (EVP_CIPHER_CTX_encrypting(ctx)) {
2839 /* Encrypt payload */
2842 #if defined(AES_GCM_ASM)
2843 if (len >= 32 && AES_GCM_ASM(gctx)) {
2844 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2847 bulk = AES_gcm_encrypt(in, out, len,
2849 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2850 gctx->gcm.len.u[1] += bulk;
2853 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
2856 len - bulk, gctx->ctr))
2860 #if defined(AES_GCM_ASM2)
2861 if (len >= 32 && AES_GCM_ASM2(gctx)) {
2862 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2865 bulk = AES_gcm_encrypt(in, out, len,
2867 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2868 gctx->gcm.len.u[1] += bulk;
2871 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
2872 in + bulk, out + bulk, len - bulk))
2876 /* Finally write tag */
2877 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
2878 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
2883 #if defined(AES_GCM_ASM)
2884 if (len >= 16 && AES_GCM_ASM(gctx)) {
2885 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2888 bulk = AES_gcm_decrypt(in, out, len,
2890 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2891 gctx->gcm.len.u[1] += bulk;
2894 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
2897 len - bulk, gctx->ctr))
2901 #if defined(AES_GCM_ASM2)
2902 if (len >= 16 && AES_GCM_ASM2(gctx)) {
2903 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2906 bulk = AES_gcm_decrypt(in, out, len,
2908 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2909 gctx->gcm.len.u[1] += bulk;
2912 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
2913 in + bulk, out + bulk, len - bulk))
2917 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
2918 EVP_GCM_TLS_TAG_LEN);
2919 /* If tag mismatch wipe buffer */
2920 if (CRYPTO_memcmp(EVP_CIPHER_CTX_buf_noconst(ctx), in + len,
2921 EVP_GCM_TLS_TAG_LEN)) {
2922 OPENSSL_cleanse(out, len);
2930 gctx->tls_aad_len = -1;
2934 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2935 const unsigned char *in, size_t len)
2937 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
2938 /* If not set up, return error */
2942 if (gctx->tls_aad_len >= 0)
2943 return aes_gcm_tls_cipher(ctx, out, in, len);
2949 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
2951 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
2954 #if defined(AES_GCM_ASM)
2955 if (len >= 32 && AES_GCM_ASM(gctx)) {
2956 size_t res = (16 - gctx->gcm.mres) % 16;
2958 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
2961 bulk = AES_gcm_encrypt(in + res,
2962 out + res, len - res,
2963 gctx->gcm.key, gctx->gcm.Yi.c,
2965 gctx->gcm.len.u[1] += bulk;
2969 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
2972 len - bulk, gctx->ctr))
2976 #if defined(AES_GCM_ASM2)
2977 if (len >= 32 && AES_GCM_ASM2(gctx)) {
2978 size_t res = (16 - gctx->gcm.mres) % 16;
2980 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
2983 bulk = AES_gcm_encrypt(in + res,
2984 out + res, len - res,
2985 gctx->gcm.key, gctx->gcm.Yi.c,
2987 gctx->gcm.len.u[1] += bulk;
2991 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
2992 in + bulk, out + bulk, len - bulk))
2998 #if defined(AES_GCM_ASM)
2999 if (len >= 16 && AES_GCM_ASM(gctx)) {
3000 size_t res = (16 - gctx->gcm.mres) % 16;
3002 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3005 bulk = AES_gcm_decrypt(in + res,
3006 out + res, len - res,
3008 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3009 gctx->gcm.len.u[1] += bulk;
3013 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3016 len - bulk, gctx->ctr))
3020 #if defined(AES_GCM_ASM2)
3021 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3022 size_t res = (16 - gctx->gcm.mres) % 16;
3024 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3027 bulk = AES_gcm_decrypt(in + res,
3028 out + res, len - res,
3030 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3031 gctx->gcm.len.u[1] += bulk;
3035 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3036 in + bulk, out + bulk, len - bulk))
3042 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
3043 if (gctx->taglen < 0)
3045 if (CRYPTO_gcm128_finish(&gctx->gcm,
3046 EVP_CIPHER_CTX_buf_noconst(ctx),
3052 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx), 16);
3054 /* Don't reuse the IV */
3061 #define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
3062 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3063 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3064 | EVP_CIPH_CUSTOM_COPY)
3066 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
3067 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3068 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
3069 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3070 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
3071 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3073 static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3075 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,c);
3076 if (type == EVP_CTRL_COPY) {
3077 EVP_CIPHER_CTX *out = ptr;
3078 EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out);
3079 if (xctx->xts.key1) {
3080 if (xctx->xts.key1 != &xctx->ks1)
3082 xctx_out->xts.key1 = &xctx_out->ks1;
3084 if (xctx->xts.key2) {
3085 if (xctx->xts.key2 != &xctx->ks2)
3087 xctx_out->xts.key2 = &xctx_out->ks2;
3090 } else if (type != EVP_CTRL_INIT)
3092 /* key1 and key2 are used as an indicator both key and IV are set */
3093 xctx->xts.key1 = NULL;
3094 xctx->xts.key2 = NULL;
3098 static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3099 const unsigned char *iv, int enc)
3101 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3108 xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
3110 xctx->stream = NULL;
3112 /* key_len is two AES keys */
3113 #ifdef HWAES_CAPABLE
3114 if (HWAES_CAPABLE) {
3116 HWAES_set_encrypt_key(key,
3117 EVP_CIPHER_CTX_key_length(ctx) * 4,
3119 xctx->xts.block1 = (block128_f) HWAES_encrypt;
3120 # ifdef HWAES_xts_encrypt
3121 xctx->stream = HWAES_xts_encrypt;
3124 HWAES_set_decrypt_key(key,
3125 EVP_CIPHER_CTX_key_length(ctx) * 4,
3127 xctx->xts.block1 = (block128_f) HWAES_decrypt;
3128 # ifdef HWAES_xts_decrypt
3129 xctx->stream = HWAES_xts_decrypt;
3133 HWAES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3134 EVP_CIPHER_CTX_key_length(ctx) * 4,
3136 xctx->xts.block2 = (block128_f) HWAES_encrypt;
3138 xctx->xts.key1 = &xctx->ks1;
3142 #ifdef BSAES_CAPABLE
3144 xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt;
3147 #ifdef VPAES_CAPABLE
3148 if (VPAES_CAPABLE) {
3150 vpaes_set_encrypt_key(key,
3151 EVP_CIPHER_CTX_key_length(ctx) * 4,
3153 xctx->xts.block1 = (block128_f) vpaes_encrypt;
3155 vpaes_set_decrypt_key(key,
3156 EVP_CIPHER_CTX_key_length(ctx) * 4,
3158 xctx->xts.block1 = (block128_f) vpaes_decrypt;
3161 vpaes_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3162 EVP_CIPHER_CTX_key_length(ctx) * 4,
3164 xctx->xts.block2 = (block128_f) vpaes_encrypt;
3166 xctx->xts.key1 = &xctx->ks1;
3170 (void)0; /* terminate potentially open 'else' */
3173 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
3175 xctx->xts.block1 = (block128_f) AES_encrypt;
3177 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
3179 xctx->xts.block1 = (block128_f) AES_decrypt;
3182 AES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3183 EVP_CIPHER_CTX_key_length(ctx) * 4,
3185 xctx->xts.block2 = (block128_f) AES_encrypt;
3187 xctx->xts.key1 = &xctx->ks1;
3191 xctx->xts.key2 = &xctx->ks2;
3192 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
3198 static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3199 const unsigned char *in, size_t len)
3201 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3202 if (!xctx->xts.key1 || !xctx->xts.key2)
3204 if (!out || !in || len < AES_BLOCK_SIZE)
3207 (*xctx->stream) (in, out, len,
3208 xctx->xts.key1, xctx->xts.key2,
3209 EVP_CIPHER_CTX_iv_noconst(ctx));
3210 else if (CRYPTO_xts128_encrypt(&xctx->xts, EVP_CIPHER_CTX_iv_noconst(ctx),
3212 EVP_CIPHER_CTX_encrypting(ctx)))
3217 #define aes_xts_cleanup NULL
3219 #define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
3220 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3221 | EVP_CIPH_CUSTOM_COPY)
3223 BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
3224 BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
3226 static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3228 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c);
3237 cctx->tls_aad_len = -1;
3240 case EVP_CTRL_AEAD_TLS1_AAD:
3241 /* Save the AAD for later use */
3242 if (arg != EVP_AEAD_TLS1_AAD_LEN)
3244 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3245 cctx->tls_aad_len = arg;
3248 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
3249 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
3250 /* Correct length for explicit IV */
3251 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
3253 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
3254 /* If decrypting correct for tag too */
3255 if (!EVP_CIPHER_CTX_encrypting(c)) {
3260 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
3261 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
3263 /* Extra padding: tag appended to record */
3266 case EVP_CTRL_CCM_SET_IV_FIXED:
3267 /* Sanity check length */
3268 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
3270 /* Just copy to first part of IV */
3271 memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg);
3274 case EVP_CTRL_AEAD_SET_IVLEN:
3277 case EVP_CTRL_CCM_SET_L:
3278 if (arg < 2 || arg > 8)
3283 case EVP_CTRL_AEAD_SET_TAG:
3284 if ((arg & 1) || arg < 4 || arg > 16)
3286 if (EVP_CIPHER_CTX_encrypting(c) && ptr)
3290 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3295 case EVP_CTRL_AEAD_GET_TAG:
3296 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
3298 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
3307 EVP_CIPHER_CTX *out = ptr;
3308 EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out);
3309 if (cctx->ccm.key) {
3310 if (cctx->ccm.key != &cctx->ks)
3312 cctx_out->ccm.key = &cctx_out->ks;
3323 static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3324 const unsigned char *iv, int enc)
3326 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3331 #ifdef HWAES_CAPABLE
3332 if (HWAES_CAPABLE) {
3333 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3336 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3337 &cctx->ks, (block128_f) HWAES_encrypt);
3343 #ifdef VPAES_CAPABLE
3344 if (VPAES_CAPABLE) {
3345 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3347 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3348 &cctx->ks, (block128_f) vpaes_encrypt);
3354 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3356 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3357 &cctx->ks, (block128_f) AES_encrypt);
3362 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
3368 static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3369 const unsigned char *in, size_t len)
3371 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3372 CCM128_CONTEXT *ccm = &cctx->ccm;
3373 /* Encrypt/decrypt must be performed in place */
3374 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
3376 /* If encrypting set explicit IV from sequence number (start of AAD) */
3377 if (EVP_CIPHER_CTX_encrypting(ctx))
3378 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
3379 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3380 /* Get rest of IV from explicit IV */
3381 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in,
3382 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3383 /* Correct length value */
3384 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3385 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L,
3389 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
3390 /* Fix buffer to point to payload */
3391 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3392 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3393 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3394 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3396 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3398 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
3400 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3402 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3404 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3405 unsigned char tag[16];
3406 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3407 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
3411 OPENSSL_cleanse(out, len);
3416 static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3417 const unsigned char *in, size_t len)
3419 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3420 CCM128_CONTEXT *ccm = &cctx->ccm;
3421 /* If not set up, return error */
3425 if (cctx->tls_aad_len >= 0)
3426 return aes_ccm_tls_cipher(ctx, out, in, len);
3428 /* EVP_*Final() doesn't return any data */
3429 if (in == NULL && out != NULL)
3435 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
3439 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3445 /* If have AAD need message length */
3446 if (!cctx->len_set && len)
3448 CRYPTO_ccm128_aad(ccm, in, len);
3451 /* If not set length yet do it */
3452 if (!cctx->len_set) {
3453 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3458 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3459 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3461 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3467 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3469 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3470 unsigned char tag[16];
3471 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3472 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
3478 OPENSSL_cleanse(out, len);
3486 #define aes_ccm_cleanup NULL
3488 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
3489 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3490 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
3491 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3492 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
3493 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3500 /* Indicates if IV has been set */
3504 static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3505 const unsigned char *iv, int enc)
3507 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3511 if (EVP_CIPHER_CTX_encrypting(ctx))
3512 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3515 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3521 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, EVP_CIPHER_CTX_iv_length(ctx));
3522 wctx->iv = EVP_CIPHER_CTX_iv_noconst(ctx);
3527 static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3528 const unsigned char *in, size_t inlen)
3530 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3532 /* AES wrap with padding has IV length of 4, without padding 8 */
3533 int pad = EVP_CIPHER_CTX_iv_length(ctx) == 4;
3534 /* No final operation so always return zero length */
3537 /* Input length must always be non-zero */
3540 /* If decrypting need at least 16 bytes and multiple of 8 */
3541 if (!EVP_CIPHER_CTX_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
3543 /* If not padding input must be multiple of 8 */
3544 if (!pad && inlen & 0x7)
3546 if (is_partially_overlapping(out, in, inlen)) {
3547 EVPerr(EVP_F_AES_WRAP_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
3551 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3552 /* If padding round up to multiple of 8 */
3554 inlen = (inlen + 7) / 8 * 8;
3559 * If not padding output will be exactly 8 bytes smaller than
3560 * input. If padding it will be at least 8 bytes smaller but we
3561 * don't know how much.
3567 if (EVP_CIPHER_CTX_encrypting(ctx))
3568 rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
3570 (block128_f) AES_encrypt);
3572 rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
3574 (block128_f) AES_decrypt);
3576 if (EVP_CIPHER_CTX_encrypting(ctx))
3577 rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
3578 out, in, inlen, (block128_f) AES_encrypt);
3580 rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
3581 out, in, inlen, (block128_f) AES_decrypt);
3583 return rv ? (int)rv : -1;
3586 #define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \
3587 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3588 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
3590 static const EVP_CIPHER aes_128_wrap = {
3592 8, 16, 8, WRAP_FLAGS,
3593 aes_wrap_init_key, aes_wrap_cipher,
3595 sizeof(EVP_AES_WRAP_CTX),
3596 NULL, NULL, NULL, NULL
3599 const EVP_CIPHER *EVP_aes_128_wrap(void)
3601 return &aes_128_wrap;
3604 static const EVP_CIPHER aes_192_wrap = {
3606 8, 24, 8, WRAP_FLAGS,
3607 aes_wrap_init_key, aes_wrap_cipher,
3609 sizeof(EVP_AES_WRAP_CTX),
3610 NULL, NULL, NULL, NULL
3613 const EVP_CIPHER *EVP_aes_192_wrap(void)
3615 return &aes_192_wrap;
3618 static const EVP_CIPHER aes_256_wrap = {
3620 8, 32, 8, WRAP_FLAGS,
3621 aes_wrap_init_key, aes_wrap_cipher,
3623 sizeof(EVP_AES_WRAP_CTX),
3624 NULL, NULL, NULL, NULL
3627 const EVP_CIPHER *EVP_aes_256_wrap(void)
3629 return &aes_256_wrap;
3632 static const EVP_CIPHER aes_128_wrap_pad = {
3633 NID_id_aes128_wrap_pad,
3634 8, 16, 4, WRAP_FLAGS,
3635 aes_wrap_init_key, aes_wrap_cipher,
3637 sizeof(EVP_AES_WRAP_CTX),
3638 NULL, NULL, NULL, NULL
3641 const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
3643 return &aes_128_wrap_pad;
3646 static const EVP_CIPHER aes_192_wrap_pad = {
3647 NID_id_aes192_wrap_pad,
3648 8, 24, 4, WRAP_FLAGS,
3649 aes_wrap_init_key, aes_wrap_cipher,
3651 sizeof(EVP_AES_WRAP_CTX),
3652 NULL, NULL, NULL, NULL
3655 const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
3657 return &aes_192_wrap_pad;
3660 static const EVP_CIPHER aes_256_wrap_pad = {
3661 NID_id_aes256_wrap_pad,
3662 8, 32, 4, WRAP_FLAGS,
3663 aes_wrap_init_key, aes_wrap_cipher,
3665 sizeof(EVP_AES_WRAP_CTX),
3666 NULL, NULL, NULL, NULL
3669 const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
3671 return &aes_256_wrap_pad;
3674 #ifndef OPENSSL_NO_OCB
3675 static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3677 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
3678 EVP_CIPHER_CTX *newc;
3679 EVP_AES_OCB_CTX *new_octx;
3685 octx->ivlen = EVP_CIPHER_CTX_iv_length(c);
3686 octx->iv = EVP_CIPHER_CTX_iv_noconst(c);
3688 octx->data_buf_len = 0;
3689 octx->aad_buf_len = 0;
3692 case EVP_CTRL_AEAD_SET_IVLEN:
3693 /* IV len must be 1 to 15 */
3694 if (arg <= 0 || arg > 15)
3700 case EVP_CTRL_AEAD_SET_TAG:
3702 /* Tag len must be 0 to 16 */
3703 if (arg < 0 || arg > 16)
3709 if (arg != octx->taglen || EVP_CIPHER_CTX_encrypting(c))
3711 memcpy(octx->tag, ptr, arg);
3714 case EVP_CTRL_AEAD_GET_TAG:
3715 if (arg != octx->taglen || !EVP_CIPHER_CTX_encrypting(c))
3718 memcpy(ptr, octx->tag, arg);
3722 newc = (EVP_CIPHER_CTX *)ptr;
3723 new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc);
3724 return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
3725 &new_octx->ksenc.ks,
3726 &new_octx->ksdec.ks);
3734 # ifdef HWAES_CAPABLE
3735 # ifdef HWAES_ocb_encrypt
3736 void HWAES_ocb_encrypt(const unsigned char *in, unsigned char *out,
3737 size_t blocks, const void *key,
3738 size_t start_block_num,
3739 unsigned char offset_i[16],
3740 const unsigned char L_[][16],
3741 unsigned char checksum[16]);
3743 # define HWAES_ocb_encrypt ((ocb128_f)NULL)
3745 # ifdef HWAES_ocb_decrypt
3746 void HWAES_ocb_decrypt(const unsigned char *in, unsigned char *out,
3747 size_t blocks, const void *key,
3748 size_t start_block_num,
3749 unsigned char offset_i[16],
3750 const unsigned char L_[][16],
3751 unsigned char checksum[16]);
3753 # define HWAES_ocb_decrypt ((ocb128_f)NULL)
3757 static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3758 const unsigned char *iv, int enc)
3760 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
3766 * We set both the encrypt and decrypt key here because decrypt
3767 * needs both. We could possibly optimise to remove setting the
3768 * decrypt for an encryption operation.
3770 # ifdef HWAES_CAPABLE
3771 if (HWAES_CAPABLE) {
3772 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3774 HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3776 if (!CRYPTO_ocb128_init(&octx->ocb,
3777 &octx->ksenc.ks, &octx->ksdec.ks,
3778 (block128_f) HWAES_encrypt,
3779 (block128_f) HWAES_decrypt,
3780 enc ? HWAES_ocb_encrypt
3781 : HWAES_ocb_decrypt))
3786 # ifdef VPAES_CAPABLE
3787 if (VPAES_CAPABLE) {
3788 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3790 vpaes_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3792 if (!CRYPTO_ocb128_init(&octx->ocb,
3793 &octx->ksenc.ks, &octx->ksdec.ks,
3794 (block128_f) vpaes_encrypt,
3795 (block128_f) vpaes_decrypt,
3801 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3803 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3805 if (!CRYPTO_ocb128_init(&octx->ocb,
3806 &octx->ksenc.ks, &octx->ksdec.ks,
3807 (block128_f) AES_encrypt,
3808 (block128_f) AES_decrypt,
3815 * If we have an iv we can set it directly, otherwise use saved IV.
3817 if (iv == NULL && octx->iv_set)
3820 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
3827 /* If key set use IV, otherwise copy */
3829 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
3831 memcpy(octx->iv, iv, octx->ivlen);
3837 static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3838 const unsigned char *in, size_t len)
3842 int written_len = 0;
3843 size_t trailing_len;
3844 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
3846 /* If IV or Key not set then return error */
3855 * Need to ensure we are only passing full blocks to low level OCB
3856 * routines. We do it here rather than in EVP_EncryptUpdate/
3857 * EVP_DecryptUpdate because we need to pass full blocks of AAD too
3858 * and those routines don't support that
3861 /* Are we dealing with AAD or normal data here? */
3863 buf = octx->aad_buf;
3864 buf_len = &(octx->aad_buf_len);
3866 buf = octx->data_buf;
3867 buf_len = &(octx->data_buf_len);
3869 if (is_partially_overlapping(out + *buf_len, in, len)) {
3870 EVPerr(EVP_F_AES_OCB_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
3876 * If we've got a partially filled buffer from a previous call then
3877 * use that data first
3880 unsigned int remaining;
3882 remaining = AES_BLOCK_SIZE - (*buf_len);
3883 if (remaining > len) {
3884 memcpy(buf + (*buf_len), in, len);
3888 memcpy(buf + (*buf_len), in, remaining);
3891 * If we get here we've filled the buffer, so process it
3896 if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE))
3898 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
3899 if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out,
3903 if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out,
3907 written_len = AES_BLOCK_SIZE;
3910 out += AES_BLOCK_SIZE;
3913 /* Do we have a partial block to handle at the end? */
3914 trailing_len = len % AES_BLOCK_SIZE;
3917 * If we've got some full blocks to handle, then process these first
3919 if (len != trailing_len) {
3921 if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
3923 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
3924 if (!CRYPTO_ocb128_encrypt
3925 (&octx->ocb, in, out, len - trailing_len))
3928 if (!CRYPTO_ocb128_decrypt
3929 (&octx->ocb, in, out, len - trailing_len))
3932 written_len += len - trailing_len;
3933 in += len - trailing_len;
3936 /* Handle any trailing partial block */
3937 if (trailing_len > 0) {
3938 memcpy(buf, in, trailing_len);
3939 *buf_len = trailing_len;
3945 * First of all empty the buffer of any partial block that we might
3946 * have been provided - both for data and AAD
3948 if (octx->data_buf_len > 0) {
3949 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3950 if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
3951 octx->data_buf_len))
3954 if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
3955 octx->data_buf_len))
3958 written_len = octx->data_buf_len;
3959 octx->data_buf_len = 0;
3961 if (octx->aad_buf_len > 0) {
3962 if (!CRYPTO_ocb128_aad
3963 (&octx->ocb, octx->aad_buf, octx->aad_buf_len))
3965 octx->aad_buf_len = 0;
3967 /* If decrypting then verify */
3968 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
3969 if (octx->taglen < 0)
3971 if (CRYPTO_ocb128_finish(&octx->ocb,
3972 octx->tag, octx->taglen) != 0)
3977 /* If encrypting then just get the tag */
3978 if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
3980 /* Don't reuse the IV */
3986 static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
3988 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
3989 CRYPTO_ocb128_cleanup(&octx->ocb);
3993 BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
3994 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3995 BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
3996 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3997 BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
3998 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3999 #endif /* OPENSSL_NO_OCB */