2 * Copyright 2001-2018 The OpenSSL Project Authors. All Rights Reserved.
4 * Licensed under the OpenSSL license (the "License"). You may not use
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
10 #include <openssl/opensslconf.h>
11 #include <openssl/crypto.h>
12 #include <openssl/evp.h>
13 #include <openssl/err.h>
16 #include <openssl/aes.h>
17 #include "internal/evp_int.h"
18 #include "modes_lcl.h"
19 #include <openssl/rand.h>
38 } ks; /* AES key schedule to use */
39 int key_set; /* Set if key initialised */
40 int iv_set; /* Set if an iv is set */
42 unsigned char *iv; /* Temporary IV store */
43 int ivlen; /* IV length */
45 int iv_gen; /* It is OK to generate IVs */
46 int tls_aad_len; /* TLS AAD length */
54 } ks1, ks2; /* AES key schedules to use */
56 void (*stream) (const unsigned char *in,
57 unsigned char *out, size_t length,
58 const AES_KEY *key1, const AES_KEY *key2,
59 const unsigned char iv[16]);
66 } ks; /* AES key schedule to use */
67 int key_set; /* Set if key initialised */
68 int iv_set; /* Set if an iv is set */
69 int tag_set; /* Set if tag is valid */
70 int len_set; /* Set if message length set */
71 int L, M; /* L and M parameters from RFC3610 */
72 int tls_aad_len; /* TLS AAD length */
77 #ifndef OPENSSL_NO_OCB
82 } ksenc; /* AES key schedule to use for encryption */
86 } ksdec; /* AES key schedule to use for decryption */
87 int key_set; /* Set if key initialised */
88 int iv_set; /* Set if an iv is set */
90 unsigned char *iv; /* Temporary IV store */
91 unsigned char tag[16];
92 unsigned char data_buf[16]; /* Store partial data blocks */
93 unsigned char aad_buf[16]; /* Store partial AAD blocks */
96 int ivlen; /* IV length */
101 #define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
104 int vpaes_set_encrypt_key(const unsigned char *userKey, int bits,
106 int vpaes_set_decrypt_key(const unsigned char *userKey, int bits,
109 void vpaes_encrypt(const unsigned char *in, unsigned char *out,
111 void vpaes_decrypt(const unsigned char *in, unsigned char *out,
114 void vpaes_cbc_encrypt(const unsigned char *in,
117 const AES_KEY *key, unsigned char *ivec, int enc);
120 void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out,
121 size_t length, const AES_KEY *key,
122 unsigned char ivec[16], int enc);
123 void bsaes_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
124 size_t len, const AES_KEY *key,
125 const unsigned char ivec[16]);
126 void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out,
127 size_t len, const AES_KEY *key1,
128 const AES_KEY *key2, const unsigned char iv[16]);
129 void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out,
130 size_t len, const AES_KEY *key1,
131 const AES_KEY *key2, const unsigned char iv[16]);
134 void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out,
135 size_t blocks, const AES_KEY *key,
136 const unsigned char ivec[AES_BLOCK_SIZE]);
139 void AES_xts_encrypt(const unsigned char *inp, unsigned char *out, size_t len,
140 const AES_KEY *key1, const AES_KEY *key2,
141 const unsigned char iv[16]);
142 void AES_xts_decrypt(const unsigned char *inp, unsigned char *out, size_t len,
143 const AES_KEY *key1, const AES_KEY *key2,
144 const unsigned char iv[16]);
147 #if defined(OPENSSL_CPUID_OBJ) && (defined(__powerpc__) || defined(__ppc__) || defined(_ARCH_PPC))
148 # include "ppc_arch.h"
150 # define VPAES_CAPABLE (OPENSSL_ppccap_P & PPC_ALTIVEC)
152 # define HWAES_CAPABLE (OPENSSL_ppccap_P & PPC_CRYPTO207)
153 # define HWAES_set_encrypt_key aes_p8_set_encrypt_key
154 # define HWAES_set_decrypt_key aes_p8_set_decrypt_key
155 # define HWAES_encrypt aes_p8_encrypt
156 # define HWAES_decrypt aes_p8_decrypt
157 # define HWAES_cbc_encrypt aes_p8_cbc_encrypt
158 # define HWAES_ctr32_encrypt_blocks aes_p8_ctr32_encrypt_blocks
159 # define HWAES_xts_encrypt aes_p8_xts_encrypt
160 # define HWAES_xts_decrypt aes_p8_xts_decrypt
163 #if defined(AES_ASM) && !defined(I386_ONLY) && ( \
164 ((defined(__i386) || defined(__i386__) || \
165 defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \
166 defined(__x86_64) || defined(__x86_64__) || \
167 defined(_M_AMD64) || defined(_M_X64) )
169 extern unsigned int OPENSSL_ia32cap_P[];
172 # define VPAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
175 # define BSAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
180 # define AESNI_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(57-32)))
182 int aesni_set_encrypt_key(const unsigned char *userKey, int bits,
184 int aesni_set_decrypt_key(const unsigned char *userKey, int bits,
187 void aesni_encrypt(const unsigned char *in, unsigned char *out,
189 void aesni_decrypt(const unsigned char *in, unsigned char *out,
192 void aesni_ecb_encrypt(const unsigned char *in,
194 size_t length, const AES_KEY *key, int enc);
195 void aesni_cbc_encrypt(const unsigned char *in,
198 const AES_KEY *key, unsigned char *ivec, int enc);
200 void aesni_ctr32_encrypt_blocks(const unsigned char *in,
203 const void *key, const unsigned char *ivec);
205 void aesni_xts_encrypt(const unsigned char *in,
208 const AES_KEY *key1, const AES_KEY *key2,
209 const unsigned char iv[16]);
211 void aesni_xts_decrypt(const unsigned char *in,
214 const AES_KEY *key1, const AES_KEY *key2,
215 const unsigned char iv[16]);
217 void aesni_ccm64_encrypt_blocks(const unsigned char *in,
221 const unsigned char ivec[16],
222 unsigned char cmac[16]);
224 void aesni_ccm64_decrypt_blocks(const unsigned char *in,
228 const unsigned char ivec[16],
229 unsigned char cmac[16]);
231 # if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
232 size_t aesni_gcm_encrypt(const unsigned char *in,
235 const void *key, unsigned char ivec[16], u64 *Xi);
236 # define AES_gcm_encrypt aesni_gcm_encrypt
237 size_t aesni_gcm_decrypt(const unsigned char *in,
240 const void *key, unsigned char ivec[16], u64 *Xi);
241 # define AES_gcm_decrypt aesni_gcm_decrypt
242 void gcm_ghash_avx(u64 Xi[2], const u128 Htable[16], const u8 *in,
244 # define AES_GCM_ASM(gctx) (gctx->ctr==aesni_ctr32_encrypt_blocks && \
245 gctx->gcm.ghash==gcm_ghash_avx)
246 # define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
247 gctx->gcm.ghash==gcm_ghash_avx)
248 # undef AES_GCM_ASM2 /* minor size optimization */
251 static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
252 const unsigned char *iv, int enc)
255 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
257 mode = EVP_CIPHER_CTX_mode(ctx);
258 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
260 ret = aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
262 dat->block = (block128_f) aesni_decrypt;
263 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
264 (cbc128_f) aesni_cbc_encrypt : NULL;
266 ret = aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
268 dat->block = (block128_f) aesni_encrypt;
269 if (mode == EVP_CIPH_CBC_MODE)
270 dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
271 else if (mode == EVP_CIPH_CTR_MODE)
272 dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
274 dat->stream.cbc = NULL;
278 EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
285 static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
286 const unsigned char *in, size_t len)
288 aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
289 EVP_CIPHER_CTX_iv_noconst(ctx),
290 EVP_CIPHER_CTX_encrypting(ctx));
295 static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
296 const unsigned char *in, size_t len)
298 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
303 aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
304 EVP_CIPHER_CTX_encrypting(ctx));
309 # define aesni_ofb_cipher aes_ofb_cipher
310 static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
311 const unsigned char *in, size_t len);
313 # define aesni_cfb_cipher aes_cfb_cipher
314 static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
315 const unsigned char *in, size_t len);
317 # define aesni_cfb8_cipher aes_cfb8_cipher
318 static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
319 const unsigned char *in, size_t len);
321 # define aesni_cfb1_cipher aes_cfb1_cipher
322 static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
323 const unsigned char *in, size_t len);
325 # define aesni_ctr_cipher aes_ctr_cipher
326 static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
327 const unsigned char *in, size_t len);
329 static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
330 const unsigned char *iv, int enc)
332 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
336 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
338 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
339 gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
341 * If we have an iv can set it directly, otherwise use saved IV.
343 if (iv == NULL && gctx->iv_set)
346 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
351 /* If key set use IV, otherwise copy */
353 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
355 memcpy(gctx->iv, iv, gctx->ivlen);
362 # define aesni_gcm_cipher aes_gcm_cipher
363 static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
364 const unsigned char *in, size_t len);
366 static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
367 const unsigned char *iv, int enc)
369 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
374 /* key_len is two AES keys */
376 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
378 xctx->xts.block1 = (block128_f) aesni_encrypt;
379 xctx->stream = aesni_xts_encrypt;
381 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
383 xctx->xts.block1 = (block128_f) aesni_decrypt;
384 xctx->stream = aesni_xts_decrypt;
387 aesni_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
388 EVP_CIPHER_CTX_key_length(ctx) * 4,
390 xctx->xts.block2 = (block128_f) aesni_encrypt;
392 xctx->xts.key1 = &xctx->ks1;
396 xctx->xts.key2 = &xctx->ks2;
397 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
403 # define aesni_xts_cipher aes_xts_cipher
404 static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
405 const unsigned char *in, size_t len);
407 static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
408 const unsigned char *iv, int enc)
410 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
414 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
416 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
417 &cctx->ks, (block128_f) aesni_encrypt);
418 cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
419 (ccm128_f) aesni_ccm64_decrypt_blocks;
423 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
429 # define aesni_ccm_cipher aes_ccm_cipher
430 static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
431 const unsigned char *in, size_t len);
433 # ifndef OPENSSL_NO_OCB
434 void aesni_ocb_encrypt(const unsigned char *in, unsigned char *out,
435 size_t blocks, const void *key,
436 size_t start_block_num,
437 unsigned char offset_i[16],
438 const unsigned char L_[][16],
439 unsigned char checksum[16]);
440 void aesni_ocb_decrypt(const unsigned char *in, unsigned char *out,
441 size_t blocks, const void *key,
442 size_t start_block_num,
443 unsigned char offset_i[16],
444 const unsigned char L_[][16],
445 unsigned char checksum[16]);
447 static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
448 const unsigned char *iv, int enc)
450 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
456 * We set both the encrypt and decrypt key here because decrypt
457 * needs both. We could possibly optimise to remove setting the
458 * decrypt for an encryption operation.
460 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
462 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
464 if (!CRYPTO_ocb128_init(&octx->ocb,
465 &octx->ksenc.ks, &octx->ksdec.ks,
466 (block128_f) aesni_encrypt,
467 (block128_f) aesni_decrypt,
468 enc ? aesni_ocb_encrypt
469 : aesni_ocb_decrypt))
475 * If we have an iv we can set it directly, otherwise use saved IV.
477 if (iv == NULL && octx->iv_set)
480 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
487 /* If key set use IV, otherwise copy */
489 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
491 memcpy(octx->iv, iv, octx->ivlen);
497 # define aesni_ocb_cipher aes_ocb_cipher
498 static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
499 const unsigned char *in, size_t len);
500 # endif /* OPENSSL_NO_OCB */
502 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
503 static const EVP_CIPHER aesni_##keylen##_##mode = { \
504 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
505 flags|EVP_CIPH_##MODE##_MODE, \
507 aesni_##mode##_cipher, \
509 sizeof(EVP_AES_KEY), \
510 NULL,NULL,NULL,NULL }; \
511 static const EVP_CIPHER aes_##keylen##_##mode = { \
512 nid##_##keylen##_##nmode,blocksize, \
514 flags|EVP_CIPH_##MODE##_MODE, \
516 aes_##mode##_cipher, \
518 sizeof(EVP_AES_KEY), \
519 NULL,NULL,NULL,NULL }; \
520 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
521 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
523 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
524 static const EVP_CIPHER aesni_##keylen##_##mode = { \
525 nid##_##keylen##_##mode,blocksize, \
526 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
527 flags|EVP_CIPH_##MODE##_MODE, \
528 aesni_##mode##_init_key, \
529 aesni_##mode##_cipher, \
530 aes_##mode##_cleanup, \
531 sizeof(EVP_AES_##MODE##_CTX), \
532 NULL,NULL,aes_##mode##_ctrl,NULL }; \
533 static const EVP_CIPHER aes_##keylen##_##mode = { \
534 nid##_##keylen##_##mode,blocksize, \
535 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
536 flags|EVP_CIPH_##MODE##_MODE, \
537 aes_##mode##_init_key, \
538 aes_##mode##_cipher, \
539 aes_##mode##_cleanup, \
540 sizeof(EVP_AES_##MODE##_CTX), \
541 NULL,NULL,aes_##mode##_ctrl,NULL }; \
542 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
543 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
545 #elif defined(AES_ASM) && (defined(__sparc) || defined(__sparc__))
547 # include "sparc_arch.h"
549 extern unsigned int OPENSSL_sparcv9cap_P[];
552 * Initial Fujitsu SPARC64 X support
554 # define HWAES_CAPABLE (OPENSSL_sparcv9cap_P[0] & SPARCV9_FJAESX)
555 # define HWAES_set_encrypt_key aes_fx_set_encrypt_key
556 # define HWAES_set_decrypt_key aes_fx_set_decrypt_key
557 # define HWAES_encrypt aes_fx_encrypt
558 # define HWAES_decrypt aes_fx_decrypt
559 # define HWAES_cbc_encrypt aes_fx_cbc_encrypt
560 # define HWAES_ctr32_encrypt_blocks aes_fx_ctr32_encrypt_blocks
562 # define SPARC_AES_CAPABLE (OPENSSL_sparcv9cap_P[1] & CFR_AES)
564 void aes_t4_set_encrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
565 void aes_t4_set_decrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
566 void aes_t4_encrypt(const unsigned char *in, unsigned char *out,
568 void aes_t4_decrypt(const unsigned char *in, unsigned char *out,
571 * Key-length specific subroutines were chosen for following reason.
572 * Each SPARC T4 core can execute up to 8 threads which share core's
573 * resources. Loading as much key material to registers allows to
574 * minimize references to shared memory interface, as well as amount
575 * of instructions in inner loops [much needed on T4]. But then having
576 * non-key-length specific routines would require conditional branches
577 * either in inner loops or on subroutines' entries. Former is hardly
578 * acceptable, while latter means code size increase to size occupied
579 * by multiple key-length specific subroutines, so why fight?
581 void aes128_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
582 size_t len, const AES_KEY *key,
583 unsigned char *ivec);
584 void aes128_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
585 size_t len, const AES_KEY *key,
586 unsigned char *ivec);
587 void aes192_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
588 size_t len, const AES_KEY *key,
589 unsigned char *ivec);
590 void aes192_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
591 size_t len, const AES_KEY *key,
592 unsigned char *ivec);
593 void aes256_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
594 size_t len, const AES_KEY *key,
595 unsigned char *ivec);
596 void aes256_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
597 size_t len, const AES_KEY *key,
598 unsigned char *ivec);
599 void aes128_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
600 size_t blocks, const AES_KEY *key,
601 unsigned char *ivec);
602 void aes192_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
603 size_t blocks, const AES_KEY *key,
604 unsigned char *ivec);
605 void aes256_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
606 size_t blocks, const AES_KEY *key,
607 unsigned char *ivec);
608 void aes128_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
609 size_t blocks, const AES_KEY *key1,
610 const AES_KEY *key2, const unsigned char *ivec);
611 void aes128_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
612 size_t blocks, const AES_KEY *key1,
613 const AES_KEY *key2, const unsigned char *ivec);
614 void aes256_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
615 size_t blocks, const AES_KEY *key1,
616 const AES_KEY *key2, const unsigned char *ivec);
617 void aes256_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
618 size_t blocks, const AES_KEY *key1,
619 const AES_KEY *key2, const unsigned char *ivec);
621 static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
622 const unsigned char *iv, int enc)
625 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
627 mode = EVP_CIPHER_CTX_mode(ctx);
628 bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
629 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
632 aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
633 dat->block = (block128_f) aes_t4_decrypt;
636 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
637 (cbc128_f) aes128_t4_cbc_decrypt : NULL;
640 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
641 (cbc128_f) aes192_t4_cbc_decrypt : NULL;
644 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
645 (cbc128_f) aes256_t4_cbc_decrypt : NULL;
652 aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
653 dat->block = (block128_f) aes_t4_encrypt;
656 if (mode == EVP_CIPH_CBC_MODE)
657 dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
658 else if (mode == EVP_CIPH_CTR_MODE)
659 dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
661 dat->stream.cbc = NULL;
664 if (mode == EVP_CIPH_CBC_MODE)
665 dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
666 else if (mode == EVP_CIPH_CTR_MODE)
667 dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
669 dat->stream.cbc = NULL;
672 if (mode == EVP_CIPH_CBC_MODE)
673 dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
674 else if (mode == EVP_CIPH_CTR_MODE)
675 dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
677 dat->stream.cbc = NULL;
685 EVPerr(EVP_F_AES_T4_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
692 # define aes_t4_cbc_cipher aes_cbc_cipher
693 static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
694 const unsigned char *in, size_t len);
696 # define aes_t4_ecb_cipher aes_ecb_cipher
697 static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
698 const unsigned char *in, size_t len);
700 # define aes_t4_ofb_cipher aes_ofb_cipher
701 static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
702 const unsigned char *in, size_t len);
704 # define aes_t4_cfb_cipher aes_cfb_cipher
705 static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
706 const unsigned char *in, size_t len);
708 # define aes_t4_cfb8_cipher aes_cfb8_cipher
709 static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
710 const unsigned char *in, size_t len);
712 # define aes_t4_cfb1_cipher aes_cfb1_cipher
713 static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
714 const unsigned char *in, size_t len);
716 # define aes_t4_ctr_cipher aes_ctr_cipher
717 static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
718 const unsigned char *in, size_t len);
720 static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
721 const unsigned char *iv, int enc)
723 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
727 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
728 aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
729 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
730 (block128_f) aes_t4_encrypt);
733 gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
736 gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
739 gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
745 * If we have an iv can set it directly, otherwise use saved IV.
747 if (iv == NULL && gctx->iv_set)
750 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
755 /* If key set use IV, otherwise copy */
757 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
759 memcpy(gctx->iv, iv, gctx->ivlen);
766 # define aes_t4_gcm_cipher aes_gcm_cipher
767 static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
768 const unsigned char *in, size_t len);
770 static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
771 const unsigned char *iv, int enc)
773 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
778 int bits = EVP_CIPHER_CTX_key_length(ctx) * 4;
780 /* key_len is two AES keys */
782 aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
783 xctx->xts.block1 = (block128_f) aes_t4_encrypt;
786 xctx->stream = aes128_t4_xts_encrypt;
789 xctx->stream = aes256_t4_xts_encrypt;
795 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
797 xctx->xts.block1 = (block128_f) aes_t4_decrypt;
800 xctx->stream = aes128_t4_xts_decrypt;
803 xctx->stream = aes256_t4_xts_decrypt;
810 aes_t4_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
811 EVP_CIPHER_CTX_key_length(ctx) * 4,
813 xctx->xts.block2 = (block128_f) aes_t4_encrypt;
815 xctx->xts.key1 = &xctx->ks1;
819 xctx->xts.key2 = &xctx->ks2;
820 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
826 # define aes_t4_xts_cipher aes_xts_cipher
827 static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
828 const unsigned char *in, size_t len);
830 static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
831 const unsigned char *iv, int enc)
833 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
837 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
838 aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
839 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
840 &cctx->ks, (block128_f) aes_t4_encrypt);
845 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
851 # define aes_t4_ccm_cipher aes_ccm_cipher
852 static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
853 const unsigned char *in, size_t len);
855 # ifndef OPENSSL_NO_OCB
856 static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
857 const unsigned char *iv, int enc)
859 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
865 * We set both the encrypt and decrypt key here because decrypt
866 * needs both. We could possibly optimise to remove setting the
867 * decrypt for an encryption operation.
869 aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
871 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
873 if (!CRYPTO_ocb128_init(&octx->ocb,
874 &octx->ksenc.ks, &octx->ksdec.ks,
875 (block128_f) aes_t4_encrypt,
876 (block128_f) aes_t4_decrypt,
883 * If we have an iv we can set it directly, otherwise use saved IV.
885 if (iv == NULL && octx->iv_set)
888 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
895 /* If key set use IV, otherwise copy */
897 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
899 memcpy(octx->iv, iv, octx->ivlen);
905 # define aes_t4_ocb_cipher aes_ocb_cipher
906 static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
907 const unsigned char *in, size_t len);
908 # endif /* OPENSSL_NO_OCB */
910 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
911 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
912 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
913 flags|EVP_CIPH_##MODE##_MODE, \
915 aes_t4_##mode##_cipher, \
917 sizeof(EVP_AES_KEY), \
918 NULL,NULL,NULL,NULL }; \
919 static const EVP_CIPHER aes_##keylen##_##mode = { \
920 nid##_##keylen##_##nmode,blocksize, \
922 flags|EVP_CIPH_##MODE##_MODE, \
924 aes_##mode##_cipher, \
926 sizeof(EVP_AES_KEY), \
927 NULL,NULL,NULL,NULL }; \
928 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
929 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
931 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
932 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
933 nid##_##keylen##_##mode,blocksize, \
934 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
935 flags|EVP_CIPH_##MODE##_MODE, \
936 aes_t4_##mode##_init_key, \
937 aes_t4_##mode##_cipher, \
938 aes_##mode##_cleanup, \
939 sizeof(EVP_AES_##MODE##_CTX), \
940 NULL,NULL,aes_##mode##_ctrl,NULL }; \
941 static const EVP_CIPHER aes_##keylen##_##mode = { \
942 nid##_##keylen##_##mode,blocksize, \
943 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
944 flags|EVP_CIPH_##MODE##_MODE, \
945 aes_##mode##_init_key, \
946 aes_##mode##_cipher, \
947 aes_##mode##_cleanup, \
948 sizeof(EVP_AES_##MODE##_CTX), \
949 NULL,NULL,aes_##mode##_ctrl,NULL }; \
950 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
951 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
953 #elif defined(OPENSSL_CPUID_OBJ) && defined(__s390__)
957 # include "s390x_arch.h"
963 * KM-AES parameter block - begin
964 * (see z/Architecture Principles of Operation >= SA22-7832-06)
969 /* KM-AES parameter block - end */
978 * KMA-GCM-AES parameter block - begin
979 * (see z/Architecture Principles of Operation >= SA22-7832-11)
982 unsigned char reserved[12];
988 unsigned long long g[2];
992 unsigned long long taadl;
993 unsigned long long tpcl;
995 unsigned long long g[2];
1000 /* KMA-GCM-AES parameter block - end */
1012 unsigned char ares[16];
1013 unsigned char mres[16];
1014 unsigned char kres[16];
1020 } S390X_AES_GCM_CTX;
1026 * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and
1027 * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's
1028 * rounds field is used to store the function code and that the key
1029 * schedule is not stored (if aes hardware support is detected).
1032 unsigned char pad[16];
1038 * KMAC-AES parameter block - begin
1039 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1043 unsigned long long g[2];
1044 unsigned char b[16];
1046 unsigned char k[32];
1048 /* KMAC-AES paramater block - end */
1051 unsigned long long g[2];
1052 unsigned char b[16];
1055 unsigned long long g[2];
1056 unsigned char b[16];
1059 unsigned long long blocks;
1068 unsigned char pad[140];
1072 } S390X_AES_CCM_CTX;
1074 /* Convert key size to function code: [16,24,32] -> [18,19,20]. */
1075 # define S390X_AES_FC(keylen) (S390X_AES_128 + ((((keylen) << 3) - 128) >> 6))
1077 /* Most modes of operation need km for partial block processing. */
1078 # define S390X_aes_128_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1079 S390X_CAPBIT(S390X_AES_128))
1080 # define S390X_aes_192_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1081 S390X_CAPBIT(S390X_AES_192))
1082 # define S390X_aes_256_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1083 S390X_CAPBIT(S390X_AES_256))
1085 # define s390x_aes_init_key aes_init_key
1086 static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1087 const unsigned char *iv, int enc);
1089 # define S390X_aes_128_cbc_CAPABLE 1 /* checked by callee */
1090 # define S390X_aes_192_cbc_CAPABLE 1
1091 # define S390X_aes_256_cbc_CAPABLE 1
1092 # define S390X_AES_CBC_CTX EVP_AES_KEY
1094 # define s390x_aes_cbc_init_key aes_init_key
1096 # define s390x_aes_cbc_cipher aes_cbc_cipher
1097 static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1098 const unsigned char *in, size_t len);
1100 # define S390X_aes_128_ecb_CAPABLE S390X_aes_128_CAPABLE
1101 # define S390X_aes_192_ecb_CAPABLE S390X_aes_192_CAPABLE
1102 # define S390X_aes_256_ecb_CAPABLE S390X_aes_256_CAPABLE
1104 static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX *ctx,
1105 const unsigned char *key,
1106 const unsigned char *iv, int enc)
1108 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1109 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1111 cctx->fc = S390X_AES_FC(keylen);
1113 cctx->fc |= S390X_DECRYPT;
1115 memcpy(cctx->km.param.k, key, keylen);
1119 static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1120 const unsigned char *in, size_t len)
1122 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1124 s390x_km(in, len, out, cctx->fc, &cctx->km.param);
1128 # define S390X_aes_128_ofb_CAPABLE 0
1129 # define S390X_aes_192_ofb_CAPABLE 0
1130 # define S390X_aes_256_ofb_CAPABLE 0
1131 # define S390X_AES_OFB_CTX EVP_AES_KEY
1133 # define s390x_aes_ofb_init_key aes_init_key
1135 # define s390x_aes_ofb_cipher aes_ofb_cipher
1136 static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1137 const unsigned char *in, size_t len);
1139 # define S390X_aes_128_cfb_CAPABLE 0
1140 # define S390X_aes_192_cfb_CAPABLE 0
1141 # define S390X_aes_256_cfb_CAPABLE 0
1142 # define S390X_AES_CFB_CTX EVP_AES_KEY
1144 # define s390x_aes_cfb_init_key aes_init_key
1146 # define s390x_aes_cfb_cipher aes_cfb_cipher
1147 static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1148 const unsigned char *in, size_t len);
1150 # define S390X_aes_128_cfb8_CAPABLE 0
1151 # define S390X_aes_192_cfb8_CAPABLE 0
1152 # define S390X_aes_256_cfb8_CAPABLE 0
1154 # define s390x_aes_cfb8_init_key aes_init_key
1156 # define s390x_aes_cfb8_cipher aes_cfb8_cipher
1157 static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1158 const unsigned char *in, size_t len);
1160 # define S390X_aes_128_cfb1_CAPABLE 0
1161 # define S390X_aes_192_cfb1_CAPABLE 0
1162 # define S390X_aes_256_cfb1_CAPABLE 0
1164 # define s390x_aes_cfb1_init_key aes_init_key
1166 # define s390x_aes_cfb1_cipher aes_cfb1_cipher
1167 static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1168 const unsigned char *in, size_t len);
1170 # define S390X_aes_128_ctr_CAPABLE 1 /* checked by callee */
1171 # define S390X_aes_192_ctr_CAPABLE 1
1172 # define S390X_aes_256_ctr_CAPABLE 1
1173 # define S390X_AES_CTR_CTX EVP_AES_KEY
1175 # define s390x_aes_ctr_init_key aes_init_key
1177 # define s390x_aes_ctr_cipher aes_ctr_cipher
1178 static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1179 const unsigned char *in, size_t len);
1181 # define S390X_aes_128_gcm_CAPABLE (S390X_aes_128_CAPABLE && \
1182 (OPENSSL_s390xcap_P.kma[0] & \
1183 S390X_CAPBIT(S390X_AES_128)))
1184 # define S390X_aes_192_gcm_CAPABLE (S390X_aes_192_CAPABLE && \
1185 (OPENSSL_s390xcap_P.kma[0] & \
1186 S390X_CAPBIT(S390X_AES_192)))
1187 # define S390X_aes_256_gcm_CAPABLE (S390X_aes_256_CAPABLE && \
1188 (OPENSSL_s390xcap_P.kma[0] & \
1189 S390X_CAPBIT(S390X_AES_256)))
1191 /* iv + padding length for iv lenghts != 12 */
1192 # define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16)
1195 * Process additional authenticated data. Returns 0 on success. Code is
1198 static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad,
1201 unsigned long long alen;
1204 if (ctx->kma.param.tpcl)
1207 alen = ctx->kma.param.taadl + len;
1208 if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
1210 ctx->kma.param.taadl = alen;
1215 ctx->ares[n] = *aad;
1220 /* ctx->ares contains a complete block if offset has wrapped around */
1222 s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1223 ctx->fc |= S390X_KMA_HS;
1232 s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1234 ctx->fc |= S390X_KMA_HS;
1242 ctx->ares[rem] = aad[rem];
1249 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for
1250 * success. Code is big-endian.
1252 static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in,
1253 unsigned char *out, size_t len)
1255 const unsigned char *inptr;
1256 unsigned long long mlen;
1259 unsigned char b[16];
1264 mlen = ctx->kma.param.tpcl + len;
1265 if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
1267 ctx->kma.param.tpcl = mlen;
1273 while (n && inlen) {
1274 ctx->mres[n] = *inptr;
1279 /* ctx->mres contains a complete block if offset has wrapped around */
1281 s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b,
1282 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1283 ctx->fc |= S390X_KMA_HS;
1286 /* previous call already encrypted/decrypted its remainder,
1287 * see comment below */
1304 s390x_kma(ctx->ares, ctx->areslen, in, len, out,
1305 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1308 ctx->fc |= S390X_KMA_HS;
1313 * If there is a remainder, it has to be saved such that it can be
1314 * processed by kma later. However, we also have to do the for-now
1315 * unauthenticated encryption/decryption part here and now...
1318 if (!ctx->mreslen) {
1319 buf.w[0] = ctx->kma.param.j0.w[0];
1320 buf.w[1] = ctx->kma.param.j0.w[1];
1321 buf.w[2] = ctx->kma.param.j0.w[2];
1322 buf.w[3] = ctx->kma.param.cv.w + 1;
1323 s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k);
1327 for (i = 0; i < rem; i++) {
1328 ctx->mres[n + i] = in[i];
1329 out[i] = in[i] ^ ctx->kres[n + i];
1332 ctx->mreslen += rem;
1338 * Initialize context structure. Code is big-endian.
1340 static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx,
1341 const unsigned char *iv)
1343 ctx->kma.param.t.g[0] = 0;
1344 ctx->kma.param.t.g[1] = 0;
1345 ctx->kma.param.tpcl = 0;
1346 ctx->kma.param.taadl = 0;
1351 if (ctx->ivlen == 12) {
1352 memcpy(&ctx->kma.param.j0, iv, ctx->ivlen);
1353 ctx->kma.param.j0.w[3] = 1;
1354 ctx->kma.param.cv.w = 1;
1356 /* ctx->iv has the right size and is already padded. */
1357 memcpy(ctx->iv, iv, ctx->ivlen);
1358 s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL,
1359 ctx->fc, &ctx->kma.param);
1360 ctx->fc |= S390X_KMA_HS;
1362 ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0];
1363 ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1];
1364 ctx->kma.param.cv.w = ctx->kma.param.j0.w[3];
1365 ctx->kma.param.t.g[0] = 0;
1366 ctx->kma.param.t.g[1] = 0;
1371 * Performs various operations on the context structure depending on control
1372 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
1373 * Code is big-endian.
1375 static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1377 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1378 S390X_AES_GCM_CTX *gctx_out;
1379 EVP_CIPHER_CTX *out;
1380 unsigned char *buf, *iv;
1381 int ivlen, enc, len;
1385 ivlen = EVP_CIPHER_CTX_iv_length(c);
1386 iv = EVP_CIPHER_CTX_iv_noconst(c);
1389 gctx->ivlen = ivlen;
1393 gctx->tls_aad_len = -1;
1396 case EVP_CTRL_AEAD_SET_IVLEN:
1401 iv = EVP_CIPHER_CTX_iv_noconst(c);
1402 len = S390X_gcm_ivpadlen(arg);
1404 /* Allocate memory for iv if needed. */
1405 if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) {
1407 OPENSSL_free(gctx->iv);
1409 gctx->iv = OPENSSL_malloc(len);
1410 if (gctx->iv == NULL)
1414 memset(gctx->iv + arg, 0, len - arg - 8);
1415 *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3;
1420 case EVP_CTRL_AEAD_SET_TAG:
1421 buf = EVP_CIPHER_CTX_buf_noconst(c);
1422 enc = EVP_CIPHER_CTX_encrypting(c);
1423 if (arg <= 0 || arg > 16 || enc)
1426 memcpy(buf, ptr, arg);
1430 case EVP_CTRL_AEAD_GET_TAG:
1431 enc = EVP_CIPHER_CTX_encrypting(c);
1432 if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0)
1435 memcpy(ptr, gctx->kma.param.t.b, arg);
1438 case EVP_CTRL_GCM_SET_IV_FIXED:
1439 /* Special case: -1 length restores whole iv */
1441 memcpy(gctx->iv, ptr, gctx->ivlen);
1446 * Fixed field must be at least 4 bytes and invocation field at least
1449 if ((arg < 4) || (gctx->ivlen - arg) < 8)
1453 memcpy(gctx->iv, ptr, arg);
1455 enc = EVP_CIPHER_CTX_encrypting(c);
1456 if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
1462 case EVP_CTRL_GCM_IV_GEN:
1463 if (gctx->iv_gen == 0 || gctx->key_set == 0)
1466 s390x_aes_gcm_setiv(gctx, gctx->iv);
1468 if (arg <= 0 || arg > gctx->ivlen)
1471 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1473 * Invocation field will be at least 8 bytes in size and so no need
1474 * to check wrap around or increment more than last 8 bytes.
1476 (*(unsigned long long *)(gctx->iv + gctx->ivlen - 8))++;
1480 case EVP_CTRL_GCM_SET_IV_INV:
1481 enc = EVP_CIPHER_CTX_encrypting(c);
1482 if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc)
1485 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1486 s390x_aes_gcm_setiv(gctx, gctx->iv);
1490 case EVP_CTRL_AEAD_TLS1_AAD:
1491 /* Save the aad for later use. */
1492 if (arg != EVP_AEAD_TLS1_AAD_LEN)
1495 buf = EVP_CIPHER_CTX_buf_noconst(c);
1496 memcpy(buf, ptr, arg);
1497 gctx->tls_aad_len = arg;
1499 len = buf[arg - 2] << 8 | buf[arg - 1];
1500 /* Correct length for explicit iv. */
1501 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1503 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1505 /* If decrypting correct for tag too. */
1506 enc = EVP_CIPHER_CTX_encrypting(c);
1508 if (len < EVP_GCM_TLS_TAG_LEN)
1510 len -= EVP_GCM_TLS_TAG_LEN;
1512 buf[arg - 2] = len >> 8;
1513 buf[arg - 1] = len & 0xff;
1514 /* Extra padding: tag appended to record. */
1515 return EVP_GCM_TLS_TAG_LEN;
1519 gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out);
1520 iv = EVP_CIPHER_CTX_iv_noconst(c);
1522 if (gctx->iv == iv) {
1523 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
1525 len = S390X_gcm_ivpadlen(gctx->ivlen);
1527 gctx_out->iv = OPENSSL_malloc(len);
1528 if (gctx_out->iv == NULL)
1531 memcpy(gctx_out->iv, gctx->iv, len);
1541 * Set key and/or iv. Returns 1 on success. Otherwise 0 is returned.
1543 static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
1544 const unsigned char *key,
1545 const unsigned char *iv, int enc)
1547 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1550 if (iv == NULL && key == NULL)
1554 keylen = EVP_CIPHER_CTX_key_length(ctx);
1555 memcpy(&gctx->kma.param.k, key, keylen);
1557 /* Convert key size to function code. */
1558 gctx->fc = S390X_AES_128 + (((keylen << 3) - 128) >> 6);
1560 gctx->fc |= S390X_DECRYPT;
1562 if (iv == NULL && gctx->iv_set)
1566 s390x_aes_gcm_setiv(gctx, iv);
1572 s390x_aes_gcm_setiv(gctx, iv);
1574 memcpy(gctx->iv, iv, gctx->ivlen);
1583 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1584 * if successful. Otherwise -1 is returned. Code is big-endian.
1586 static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1587 const unsigned char *in, size_t len)
1589 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1590 const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1591 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1594 if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1597 if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN
1598 : EVP_CTRL_GCM_SET_IV_INV,
1599 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
1602 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1603 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1604 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1606 gctx->kma.param.taadl = gctx->tls_aad_len << 3;
1607 gctx->kma.param.tpcl = len << 3;
1608 s390x_kma(buf, gctx->tls_aad_len, in, len, out,
1609 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1612 memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN);
1613 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1615 if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len,
1616 EVP_GCM_TLS_TAG_LEN)) {
1617 OPENSSL_cleanse(out, len);
1624 gctx->tls_aad_len = -1;
1629 * Called from EVP layer to initialize context, process additional
1630 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1631 * ciphertext or process a TLS packet, depending on context. Returns bytes
1632 * written on success. Otherwise -1 is returned. Code is big-endian.
1634 static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1635 const unsigned char *in, size_t len)
1637 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1638 unsigned char *buf, tmp[16];
1644 if (gctx->tls_aad_len >= 0)
1645 return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
1652 if (s390x_aes_gcm_aad(gctx, in, len))
1655 if (s390x_aes_gcm(gctx, in, out, len))
1660 gctx->kma.param.taadl <<= 3;
1661 gctx->kma.param.tpcl <<= 3;
1662 s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp,
1663 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1664 /* recall that we already did en-/decrypt gctx->mres
1665 * and returned it to caller... */
1666 OPENSSL_cleanse(tmp, gctx->mreslen);
1669 enc = EVP_CIPHER_CTX_encrypting(ctx);
1673 if (gctx->taglen < 0)
1676 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1677 if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen))
1684 static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1686 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1687 const unsigned char *iv;
1692 iv = EVP_CIPHER_CTX_iv(c);
1694 OPENSSL_free(gctx->iv);
1696 OPENSSL_cleanse(gctx, sizeof(*gctx));
1700 # define S390X_AES_XTS_CTX EVP_AES_XTS_CTX
1701 # define S390X_aes_128_xts_CAPABLE 1 /* checked by callee */
1702 # define S390X_aes_256_xts_CAPABLE 1
1704 # define s390x_aes_xts_init_key aes_xts_init_key
1705 static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx,
1706 const unsigned char *key,
1707 const unsigned char *iv, int enc);
1708 # define s390x_aes_xts_cipher aes_xts_cipher
1709 static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1710 const unsigned char *in, size_t len);
1711 # define s390x_aes_xts_ctrl aes_xts_ctrl
1712 static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
1713 # define s390x_aes_xts_cleanup aes_xts_cleanup
1715 # define S390X_aes_128_ccm_CAPABLE (S390X_aes_128_CAPABLE && \
1716 (OPENSSL_s390xcap_P.kmac[0] & \
1717 S390X_CAPBIT(S390X_AES_128)))
1718 # define S390X_aes_192_ccm_CAPABLE (S390X_aes_192_CAPABLE && \
1719 (OPENSSL_s390xcap_P.kmac[0] & \
1720 S390X_CAPBIT(S390X_AES_192)))
1721 # define S390X_aes_256_ccm_CAPABLE (S390X_aes_256_CAPABLE && \
1722 (OPENSSL_s390xcap_P.kmac[0] & \
1723 S390X_CAPBIT(S390X_AES_256)))
1725 # define S390X_CCM_AAD_FLAG 0x40
1728 * Set nonce and length fields. Code is big-endian.
1730 static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx,
1731 const unsigned char *nonce,
1734 ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG;
1735 ctx->aes.ccm.nonce.g[1] = mlen;
1736 memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l);
1740 * Process additional authenticated data. Code is big-endian.
1742 static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad,
1751 ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG;
1753 /* Suppress 'type-punned pointer dereference' warning. */
1754 ptr = ctx->aes.ccm.buf.b;
1756 if (alen < ((1 << 16) - (1 << 8))) {
1757 *(uint16_t *)ptr = alen;
1759 } else if (sizeof(alen) == 8
1760 && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) {
1761 *(uint16_t *)ptr = 0xffff;
1762 *(uint64_t *)(ptr + 2) = alen;
1765 *(uint16_t *)ptr = 0xfffe;
1766 *(uint32_t *)(ptr + 2) = alen;
1770 while (i < 16 && alen) {
1771 ctx->aes.ccm.buf.b[i] = *aad;
1777 ctx->aes.ccm.buf.b[i] = 0;
1781 ctx->aes.ccm.kmac_param.icv.g[0] = 0;
1782 ctx->aes.ccm.kmac_param.icv.g[1] = 0;
1783 s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc,
1784 &ctx->aes.ccm.kmac_param);
1785 ctx->aes.ccm.blocks += 2;
1790 s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1791 ctx->aes.ccm.blocks += alen >> 4;
1795 for (i = 0; i < rem; i++)
1796 ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i];
1798 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1799 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1800 ctx->aes.ccm.kmac_param.k);
1801 ctx->aes.ccm.blocks++;
1806 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for
1809 static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in,
1810 unsigned char *out, size_t len, int enc)
1813 unsigned int i, l, num;
1814 unsigned char flags;
1816 flags = ctx->aes.ccm.nonce.b[0];
1817 if (!(flags & S390X_CCM_AAD_FLAG)) {
1818 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b,
1819 ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k);
1820 ctx->aes.ccm.blocks++;
1823 ctx->aes.ccm.nonce.b[0] = l;
1826 * Reconstruct length from encoded length field
1827 * and initialize it with counter value.
1830 for (i = 15 - l; i < 15; i++) {
1831 n |= ctx->aes.ccm.nonce.b[i];
1832 ctx->aes.ccm.nonce.b[i] = 0;
1835 n |= ctx->aes.ccm.nonce.b[15];
1836 ctx->aes.ccm.nonce.b[15] = 1;
1839 return -1; /* length mismatch */
1842 /* Two operations per block plus one for tag encryption */
1843 ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1;
1844 if (ctx->aes.ccm.blocks > (1ULL << 61))
1845 return -2; /* too much data */
1853 /* mac-then-encrypt */
1855 s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1857 for (i = 0; i < rem; i++)
1858 ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i];
1860 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1861 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1862 ctx->aes.ccm.kmac_param.k);
1865 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1866 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1867 &num, (ctr128_f)AES_ctr32_encrypt);
1869 /* decrypt-then-mac */
1870 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1871 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1872 &num, (ctr128_f)AES_ctr32_encrypt);
1875 s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1877 for (i = 0; i < rem; i++)
1878 ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i];
1880 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1881 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1882 ctx->aes.ccm.kmac_param.k);
1886 for (i = 15 - l; i < 16; i++)
1887 ctx->aes.ccm.nonce.b[i] = 0;
1889 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc,
1890 ctx->aes.ccm.kmac_param.k);
1891 ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0];
1892 ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1];
1894 ctx->aes.ccm.nonce.b[0] = flags; /* restore flags field */
1899 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1900 * if successful. Otherwise -1 is returned.
1902 static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1903 const unsigned char *in, size_t len)
1905 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1906 unsigned char *ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
1907 unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1908 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1911 || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m))
1915 /* Set explicit iv (sequence number). */
1916 memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1919 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
1921 * Get explicit iv (sequence number). We already have fixed iv
1922 * (server/client_write_iv) here.
1924 memcpy(ivec + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1925 s390x_aes_ccm_setiv(cctx, ivec, len);
1927 /* Process aad (sequence number|type|version|length) */
1928 s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len);
1930 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
1931 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
1934 if (s390x_aes_ccm(cctx, in, out, len, enc))
1937 memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
1938 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
1940 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
1941 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len,
1946 OPENSSL_cleanse(out, len);
1952 * Set key and flag field and/or iv. Returns 1 if successful. Otherwise 0 is
1955 static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx,
1956 const unsigned char *key,
1957 const unsigned char *iv, int enc)
1959 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1960 unsigned char *ivec;
1963 if (iv == NULL && key == NULL)
1967 keylen = EVP_CIPHER_CTX_key_length(ctx);
1968 /* Convert key size to function code. */
1969 cctx->aes.ccm.fc = S390X_AES_128 + (((keylen << 3) - 128) >> 6);
1970 memcpy(cctx->aes.ccm.kmac_param.k, key, keylen);
1972 /* Store encoded m and l. */
1973 cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7)
1974 | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3;
1975 memset(cctx->aes.ccm.nonce.b + 1, 0,
1976 sizeof(cctx->aes.ccm.nonce.b));
1977 cctx->aes.ccm.blocks = 0;
1979 cctx->aes.ccm.key_set = 1;
1983 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
1984 memcpy(ivec, iv, 15 - cctx->aes.ccm.l);
1986 cctx->aes.ccm.iv_set = 1;
1993 * Called from EVP layer to initialize context, process additional
1994 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1995 * plaintext or process a TLS packet, depending on context. Returns bytes
1996 * written on success. Otherwise -1 is returned.
1998 static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1999 const unsigned char *in, size_t len)
2001 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2002 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
2004 unsigned char *buf, *ivec;
2006 if (!cctx->aes.ccm.key_set)
2009 if (cctx->aes.ccm.tls_aad_len >= 0)
2010 return s390x_aes_ccm_tls_cipher(ctx, out, in, len);
2013 * Final(): Does not return any data. Recall that ccm is mac-then-encrypt
2014 * so integrity must be checked already at Update() i.e., before
2015 * potentially corrupted data is output.
2017 if (in == NULL && out != NULL)
2020 if (!cctx->aes.ccm.iv_set)
2023 if (!enc && !cctx->aes.ccm.tag_set)
2027 /* Update(): Pass message length. */
2029 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2030 s390x_aes_ccm_setiv(cctx, ivec, len);
2032 cctx->aes.ccm.len_set = 1;
2036 /* Update(): Process aad. */
2037 if (!cctx->aes.ccm.len_set && len)
2040 s390x_aes_ccm_aad(cctx, in, len);
2044 /* Update(): Process message. */
2046 if (!cctx->aes.ccm.len_set) {
2048 * In case message length was not previously set explicitely via
2049 * Update(), set it now.
2051 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2052 s390x_aes_ccm_setiv(cctx, ivec, len);
2054 cctx->aes.ccm.len_set = 1;
2058 if (s390x_aes_ccm(cctx, in, out, len, enc))
2061 cctx->aes.ccm.tag_set = 1;
2066 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2067 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2068 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf,
2074 OPENSSL_cleanse(out, len);
2076 cctx->aes.ccm.iv_set = 0;
2077 cctx->aes.ccm.tag_set = 0;
2078 cctx->aes.ccm.len_set = 0;
2084 * Performs various operations on the context structure depending on control
2085 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
2086 * Code is big-endian.
2088 static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2090 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c);
2091 unsigned char *buf, *iv;
2096 cctx->aes.ccm.key_set = 0;
2097 cctx->aes.ccm.iv_set = 0;
2098 cctx->aes.ccm.l = 8;
2099 cctx->aes.ccm.m = 12;
2100 cctx->aes.ccm.tag_set = 0;
2101 cctx->aes.ccm.len_set = 0;
2102 cctx->aes.ccm.tls_aad_len = -1;
2105 case EVP_CTRL_AEAD_TLS1_AAD:
2106 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2109 /* Save the aad for later use. */
2110 buf = EVP_CIPHER_CTX_buf_noconst(c);
2111 memcpy(buf, ptr, arg);
2112 cctx->aes.ccm.tls_aad_len = arg;
2114 len = *(uint16_t *)(buf + arg - 2);
2115 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
2118 /* Correct length for explicit iv. */
2119 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
2121 enc = EVP_CIPHER_CTX_encrypting(c);
2123 if (len < cctx->aes.ccm.m)
2126 /* Correct length for tag. */
2127 len -= cctx->aes.ccm.m;
2130 *(uint16_t *)(buf + arg - 2) = len;
2131 /* Extra padding: tag appended to record. */
2132 return cctx->aes.ccm.m;
2134 case EVP_CTRL_CCM_SET_IV_FIXED:
2135 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
2138 /* Copy to first part of the iv. */
2139 iv = EVP_CIPHER_CTX_iv_noconst(c);
2140 memcpy(iv, ptr, arg);
2143 case EVP_CTRL_AEAD_SET_IVLEN:
2147 case EVP_CTRL_CCM_SET_L:
2148 if (arg < 2 || arg > 8)
2151 cctx->aes.ccm.l = arg;
2154 case EVP_CTRL_AEAD_SET_TAG:
2155 if ((arg & 1) || arg < 4 || arg > 16)
2158 enc = EVP_CIPHER_CTX_encrypting(c);
2163 cctx->aes.ccm.tag_set = 1;
2164 buf = EVP_CIPHER_CTX_buf_noconst(c);
2165 memcpy(buf, ptr, arg);
2168 cctx->aes.ccm.m = arg;
2171 case EVP_CTRL_AEAD_GET_TAG:
2172 enc = EVP_CIPHER_CTX_encrypting(c);
2173 if (!enc || !cctx->aes.ccm.tag_set)
2176 if(arg < cctx->aes.ccm.m)
2179 memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2180 cctx->aes.ccm.tag_set = 0;
2181 cctx->aes.ccm.iv_set = 0;
2182 cctx->aes.ccm.len_set = 0;
2193 # define s390x_aes_ccm_cleanup aes_ccm_cleanup
2195 # ifndef OPENSSL_NO_OCB
2196 # define S390X_AES_OCB_CTX EVP_AES_OCB_CTX
2197 # define S390X_aes_128_ocb_CAPABLE 0
2198 # define S390X_aes_192_ocb_CAPABLE 0
2199 # define S390X_aes_256_ocb_CAPABLE 0
2201 # define s390x_aes_ocb_init_key aes_ocb_init_key
2202 static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2203 const unsigned char *iv, int enc);
2204 # define s390x_aes_ocb_cipher aes_ocb_cipher
2205 static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2206 const unsigned char *in, size_t len);
2207 # define s390x_aes_ocb_cleanup aes_ocb_cleanup
2208 static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *);
2209 # define s390x_aes_ocb_ctrl aes_ocb_ctrl
2210 static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
2213 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode, \
2215 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2216 nid##_##keylen##_##nmode,blocksize, \
2219 flags | EVP_CIPH_##MODE##_MODE, \
2220 s390x_aes_##mode##_init_key, \
2221 s390x_aes_##mode##_cipher, \
2223 sizeof(S390X_AES_##MODE##_CTX), \
2229 static const EVP_CIPHER aes_##keylen##_##mode = { \
2230 nid##_##keylen##_##nmode, \
2234 flags | EVP_CIPH_##MODE##_MODE, \
2236 aes_##mode##_cipher, \
2238 sizeof(EVP_AES_KEY), \
2244 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2246 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2247 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2250 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\
2251 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2252 nid##_##keylen##_##mode, \
2254 (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * keylen / 8, \
2256 flags | EVP_CIPH_##MODE##_MODE, \
2257 s390x_aes_##mode##_init_key, \
2258 s390x_aes_##mode##_cipher, \
2259 s390x_aes_##mode##_cleanup, \
2260 sizeof(S390X_AES_##MODE##_CTX), \
2263 s390x_aes_##mode##_ctrl, \
2266 static const EVP_CIPHER aes_##keylen##_##mode = { \
2267 nid##_##keylen##_##mode,blocksize, \
2268 (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * keylen / 8, \
2270 flags | EVP_CIPH_##MODE##_MODE, \
2271 aes_##mode##_init_key, \
2272 aes_##mode##_cipher, \
2273 aes_##mode##_cleanup, \
2274 sizeof(EVP_AES_##MODE##_CTX), \
2277 aes_##mode##_ctrl, \
2280 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2282 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2283 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2288 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
2289 static const EVP_CIPHER aes_##keylen##_##mode = { \
2290 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
2291 flags|EVP_CIPH_##MODE##_MODE, \
2293 aes_##mode##_cipher, \
2295 sizeof(EVP_AES_KEY), \
2296 NULL,NULL,NULL,NULL }; \
2297 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2298 { return &aes_##keylen##_##mode; }
2300 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
2301 static const EVP_CIPHER aes_##keylen##_##mode = { \
2302 nid##_##keylen##_##mode,blocksize, \
2303 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
2304 flags|EVP_CIPH_##MODE##_MODE, \
2305 aes_##mode##_init_key, \
2306 aes_##mode##_cipher, \
2307 aes_##mode##_cleanup, \
2308 sizeof(EVP_AES_##MODE##_CTX), \
2309 NULL,NULL,aes_##mode##_ctrl,NULL }; \
2310 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2311 { return &aes_##keylen##_##mode; }
2315 #if defined(OPENSSL_CPUID_OBJ) && (defined(__arm__) || defined(__arm) || defined(__aarch64__))
2316 # include "arm_arch.h"
2317 # if __ARM_MAX_ARCH__>=7
2318 # if defined(BSAES_ASM)
2319 # define BSAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2321 # if defined(VPAES_ASM)
2322 # define VPAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2324 # define HWAES_CAPABLE (OPENSSL_armcap_P & ARMV8_AES)
2325 # define HWAES_set_encrypt_key aes_v8_set_encrypt_key
2326 # define HWAES_set_decrypt_key aes_v8_set_decrypt_key
2327 # define HWAES_encrypt aes_v8_encrypt
2328 # define HWAES_decrypt aes_v8_decrypt
2329 # define HWAES_cbc_encrypt aes_v8_cbc_encrypt
2330 # define HWAES_ctr32_encrypt_blocks aes_v8_ctr32_encrypt_blocks
2334 #if defined(HWAES_CAPABLE)
2335 int HWAES_set_encrypt_key(const unsigned char *userKey, const int bits,
2337 int HWAES_set_decrypt_key(const unsigned char *userKey, const int bits,
2339 void HWAES_encrypt(const unsigned char *in, unsigned char *out,
2340 const AES_KEY *key);
2341 void HWAES_decrypt(const unsigned char *in, unsigned char *out,
2342 const AES_KEY *key);
2343 void HWAES_cbc_encrypt(const unsigned char *in, unsigned char *out,
2344 size_t length, const AES_KEY *key,
2345 unsigned char *ivec, const int enc);
2346 void HWAES_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
2347 size_t len, const AES_KEY *key,
2348 const unsigned char ivec[16]);
2349 void HWAES_xts_encrypt(const unsigned char *inp, unsigned char *out,
2350 size_t len, const AES_KEY *key1,
2351 const AES_KEY *key2, const unsigned char iv[16]);
2352 void HWAES_xts_decrypt(const unsigned char *inp, unsigned char *out,
2353 size_t len, const AES_KEY *key1,
2354 const AES_KEY *key2, const unsigned char iv[16]);
2357 #define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
2358 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2359 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2360 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2361 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2362 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
2363 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
2364 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
2366 static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2367 const unsigned char *iv, int enc)
2370 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2372 mode = EVP_CIPHER_CTX_mode(ctx);
2373 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
2375 #ifdef HWAES_CAPABLE
2376 if (HWAES_CAPABLE) {
2377 ret = HWAES_set_decrypt_key(key,
2378 EVP_CIPHER_CTX_key_length(ctx) * 8,
2380 dat->block = (block128_f) HWAES_decrypt;
2381 dat->stream.cbc = NULL;
2382 # ifdef HWAES_cbc_encrypt
2383 if (mode == EVP_CIPH_CBC_MODE)
2384 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2388 #ifdef BSAES_CAPABLE
2389 if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
2390 ret = AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2392 dat->block = (block128_f) AES_decrypt;
2393 dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt;
2396 #ifdef VPAES_CAPABLE
2397 if (VPAES_CAPABLE) {
2398 ret = vpaes_set_decrypt_key(key,
2399 EVP_CIPHER_CTX_key_length(ctx) * 8,
2401 dat->block = (block128_f) vpaes_decrypt;
2402 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2403 (cbc128_f) vpaes_cbc_encrypt : NULL;
2407 ret = AES_set_decrypt_key(key,
2408 EVP_CIPHER_CTX_key_length(ctx) * 8,
2410 dat->block = (block128_f) AES_decrypt;
2411 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2412 (cbc128_f) AES_cbc_encrypt : NULL;
2415 #ifdef HWAES_CAPABLE
2416 if (HWAES_CAPABLE) {
2417 ret = HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2419 dat->block = (block128_f) HWAES_encrypt;
2420 dat->stream.cbc = NULL;
2421 # ifdef HWAES_cbc_encrypt
2422 if (mode == EVP_CIPH_CBC_MODE)
2423 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2426 # ifdef HWAES_ctr32_encrypt_blocks
2427 if (mode == EVP_CIPH_CTR_MODE)
2428 dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2431 (void)0; /* terminate potentially open 'else' */
2434 #ifdef BSAES_CAPABLE
2435 if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
2436 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2438 dat->block = (block128_f) AES_encrypt;
2439 dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2442 #ifdef VPAES_CAPABLE
2443 if (VPAES_CAPABLE) {
2444 ret = vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2446 dat->block = (block128_f) vpaes_encrypt;
2447 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2448 (cbc128_f) vpaes_cbc_encrypt : NULL;
2452 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2454 dat->block = (block128_f) AES_encrypt;
2455 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2456 (cbc128_f) AES_cbc_encrypt : NULL;
2458 if (mode == EVP_CIPH_CTR_MODE)
2459 dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
2464 EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
2471 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2472 const unsigned char *in, size_t len)
2474 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2476 if (dat->stream.cbc)
2477 (*dat->stream.cbc) (in, out, len, &dat->ks,
2478 EVP_CIPHER_CTX_iv_noconst(ctx),
2479 EVP_CIPHER_CTX_encrypting(ctx));
2480 else if (EVP_CIPHER_CTX_encrypting(ctx))
2481 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks,
2482 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2484 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
2485 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2490 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2491 const unsigned char *in, size_t len)
2493 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
2495 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2500 for (i = 0, len -= bl; i <= len; i += bl)
2501 (*dat->block) (in + i, out + i, &dat->ks);
2506 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2507 const unsigned char *in, size_t len)
2509 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2511 int num = EVP_CIPHER_CTX_num(ctx);
2512 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
2513 EVP_CIPHER_CTX_iv_noconst(ctx), &num, dat->block);
2514 EVP_CIPHER_CTX_set_num(ctx, num);
2518 static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2519 const unsigned char *in, size_t len)
2521 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2523 int num = EVP_CIPHER_CTX_num(ctx);
2524 CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
2525 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2526 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2527 EVP_CIPHER_CTX_set_num(ctx, num);
2531 static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2532 const unsigned char *in, size_t len)
2534 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2536 int num = EVP_CIPHER_CTX_num(ctx);
2537 CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
2538 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2539 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2540 EVP_CIPHER_CTX_set_num(ctx, num);
2544 static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2545 const unsigned char *in, size_t len)
2547 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2549 if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
2550 int num = EVP_CIPHER_CTX_num(ctx);
2551 CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
2552 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2553 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2554 EVP_CIPHER_CTX_set_num(ctx, num);
2558 while (len >= MAXBITCHUNK) {
2559 int num = EVP_CIPHER_CTX_num(ctx);
2560 CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
2561 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2562 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2563 EVP_CIPHER_CTX_set_num(ctx, num);
2569 int num = EVP_CIPHER_CTX_num(ctx);
2570 CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
2571 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2572 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2573 EVP_CIPHER_CTX_set_num(ctx, num);
2579 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2580 const unsigned char *in, size_t len)
2582 unsigned int num = EVP_CIPHER_CTX_num(ctx);
2583 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2585 if (dat->stream.ctr)
2586 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
2587 EVP_CIPHER_CTX_iv_noconst(ctx),
2588 EVP_CIPHER_CTX_buf_noconst(ctx),
2589 &num, dat->stream.ctr);
2591 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
2592 EVP_CIPHER_CTX_iv_noconst(ctx),
2593 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
2595 EVP_CIPHER_CTX_set_num(ctx, num);
2599 BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
2600 BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
2601 BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
2603 static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
2605 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2608 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
2609 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
2610 OPENSSL_free(gctx->iv);
2614 /* increment counter (64-bit int) by 1 */
2615 static void ctr64_inc(unsigned char *counter)
2630 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2632 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2637 gctx->ivlen = EVP_CIPHER_CTX_iv_length(c);
2638 gctx->iv = EVP_CIPHER_CTX_iv_noconst(c);
2641 gctx->tls_aad_len = -1;
2644 case EVP_CTRL_AEAD_SET_IVLEN:
2647 /* Allocate memory for IV if needed */
2648 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
2649 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
2650 OPENSSL_free(gctx->iv);
2651 gctx->iv = OPENSSL_malloc(arg);
2652 if (gctx->iv == NULL)
2658 case EVP_CTRL_AEAD_SET_TAG:
2659 if (arg <= 0 || arg > 16 || EVP_CIPHER_CTX_encrypting(c))
2661 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
2665 case EVP_CTRL_AEAD_GET_TAG:
2666 if (arg <= 0 || arg > 16 || !EVP_CIPHER_CTX_encrypting(c)
2667 || gctx->taglen < 0)
2669 memcpy(ptr, EVP_CIPHER_CTX_buf_noconst(c), arg);
2672 case EVP_CTRL_GCM_SET_IV_FIXED:
2673 /* Special case: -1 length restores whole IV */
2675 memcpy(gctx->iv, ptr, gctx->ivlen);
2680 * Fixed field must be at least 4 bytes and invocation field at least
2683 if ((arg < 4) || (gctx->ivlen - arg) < 8)
2686 memcpy(gctx->iv, ptr, arg);
2687 if (EVP_CIPHER_CTX_encrypting(c)
2688 && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
2693 case EVP_CTRL_GCM_IV_GEN:
2694 if (gctx->iv_gen == 0 || gctx->key_set == 0)
2696 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2697 if (arg <= 0 || arg > gctx->ivlen)
2699 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
2701 * Invocation field will be at least 8 bytes in size and so no need
2702 * to check wrap around or increment more than last 8 bytes.
2704 ctr64_inc(gctx->iv + gctx->ivlen - 8);
2708 case EVP_CTRL_GCM_SET_IV_INV:
2709 if (gctx->iv_gen == 0 || gctx->key_set == 0
2710 || EVP_CIPHER_CTX_encrypting(c))
2712 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
2713 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2717 case EVP_CTRL_AEAD_TLS1_AAD:
2718 /* Save the AAD for later use */
2719 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2721 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
2722 gctx->tls_aad_len = arg;
2725 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
2726 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
2727 /* Correct length for explicit IV */
2728 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
2730 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
2731 /* If decrypting correct for tag too */
2732 if (!EVP_CIPHER_CTX_encrypting(c)) {
2733 if (len < EVP_GCM_TLS_TAG_LEN)
2735 len -= EVP_GCM_TLS_TAG_LEN;
2737 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
2738 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
2740 /* Extra padding: tag appended to record */
2741 return EVP_GCM_TLS_TAG_LEN;
2745 EVP_CIPHER_CTX *out = ptr;
2746 EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out);
2747 if (gctx->gcm.key) {
2748 if (gctx->gcm.key != &gctx->ks)
2750 gctx_out->gcm.key = &gctx_out->ks;
2752 if (gctx->iv == EVP_CIPHER_CTX_iv_noconst(c))
2753 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
2755 gctx_out->iv = OPENSSL_malloc(gctx->ivlen);
2756 if (gctx_out->iv == NULL)
2758 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
2769 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2770 const unsigned char *iv, int enc)
2772 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
2777 #ifdef HWAES_CAPABLE
2778 if (HWAES_CAPABLE) {
2779 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2781 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2782 (block128_f) HWAES_encrypt);
2783 # ifdef HWAES_ctr32_encrypt_blocks
2784 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2791 #ifdef BSAES_CAPABLE
2792 if (BSAES_CAPABLE) {
2793 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2795 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2796 (block128_f) AES_encrypt);
2797 gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2801 #ifdef VPAES_CAPABLE
2802 if (VPAES_CAPABLE) {
2803 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2805 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2806 (block128_f) vpaes_encrypt);
2811 (void)0; /* terminate potentially open 'else' */
2813 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2815 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2816 (block128_f) AES_encrypt);
2818 gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
2825 * If we have an iv can set it directly, otherwise use saved IV.
2827 if (iv == NULL && gctx->iv_set)
2830 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2835 /* If key set use IV, otherwise copy */
2837 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2839 memcpy(gctx->iv, iv, gctx->ivlen);
2847 * Handle TLS GCM packet format. This consists of the last portion of the IV
2848 * followed by the payload and finally the tag. On encrypt generate IV,
2849 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
2853 static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2854 const unsigned char *in, size_t len)
2856 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
2858 /* Encrypt/decrypt must be performed in place */
2860 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
2863 * Set IV from start of buffer or generate IV and write to start of
2866 if (EVP_CIPHER_CTX_ctrl(ctx, EVP_CIPHER_CTX_encrypting(ctx) ?
2867 EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
2868 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
2871 if (CRYPTO_gcm128_aad(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
2874 /* Fix buffer and length to point to payload */
2875 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2876 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2877 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
2878 if (EVP_CIPHER_CTX_encrypting(ctx)) {
2879 /* Encrypt payload */
2882 #if defined(AES_GCM_ASM)
2883 if (len >= 32 && AES_GCM_ASM(gctx)) {
2884 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2887 bulk = AES_gcm_encrypt(in, out, len,
2889 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2890 gctx->gcm.len.u[1] += bulk;
2893 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
2896 len - bulk, gctx->ctr))
2900 #if defined(AES_GCM_ASM2)
2901 if (len >= 32 && AES_GCM_ASM2(gctx)) {
2902 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2905 bulk = AES_gcm_encrypt(in, out, len,
2907 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2908 gctx->gcm.len.u[1] += bulk;
2911 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
2912 in + bulk, out + bulk, len - bulk))
2916 /* Finally write tag */
2917 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
2918 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
2923 #if defined(AES_GCM_ASM)
2924 if (len >= 16 && AES_GCM_ASM(gctx)) {
2925 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2928 bulk = AES_gcm_decrypt(in, out, len,
2930 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2931 gctx->gcm.len.u[1] += bulk;
2934 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
2937 len - bulk, gctx->ctr))
2941 #if defined(AES_GCM_ASM2)
2942 if (len >= 16 && AES_GCM_ASM2(gctx)) {
2943 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2946 bulk = AES_gcm_decrypt(in, out, len,
2948 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2949 gctx->gcm.len.u[1] += bulk;
2952 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
2953 in + bulk, out + bulk, len - bulk))
2957 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
2958 EVP_GCM_TLS_TAG_LEN);
2959 /* If tag mismatch wipe buffer */
2960 if (CRYPTO_memcmp(EVP_CIPHER_CTX_buf_noconst(ctx), in + len,
2961 EVP_GCM_TLS_TAG_LEN)) {
2962 OPENSSL_cleanse(out, len);
2970 gctx->tls_aad_len = -1;
2974 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2975 const unsigned char *in, size_t len)
2977 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
2978 /* If not set up, return error */
2982 if (gctx->tls_aad_len >= 0)
2983 return aes_gcm_tls_cipher(ctx, out, in, len);
2989 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
2991 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
2994 #if defined(AES_GCM_ASM)
2995 if (len >= 32 && AES_GCM_ASM(gctx)) {
2996 size_t res = (16 - gctx->gcm.mres) % 16;
2998 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3001 bulk = AES_gcm_encrypt(in + res,
3002 out + res, len - res,
3003 gctx->gcm.key, gctx->gcm.Yi.c,
3005 gctx->gcm.len.u[1] += bulk;
3009 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
3012 len - bulk, gctx->ctr))
3016 #if defined(AES_GCM_ASM2)
3017 if (len >= 32 && AES_GCM_ASM2(gctx)) {
3018 size_t res = (16 - gctx->gcm.mres) % 16;
3020 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3023 bulk = AES_gcm_encrypt(in + res,
3024 out + res, len - res,
3025 gctx->gcm.key, gctx->gcm.Yi.c,
3027 gctx->gcm.len.u[1] += bulk;
3031 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3032 in + bulk, out + bulk, len - bulk))
3038 #if defined(AES_GCM_ASM)
3039 if (len >= 16 && AES_GCM_ASM(gctx)) {
3040 size_t res = (16 - gctx->gcm.mres) % 16;
3042 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3045 bulk = AES_gcm_decrypt(in + res,
3046 out + res, len - res,
3048 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3049 gctx->gcm.len.u[1] += bulk;
3053 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3056 len - bulk, gctx->ctr))
3060 #if defined(AES_GCM_ASM2)
3061 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3062 size_t res = (16 - gctx->gcm.mres) % 16;
3064 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3067 bulk = AES_gcm_decrypt(in + res,
3068 out + res, len - res,
3070 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3071 gctx->gcm.len.u[1] += bulk;
3075 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3076 in + bulk, out + bulk, len - bulk))
3082 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
3083 if (gctx->taglen < 0)
3085 if (CRYPTO_gcm128_finish(&gctx->gcm,
3086 EVP_CIPHER_CTX_buf_noconst(ctx),
3092 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx), 16);
3094 /* Don't reuse the IV */
3101 #define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
3102 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3103 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3104 | EVP_CIPH_CUSTOM_COPY)
3106 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
3107 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3108 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
3109 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3110 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
3111 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3113 static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3115 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,c);
3116 if (type == EVP_CTRL_COPY) {
3117 EVP_CIPHER_CTX *out = ptr;
3118 EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out);
3119 if (xctx->xts.key1) {
3120 if (xctx->xts.key1 != &xctx->ks1)
3122 xctx_out->xts.key1 = &xctx_out->ks1;
3124 if (xctx->xts.key2) {
3125 if (xctx->xts.key2 != &xctx->ks2)
3127 xctx_out->xts.key2 = &xctx_out->ks2;
3130 } else if (type != EVP_CTRL_INIT)
3132 /* key1 and key2 are used as an indicator both key and IV are set */
3133 xctx->xts.key1 = NULL;
3134 xctx->xts.key2 = NULL;
3138 static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3139 const unsigned char *iv, int enc)
3141 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3148 xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
3150 xctx->stream = NULL;
3152 /* key_len is two AES keys */
3153 #ifdef HWAES_CAPABLE
3154 if (HWAES_CAPABLE) {
3156 HWAES_set_encrypt_key(key,
3157 EVP_CIPHER_CTX_key_length(ctx) * 4,
3159 xctx->xts.block1 = (block128_f) HWAES_encrypt;
3160 # ifdef HWAES_xts_encrypt
3161 xctx->stream = HWAES_xts_encrypt;
3164 HWAES_set_decrypt_key(key,
3165 EVP_CIPHER_CTX_key_length(ctx) * 4,
3167 xctx->xts.block1 = (block128_f) HWAES_decrypt;
3168 # ifdef HWAES_xts_decrypt
3169 xctx->stream = HWAES_xts_decrypt;
3173 HWAES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3174 EVP_CIPHER_CTX_key_length(ctx) * 4,
3176 xctx->xts.block2 = (block128_f) HWAES_encrypt;
3178 xctx->xts.key1 = &xctx->ks1;
3182 #ifdef BSAES_CAPABLE
3184 xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt;
3187 #ifdef VPAES_CAPABLE
3188 if (VPAES_CAPABLE) {
3190 vpaes_set_encrypt_key(key,
3191 EVP_CIPHER_CTX_key_length(ctx) * 4,
3193 xctx->xts.block1 = (block128_f) vpaes_encrypt;
3195 vpaes_set_decrypt_key(key,
3196 EVP_CIPHER_CTX_key_length(ctx) * 4,
3198 xctx->xts.block1 = (block128_f) vpaes_decrypt;
3201 vpaes_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3202 EVP_CIPHER_CTX_key_length(ctx) * 4,
3204 xctx->xts.block2 = (block128_f) vpaes_encrypt;
3206 xctx->xts.key1 = &xctx->ks1;
3210 (void)0; /* terminate potentially open 'else' */
3213 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
3215 xctx->xts.block1 = (block128_f) AES_encrypt;
3217 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
3219 xctx->xts.block1 = (block128_f) AES_decrypt;
3222 AES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3223 EVP_CIPHER_CTX_key_length(ctx) * 4,
3225 xctx->xts.block2 = (block128_f) AES_encrypt;
3227 xctx->xts.key1 = &xctx->ks1;
3231 xctx->xts.key2 = &xctx->ks2;
3232 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
3238 static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3239 const unsigned char *in, size_t len)
3241 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3242 if (!xctx->xts.key1 || !xctx->xts.key2)
3244 if (!out || !in || len < AES_BLOCK_SIZE)
3247 (*xctx->stream) (in, out, len,
3248 xctx->xts.key1, xctx->xts.key2,
3249 EVP_CIPHER_CTX_iv_noconst(ctx));
3250 else if (CRYPTO_xts128_encrypt(&xctx->xts, EVP_CIPHER_CTX_iv_noconst(ctx),
3252 EVP_CIPHER_CTX_encrypting(ctx)))
3257 #define aes_xts_cleanup NULL
3259 #define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
3260 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3261 | EVP_CIPH_CUSTOM_COPY)
3263 BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
3264 BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
3266 static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3268 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c);
3277 cctx->tls_aad_len = -1;
3280 case EVP_CTRL_AEAD_TLS1_AAD:
3281 /* Save the AAD for later use */
3282 if (arg != EVP_AEAD_TLS1_AAD_LEN)
3284 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3285 cctx->tls_aad_len = arg;
3288 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
3289 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
3290 /* Correct length for explicit IV */
3291 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
3293 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
3294 /* If decrypting correct for tag too */
3295 if (!EVP_CIPHER_CTX_encrypting(c)) {
3300 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
3301 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
3303 /* Extra padding: tag appended to record */
3306 case EVP_CTRL_CCM_SET_IV_FIXED:
3307 /* Sanity check length */
3308 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
3310 /* Just copy to first part of IV */
3311 memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg);
3314 case EVP_CTRL_AEAD_SET_IVLEN:
3317 case EVP_CTRL_CCM_SET_L:
3318 if (arg < 2 || arg > 8)
3323 case EVP_CTRL_AEAD_SET_TAG:
3324 if ((arg & 1) || arg < 4 || arg > 16)
3326 if (EVP_CIPHER_CTX_encrypting(c) && ptr)
3330 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3335 case EVP_CTRL_AEAD_GET_TAG:
3336 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
3338 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
3347 EVP_CIPHER_CTX *out = ptr;
3348 EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out);
3349 if (cctx->ccm.key) {
3350 if (cctx->ccm.key != &cctx->ks)
3352 cctx_out->ccm.key = &cctx_out->ks;
3363 static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3364 const unsigned char *iv, int enc)
3366 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3371 #ifdef HWAES_CAPABLE
3372 if (HWAES_CAPABLE) {
3373 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3376 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3377 &cctx->ks, (block128_f) HWAES_encrypt);
3383 #ifdef VPAES_CAPABLE
3384 if (VPAES_CAPABLE) {
3385 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3387 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3388 &cctx->ks, (block128_f) vpaes_encrypt);
3394 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3396 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3397 &cctx->ks, (block128_f) AES_encrypt);
3402 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
3408 static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3409 const unsigned char *in, size_t len)
3411 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3412 CCM128_CONTEXT *ccm = &cctx->ccm;
3413 /* Encrypt/decrypt must be performed in place */
3414 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
3416 /* If encrypting set explicit IV from sequence number (start of AAD) */
3417 if (EVP_CIPHER_CTX_encrypting(ctx))
3418 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
3419 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3420 /* Get rest of IV from explicit IV */
3421 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in,
3422 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3423 /* Correct length value */
3424 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3425 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L,
3429 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
3430 /* Fix buffer to point to payload */
3431 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3432 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3433 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3434 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3436 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3438 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
3440 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3442 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3444 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3445 unsigned char tag[16];
3446 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3447 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
3451 OPENSSL_cleanse(out, len);
3456 static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3457 const unsigned char *in, size_t len)
3459 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3460 CCM128_CONTEXT *ccm = &cctx->ccm;
3461 /* If not set up, return error */
3465 if (cctx->tls_aad_len >= 0)
3466 return aes_ccm_tls_cipher(ctx, out, in, len);
3468 /* EVP_*Final() doesn't return any data */
3469 if (in == NULL && out != NULL)
3475 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
3479 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3485 /* If have AAD need message length */
3486 if (!cctx->len_set && len)
3488 CRYPTO_ccm128_aad(ccm, in, len);
3491 /* If not set length yet do it */
3492 if (!cctx->len_set) {
3493 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3498 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3499 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3501 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3507 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3509 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3510 unsigned char tag[16];
3511 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3512 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
3518 OPENSSL_cleanse(out, len);
3526 #define aes_ccm_cleanup NULL
3528 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
3529 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3530 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
3531 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3532 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
3533 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3540 /* Indicates if IV has been set */
3544 static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3545 const unsigned char *iv, int enc)
3547 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3551 if (EVP_CIPHER_CTX_encrypting(ctx))
3552 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3555 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3561 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, EVP_CIPHER_CTX_iv_length(ctx));
3562 wctx->iv = EVP_CIPHER_CTX_iv_noconst(ctx);
3567 static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3568 const unsigned char *in, size_t inlen)
3570 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3572 /* AES wrap with padding has IV length of 4, without padding 8 */
3573 int pad = EVP_CIPHER_CTX_iv_length(ctx) == 4;
3574 /* No final operation so always return zero length */
3577 /* Input length must always be non-zero */
3580 /* If decrypting need at least 16 bytes and multiple of 8 */
3581 if (!EVP_CIPHER_CTX_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
3583 /* If not padding input must be multiple of 8 */
3584 if (!pad && inlen & 0x7)
3586 if (is_partially_overlapping(out, in, inlen)) {
3587 EVPerr(EVP_F_AES_WRAP_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
3591 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3592 /* If padding round up to multiple of 8 */
3594 inlen = (inlen + 7) / 8 * 8;
3599 * If not padding output will be exactly 8 bytes smaller than
3600 * input. If padding it will be at least 8 bytes smaller but we
3601 * don't know how much.
3607 if (EVP_CIPHER_CTX_encrypting(ctx))
3608 rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
3610 (block128_f) AES_encrypt);
3612 rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
3614 (block128_f) AES_decrypt);
3616 if (EVP_CIPHER_CTX_encrypting(ctx))
3617 rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
3618 out, in, inlen, (block128_f) AES_encrypt);
3620 rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
3621 out, in, inlen, (block128_f) AES_decrypt);
3623 return rv ? (int)rv : -1;
3626 #define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \
3627 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3628 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
3630 static const EVP_CIPHER aes_128_wrap = {
3632 8, 16, 8, WRAP_FLAGS,
3633 aes_wrap_init_key, aes_wrap_cipher,
3635 sizeof(EVP_AES_WRAP_CTX),
3636 NULL, NULL, NULL, NULL
3639 const EVP_CIPHER *EVP_aes_128_wrap(void)
3641 return &aes_128_wrap;
3644 static const EVP_CIPHER aes_192_wrap = {
3646 8, 24, 8, WRAP_FLAGS,
3647 aes_wrap_init_key, aes_wrap_cipher,
3649 sizeof(EVP_AES_WRAP_CTX),
3650 NULL, NULL, NULL, NULL
3653 const EVP_CIPHER *EVP_aes_192_wrap(void)
3655 return &aes_192_wrap;
3658 static const EVP_CIPHER aes_256_wrap = {
3660 8, 32, 8, WRAP_FLAGS,
3661 aes_wrap_init_key, aes_wrap_cipher,
3663 sizeof(EVP_AES_WRAP_CTX),
3664 NULL, NULL, NULL, NULL
3667 const EVP_CIPHER *EVP_aes_256_wrap(void)
3669 return &aes_256_wrap;
3672 static const EVP_CIPHER aes_128_wrap_pad = {
3673 NID_id_aes128_wrap_pad,
3674 8, 16, 4, WRAP_FLAGS,
3675 aes_wrap_init_key, aes_wrap_cipher,
3677 sizeof(EVP_AES_WRAP_CTX),
3678 NULL, NULL, NULL, NULL
3681 const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
3683 return &aes_128_wrap_pad;
3686 static const EVP_CIPHER aes_192_wrap_pad = {
3687 NID_id_aes192_wrap_pad,
3688 8, 24, 4, WRAP_FLAGS,
3689 aes_wrap_init_key, aes_wrap_cipher,
3691 sizeof(EVP_AES_WRAP_CTX),
3692 NULL, NULL, NULL, NULL
3695 const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
3697 return &aes_192_wrap_pad;
3700 static const EVP_CIPHER aes_256_wrap_pad = {
3701 NID_id_aes256_wrap_pad,
3702 8, 32, 4, WRAP_FLAGS,
3703 aes_wrap_init_key, aes_wrap_cipher,
3705 sizeof(EVP_AES_WRAP_CTX),
3706 NULL, NULL, NULL, NULL
3709 const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
3711 return &aes_256_wrap_pad;
3714 #ifndef OPENSSL_NO_OCB
3715 static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3717 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
3718 EVP_CIPHER_CTX *newc;
3719 EVP_AES_OCB_CTX *new_octx;
3725 octx->ivlen = EVP_CIPHER_CTX_iv_length(c);
3726 octx->iv = EVP_CIPHER_CTX_iv_noconst(c);
3728 octx->data_buf_len = 0;
3729 octx->aad_buf_len = 0;
3732 case EVP_CTRL_AEAD_SET_IVLEN:
3733 /* IV len must be 1 to 15 */
3734 if (arg <= 0 || arg > 15)
3740 case EVP_CTRL_AEAD_SET_TAG:
3742 /* Tag len must be 0 to 16 */
3743 if (arg < 0 || arg > 16)
3749 if (arg != octx->taglen || EVP_CIPHER_CTX_encrypting(c))
3751 memcpy(octx->tag, ptr, arg);
3754 case EVP_CTRL_AEAD_GET_TAG:
3755 if (arg != octx->taglen || !EVP_CIPHER_CTX_encrypting(c))
3758 memcpy(ptr, octx->tag, arg);
3762 newc = (EVP_CIPHER_CTX *)ptr;
3763 new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc);
3764 return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
3765 &new_octx->ksenc.ks,
3766 &new_octx->ksdec.ks);
3774 # ifdef HWAES_CAPABLE
3775 # ifdef HWAES_ocb_encrypt
3776 void HWAES_ocb_encrypt(const unsigned char *in, unsigned char *out,
3777 size_t blocks, const void *key,
3778 size_t start_block_num,
3779 unsigned char offset_i[16],
3780 const unsigned char L_[][16],
3781 unsigned char checksum[16]);
3783 # define HWAES_ocb_encrypt ((ocb128_f)NULL)
3785 # ifdef HWAES_ocb_decrypt
3786 void HWAES_ocb_decrypt(const unsigned char *in, unsigned char *out,
3787 size_t blocks, const void *key,
3788 size_t start_block_num,
3789 unsigned char offset_i[16],
3790 const unsigned char L_[][16],
3791 unsigned char checksum[16]);
3793 # define HWAES_ocb_decrypt ((ocb128_f)NULL)
3797 static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3798 const unsigned char *iv, int enc)
3800 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
3806 * We set both the encrypt and decrypt key here because decrypt
3807 * needs both. We could possibly optimise to remove setting the
3808 * decrypt for an encryption operation.
3810 # ifdef HWAES_CAPABLE
3811 if (HWAES_CAPABLE) {
3812 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3814 HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3816 if (!CRYPTO_ocb128_init(&octx->ocb,
3817 &octx->ksenc.ks, &octx->ksdec.ks,
3818 (block128_f) HWAES_encrypt,
3819 (block128_f) HWAES_decrypt,
3820 enc ? HWAES_ocb_encrypt
3821 : HWAES_ocb_decrypt))
3826 # ifdef VPAES_CAPABLE
3827 if (VPAES_CAPABLE) {
3828 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3830 vpaes_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3832 if (!CRYPTO_ocb128_init(&octx->ocb,
3833 &octx->ksenc.ks, &octx->ksdec.ks,
3834 (block128_f) vpaes_encrypt,
3835 (block128_f) vpaes_decrypt,
3841 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3843 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3845 if (!CRYPTO_ocb128_init(&octx->ocb,
3846 &octx->ksenc.ks, &octx->ksdec.ks,
3847 (block128_f) AES_encrypt,
3848 (block128_f) AES_decrypt,
3855 * If we have an iv we can set it directly, otherwise use saved IV.
3857 if (iv == NULL && octx->iv_set)
3860 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
3867 /* If key set use IV, otherwise copy */
3869 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
3871 memcpy(octx->iv, iv, octx->ivlen);
3877 static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3878 const unsigned char *in, size_t len)
3882 int written_len = 0;
3883 size_t trailing_len;
3884 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
3886 /* If IV or Key not set then return error */
3895 * Need to ensure we are only passing full blocks to low level OCB
3896 * routines. We do it here rather than in EVP_EncryptUpdate/
3897 * EVP_DecryptUpdate because we need to pass full blocks of AAD too
3898 * and those routines don't support that
3901 /* Are we dealing with AAD or normal data here? */
3903 buf = octx->aad_buf;
3904 buf_len = &(octx->aad_buf_len);
3906 buf = octx->data_buf;
3907 buf_len = &(octx->data_buf_len);
3909 if (is_partially_overlapping(out + *buf_len, in, len)) {
3910 EVPerr(EVP_F_AES_OCB_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
3916 * If we've got a partially filled buffer from a previous call then
3917 * use that data first
3920 unsigned int remaining;
3922 remaining = AES_BLOCK_SIZE - (*buf_len);
3923 if (remaining > len) {
3924 memcpy(buf + (*buf_len), in, len);
3928 memcpy(buf + (*buf_len), in, remaining);
3931 * If we get here we've filled the buffer, so process it
3936 if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE))
3938 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
3939 if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out,
3943 if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out,
3947 written_len = AES_BLOCK_SIZE;
3950 out += AES_BLOCK_SIZE;
3953 /* Do we have a partial block to handle at the end? */
3954 trailing_len = len % AES_BLOCK_SIZE;
3957 * If we've got some full blocks to handle, then process these first
3959 if (len != trailing_len) {
3961 if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
3963 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
3964 if (!CRYPTO_ocb128_encrypt
3965 (&octx->ocb, in, out, len - trailing_len))
3968 if (!CRYPTO_ocb128_decrypt
3969 (&octx->ocb, in, out, len - trailing_len))
3972 written_len += len - trailing_len;
3973 in += len - trailing_len;
3976 /* Handle any trailing partial block */
3977 if (trailing_len > 0) {
3978 memcpy(buf, in, trailing_len);
3979 *buf_len = trailing_len;
3985 * First of all empty the buffer of any partial block that we might
3986 * have been provided - both for data and AAD
3988 if (octx->data_buf_len > 0) {
3989 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3990 if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
3991 octx->data_buf_len))
3994 if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
3995 octx->data_buf_len))
3998 written_len = octx->data_buf_len;
3999 octx->data_buf_len = 0;
4001 if (octx->aad_buf_len > 0) {
4002 if (!CRYPTO_ocb128_aad
4003 (&octx->ocb, octx->aad_buf, octx->aad_buf_len))
4005 octx->aad_buf_len = 0;
4007 /* If decrypting then verify */
4008 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
4009 if (octx->taglen < 0)
4011 if (CRYPTO_ocb128_finish(&octx->ocb,
4012 octx->tag, octx->taglen) != 0)
4017 /* If encrypting then just get the tag */
4018 if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
4020 /* Don't reuse the IV */
4026 static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
4028 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
4029 CRYPTO_ocb128_cleanup(&octx->ocb);
4033 BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
4034 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4035 BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
4036 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4037 BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
4038 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4039 #endif /* OPENSSL_NO_OCB */