2 * Copyright 2001-2019 The OpenSSL Project Authors. All Rights Reserved.
4 * Licensed under the Apache License 2.0 (the "License"). You may not use
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
10 #include <openssl/opensslconf.h>
11 #include <openssl/crypto.h>
12 #include <openssl/evp.h>
13 #include <openssl/err.h>
16 #include <openssl/aes.h>
17 #include "internal/evp_int.h"
18 #include "internal/cryptlib.h"
19 #include "modes_lcl.h"
20 #include <openssl/rand.h>
21 #include <openssl/cmac.h>
40 } ks; /* AES key schedule to use */
41 int key_set; /* Set if key initialised */
42 int iv_set; /* Set if an iv is set */
44 unsigned char *iv; /* Temporary IV store */
45 int ivlen; /* IV length */
47 int iv_gen; /* It is OK to generate IVs */
48 int iv_gen_rand; /* No IV was specified, so generate a rand IV */
49 int tls_aad_len; /* TLS AAD length */
50 uint64_t tls_enc_records; /* Number of TLS records encrypted */
58 } ks1, ks2; /* AES key schedules to use */
60 void (*stream) (const unsigned char *in,
61 unsigned char *out, size_t length,
62 const AES_KEY *key1, const AES_KEY *key2,
63 const unsigned char iv[16]);
70 } ks; /* AES key schedule to use */
71 int key_set; /* Set if key initialised */
72 int iv_set; /* Set if an iv is set */
73 int tag_set; /* Set if tag is valid */
74 int len_set; /* Set if message length set */
75 int L, M; /* L and M parameters from RFC3610 */
76 int tls_aad_len; /* TLS AAD length */
81 #ifndef OPENSSL_NO_OCB
86 } ksenc; /* AES key schedule to use for encryption */
90 } ksdec; /* AES key schedule to use for decryption */
91 int key_set; /* Set if key initialised */
92 int iv_set; /* Set if an iv is set */
94 unsigned char *iv; /* Temporary IV store */
95 unsigned char tag[16];
96 unsigned char data_buf[16]; /* Store partial data blocks */
97 unsigned char aad_buf[16]; /* Store partial AAD blocks */
100 int ivlen; /* IV length */
105 #define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
108 int vpaes_set_encrypt_key(const unsigned char *userKey, int bits,
110 int vpaes_set_decrypt_key(const unsigned char *userKey, int bits,
113 void vpaes_encrypt(const unsigned char *in, unsigned char *out,
115 void vpaes_decrypt(const unsigned char *in, unsigned char *out,
118 void vpaes_cbc_encrypt(const unsigned char *in,
121 const AES_KEY *key, unsigned char *ivec, int enc);
124 void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out,
125 size_t length, const AES_KEY *key,
126 unsigned char ivec[16], int enc);
127 void bsaes_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
128 size_t len, const AES_KEY *key,
129 const unsigned char ivec[16]);
130 void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out,
131 size_t len, const AES_KEY *key1,
132 const AES_KEY *key2, const unsigned char iv[16]);
133 void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out,
134 size_t len, const AES_KEY *key1,
135 const AES_KEY *key2, const unsigned char iv[16]);
138 void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out,
139 size_t blocks, const AES_KEY *key,
140 const unsigned char ivec[AES_BLOCK_SIZE]);
143 void AES_xts_encrypt(const unsigned char *inp, unsigned char *out, size_t len,
144 const AES_KEY *key1, const AES_KEY *key2,
145 const unsigned char iv[16]);
146 void AES_xts_decrypt(const unsigned char *inp, unsigned char *out, size_t len,
147 const AES_KEY *key1, const AES_KEY *key2,
148 const unsigned char iv[16]);
151 /* increment counter (64-bit int) by 1 */
152 static void ctr64_inc(unsigned char *counter)
167 #if defined(OPENSSL_CPUID_OBJ) && (defined(__powerpc__) || defined(__ppc__) || defined(_ARCH_PPC))
168 # include "ppc_arch.h"
170 # define VPAES_CAPABLE (OPENSSL_ppccap_P & PPC_ALTIVEC)
172 # define HWAES_CAPABLE (OPENSSL_ppccap_P & PPC_CRYPTO207)
173 # define HWAES_set_encrypt_key aes_p8_set_encrypt_key
174 # define HWAES_set_decrypt_key aes_p8_set_decrypt_key
175 # define HWAES_encrypt aes_p8_encrypt
176 # define HWAES_decrypt aes_p8_decrypt
177 # define HWAES_cbc_encrypt aes_p8_cbc_encrypt
178 # define HWAES_ctr32_encrypt_blocks aes_p8_ctr32_encrypt_blocks
179 # define HWAES_xts_encrypt aes_p8_xts_encrypt
180 # define HWAES_xts_decrypt aes_p8_xts_decrypt
183 #if defined(AES_ASM) && !defined(I386_ONLY) && ( \
184 ((defined(__i386) || defined(__i386__) || \
185 defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \
186 defined(__x86_64) || defined(__x86_64__) || \
187 defined(_M_AMD64) || defined(_M_X64) )
189 extern unsigned int OPENSSL_ia32cap_P[];
192 # define VPAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
195 # define BSAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
200 # define AESNI_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(57-32)))
202 int aesni_set_encrypt_key(const unsigned char *userKey, int bits,
204 int aesni_set_decrypt_key(const unsigned char *userKey, int bits,
207 void aesni_encrypt(const unsigned char *in, unsigned char *out,
209 void aesni_decrypt(const unsigned char *in, unsigned char *out,
212 void aesni_ecb_encrypt(const unsigned char *in,
214 size_t length, const AES_KEY *key, int enc);
215 void aesni_cbc_encrypt(const unsigned char *in,
218 const AES_KEY *key, unsigned char *ivec, int enc);
220 void aesni_ctr32_encrypt_blocks(const unsigned char *in,
223 const void *key, const unsigned char *ivec);
225 void aesni_xts_encrypt(const unsigned char *in,
228 const AES_KEY *key1, const AES_KEY *key2,
229 const unsigned char iv[16]);
231 void aesni_xts_decrypt(const unsigned char *in,
234 const AES_KEY *key1, const AES_KEY *key2,
235 const unsigned char iv[16]);
237 void aesni_ccm64_encrypt_blocks(const unsigned char *in,
241 const unsigned char ivec[16],
242 unsigned char cmac[16]);
244 void aesni_ccm64_decrypt_blocks(const unsigned char *in,
248 const unsigned char ivec[16],
249 unsigned char cmac[16]);
251 # if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
252 size_t aesni_gcm_encrypt(const unsigned char *in,
255 const void *key, unsigned char ivec[16], u64 *Xi);
256 # define AES_gcm_encrypt aesni_gcm_encrypt
257 size_t aesni_gcm_decrypt(const unsigned char *in,
260 const void *key, unsigned char ivec[16], u64 *Xi);
261 # define AES_gcm_decrypt aesni_gcm_decrypt
262 void gcm_ghash_avx(u64 Xi[2], const u128 Htable[16], const u8 *in,
264 # define AES_GCM_ASM(gctx) (gctx->ctr==aesni_ctr32_encrypt_blocks && \
265 gctx->gcm.ghash==gcm_ghash_avx)
266 # define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
267 gctx->gcm.ghash==gcm_ghash_avx)
268 # undef AES_GCM_ASM2 /* minor size optimization */
271 static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
272 const unsigned char *iv, int enc)
275 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
277 mode = EVP_CIPHER_CTX_mode(ctx);
278 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
280 ret = aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
282 dat->block = (block128_f) aesni_decrypt;
283 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
284 (cbc128_f) aesni_cbc_encrypt : NULL;
286 ret = aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
288 dat->block = (block128_f) aesni_encrypt;
289 if (mode == EVP_CIPH_CBC_MODE)
290 dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
291 else if (mode == EVP_CIPH_CTR_MODE)
292 dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
294 dat->stream.cbc = NULL;
298 EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
305 static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
306 const unsigned char *in, size_t len)
308 aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
309 EVP_CIPHER_CTX_iv_noconst(ctx),
310 EVP_CIPHER_CTX_encrypting(ctx));
315 static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
316 const unsigned char *in, size_t len)
318 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
323 aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
324 EVP_CIPHER_CTX_encrypting(ctx));
329 # define aesni_ofb_cipher aes_ofb_cipher
330 static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
331 const unsigned char *in, size_t len);
333 # define aesni_cfb_cipher aes_cfb_cipher
334 static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
335 const unsigned char *in, size_t len);
337 # define aesni_cfb8_cipher aes_cfb8_cipher
338 static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
339 const unsigned char *in, size_t len);
341 # define aesni_cfb1_cipher aes_cfb1_cipher
342 static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
343 const unsigned char *in, size_t len);
345 # define aesni_ctr_cipher aes_ctr_cipher
346 static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
347 const unsigned char *in, size_t len);
349 static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
350 const unsigned char *iv, int enc)
352 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
356 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
358 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
359 gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
361 * If we have an iv can set it directly, otherwise use saved IV.
363 if (iv == NULL && gctx->iv_set)
366 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
371 /* If key set use IV, otherwise copy */
373 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
375 memcpy(gctx->iv, iv, gctx->ivlen);
382 # define aesni_gcm_cipher aes_gcm_cipher
383 static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
384 const unsigned char *in, size_t len);
386 static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
387 const unsigned char *iv, int enc)
389 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
394 /* The key is two half length keys in reality */
395 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
396 const int bits = bytes * 8;
399 * Verify that the two keys are different.
401 * This addresses Rogaway's vulnerability.
402 * See comment in aes_xts_init_key() below.
404 if (memcmp(key, key + bytes, bytes) == 0) {
405 EVPerr(EVP_F_AESNI_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
409 /* key_len is two AES keys */
411 aesni_set_encrypt_key(key, bits, &xctx->ks1.ks);
412 xctx->xts.block1 = (block128_f) aesni_encrypt;
413 xctx->stream = aesni_xts_encrypt;
415 aesni_set_decrypt_key(key, bits, &xctx->ks1.ks);
416 xctx->xts.block1 = (block128_f) aesni_decrypt;
417 xctx->stream = aesni_xts_decrypt;
420 aesni_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
421 xctx->xts.block2 = (block128_f) aesni_encrypt;
423 xctx->xts.key1 = &xctx->ks1;
427 xctx->xts.key2 = &xctx->ks2;
428 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
434 # define aesni_xts_cipher aes_xts_cipher
435 static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
436 const unsigned char *in, size_t len);
438 static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
439 const unsigned char *iv, int enc)
441 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
445 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
447 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
448 &cctx->ks, (block128_f) aesni_encrypt);
449 cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
450 (ccm128_f) aesni_ccm64_decrypt_blocks;
454 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
460 # define aesni_ccm_cipher aes_ccm_cipher
461 static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
462 const unsigned char *in, size_t len);
464 # ifndef OPENSSL_NO_OCB
465 void aesni_ocb_encrypt(const unsigned char *in, unsigned char *out,
466 size_t blocks, const void *key,
467 size_t start_block_num,
468 unsigned char offset_i[16],
469 const unsigned char L_[][16],
470 unsigned char checksum[16]);
471 void aesni_ocb_decrypt(const unsigned char *in, unsigned char *out,
472 size_t blocks, const void *key,
473 size_t start_block_num,
474 unsigned char offset_i[16],
475 const unsigned char L_[][16],
476 unsigned char checksum[16]);
478 static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
479 const unsigned char *iv, int enc)
481 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
487 * We set both the encrypt and decrypt key here because decrypt
488 * needs both. We could possibly optimise to remove setting the
489 * decrypt for an encryption operation.
491 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
493 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
495 if (!CRYPTO_ocb128_init(&octx->ocb,
496 &octx->ksenc.ks, &octx->ksdec.ks,
497 (block128_f) aesni_encrypt,
498 (block128_f) aesni_decrypt,
499 enc ? aesni_ocb_encrypt
500 : aesni_ocb_decrypt))
506 * If we have an iv we can set it directly, otherwise use saved IV.
508 if (iv == NULL && octx->iv_set)
511 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
518 /* If key set use IV, otherwise copy */
520 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
522 memcpy(octx->iv, iv, octx->ivlen);
528 # define aesni_ocb_cipher aes_ocb_cipher
529 static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
530 const unsigned char *in, size_t len);
531 # endif /* OPENSSL_NO_OCB */
533 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
534 static const EVP_CIPHER aesni_##keylen##_##mode = { \
535 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
536 flags|EVP_CIPH_##MODE##_MODE, \
538 aesni_##mode##_cipher, \
540 sizeof(EVP_AES_KEY), \
541 NULL,NULL,NULL,NULL }; \
542 static const EVP_CIPHER aes_##keylen##_##mode = { \
543 nid##_##keylen##_##nmode,blocksize, \
545 flags|EVP_CIPH_##MODE##_MODE, \
547 aes_##mode##_cipher, \
549 sizeof(EVP_AES_KEY), \
550 NULL,NULL,NULL,NULL }; \
551 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
552 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
554 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
555 static const EVP_CIPHER aesni_##keylen##_##mode = { \
556 nid##_##keylen##_##mode,blocksize, \
557 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
559 flags|EVP_CIPH_##MODE##_MODE, \
560 aesni_##mode##_init_key, \
561 aesni_##mode##_cipher, \
562 aes_##mode##_cleanup, \
563 sizeof(EVP_AES_##MODE##_CTX), \
564 NULL,NULL,aes_##mode##_ctrl,NULL }; \
565 static const EVP_CIPHER aes_##keylen##_##mode = { \
566 nid##_##keylen##_##mode,blocksize, \
567 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
569 flags|EVP_CIPH_##MODE##_MODE, \
570 aes_##mode##_init_key, \
571 aes_##mode##_cipher, \
572 aes_##mode##_cleanup, \
573 sizeof(EVP_AES_##MODE##_CTX), \
574 NULL,NULL,aes_##mode##_ctrl,NULL }; \
575 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
576 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
578 #elif defined(AES_ASM) && (defined(__sparc) || defined(__sparc__))
580 # include "sparc_arch.h"
582 extern unsigned int OPENSSL_sparcv9cap_P[];
585 * Initial Fujitsu SPARC64 X support
587 # define HWAES_CAPABLE (OPENSSL_sparcv9cap_P[0] & SPARCV9_FJAESX)
588 # define HWAES_set_encrypt_key aes_fx_set_encrypt_key
589 # define HWAES_set_decrypt_key aes_fx_set_decrypt_key
590 # define HWAES_encrypt aes_fx_encrypt
591 # define HWAES_decrypt aes_fx_decrypt
592 # define HWAES_cbc_encrypt aes_fx_cbc_encrypt
593 # define HWAES_ctr32_encrypt_blocks aes_fx_ctr32_encrypt_blocks
595 # define SPARC_AES_CAPABLE (OPENSSL_sparcv9cap_P[1] & CFR_AES)
597 void aes_t4_set_encrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
598 void aes_t4_set_decrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
599 void aes_t4_encrypt(const unsigned char *in, unsigned char *out,
601 void aes_t4_decrypt(const unsigned char *in, unsigned char *out,
604 * Key-length specific subroutines were chosen for following reason.
605 * Each SPARC T4 core can execute up to 8 threads which share core's
606 * resources. Loading as much key material to registers allows to
607 * minimize references to shared memory interface, as well as amount
608 * of instructions in inner loops [much needed on T4]. But then having
609 * non-key-length specific routines would require conditional branches
610 * either in inner loops or on subroutines' entries. Former is hardly
611 * acceptable, while latter means code size increase to size occupied
612 * by multiple key-length specific subroutines, so why fight?
614 void aes128_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
615 size_t len, const AES_KEY *key,
616 unsigned char *ivec);
617 void aes128_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
618 size_t len, const AES_KEY *key,
619 unsigned char *ivec);
620 void aes192_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
621 size_t len, const AES_KEY *key,
622 unsigned char *ivec);
623 void aes192_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
624 size_t len, const AES_KEY *key,
625 unsigned char *ivec);
626 void aes256_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
627 size_t len, const AES_KEY *key,
628 unsigned char *ivec);
629 void aes256_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
630 size_t len, const AES_KEY *key,
631 unsigned char *ivec);
632 void aes128_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
633 size_t blocks, const AES_KEY *key,
634 unsigned char *ivec);
635 void aes192_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
636 size_t blocks, const AES_KEY *key,
637 unsigned char *ivec);
638 void aes256_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
639 size_t blocks, const AES_KEY *key,
640 unsigned char *ivec);
641 void aes128_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
642 size_t blocks, const AES_KEY *key1,
643 const AES_KEY *key2, const unsigned char *ivec);
644 void aes128_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
645 size_t blocks, const AES_KEY *key1,
646 const AES_KEY *key2, const unsigned char *ivec);
647 void aes256_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
648 size_t blocks, const AES_KEY *key1,
649 const AES_KEY *key2, const unsigned char *ivec);
650 void aes256_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
651 size_t blocks, const AES_KEY *key1,
652 const AES_KEY *key2, const unsigned char *ivec);
654 static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
655 const unsigned char *iv, int enc)
658 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
660 mode = EVP_CIPHER_CTX_mode(ctx);
661 bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
662 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
665 aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
666 dat->block = (block128_f) aes_t4_decrypt;
669 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
670 (cbc128_f) aes128_t4_cbc_decrypt : NULL;
673 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
674 (cbc128_f) aes192_t4_cbc_decrypt : NULL;
677 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
678 (cbc128_f) aes256_t4_cbc_decrypt : NULL;
685 aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
686 dat->block = (block128_f) aes_t4_encrypt;
689 if (mode == EVP_CIPH_CBC_MODE)
690 dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
691 else if (mode == EVP_CIPH_CTR_MODE)
692 dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
694 dat->stream.cbc = NULL;
697 if (mode == EVP_CIPH_CBC_MODE)
698 dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
699 else if (mode == EVP_CIPH_CTR_MODE)
700 dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
702 dat->stream.cbc = NULL;
705 if (mode == EVP_CIPH_CBC_MODE)
706 dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
707 else if (mode == EVP_CIPH_CTR_MODE)
708 dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
710 dat->stream.cbc = NULL;
718 EVPerr(EVP_F_AES_T4_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
725 # define aes_t4_cbc_cipher aes_cbc_cipher
726 static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
727 const unsigned char *in, size_t len);
729 # define aes_t4_ecb_cipher aes_ecb_cipher
730 static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
731 const unsigned char *in, size_t len);
733 # define aes_t4_ofb_cipher aes_ofb_cipher
734 static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
735 const unsigned char *in, size_t len);
737 # define aes_t4_cfb_cipher aes_cfb_cipher
738 static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
739 const unsigned char *in, size_t len);
741 # define aes_t4_cfb8_cipher aes_cfb8_cipher
742 static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
743 const unsigned char *in, size_t len);
745 # define aes_t4_cfb1_cipher aes_cfb1_cipher
746 static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
747 const unsigned char *in, size_t len);
749 # define aes_t4_ctr_cipher aes_ctr_cipher
750 static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
751 const unsigned char *in, size_t len);
753 static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
754 const unsigned char *iv, int enc)
756 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
760 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
761 aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
762 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
763 (block128_f) aes_t4_encrypt);
766 gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
769 gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
772 gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
778 * If we have an iv can set it directly, otherwise use saved IV.
780 if (iv == NULL && gctx->iv_set)
783 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
788 /* If key set use IV, otherwise copy */
790 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
792 memcpy(gctx->iv, iv, gctx->ivlen);
799 # define aes_t4_gcm_cipher aes_gcm_cipher
800 static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
801 const unsigned char *in, size_t len);
803 static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
804 const unsigned char *iv, int enc)
806 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
811 /* The key is two half length keys in reality */
812 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
813 const int bits = bytes * 8;
816 * Verify that the two keys are different.
818 * This addresses Rogaway's vulnerability.
819 * See comment in aes_xts_init_key() below.
821 if (memcmp(key, key + bytes, bytes) == 0) {
822 EVPerr(EVP_F_AES_T4_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
827 /* key_len is two AES keys */
829 aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
830 xctx->xts.block1 = (block128_f) aes_t4_encrypt;
833 xctx->stream = aes128_t4_xts_encrypt;
836 xctx->stream = aes256_t4_xts_encrypt;
842 aes_t4_set_decrypt_key(key, bits, &xctx->ks1.ks);
843 xctx->xts.block1 = (block128_f) aes_t4_decrypt;
846 xctx->stream = aes128_t4_xts_decrypt;
849 xctx->stream = aes256_t4_xts_decrypt;
856 aes_t4_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
857 xctx->xts.block2 = (block128_f) aes_t4_encrypt;
859 xctx->xts.key1 = &xctx->ks1;
863 xctx->xts.key2 = &xctx->ks2;
864 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
870 # define aes_t4_xts_cipher aes_xts_cipher
871 static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
872 const unsigned char *in, size_t len);
874 static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
875 const unsigned char *iv, int enc)
877 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
881 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
882 aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
883 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
884 &cctx->ks, (block128_f) aes_t4_encrypt);
889 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
895 # define aes_t4_ccm_cipher aes_ccm_cipher
896 static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
897 const unsigned char *in, size_t len);
899 # ifndef OPENSSL_NO_OCB
900 static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
901 const unsigned char *iv, int enc)
903 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
909 * We set both the encrypt and decrypt key here because decrypt
910 * needs both. We could possibly optimise to remove setting the
911 * decrypt for an encryption operation.
913 aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
915 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
917 if (!CRYPTO_ocb128_init(&octx->ocb,
918 &octx->ksenc.ks, &octx->ksdec.ks,
919 (block128_f) aes_t4_encrypt,
920 (block128_f) aes_t4_decrypt,
927 * If we have an iv we can set it directly, otherwise use saved IV.
929 if (iv == NULL && octx->iv_set)
932 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
939 /* If key set use IV, otherwise copy */
941 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
943 memcpy(octx->iv, iv, octx->ivlen);
949 # define aes_t4_ocb_cipher aes_ocb_cipher
950 static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
951 const unsigned char *in, size_t len);
952 # endif /* OPENSSL_NO_OCB */
954 # ifndef OPENSSL_NO_SIV
955 # define aes_t4_siv_init_key aes_siv_init_key
956 # define aes_t4_siv_cipher aes_siv_cipher
957 # endif /* OPENSSL_NO_SIV */
959 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
960 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
961 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
962 flags|EVP_CIPH_##MODE##_MODE, \
964 aes_t4_##mode##_cipher, \
966 sizeof(EVP_AES_KEY), \
967 NULL,NULL,NULL,NULL }; \
968 static const EVP_CIPHER aes_##keylen##_##mode = { \
969 nid##_##keylen##_##nmode,blocksize, \
971 flags|EVP_CIPH_##MODE##_MODE, \
973 aes_##mode##_cipher, \
975 sizeof(EVP_AES_KEY), \
976 NULL,NULL,NULL,NULL }; \
977 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
978 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
980 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
981 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
982 nid##_##keylen##_##mode,blocksize, \
983 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
985 flags|EVP_CIPH_##MODE##_MODE, \
986 aes_t4_##mode##_init_key, \
987 aes_t4_##mode##_cipher, \
988 aes_##mode##_cleanup, \
989 sizeof(EVP_AES_##MODE##_CTX), \
990 NULL,NULL,aes_##mode##_ctrl,NULL }; \
991 static const EVP_CIPHER aes_##keylen##_##mode = { \
992 nid##_##keylen##_##mode,blocksize, \
993 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
995 flags|EVP_CIPH_##MODE##_MODE, \
996 aes_##mode##_init_key, \
997 aes_##mode##_cipher, \
998 aes_##mode##_cleanup, \
999 sizeof(EVP_AES_##MODE##_CTX), \
1000 NULL,NULL,aes_##mode##_ctrl,NULL }; \
1001 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
1002 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
1004 #elif defined(OPENSSL_CPUID_OBJ) && defined(__s390__)
1008 # include "s390x_arch.h"
1014 * KM-AES parameter block - begin
1015 * (see z/Architecture Principles of Operation >= SA22-7832-06)
1018 unsigned char k[32];
1020 /* KM-AES parameter block - end */
1023 } S390X_AES_ECB_CTX;
1029 * KMO-AES parameter block - begin
1030 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1033 unsigned char cv[16];
1034 unsigned char k[32];
1036 /* KMO-AES parameter block - end */
1041 } S390X_AES_OFB_CTX;
1047 * KMF-AES parameter block - begin
1048 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1051 unsigned char cv[16];
1052 unsigned char k[32];
1054 /* KMF-AES parameter block - end */
1059 } S390X_AES_CFB_CTX;
1065 * KMA-GCM-AES parameter block - begin
1066 * (see z/Architecture Principles of Operation >= SA22-7832-11)
1069 unsigned char reserved[12];
1075 unsigned long long g[2];
1076 unsigned char b[16];
1078 unsigned char h[16];
1079 unsigned long long taadl;
1080 unsigned long long tpcl;
1082 unsigned long long g[2];
1085 unsigned char k[32];
1087 /* KMA-GCM-AES parameter block - end */
1099 unsigned char ares[16];
1100 unsigned char mres[16];
1101 unsigned char kres[16];
1107 uint64_t tls_enc_records; /* Number of TLS records encrypted */
1108 } S390X_AES_GCM_CTX;
1114 * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and
1115 * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's
1116 * rounds field is used to store the function code and that the key
1117 * schedule is not stored (if aes hardware support is detected).
1120 unsigned char pad[16];
1126 * KMAC-AES parameter block - begin
1127 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1131 unsigned long long g[2];
1132 unsigned char b[16];
1134 unsigned char k[32];
1136 /* KMAC-AES paramater block - end */
1139 unsigned long long g[2];
1140 unsigned char b[16];
1143 unsigned long long g[2];
1144 unsigned char b[16];
1147 unsigned long long blocks;
1156 unsigned char pad[140];
1160 } S390X_AES_CCM_CTX;
1162 /* Convert key size to function code: [16,24,32] -> [18,19,20]. */
1163 # define S390X_AES_FC(keylen) (S390X_AES_128 + ((((keylen) << 3) - 128) >> 6))
1165 /* Most modes of operation need km for partial block processing. */
1166 # define S390X_aes_128_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1167 S390X_CAPBIT(S390X_AES_128))
1168 # define S390X_aes_192_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1169 S390X_CAPBIT(S390X_AES_192))
1170 # define S390X_aes_256_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1171 S390X_CAPBIT(S390X_AES_256))
1173 # define s390x_aes_init_key aes_init_key
1174 static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1175 const unsigned char *iv, int enc);
1177 # define S390X_aes_128_cbc_CAPABLE 1 /* checked by callee */
1178 # define S390X_aes_192_cbc_CAPABLE 1
1179 # define S390X_aes_256_cbc_CAPABLE 1
1180 # define S390X_AES_CBC_CTX EVP_AES_KEY
1182 # define s390x_aes_cbc_init_key aes_init_key
1184 # define s390x_aes_cbc_cipher aes_cbc_cipher
1185 static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1186 const unsigned char *in, size_t len);
1188 # define S390X_aes_128_ecb_CAPABLE S390X_aes_128_CAPABLE
1189 # define S390X_aes_192_ecb_CAPABLE S390X_aes_192_CAPABLE
1190 # define S390X_aes_256_ecb_CAPABLE S390X_aes_256_CAPABLE
1192 static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX *ctx,
1193 const unsigned char *key,
1194 const unsigned char *iv, int enc)
1196 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1197 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1199 cctx->fc = S390X_AES_FC(keylen);
1201 cctx->fc |= S390X_DECRYPT;
1203 memcpy(cctx->km.param.k, key, keylen);
1207 static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1208 const unsigned char *in, size_t len)
1210 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1212 s390x_km(in, len, out, cctx->fc, &cctx->km.param);
1216 # define S390X_aes_128_ofb_CAPABLE (S390X_aes_128_CAPABLE && \
1217 (OPENSSL_s390xcap_P.kmo[0] & \
1218 S390X_CAPBIT(S390X_AES_128)))
1219 # define S390X_aes_192_ofb_CAPABLE (S390X_aes_192_CAPABLE && \
1220 (OPENSSL_s390xcap_P.kmo[0] & \
1221 S390X_CAPBIT(S390X_AES_192)))
1222 # define S390X_aes_256_ofb_CAPABLE (S390X_aes_256_CAPABLE && \
1223 (OPENSSL_s390xcap_P.kmo[0] & \
1224 S390X_CAPBIT(S390X_AES_256)))
1226 static int s390x_aes_ofb_init_key(EVP_CIPHER_CTX *ctx,
1227 const unsigned char *key,
1228 const unsigned char *ivec, int enc)
1230 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1231 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1232 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1233 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1235 memcpy(cctx->kmo.param.cv, iv, ivlen);
1236 memcpy(cctx->kmo.param.k, key, keylen);
1237 cctx->fc = S390X_AES_FC(keylen);
1242 static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1243 const unsigned char *in, size_t len)
1245 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1250 *out = *in ^ cctx->kmo.param.cv[n];
1259 len &= ~(size_t)0xf;
1261 s390x_kmo(in, len, out, cctx->fc, &cctx->kmo.param);
1268 s390x_km(cctx->kmo.param.cv, 16, cctx->kmo.param.cv, cctx->fc,
1272 out[n] = in[n] ^ cctx->kmo.param.cv[n];
1281 # define S390X_aes_128_cfb_CAPABLE (S390X_aes_128_CAPABLE && \
1282 (OPENSSL_s390xcap_P.kmf[0] & \
1283 S390X_CAPBIT(S390X_AES_128)))
1284 # define S390X_aes_192_cfb_CAPABLE (S390X_aes_192_CAPABLE && \
1285 (OPENSSL_s390xcap_P.kmf[0] & \
1286 S390X_CAPBIT(S390X_AES_192)))
1287 # define S390X_aes_256_cfb_CAPABLE (S390X_aes_256_CAPABLE && \
1288 (OPENSSL_s390xcap_P.kmf[0] & \
1289 S390X_CAPBIT(S390X_AES_256)))
1291 static int s390x_aes_cfb_init_key(EVP_CIPHER_CTX *ctx,
1292 const unsigned char *key,
1293 const unsigned char *ivec, int enc)
1295 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1296 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1297 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1298 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1300 cctx->fc = S390X_AES_FC(keylen);
1301 cctx->fc |= 16 << 24; /* 16 bytes cipher feedback */
1303 cctx->fc |= S390X_DECRYPT;
1306 memcpy(cctx->kmf.param.cv, iv, ivlen);
1307 memcpy(cctx->kmf.param.k, key, keylen);
1311 static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1312 const unsigned char *in, size_t len)
1314 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1315 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1316 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1323 *out = cctx->kmf.param.cv[n] ^ tmp;
1324 cctx->kmf.param.cv[n] = enc ? *out : tmp;
1333 len &= ~(size_t)0xf;
1335 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1342 s390x_km(cctx->kmf.param.cv, 16, cctx->kmf.param.cv,
1343 S390X_AES_FC(keylen), cctx->kmf.param.k);
1347 out[n] = cctx->kmf.param.cv[n] ^ tmp;
1348 cctx->kmf.param.cv[n] = enc ? out[n] : tmp;
1357 # define S390X_aes_128_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1358 S390X_CAPBIT(S390X_AES_128))
1359 # define S390X_aes_192_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1360 S390X_CAPBIT(S390X_AES_192))
1361 # define S390X_aes_256_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1362 S390X_CAPBIT(S390X_AES_256))
1364 static int s390x_aes_cfb8_init_key(EVP_CIPHER_CTX *ctx,
1365 const unsigned char *key,
1366 const unsigned char *ivec, int enc)
1368 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1369 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1370 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1371 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1373 cctx->fc = S390X_AES_FC(keylen);
1374 cctx->fc |= 1 << 24; /* 1 byte cipher feedback */
1376 cctx->fc |= S390X_DECRYPT;
1378 memcpy(cctx->kmf.param.cv, iv, ivlen);
1379 memcpy(cctx->kmf.param.k, key, keylen);
1383 static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1384 const unsigned char *in, size_t len)
1386 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1388 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1392 # define S390X_aes_128_cfb1_CAPABLE 0
1393 # define S390X_aes_192_cfb1_CAPABLE 0
1394 # define S390X_aes_256_cfb1_CAPABLE 0
1396 # define s390x_aes_cfb1_init_key aes_init_key
1398 # define s390x_aes_cfb1_cipher aes_cfb1_cipher
1399 static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1400 const unsigned char *in, size_t len);
1402 # define S390X_aes_128_ctr_CAPABLE 1 /* checked by callee */
1403 # define S390X_aes_192_ctr_CAPABLE 1
1404 # define S390X_aes_256_ctr_CAPABLE 1
1405 # define S390X_AES_CTR_CTX EVP_AES_KEY
1407 # define s390x_aes_ctr_init_key aes_init_key
1409 # define s390x_aes_ctr_cipher aes_ctr_cipher
1410 static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1411 const unsigned char *in, size_t len);
1413 # define S390X_aes_128_gcm_CAPABLE (S390X_aes_128_CAPABLE && \
1414 (OPENSSL_s390xcap_P.kma[0] & \
1415 S390X_CAPBIT(S390X_AES_128)))
1416 # define S390X_aes_192_gcm_CAPABLE (S390X_aes_192_CAPABLE && \
1417 (OPENSSL_s390xcap_P.kma[0] & \
1418 S390X_CAPBIT(S390X_AES_192)))
1419 # define S390X_aes_256_gcm_CAPABLE (S390X_aes_256_CAPABLE && \
1420 (OPENSSL_s390xcap_P.kma[0] & \
1421 S390X_CAPBIT(S390X_AES_256)))
1423 /* iv + padding length for iv lengths != 12 */
1424 # define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16)
1427 * Process additional authenticated data. Returns 0 on success. Code is
1430 static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad,
1433 unsigned long long alen;
1436 if (ctx->kma.param.tpcl)
1439 alen = ctx->kma.param.taadl + len;
1440 if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
1442 ctx->kma.param.taadl = alen;
1447 ctx->ares[n] = *aad;
1452 /* ctx->ares contains a complete block if offset has wrapped around */
1454 s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1455 ctx->fc |= S390X_KMA_HS;
1462 len &= ~(size_t)0xf;
1464 s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1466 ctx->fc |= S390X_KMA_HS;
1474 ctx->ares[rem] = aad[rem];
1481 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for
1482 * success. Code is big-endian.
1484 static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in,
1485 unsigned char *out, size_t len)
1487 const unsigned char *inptr;
1488 unsigned long long mlen;
1491 unsigned char b[16];
1496 mlen = ctx->kma.param.tpcl + len;
1497 if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
1499 ctx->kma.param.tpcl = mlen;
1505 while (n && inlen) {
1506 ctx->mres[n] = *inptr;
1511 /* ctx->mres contains a complete block if offset has wrapped around */
1513 s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b,
1514 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1515 ctx->fc |= S390X_KMA_HS;
1518 /* previous call already encrypted/decrypted its remainder,
1519 * see comment below */
1534 len &= ~(size_t)0xf;
1536 s390x_kma(ctx->ares, ctx->areslen, in, len, out,
1537 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1540 ctx->fc |= S390X_KMA_HS;
1545 * If there is a remainder, it has to be saved such that it can be
1546 * processed by kma later. However, we also have to do the for-now
1547 * unauthenticated encryption/decryption part here and now...
1550 if (!ctx->mreslen) {
1551 buf.w[0] = ctx->kma.param.j0.w[0];
1552 buf.w[1] = ctx->kma.param.j0.w[1];
1553 buf.w[2] = ctx->kma.param.j0.w[2];
1554 buf.w[3] = ctx->kma.param.cv.w + 1;
1555 s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k);
1559 for (i = 0; i < rem; i++) {
1560 ctx->mres[n + i] = in[i];
1561 out[i] = in[i] ^ ctx->kres[n + i];
1564 ctx->mreslen += rem;
1570 * Initialize context structure. Code is big-endian.
1572 static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx,
1573 const unsigned char *iv)
1575 ctx->kma.param.t.g[0] = 0;
1576 ctx->kma.param.t.g[1] = 0;
1577 ctx->kma.param.tpcl = 0;
1578 ctx->kma.param.taadl = 0;
1583 if (ctx->ivlen == 12) {
1584 memcpy(&ctx->kma.param.j0, iv, ctx->ivlen);
1585 ctx->kma.param.j0.w[3] = 1;
1586 ctx->kma.param.cv.w = 1;
1588 /* ctx->iv has the right size and is already padded. */
1589 memcpy(ctx->iv, iv, ctx->ivlen);
1590 s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL,
1591 ctx->fc, &ctx->kma.param);
1592 ctx->fc |= S390X_KMA_HS;
1594 ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0];
1595 ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1];
1596 ctx->kma.param.cv.w = ctx->kma.param.j0.w[3];
1597 ctx->kma.param.t.g[0] = 0;
1598 ctx->kma.param.t.g[1] = 0;
1603 * Performs various operations on the context structure depending on control
1604 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
1605 * Code is big-endian.
1607 static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1609 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1610 S390X_AES_GCM_CTX *gctx_out;
1611 EVP_CIPHER_CTX *out;
1612 unsigned char *buf, *iv;
1613 int ivlen, enc, len;
1617 ivlen = EVP_CIPHER_CTX_iv_length(c);
1618 iv = EVP_CIPHER_CTX_iv_noconst(c);
1621 gctx->ivlen = ivlen;
1625 gctx->tls_aad_len = -1;
1628 case EVP_CTRL_AEAD_SET_IVLEN:
1633 iv = EVP_CIPHER_CTX_iv_noconst(c);
1634 len = S390X_gcm_ivpadlen(arg);
1636 /* Allocate memory for iv if needed. */
1637 if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) {
1639 OPENSSL_free(gctx->iv);
1641 if ((gctx->iv = OPENSSL_malloc(len)) == NULL) {
1642 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
1647 memset(gctx->iv + arg, 0, len - arg - 8);
1648 *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3;
1653 case EVP_CTRL_AEAD_SET_TAG:
1654 buf = EVP_CIPHER_CTX_buf_noconst(c);
1655 enc = EVP_CIPHER_CTX_encrypting(c);
1656 if (arg <= 0 || arg > 16 || enc)
1659 memcpy(buf, ptr, arg);
1663 case EVP_CTRL_AEAD_GET_TAG:
1664 enc = EVP_CIPHER_CTX_encrypting(c);
1665 if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0)
1668 memcpy(ptr, gctx->kma.param.t.b, arg);
1671 case EVP_CTRL_GCM_SET_IV_FIXED:
1672 /* Special case: -1 length restores whole iv */
1674 memcpy(gctx->iv, ptr, gctx->ivlen);
1679 * Fixed field must be at least 4 bytes and invocation field at least
1682 if ((arg < 4) || (gctx->ivlen - arg) < 8)
1686 memcpy(gctx->iv, ptr, arg);
1688 enc = EVP_CIPHER_CTX_encrypting(c);
1689 if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
1695 case EVP_CTRL_GCM_IV_GEN:
1696 if (gctx->iv_gen == 0 || gctx->key_set == 0)
1699 s390x_aes_gcm_setiv(gctx, gctx->iv);
1701 if (arg <= 0 || arg > gctx->ivlen)
1704 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1706 * Invocation field will be at least 8 bytes in size and so no need
1707 * to check wrap around or increment more than last 8 bytes.
1709 ctr64_inc(gctx->iv + gctx->ivlen - 8);
1713 case EVP_CTRL_GCM_SET_IV_INV:
1714 enc = EVP_CIPHER_CTX_encrypting(c);
1715 if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc)
1718 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1719 s390x_aes_gcm_setiv(gctx, gctx->iv);
1723 case EVP_CTRL_AEAD_TLS1_AAD:
1724 /* Save the aad for later use. */
1725 if (arg != EVP_AEAD_TLS1_AAD_LEN)
1728 buf = EVP_CIPHER_CTX_buf_noconst(c);
1729 memcpy(buf, ptr, arg);
1730 gctx->tls_aad_len = arg;
1731 gctx->tls_enc_records = 0;
1733 len = buf[arg - 2] << 8 | buf[arg - 1];
1734 /* Correct length for explicit iv. */
1735 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1737 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1739 /* If decrypting correct for tag too. */
1740 enc = EVP_CIPHER_CTX_encrypting(c);
1742 if (len < EVP_GCM_TLS_TAG_LEN)
1744 len -= EVP_GCM_TLS_TAG_LEN;
1746 buf[arg - 2] = len >> 8;
1747 buf[arg - 1] = len & 0xff;
1748 /* Extra padding: tag appended to record. */
1749 return EVP_GCM_TLS_TAG_LEN;
1753 gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out);
1754 iv = EVP_CIPHER_CTX_iv_noconst(c);
1756 if (gctx->iv == iv) {
1757 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
1759 len = S390X_gcm_ivpadlen(gctx->ivlen);
1761 if ((gctx_out->iv = OPENSSL_malloc(len)) == NULL) {
1762 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
1766 memcpy(gctx_out->iv, gctx->iv, len);
1776 * Set key and/or iv. Returns 1 on success. Otherwise 0 is returned.
1778 static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
1779 const unsigned char *key,
1780 const unsigned char *iv, int enc)
1782 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1785 if (iv == NULL && key == NULL)
1789 keylen = EVP_CIPHER_CTX_key_length(ctx);
1790 memcpy(&gctx->kma.param.k, key, keylen);
1792 gctx->fc = S390X_AES_FC(keylen);
1794 gctx->fc |= S390X_DECRYPT;
1796 if (iv == NULL && gctx->iv_set)
1800 s390x_aes_gcm_setiv(gctx, iv);
1806 s390x_aes_gcm_setiv(gctx, iv);
1808 memcpy(gctx->iv, iv, gctx->ivlen);
1817 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1818 * if successful. Otherwise -1 is returned. Code is big-endian.
1820 static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1821 const unsigned char *in, size_t len)
1823 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1824 const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1825 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1828 if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1832 * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
1833 * Requirements from SP 800-38D". The requirements is for one party to the
1834 * communication to fail after 2^64 - 1 keys. We do this on the encrypting
1837 if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
1838 EVPerr(EVP_F_S390X_AES_GCM_TLS_CIPHER, EVP_R_TOO_MANY_RECORDS);
1842 if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN
1843 : EVP_CTRL_GCM_SET_IV_INV,
1844 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
1847 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1848 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1849 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1851 gctx->kma.param.taadl = gctx->tls_aad_len << 3;
1852 gctx->kma.param.tpcl = len << 3;
1853 s390x_kma(buf, gctx->tls_aad_len, in, len, out,
1854 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1857 memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN);
1858 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1860 if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len,
1861 EVP_GCM_TLS_TAG_LEN)) {
1862 OPENSSL_cleanse(out, len);
1869 gctx->tls_aad_len = -1;
1874 * Called from EVP layer to initialize context, process additional
1875 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1876 * ciphertext or process a TLS packet, depending on context. Returns bytes
1877 * written on success. Otherwise -1 is returned. Code is big-endian.
1879 static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1880 const unsigned char *in, size_t len)
1882 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1883 unsigned char *buf, tmp[16];
1889 if (gctx->tls_aad_len >= 0)
1890 return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
1897 if (s390x_aes_gcm_aad(gctx, in, len))
1900 if (s390x_aes_gcm(gctx, in, out, len))
1905 gctx->kma.param.taadl <<= 3;
1906 gctx->kma.param.tpcl <<= 3;
1907 s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp,
1908 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1909 /* recall that we already did en-/decrypt gctx->mres
1910 * and returned it to caller... */
1911 OPENSSL_cleanse(tmp, gctx->mreslen);
1914 enc = EVP_CIPHER_CTX_encrypting(ctx);
1918 if (gctx->taglen < 0)
1921 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1922 if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen))
1929 static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1931 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1932 const unsigned char *iv;
1937 iv = EVP_CIPHER_CTX_iv(c);
1939 OPENSSL_free(gctx->iv);
1941 OPENSSL_cleanse(gctx, sizeof(*gctx));
1945 # define S390X_AES_XTS_CTX EVP_AES_XTS_CTX
1946 # define S390X_aes_128_xts_CAPABLE 1 /* checked by callee */
1947 # define S390X_aes_256_xts_CAPABLE 1
1949 # define s390x_aes_xts_init_key aes_xts_init_key
1950 static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx,
1951 const unsigned char *key,
1952 const unsigned char *iv, int enc);
1953 # define s390x_aes_xts_cipher aes_xts_cipher
1954 static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1955 const unsigned char *in, size_t len);
1956 # define s390x_aes_xts_ctrl aes_xts_ctrl
1957 static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
1958 # define s390x_aes_xts_cleanup aes_xts_cleanup
1960 # define S390X_aes_128_ccm_CAPABLE (S390X_aes_128_CAPABLE && \
1961 (OPENSSL_s390xcap_P.kmac[0] & \
1962 S390X_CAPBIT(S390X_AES_128)))
1963 # define S390X_aes_192_ccm_CAPABLE (S390X_aes_192_CAPABLE && \
1964 (OPENSSL_s390xcap_P.kmac[0] & \
1965 S390X_CAPBIT(S390X_AES_192)))
1966 # define S390X_aes_256_ccm_CAPABLE (S390X_aes_256_CAPABLE && \
1967 (OPENSSL_s390xcap_P.kmac[0] & \
1968 S390X_CAPBIT(S390X_AES_256)))
1970 # define S390X_CCM_AAD_FLAG 0x40
1973 * Set nonce and length fields. Code is big-endian.
1975 static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx,
1976 const unsigned char *nonce,
1979 ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG;
1980 ctx->aes.ccm.nonce.g[1] = mlen;
1981 memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l);
1985 * Process additional authenticated data. Code is big-endian.
1987 static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad,
1996 ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG;
1998 /* Suppress 'type-punned pointer dereference' warning. */
1999 ptr = ctx->aes.ccm.buf.b;
2001 if (alen < ((1 << 16) - (1 << 8))) {
2002 *(uint16_t *)ptr = alen;
2004 } else if (sizeof(alen) == 8
2005 && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) {
2006 *(uint16_t *)ptr = 0xffff;
2007 *(uint64_t *)(ptr + 2) = alen;
2010 *(uint16_t *)ptr = 0xfffe;
2011 *(uint32_t *)(ptr + 2) = alen;
2015 while (i < 16 && alen) {
2016 ctx->aes.ccm.buf.b[i] = *aad;
2022 ctx->aes.ccm.buf.b[i] = 0;
2026 ctx->aes.ccm.kmac_param.icv.g[0] = 0;
2027 ctx->aes.ccm.kmac_param.icv.g[1] = 0;
2028 s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc,
2029 &ctx->aes.ccm.kmac_param);
2030 ctx->aes.ccm.blocks += 2;
2033 alen &= ~(size_t)0xf;
2035 s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2036 ctx->aes.ccm.blocks += alen >> 4;
2040 for (i = 0; i < rem; i++)
2041 ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i];
2043 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2044 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2045 ctx->aes.ccm.kmac_param.k);
2046 ctx->aes.ccm.blocks++;
2051 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for
2054 static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in,
2055 unsigned char *out, size_t len, int enc)
2058 unsigned int i, l, num;
2059 unsigned char flags;
2061 flags = ctx->aes.ccm.nonce.b[0];
2062 if (!(flags & S390X_CCM_AAD_FLAG)) {
2063 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b,
2064 ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k);
2065 ctx->aes.ccm.blocks++;
2068 ctx->aes.ccm.nonce.b[0] = l;
2071 * Reconstruct length from encoded length field
2072 * and initialize it with counter value.
2075 for (i = 15 - l; i < 15; i++) {
2076 n |= ctx->aes.ccm.nonce.b[i];
2077 ctx->aes.ccm.nonce.b[i] = 0;
2080 n |= ctx->aes.ccm.nonce.b[15];
2081 ctx->aes.ccm.nonce.b[15] = 1;
2084 return -1; /* length mismatch */
2087 /* Two operations per block plus one for tag encryption */
2088 ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1;
2089 if (ctx->aes.ccm.blocks > (1ULL << 61))
2090 return -2; /* too much data */
2095 len &= ~(size_t)0xf;
2098 /* mac-then-encrypt */
2100 s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2102 for (i = 0; i < rem; i++)
2103 ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i];
2105 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2106 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2107 ctx->aes.ccm.kmac_param.k);
2110 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
2111 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
2112 &num, (ctr128_f)AES_ctr32_encrypt);
2114 /* decrypt-then-mac */
2115 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
2116 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
2117 &num, (ctr128_f)AES_ctr32_encrypt);
2120 s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2122 for (i = 0; i < rem; i++)
2123 ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i];
2125 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2126 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2127 ctx->aes.ccm.kmac_param.k);
2131 for (i = 15 - l; i < 16; i++)
2132 ctx->aes.ccm.nonce.b[i] = 0;
2134 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc,
2135 ctx->aes.ccm.kmac_param.k);
2136 ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0];
2137 ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1];
2139 ctx->aes.ccm.nonce.b[0] = flags; /* restore flags field */
2144 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
2145 * if successful. Otherwise -1 is returned.
2147 static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2148 const unsigned char *in, size_t len)
2150 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2151 unsigned char *ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2152 unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2153 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
2156 || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m))
2160 /* Set explicit iv (sequence number). */
2161 memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN);
2164 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
2166 * Get explicit iv (sequence number). We already have fixed iv
2167 * (server/client_write_iv) here.
2169 memcpy(ivec + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN);
2170 s390x_aes_ccm_setiv(cctx, ivec, len);
2172 /* Process aad (sequence number|type|version|length) */
2173 s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len);
2175 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
2176 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
2179 if (s390x_aes_ccm(cctx, in, out, len, enc))
2182 memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2183 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
2185 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2186 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len,
2191 OPENSSL_cleanse(out, len);
2197 * Set key and flag field and/or iv. Returns 1 if successful. Otherwise 0 is
2200 static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx,
2201 const unsigned char *key,
2202 const unsigned char *iv, int enc)
2204 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2205 unsigned char *ivec;
2208 if (iv == NULL && key == NULL)
2212 keylen = EVP_CIPHER_CTX_key_length(ctx);
2213 cctx->aes.ccm.fc = S390X_AES_FC(keylen);
2214 memcpy(cctx->aes.ccm.kmac_param.k, key, keylen);
2216 /* Store encoded m and l. */
2217 cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7)
2218 | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3;
2219 memset(cctx->aes.ccm.nonce.b + 1, 0,
2220 sizeof(cctx->aes.ccm.nonce.b));
2221 cctx->aes.ccm.blocks = 0;
2223 cctx->aes.ccm.key_set = 1;
2227 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2228 memcpy(ivec, iv, 15 - cctx->aes.ccm.l);
2230 cctx->aes.ccm.iv_set = 1;
2237 * Called from EVP layer to initialize context, process additional
2238 * authenticated data, en/de-crypt plain/cipher-text and authenticate
2239 * plaintext or process a TLS packet, depending on context. Returns bytes
2240 * written on success. Otherwise -1 is returned.
2242 static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2243 const unsigned char *in, size_t len)
2245 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2246 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
2248 unsigned char *buf, *ivec;
2250 if (!cctx->aes.ccm.key_set)
2253 if (cctx->aes.ccm.tls_aad_len >= 0)
2254 return s390x_aes_ccm_tls_cipher(ctx, out, in, len);
2257 * Final(): Does not return any data. Recall that ccm is mac-then-encrypt
2258 * so integrity must be checked already at Update() i.e., before
2259 * potentially corrupted data is output.
2261 if (in == NULL && out != NULL)
2264 if (!cctx->aes.ccm.iv_set)
2268 /* Update(): Pass message length. */
2270 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2271 s390x_aes_ccm_setiv(cctx, ivec, len);
2273 cctx->aes.ccm.len_set = 1;
2277 /* Update(): Process aad. */
2278 if (!cctx->aes.ccm.len_set && len)
2281 s390x_aes_ccm_aad(cctx, in, len);
2285 /* The tag must be set before actually decrypting data */
2286 if (!enc && !cctx->aes.ccm.tag_set)
2289 /* Update(): Process message. */
2291 if (!cctx->aes.ccm.len_set) {
2293 * In case message length was not previously set explicitly via
2294 * Update(), set it now.
2296 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2297 s390x_aes_ccm_setiv(cctx, ivec, len);
2299 cctx->aes.ccm.len_set = 1;
2303 if (s390x_aes_ccm(cctx, in, out, len, enc))
2306 cctx->aes.ccm.tag_set = 1;
2311 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2312 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2313 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf,
2319 OPENSSL_cleanse(out, len);
2321 cctx->aes.ccm.iv_set = 0;
2322 cctx->aes.ccm.tag_set = 0;
2323 cctx->aes.ccm.len_set = 0;
2329 * Performs various operations on the context structure depending on control
2330 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
2331 * Code is big-endian.
2333 static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2335 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c);
2336 unsigned char *buf, *iv;
2341 cctx->aes.ccm.key_set = 0;
2342 cctx->aes.ccm.iv_set = 0;
2343 cctx->aes.ccm.l = 8;
2344 cctx->aes.ccm.m = 12;
2345 cctx->aes.ccm.tag_set = 0;
2346 cctx->aes.ccm.len_set = 0;
2347 cctx->aes.ccm.tls_aad_len = -1;
2350 case EVP_CTRL_AEAD_TLS1_AAD:
2351 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2354 /* Save the aad for later use. */
2355 buf = EVP_CIPHER_CTX_buf_noconst(c);
2356 memcpy(buf, ptr, arg);
2357 cctx->aes.ccm.tls_aad_len = arg;
2359 len = buf[arg - 2] << 8 | buf[arg - 1];
2360 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
2363 /* Correct length for explicit iv. */
2364 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
2366 enc = EVP_CIPHER_CTX_encrypting(c);
2368 if (len < cctx->aes.ccm.m)
2371 /* Correct length for tag. */
2372 len -= cctx->aes.ccm.m;
2375 buf[arg - 2] = len >> 8;
2376 buf[arg - 1] = len & 0xff;
2378 /* Extra padding: tag appended to record. */
2379 return cctx->aes.ccm.m;
2381 case EVP_CTRL_CCM_SET_IV_FIXED:
2382 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
2385 /* Copy to first part of the iv. */
2386 iv = EVP_CIPHER_CTX_iv_noconst(c);
2387 memcpy(iv, ptr, arg);
2390 case EVP_CTRL_AEAD_SET_IVLEN:
2394 case EVP_CTRL_CCM_SET_L:
2395 if (arg < 2 || arg > 8)
2398 cctx->aes.ccm.l = arg;
2401 case EVP_CTRL_AEAD_SET_TAG:
2402 if ((arg & 1) || arg < 4 || arg > 16)
2405 enc = EVP_CIPHER_CTX_encrypting(c);
2410 cctx->aes.ccm.tag_set = 1;
2411 buf = EVP_CIPHER_CTX_buf_noconst(c);
2412 memcpy(buf, ptr, arg);
2415 cctx->aes.ccm.m = arg;
2418 case EVP_CTRL_AEAD_GET_TAG:
2419 enc = EVP_CIPHER_CTX_encrypting(c);
2420 if (!enc || !cctx->aes.ccm.tag_set)
2423 if(arg < cctx->aes.ccm.m)
2426 memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2427 cctx->aes.ccm.tag_set = 0;
2428 cctx->aes.ccm.iv_set = 0;
2429 cctx->aes.ccm.len_set = 0;
2440 # define s390x_aes_ccm_cleanup aes_ccm_cleanup
2442 # ifndef OPENSSL_NO_OCB
2443 # define S390X_AES_OCB_CTX EVP_AES_OCB_CTX
2444 # define S390X_aes_128_ocb_CAPABLE 0
2445 # define S390X_aes_192_ocb_CAPABLE 0
2446 # define S390X_aes_256_ocb_CAPABLE 0
2448 # define s390x_aes_ocb_init_key aes_ocb_init_key
2449 static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2450 const unsigned char *iv, int enc);
2451 # define s390x_aes_ocb_cipher aes_ocb_cipher
2452 static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2453 const unsigned char *in, size_t len);
2454 # define s390x_aes_ocb_cleanup aes_ocb_cleanup
2455 static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *);
2456 # define s390x_aes_ocb_ctrl aes_ocb_ctrl
2457 static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
2460 # ifndef OPENSSL_NO_SIV
2461 # define S390X_AES_SIV_CTX EVP_AES_SIV_CTX
2462 # define S390X_aes_128_siv_CAPABLE 0
2463 # define S390X_aes_192_siv_CAPABLE 0
2464 # define S390X_aes_256_siv_CAPABLE 0
2466 # define s390x_aes_siv_init_key aes_siv_init_key
2467 # define s390x_aes_siv_cipher aes_siv_cipher
2468 # define s390x_aes_siv_cleanup aes_siv_cleanup
2469 # define s390x_aes_siv_ctrl aes_siv_ctrl
2472 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode, \
2474 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2475 nid##_##keylen##_##nmode,blocksize, \
2478 flags | EVP_CIPH_##MODE##_MODE, \
2479 s390x_aes_##mode##_init_key, \
2480 s390x_aes_##mode##_cipher, \
2482 sizeof(S390X_AES_##MODE##_CTX), \
2488 static const EVP_CIPHER aes_##keylen##_##mode = { \
2489 nid##_##keylen##_##nmode, \
2493 flags | EVP_CIPH_##MODE##_MODE, \
2495 aes_##mode##_cipher, \
2497 sizeof(EVP_AES_KEY), \
2503 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2505 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2506 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2509 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\
2510 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2511 nid##_##keylen##_##mode, \
2513 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2515 flags | EVP_CIPH_##MODE##_MODE, \
2516 s390x_aes_##mode##_init_key, \
2517 s390x_aes_##mode##_cipher, \
2518 s390x_aes_##mode##_cleanup, \
2519 sizeof(S390X_AES_##MODE##_CTX), \
2522 s390x_aes_##mode##_ctrl, \
2525 static const EVP_CIPHER aes_##keylen##_##mode = { \
2526 nid##_##keylen##_##mode,blocksize, \
2527 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2529 flags | EVP_CIPH_##MODE##_MODE, \
2530 aes_##mode##_init_key, \
2531 aes_##mode##_cipher, \
2532 aes_##mode##_cleanup, \
2533 sizeof(EVP_AES_##MODE##_CTX), \
2536 aes_##mode##_ctrl, \
2539 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2541 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2542 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2547 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
2548 static const EVP_CIPHER aes_##keylen##_##mode = { \
2549 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
2550 flags|EVP_CIPH_##MODE##_MODE, \
2552 aes_##mode##_cipher, \
2554 sizeof(EVP_AES_KEY), \
2555 NULL,NULL,NULL,NULL }; \
2556 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2557 { return &aes_##keylen##_##mode; }
2559 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
2560 static const EVP_CIPHER aes_##keylen##_##mode = { \
2561 nid##_##keylen##_##mode,blocksize, \
2562 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
2564 flags|EVP_CIPH_##MODE##_MODE, \
2565 aes_##mode##_init_key, \
2566 aes_##mode##_cipher, \
2567 aes_##mode##_cleanup, \
2568 sizeof(EVP_AES_##MODE##_CTX), \
2569 NULL,NULL,aes_##mode##_ctrl,NULL }; \
2570 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2571 { return &aes_##keylen##_##mode; }
2575 #if defined(OPENSSL_CPUID_OBJ) && (defined(__arm__) || defined(__arm) || defined(__aarch64__))
2576 # include "arm_arch.h"
2577 # if __ARM_MAX_ARCH__>=7
2578 # if defined(BSAES_ASM)
2579 # define BSAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2581 # if defined(VPAES_ASM)
2582 # define VPAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2584 # define HWAES_CAPABLE (OPENSSL_armcap_P & ARMV8_AES)
2585 # define HWAES_set_encrypt_key aes_v8_set_encrypt_key
2586 # define HWAES_set_decrypt_key aes_v8_set_decrypt_key
2587 # define HWAES_encrypt aes_v8_encrypt
2588 # define HWAES_decrypt aes_v8_decrypt
2589 # define HWAES_cbc_encrypt aes_v8_cbc_encrypt
2590 # define HWAES_ctr32_encrypt_blocks aes_v8_ctr32_encrypt_blocks
2594 #if defined(HWAES_CAPABLE)
2595 int HWAES_set_encrypt_key(const unsigned char *userKey, const int bits,
2597 int HWAES_set_decrypt_key(const unsigned char *userKey, const int bits,
2599 void HWAES_encrypt(const unsigned char *in, unsigned char *out,
2600 const AES_KEY *key);
2601 void HWAES_decrypt(const unsigned char *in, unsigned char *out,
2602 const AES_KEY *key);
2603 void HWAES_cbc_encrypt(const unsigned char *in, unsigned char *out,
2604 size_t length, const AES_KEY *key,
2605 unsigned char *ivec, const int enc);
2606 void HWAES_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
2607 size_t len, const AES_KEY *key,
2608 const unsigned char ivec[16]);
2609 void HWAES_xts_encrypt(const unsigned char *inp, unsigned char *out,
2610 size_t len, const AES_KEY *key1,
2611 const AES_KEY *key2, const unsigned char iv[16]);
2612 void HWAES_xts_decrypt(const unsigned char *inp, unsigned char *out,
2613 size_t len, const AES_KEY *key1,
2614 const AES_KEY *key2, const unsigned char iv[16]);
2617 #define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
2618 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2619 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2620 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2621 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2622 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
2623 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
2624 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
2626 static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2627 const unsigned char *iv, int enc)
2630 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2632 mode = EVP_CIPHER_CTX_mode(ctx);
2633 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
2635 #ifdef HWAES_CAPABLE
2636 if (HWAES_CAPABLE) {
2637 ret = HWAES_set_decrypt_key(key,
2638 EVP_CIPHER_CTX_key_length(ctx) * 8,
2640 dat->block = (block128_f) HWAES_decrypt;
2641 dat->stream.cbc = NULL;
2642 # ifdef HWAES_cbc_encrypt
2643 if (mode == EVP_CIPH_CBC_MODE)
2644 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2648 #ifdef BSAES_CAPABLE
2649 if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
2650 ret = AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2652 dat->block = (block128_f) AES_decrypt;
2653 dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt;
2656 #ifdef VPAES_CAPABLE
2657 if (VPAES_CAPABLE) {
2658 ret = vpaes_set_decrypt_key(key,
2659 EVP_CIPHER_CTX_key_length(ctx) * 8,
2661 dat->block = (block128_f) vpaes_decrypt;
2662 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2663 (cbc128_f) vpaes_cbc_encrypt : NULL;
2667 ret = AES_set_decrypt_key(key,
2668 EVP_CIPHER_CTX_key_length(ctx) * 8,
2670 dat->block = (block128_f) AES_decrypt;
2671 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2672 (cbc128_f) AES_cbc_encrypt : NULL;
2675 #ifdef HWAES_CAPABLE
2676 if (HWAES_CAPABLE) {
2677 ret = HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2679 dat->block = (block128_f) HWAES_encrypt;
2680 dat->stream.cbc = NULL;
2681 # ifdef HWAES_cbc_encrypt
2682 if (mode == EVP_CIPH_CBC_MODE)
2683 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2686 # ifdef HWAES_ctr32_encrypt_blocks
2687 if (mode == EVP_CIPH_CTR_MODE)
2688 dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2691 (void)0; /* terminate potentially open 'else' */
2694 #ifdef BSAES_CAPABLE
2695 if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
2696 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2698 dat->block = (block128_f) AES_encrypt;
2699 dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2702 #ifdef VPAES_CAPABLE
2703 if (VPAES_CAPABLE) {
2704 ret = vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2706 dat->block = (block128_f) vpaes_encrypt;
2707 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2708 (cbc128_f) vpaes_cbc_encrypt : NULL;
2712 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2714 dat->block = (block128_f) AES_encrypt;
2715 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2716 (cbc128_f) AES_cbc_encrypt : NULL;
2718 if (mode == EVP_CIPH_CTR_MODE)
2719 dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
2724 EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
2731 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2732 const unsigned char *in, size_t len)
2734 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2736 if (dat->stream.cbc)
2737 (*dat->stream.cbc) (in, out, len, &dat->ks,
2738 EVP_CIPHER_CTX_iv_noconst(ctx),
2739 EVP_CIPHER_CTX_encrypting(ctx));
2740 else if (EVP_CIPHER_CTX_encrypting(ctx))
2741 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks,
2742 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2744 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
2745 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2750 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2751 const unsigned char *in, size_t len)
2753 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
2755 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2760 for (i = 0, len -= bl; i <= len; i += bl)
2761 (*dat->block) (in + i, out + i, &dat->ks);
2766 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2767 const unsigned char *in, size_t len)
2769 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2771 int num = EVP_CIPHER_CTX_num(ctx);
2772 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
2773 EVP_CIPHER_CTX_iv_noconst(ctx), &num, dat->block);
2774 EVP_CIPHER_CTX_set_num(ctx, num);
2778 static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2779 const unsigned char *in, size_t len)
2781 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2783 int num = EVP_CIPHER_CTX_num(ctx);
2784 CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
2785 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2786 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2787 EVP_CIPHER_CTX_set_num(ctx, num);
2791 static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2792 const unsigned char *in, size_t len)
2794 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2796 int num = EVP_CIPHER_CTX_num(ctx);
2797 CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
2798 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2799 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2800 EVP_CIPHER_CTX_set_num(ctx, num);
2804 static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2805 const unsigned char *in, size_t len)
2807 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2809 if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
2810 int num = EVP_CIPHER_CTX_num(ctx);
2811 CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
2812 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2813 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2814 EVP_CIPHER_CTX_set_num(ctx, num);
2818 while (len >= MAXBITCHUNK) {
2819 int num = EVP_CIPHER_CTX_num(ctx);
2820 CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
2821 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2822 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2823 EVP_CIPHER_CTX_set_num(ctx, num);
2829 int num = EVP_CIPHER_CTX_num(ctx);
2830 CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
2831 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2832 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2833 EVP_CIPHER_CTX_set_num(ctx, num);
2839 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2840 const unsigned char *in, size_t len)
2842 unsigned int num = EVP_CIPHER_CTX_num(ctx);
2843 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2845 if (dat->stream.ctr)
2846 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
2847 EVP_CIPHER_CTX_iv_noconst(ctx),
2848 EVP_CIPHER_CTX_buf_noconst(ctx),
2849 &num, dat->stream.ctr);
2851 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
2852 EVP_CIPHER_CTX_iv_noconst(ctx),
2853 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
2855 EVP_CIPHER_CTX_set_num(ctx, num);
2859 BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
2860 BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
2861 BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
2863 static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
2865 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2868 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
2869 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
2870 OPENSSL_free(gctx->iv);
2874 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2876 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2881 gctx->ivlen = c->cipher->iv_len;
2885 gctx->tls_aad_len = -1;
2888 case EVP_CTRL_AEAD_SET_IVLEN:
2891 /* Allocate memory for IV if needed */
2892 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
2893 if (gctx->iv != c->iv)
2894 OPENSSL_free(gctx->iv);
2895 if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
2896 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
2903 case EVP_CTRL_AEAD_SET_TAG:
2904 if (arg <= 0 || arg > 16 || c->encrypt)
2906 memcpy(c->buf, ptr, arg);
2910 case EVP_CTRL_AEAD_GET_TAG:
2911 if (arg <= 0 || arg > 16 || !c->encrypt
2912 || gctx->taglen < 0)
2914 memcpy(ptr, c->buf, arg);
2917 case EVP_CTRL_GET_IV:
2918 if (gctx->iv_gen != 1 && gctx->iv_gen_rand != 1)
2920 if (gctx->ivlen != arg)
2922 memcpy(ptr, gctx->iv, arg);
2925 case EVP_CTRL_GCM_SET_IV_FIXED:
2926 /* Special case: -1 length restores whole IV */
2928 memcpy(gctx->iv, ptr, gctx->ivlen);
2933 * Fixed field must be at least 4 bytes and invocation field at least
2936 if ((arg < 4) || (gctx->ivlen - arg) < 8)
2939 memcpy(gctx->iv, ptr, arg);
2940 if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
2945 case EVP_CTRL_GCM_IV_GEN:
2946 if (gctx->iv_gen == 0 || gctx->key_set == 0)
2948 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2949 if (arg <= 0 || arg > gctx->ivlen)
2951 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
2953 * Invocation field will be at least 8 bytes in size and so no need
2954 * to check wrap around or increment more than last 8 bytes.
2956 ctr64_inc(gctx->iv + gctx->ivlen - 8);
2960 case EVP_CTRL_GCM_SET_IV_INV:
2961 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
2963 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
2964 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2968 case EVP_CTRL_AEAD_TLS1_AAD:
2969 /* Save the AAD for later use */
2970 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2972 memcpy(c->buf, ptr, arg);
2973 gctx->tls_aad_len = arg;
2974 gctx->tls_enc_records = 0;
2976 unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1];
2977 /* Correct length for explicit IV */
2978 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
2980 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
2981 /* If decrypting correct for tag too */
2983 if (len < EVP_GCM_TLS_TAG_LEN)
2985 len -= EVP_GCM_TLS_TAG_LEN;
2987 c->buf[arg - 2] = len >> 8;
2988 c->buf[arg - 1] = len & 0xff;
2990 /* Extra padding: tag appended to record */
2991 return EVP_GCM_TLS_TAG_LEN;
2995 EVP_CIPHER_CTX *out = ptr;
2996 EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out);
2997 if (gctx->gcm.key) {
2998 if (gctx->gcm.key != &gctx->ks)
3000 gctx_out->gcm.key = &gctx_out->ks;
3002 if (gctx->iv == c->iv)
3003 gctx_out->iv = out->iv;
3005 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
3006 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
3009 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
3020 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3021 const unsigned char *iv, int enc)
3023 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3028 #ifdef HWAES_CAPABLE
3029 if (HWAES_CAPABLE) {
3030 HWAES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3031 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3032 (block128_f) HWAES_encrypt);
3033 # ifdef HWAES_ctr32_encrypt_blocks
3034 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
3041 #ifdef BSAES_CAPABLE
3042 if (BSAES_CAPABLE) {
3043 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3044 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3045 (block128_f) AES_encrypt);
3046 gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
3050 #ifdef VPAES_CAPABLE
3051 if (VPAES_CAPABLE) {
3052 vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3053 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3054 (block128_f) vpaes_encrypt);
3059 (void)0; /* terminate potentially open 'else' */
3061 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3062 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3063 (block128_f) AES_encrypt);
3065 gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
3072 * If we have an iv can set it directly, otherwise use saved IV.
3074 if (iv == NULL && gctx->iv_set)
3077 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
3082 /* If key set use IV, otherwise copy */
3084 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
3086 memcpy(gctx->iv, iv, gctx->ivlen);
3094 * Handle TLS GCM packet format. This consists of the last portion of the IV
3095 * followed by the payload and finally the tag. On encrypt generate IV,
3096 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
3100 static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3101 const unsigned char *in, size_t len)
3103 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3105 /* Encrypt/decrypt must be performed in place */
3107 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
3111 * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
3112 * Requirements from SP 800-38D". The requirements is for one party to the
3113 * communication to fail after 2^64 - 1 keys. We do this on the encrypting
3116 if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
3117 EVPerr(EVP_F_AES_GCM_TLS_CIPHER, EVP_R_TOO_MANY_RECORDS);
3122 * Set IV from start of buffer or generate IV and write to start of
3125 if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ? EVP_CTRL_GCM_IV_GEN
3126 : EVP_CTRL_GCM_SET_IV_INV,
3127 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
3130 if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
3132 /* Fix buffer and length to point to payload */
3133 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
3134 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
3135 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
3137 /* Encrypt payload */
3140 #if defined(AES_GCM_ASM)
3141 if (len >= 32 && AES_GCM_ASM(gctx)) {
3142 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
3145 bulk = AES_gcm_encrypt(in, out, len,
3147 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3148 gctx->gcm.len.u[1] += bulk;
3151 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
3154 len - bulk, gctx->ctr))
3158 #if defined(AES_GCM_ASM2)
3159 if (len >= 32 && AES_GCM_ASM2(gctx)) {
3160 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
3163 bulk = AES_gcm_encrypt(in, out, len,
3165 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3166 gctx->gcm.len.u[1] += bulk;
3169 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3170 in + bulk, out + bulk, len - bulk))
3174 /* Finally write tag */
3175 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
3176 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
3181 #if defined(AES_GCM_ASM)
3182 if (len >= 16 && AES_GCM_ASM(gctx)) {
3183 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
3186 bulk = AES_gcm_decrypt(in, out, len,
3188 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3189 gctx->gcm.len.u[1] += bulk;
3192 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3195 len - bulk, gctx->ctr))
3199 #if defined(AES_GCM_ASM2)
3200 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3201 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
3204 bulk = AES_gcm_decrypt(in, out, len,
3206 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3207 gctx->gcm.len.u[1] += bulk;
3210 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3211 in + bulk, out + bulk, len - bulk))
3215 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN);
3216 /* If tag mismatch wipe buffer */
3217 if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
3218 OPENSSL_cleanse(out, len);
3226 gctx->tls_aad_len = -1;
3232 * See SP800-38D (GCM) Section 8 "Uniqueness requirement on IVS and keys"
3234 * See also 8.2.2 RBG-based construction.
3235 * Random construction consists of a free field (which can be NULL) and a
3236 * random field which will use a DRBG that can return at least 96 bits of
3237 * entropy strength. (The DRBG must be seeded by the FIPS module).
3239 static int aes_gcm_iv_generate(EVP_AES_GCM_CTX *gctx, int offset)
3241 int sz = gctx->ivlen - offset;
3243 /* Must be at least 96 bits */
3244 if (sz <= 0 || gctx->ivlen < 12)
3247 /* Use DRBG to generate random iv */
3248 if (RAND_bytes(gctx->iv + offset, sz) <= 0)
3252 #endif /* FIPS_MODE */
3254 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3255 const unsigned char *in, size_t len)
3257 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3259 /* If not set up, return error */
3263 if (gctx->tls_aad_len >= 0)
3264 return aes_gcm_tls_cipher(ctx, out, in, len);
3268 * FIPS requires generation of AES-GCM IV's inside the FIPS module.
3269 * The IV can still be set externally (the security policy will state that
3270 * this is not FIPS compliant). There are some applications
3271 * where setting the IV externally is the only option available.
3273 if (!gctx->iv_set) {
3274 if (!ctx->encrypt || !aes_gcm_iv_generate(gctx, 0))
3276 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
3278 gctx->iv_gen_rand = 1;
3283 #endif /* FIPS_MODE */
3287 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
3289 } else if (ctx->encrypt) {
3292 #if defined(AES_GCM_ASM)
3293 if (len >= 32 && AES_GCM_ASM(gctx)) {
3294 size_t res = (16 - gctx->gcm.mres) % 16;
3296 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3299 bulk = AES_gcm_encrypt(in + res,
3300 out + res, len - res,
3301 gctx->gcm.key, gctx->gcm.Yi.c,
3303 gctx->gcm.len.u[1] += bulk;
3307 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
3310 len - bulk, gctx->ctr))
3314 #if defined(AES_GCM_ASM2)
3315 if (len >= 32 && AES_GCM_ASM2(gctx)) {
3316 size_t res = (16 - gctx->gcm.mres) % 16;
3318 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3321 bulk = AES_gcm_encrypt(in + res,
3322 out + res, len - res,
3323 gctx->gcm.key, gctx->gcm.Yi.c,
3325 gctx->gcm.len.u[1] += bulk;
3329 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3330 in + bulk, out + bulk, len - bulk))
3336 #if defined(AES_GCM_ASM)
3337 if (len >= 16 && AES_GCM_ASM(gctx)) {
3338 size_t res = (16 - gctx->gcm.mres) % 16;
3340 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3343 bulk = AES_gcm_decrypt(in + res,
3344 out + res, len - res,
3346 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3347 gctx->gcm.len.u[1] += bulk;
3351 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3354 len - bulk, gctx->ctr))
3358 #if defined(AES_GCM_ASM2)
3359 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3360 size_t res = (16 - gctx->gcm.mres) % 16;
3362 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3365 bulk = AES_gcm_decrypt(in + res,
3366 out + res, len - res,
3368 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3369 gctx->gcm.len.u[1] += bulk;
3373 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3374 in + bulk, out + bulk, len - bulk))
3380 if (!ctx->encrypt) {
3381 if (gctx->taglen < 0)
3383 if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0)
3388 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
3390 /* Don't reuse the IV */
3397 #define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
3398 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3399 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3400 | EVP_CIPH_CUSTOM_COPY)
3402 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
3403 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3404 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
3405 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3406 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
3407 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3409 static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3411 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,c);
3412 if (type == EVP_CTRL_COPY) {
3413 EVP_CIPHER_CTX *out = ptr;
3414 EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out);
3415 if (xctx->xts.key1) {
3416 if (xctx->xts.key1 != &xctx->ks1)
3418 xctx_out->xts.key1 = &xctx_out->ks1;
3420 if (xctx->xts.key2) {
3421 if (xctx->xts.key2 != &xctx->ks2)
3423 xctx_out->xts.key2 = &xctx_out->ks2;
3426 } else if (type != EVP_CTRL_INIT)
3428 /* key1 and key2 are used as an indicator both key and IV are set */
3429 xctx->xts.key1 = NULL;
3430 xctx->xts.key2 = NULL;
3434 static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3435 const unsigned char *iv, int enc)
3437 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3443 /* The key is two half length keys in reality */
3444 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
3445 const int bits = bytes * 8;
3448 * Verify that the two keys are different.
3450 * This addresses the vulnerability described in Rogaway's
3451 * September 2004 paper:
3453 * "Efficient Instantiations of Tweakable Blockciphers and
3454 * Refinements to Modes OCB and PMAC".
3455 * (http://web.cs.ucdavis.edu/~rogaway/papers/offsets.pdf)
3457 * FIPS 140-2 IG A.9 XTS-AES Key Generation Requirements states
3459 * "The check for Key_1 != Key_2 shall be done at any place
3460 * BEFORE using the keys in the XTS-AES algorithm to process
3463 if (memcmp(key, key + bytes, bytes) == 0) {
3464 EVPerr(EVP_F_AES_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
3469 xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
3471 xctx->stream = NULL;
3473 /* key_len is two AES keys */
3474 #ifdef HWAES_CAPABLE
3475 if (HWAES_CAPABLE) {
3477 HWAES_set_encrypt_key(key, bits, &xctx->ks1.ks);
3478 xctx->xts.block1 = (block128_f) HWAES_encrypt;
3479 # ifdef HWAES_xts_encrypt
3480 xctx->stream = HWAES_xts_encrypt;
3483 HWAES_set_decrypt_key(key, bits, &xctx->ks1.ks);
3484 xctx->xts.block1 = (block128_f) HWAES_decrypt;
3485 # ifdef HWAES_xts_decrypt
3486 xctx->stream = HWAES_xts_decrypt;
3490 HWAES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3491 xctx->xts.block2 = (block128_f) HWAES_encrypt;
3493 xctx->xts.key1 = &xctx->ks1;
3497 #ifdef BSAES_CAPABLE
3499 xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt;
3502 #ifdef VPAES_CAPABLE
3503 if (VPAES_CAPABLE) {
3505 vpaes_set_encrypt_key(key, bits, &xctx->ks1.ks);
3506 xctx->xts.block1 = (block128_f) vpaes_encrypt;
3508 vpaes_set_decrypt_key(key, bits, &xctx->ks1.ks);
3509 xctx->xts.block1 = (block128_f) vpaes_decrypt;
3512 vpaes_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3513 xctx->xts.block2 = (block128_f) vpaes_encrypt;
3515 xctx->xts.key1 = &xctx->ks1;
3519 (void)0; /* terminate potentially open 'else' */
3522 AES_set_encrypt_key(key, bits, &xctx->ks1.ks);
3523 xctx->xts.block1 = (block128_f) AES_encrypt;
3525 AES_set_decrypt_key(key, bits, &xctx->ks1.ks);
3526 xctx->xts.block1 = (block128_f) AES_decrypt;
3529 AES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3530 xctx->xts.block2 = (block128_f) AES_encrypt;
3532 xctx->xts.key1 = &xctx->ks1;
3537 xctx->xts.key2 = &xctx->ks2;
3538 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
3544 static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3545 const unsigned char *in, size_t len)
3547 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3549 if (xctx->xts.key1 == NULL
3550 || xctx->xts.key2 == NULL
3553 || len < AES_BLOCK_SIZE)
3557 * Impose a limit of 2^20 blocks per data unit as specifed by
3558 * IEEE Std 1619-2018. The earlier and obsolete IEEE Std 1619-2007
3559 * indicated that this was a SHOULD NOT rather than a MUST NOT.
3560 * NIST SP 800-38E mandates the same limit.
3562 if (len > XTS_MAX_BLOCKS_PER_DATA_UNIT * AES_BLOCK_SIZE) {
3563 EVPerr(EVP_F_AES_XTS_CIPHER, EVP_R_XTS_DATA_UNIT_IS_TOO_LARGE);
3568 (*xctx->stream) (in, out, len,
3569 xctx->xts.key1, xctx->xts.key2,
3570 EVP_CIPHER_CTX_iv_noconst(ctx));
3571 else if (CRYPTO_xts128_encrypt(&xctx->xts, EVP_CIPHER_CTX_iv_noconst(ctx),
3573 EVP_CIPHER_CTX_encrypting(ctx)))
3578 #define aes_xts_cleanup NULL
3580 #define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
3581 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3582 | EVP_CIPH_CUSTOM_COPY)
3584 BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
3585 BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
3587 static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3589 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c);
3598 cctx->tls_aad_len = -1;
3601 case EVP_CTRL_AEAD_TLS1_AAD:
3602 /* Save the AAD for later use */
3603 if (arg != EVP_AEAD_TLS1_AAD_LEN)
3605 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3606 cctx->tls_aad_len = arg;
3609 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
3610 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
3611 /* Correct length for explicit IV */
3612 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
3614 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
3615 /* If decrypting correct for tag too */
3616 if (!EVP_CIPHER_CTX_encrypting(c)) {
3621 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
3622 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
3624 /* Extra padding: tag appended to record */
3627 case EVP_CTRL_CCM_SET_IV_FIXED:
3628 /* Sanity check length */
3629 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
3631 /* Just copy to first part of IV */
3632 memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg);
3635 case EVP_CTRL_AEAD_SET_IVLEN:
3638 case EVP_CTRL_CCM_SET_L:
3639 if (arg < 2 || arg > 8)
3644 case EVP_CTRL_AEAD_SET_TAG:
3645 if ((arg & 1) || arg < 4 || arg > 16)
3647 if (EVP_CIPHER_CTX_encrypting(c) && ptr)
3651 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3656 case EVP_CTRL_AEAD_GET_TAG:
3657 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
3659 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
3668 EVP_CIPHER_CTX *out = ptr;
3669 EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out);
3670 if (cctx->ccm.key) {
3671 if (cctx->ccm.key != &cctx->ks)
3673 cctx_out->ccm.key = &cctx_out->ks;
3684 static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3685 const unsigned char *iv, int enc)
3687 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3692 #ifdef HWAES_CAPABLE
3693 if (HWAES_CAPABLE) {
3694 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3697 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3698 &cctx->ks, (block128_f) HWAES_encrypt);
3704 #ifdef VPAES_CAPABLE
3705 if (VPAES_CAPABLE) {
3706 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3708 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3709 &cctx->ks, (block128_f) vpaes_encrypt);
3715 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3717 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3718 &cctx->ks, (block128_f) AES_encrypt);
3723 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
3729 static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3730 const unsigned char *in, size_t len)
3732 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3733 CCM128_CONTEXT *ccm = &cctx->ccm;
3734 /* Encrypt/decrypt must be performed in place */
3735 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
3737 /* If encrypting set explicit IV from sequence number (start of AAD) */
3738 if (EVP_CIPHER_CTX_encrypting(ctx))
3739 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
3740 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3741 /* Get rest of IV from explicit IV */
3742 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in,
3743 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3744 /* Correct length value */
3745 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3746 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L,
3750 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
3751 /* Fix buffer to point to payload */
3752 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3753 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3754 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3755 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3757 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3759 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
3761 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3763 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3765 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3766 unsigned char tag[16];
3767 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3768 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
3772 OPENSSL_cleanse(out, len);
3777 static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3778 const unsigned char *in, size_t len)
3780 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3781 CCM128_CONTEXT *ccm = &cctx->ccm;
3782 /* If not set up, return error */
3786 if (cctx->tls_aad_len >= 0)
3787 return aes_ccm_tls_cipher(ctx, out, in, len);
3789 /* EVP_*Final() doesn't return any data */
3790 if (in == NULL && out != NULL)
3798 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3804 /* If have AAD need message length */
3805 if (!cctx->len_set && len)
3807 CRYPTO_ccm128_aad(ccm, in, len);
3811 /* The tag must be set before actually decrypting data */
3812 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
3815 /* If not set length yet do it */
3816 if (!cctx->len_set) {
3817 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3822 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3823 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3825 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3831 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3833 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3834 unsigned char tag[16];
3835 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3836 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
3842 OPENSSL_cleanse(out, len);
3850 #define aes_ccm_cleanup NULL
3852 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
3853 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3854 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
3855 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3856 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
3857 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3864 /* Indicates if IV has been set */
3868 static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3869 const unsigned char *iv, int enc)
3871 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3875 if (EVP_CIPHER_CTX_encrypting(ctx))
3876 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3879 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3885 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, EVP_CIPHER_CTX_iv_length(ctx));
3886 wctx->iv = EVP_CIPHER_CTX_iv_noconst(ctx);
3891 static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3892 const unsigned char *in, size_t inlen)
3894 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3896 /* AES wrap with padding has IV length of 4, without padding 8 */
3897 int pad = EVP_CIPHER_CTX_iv_length(ctx) == 4;
3898 /* No final operation so always return zero length */
3901 /* Input length must always be non-zero */
3904 /* If decrypting need at least 16 bytes and multiple of 8 */
3905 if (!EVP_CIPHER_CTX_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
3907 /* If not padding input must be multiple of 8 */
3908 if (!pad && inlen & 0x7)
3910 if (is_partially_overlapping(out, in, inlen)) {
3911 EVPerr(EVP_F_AES_WRAP_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
3915 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3916 /* If padding round up to multiple of 8 */
3918 inlen = (inlen + 7) / 8 * 8;
3923 * If not padding output will be exactly 8 bytes smaller than
3924 * input. If padding it will be at least 8 bytes smaller but we
3925 * don't know how much.
3931 if (EVP_CIPHER_CTX_encrypting(ctx))
3932 rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
3934 (block128_f) AES_encrypt);
3936 rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
3938 (block128_f) AES_decrypt);
3940 if (EVP_CIPHER_CTX_encrypting(ctx))
3941 rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
3942 out, in, inlen, (block128_f) AES_encrypt);
3944 rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
3945 out, in, inlen, (block128_f) AES_decrypt);
3947 return rv ? (int)rv : -1;
3950 #define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \
3951 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3952 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
3954 static const EVP_CIPHER aes_128_wrap = {
3956 8, 16, 8, WRAP_FLAGS,
3957 aes_wrap_init_key, aes_wrap_cipher,
3959 sizeof(EVP_AES_WRAP_CTX),
3960 NULL, NULL, NULL, NULL
3963 const EVP_CIPHER *EVP_aes_128_wrap(void)
3965 return &aes_128_wrap;
3968 static const EVP_CIPHER aes_192_wrap = {
3970 8, 24, 8, WRAP_FLAGS,
3971 aes_wrap_init_key, aes_wrap_cipher,
3973 sizeof(EVP_AES_WRAP_CTX),
3974 NULL, NULL, NULL, NULL
3977 const EVP_CIPHER *EVP_aes_192_wrap(void)
3979 return &aes_192_wrap;
3982 static const EVP_CIPHER aes_256_wrap = {
3984 8, 32, 8, WRAP_FLAGS,
3985 aes_wrap_init_key, aes_wrap_cipher,
3987 sizeof(EVP_AES_WRAP_CTX),
3988 NULL, NULL, NULL, NULL
3991 const EVP_CIPHER *EVP_aes_256_wrap(void)
3993 return &aes_256_wrap;
3996 static const EVP_CIPHER aes_128_wrap_pad = {
3997 NID_id_aes128_wrap_pad,
3998 8, 16, 4, WRAP_FLAGS,
3999 aes_wrap_init_key, aes_wrap_cipher,
4001 sizeof(EVP_AES_WRAP_CTX),
4002 NULL, NULL, NULL, NULL
4005 const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
4007 return &aes_128_wrap_pad;
4010 static const EVP_CIPHER aes_192_wrap_pad = {
4011 NID_id_aes192_wrap_pad,
4012 8, 24, 4, WRAP_FLAGS,
4013 aes_wrap_init_key, aes_wrap_cipher,
4015 sizeof(EVP_AES_WRAP_CTX),
4016 NULL, NULL, NULL, NULL
4019 const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
4021 return &aes_192_wrap_pad;
4024 static const EVP_CIPHER aes_256_wrap_pad = {
4025 NID_id_aes256_wrap_pad,
4026 8, 32, 4, WRAP_FLAGS,
4027 aes_wrap_init_key, aes_wrap_cipher,
4029 sizeof(EVP_AES_WRAP_CTX),
4030 NULL, NULL, NULL, NULL
4033 const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
4035 return &aes_256_wrap_pad;
4038 #ifndef OPENSSL_NO_OCB
4039 static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
4041 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
4042 EVP_CIPHER_CTX *newc;
4043 EVP_AES_OCB_CTX *new_octx;
4049 octx->ivlen = EVP_CIPHER_CTX_iv_length(c);
4050 octx->iv = EVP_CIPHER_CTX_iv_noconst(c);
4052 octx->data_buf_len = 0;
4053 octx->aad_buf_len = 0;
4056 case EVP_CTRL_AEAD_SET_IVLEN:
4057 /* IV len must be 1 to 15 */
4058 if (arg <= 0 || arg > 15)
4064 case EVP_CTRL_AEAD_SET_TAG:
4066 /* Tag len must be 0 to 16 */
4067 if (arg < 0 || arg > 16)
4073 if (arg != octx->taglen || EVP_CIPHER_CTX_encrypting(c))
4075 memcpy(octx->tag, ptr, arg);
4078 case EVP_CTRL_AEAD_GET_TAG:
4079 if (arg != octx->taglen || !EVP_CIPHER_CTX_encrypting(c))
4082 memcpy(ptr, octx->tag, arg);
4086 newc = (EVP_CIPHER_CTX *)ptr;
4087 new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc);
4088 return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
4089 &new_octx->ksenc.ks,
4090 &new_octx->ksdec.ks);
4098 # ifdef HWAES_CAPABLE
4099 # ifdef HWAES_ocb_encrypt
4100 void HWAES_ocb_encrypt(const unsigned char *in, unsigned char *out,
4101 size_t blocks, const void *key,
4102 size_t start_block_num,
4103 unsigned char offset_i[16],
4104 const unsigned char L_[][16],
4105 unsigned char checksum[16]);
4107 # define HWAES_ocb_encrypt ((ocb128_f)NULL)
4109 # ifdef HWAES_ocb_decrypt
4110 void HWAES_ocb_decrypt(const unsigned char *in, unsigned char *out,
4111 size_t blocks, const void *key,
4112 size_t start_block_num,
4113 unsigned char offset_i[16],
4114 const unsigned char L_[][16],
4115 unsigned char checksum[16]);
4117 # define HWAES_ocb_decrypt ((ocb128_f)NULL)
4121 static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
4122 const unsigned char *iv, int enc)
4124 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
4130 * We set both the encrypt and decrypt key here because decrypt
4131 * needs both. We could possibly optimise to remove setting the
4132 * decrypt for an encryption operation.
4134 # ifdef HWAES_CAPABLE
4135 if (HWAES_CAPABLE) {
4136 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4138 HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4140 if (!CRYPTO_ocb128_init(&octx->ocb,
4141 &octx->ksenc.ks, &octx->ksdec.ks,
4142 (block128_f) HWAES_encrypt,
4143 (block128_f) HWAES_decrypt,
4144 enc ? HWAES_ocb_encrypt
4145 : HWAES_ocb_decrypt))
4150 # ifdef VPAES_CAPABLE
4151 if (VPAES_CAPABLE) {
4152 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4154 vpaes_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4156 if (!CRYPTO_ocb128_init(&octx->ocb,
4157 &octx->ksenc.ks, &octx->ksdec.ks,
4158 (block128_f) vpaes_encrypt,
4159 (block128_f) vpaes_decrypt,
4165 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4167 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4169 if (!CRYPTO_ocb128_init(&octx->ocb,
4170 &octx->ksenc.ks, &octx->ksdec.ks,
4171 (block128_f) AES_encrypt,
4172 (block128_f) AES_decrypt,
4179 * If we have an iv we can set it directly, otherwise use saved IV.
4181 if (iv == NULL && octx->iv_set)
4184 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
4191 /* If key set use IV, otherwise copy */
4193 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
4195 memcpy(octx->iv, iv, octx->ivlen);
4201 static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
4202 const unsigned char *in, size_t len)
4206 int written_len = 0;
4207 size_t trailing_len;
4208 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
4210 /* If IV or Key not set then return error */
4219 * Need to ensure we are only passing full blocks to low level OCB
4220 * routines. We do it here rather than in EVP_EncryptUpdate/
4221 * EVP_DecryptUpdate because we need to pass full blocks of AAD too
4222 * and those routines don't support that
4225 /* Are we dealing with AAD or normal data here? */
4227 buf = octx->aad_buf;
4228 buf_len = &(octx->aad_buf_len);
4230 buf = octx->data_buf;
4231 buf_len = &(octx->data_buf_len);
4233 if (is_partially_overlapping(out + *buf_len, in, len)) {
4234 EVPerr(EVP_F_AES_OCB_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
4240 * If we've got a partially filled buffer from a previous call then
4241 * use that data first
4244 unsigned int remaining;
4246 remaining = AES_BLOCK_SIZE - (*buf_len);
4247 if (remaining > len) {
4248 memcpy(buf + (*buf_len), in, len);
4252 memcpy(buf + (*buf_len), in, remaining);
4255 * If we get here we've filled the buffer, so process it
4260 if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE))
4262 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
4263 if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out,
4267 if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out,
4271 written_len = AES_BLOCK_SIZE;
4274 out += AES_BLOCK_SIZE;
4277 /* Do we have a partial block to handle at the end? */
4278 trailing_len = len % AES_BLOCK_SIZE;
4281 * If we've got some full blocks to handle, then process these first
4283 if (len != trailing_len) {
4285 if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
4287 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
4288 if (!CRYPTO_ocb128_encrypt
4289 (&octx->ocb, in, out, len - trailing_len))
4292 if (!CRYPTO_ocb128_decrypt
4293 (&octx->ocb, in, out, len - trailing_len))
4296 written_len += len - trailing_len;
4297 in += len - trailing_len;
4300 /* Handle any trailing partial block */
4301 if (trailing_len > 0) {
4302 memcpy(buf, in, trailing_len);
4303 *buf_len = trailing_len;
4309 * First of all empty the buffer of any partial block that we might
4310 * have been provided - both for data and AAD
4312 if (octx->data_buf_len > 0) {
4313 if (EVP_CIPHER_CTX_encrypting(ctx)) {
4314 if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
4315 octx->data_buf_len))
4318 if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
4319 octx->data_buf_len))
4322 written_len = octx->data_buf_len;
4323 octx->data_buf_len = 0;
4325 if (octx->aad_buf_len > 0) {
4326 if (!CRYPTO_ocb128_aad
4327 (&octx->ocb, octx->aad_buf, octx->aad_buf_len))
4329 octx->aad_buf_len = 0;
4331 /* If decrypting then verify */
4332 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
4333 if (octx->taglen < 0)
4335 if (CRYPTO_ocb128_finish(&octx->ocb,
4336 octx->tag, octx->taglen) != 0)
4341 /* If encrypting then just get the tag */
4342 if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
4344 /* Don't reuse the IV */
4350 static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
4352 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
4353 CRYPTO_ocb128_cleanup(&octx->ocb);
4357 BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
4358 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4359 BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
4360 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4361 BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
4362 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4363 #endif /* OPENSSL_NO_OCB */
4366 #ifndef OPENSSL_NO_SIV
4368 typedef SIV128_CONTEXT EVP_AES_SIV_CTX;
4370 #define aesni_siv_init_key aes_siv_init_key
4371 static int aes_siv_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
4372 const unsigned char *iv, int enc)
4374 const EVP_CIPHER *ctr;
4375 const EVP_CIPHER *cbc;
4376 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, ctx);
4377 int klen = EVP_CIPHER_CTX_key_length(ctx) / 2;
4384 cbc = EVP_aes_128_cbc();
4385 ctr = EVP_aes_128_ctr();
4388 cbc = EVP_aes_192_cbc();
4389 ctr = EVP_aes_192_ctr();
4392 cbc = EVP_aes_256_cbc();
4393 ctr = EVP_aes_256_ctr();
4399 /* klen is the length of the underlying cipher, not the input key,
4400 which should be twice as long */
4401 return CRYPTO_siv128_init(sctx, key, klen, cbc, ctr);
4404 #define aesni_siv_cipher aes_siv_cipher
4405 static int aes_siv_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
4406 const unsigned char *in, size_t len)
4408 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, ctx);
4410 /* EncryptFinal or DecryptFinal */
4412 return CRYPTO_siv128_finish(sctx);
4414 /* Deal with associated data */
4416 return CRYPTO_siv128_aad(sctx, in, len);
4418 if (EVP_CIPHER_CTX_encrypting(ctx))
4419 return CRYPTO_siv128_encrypt(sctx, in, out, len);
4421 return CRYPTO_siv128_decrypt(sctx, in, out, len);
4424 #define aesni_siv_cleanup aes_siv_cleanup
4425 static int aes_siv_cleanup(EVP_CIPHER_CTX *c)
4427 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, c);
4429 return CRYPTO_siv128_cleanup(sctx);
4433 #define aesni_siv_ctrl aes_siv_ctrl
4434 static int aes_siv_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
4436 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, c);
4437 SIV128_CONTEXT *sctx_out;
4441 return CRYPTO_siv128_cleanup(sctx);
4443 case EVP_CTRL_SET_SPEED:
4444 return CRYPTO_siv128_speed(sctx, arg);
4446 case EVP_CTRL_AEAD_SET_TAG:
4447 if (!EVP_CIPHER_CTX_encrypting(c))
4448 return CRYPTO_siv128_set_tag(sctx, ptr, arg);
4451 case EVP_CTRL_AEAD_GET_TAG:
4452 if (!EVP_CIPHER_CTX_encrypting(c))
4454 return CRYPTO_siv128_get_tag(sctx, ptr, arg);
4457 sctx_out = EVP_C_DATA(SIV128_CONTEXT, (EVP_CIPHER_CTX*)ptr);
4458 return CRYPTO_siv128_copy_ctx(sctx_out, sctx);
4466 #define SIV_FLAGS (EVP_CIPH_FLAG_AEAD_CIPHER | EVP_CIPH_FLAG_DEFAULT_ASN1 \
4467 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
4468 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CUSTOM_COPY \
4469 | EVP_CIPH_CTRL_INIT)
4471 BLOCK_CIPHER_custom(NID_aes, 128, 1, 0, siv, SIV, SIV_FLAGS)
4472 BLOCK_CIPHER_custom(NID_aes, 192, 1, 0, siv, SIV, SIV_FLAGS)
4473 BLOCK_CIPHER_custom(NID_aes, 256, 1, 0, siv, SIV, SIV_FLAGS)