2 * Copyright 2001-2019 The OpenSSL Project Authors. All Rights Reserved.
4 * Licensed under the Apache License 2.0 (the "License"). You may not use
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
10 #include <openssl/opensslconf.h>
11 #include <openssl/crypto.h>
12 #include <openssl/evp.h>
13 #include <openssl/err.h>
16 #include <openssl/aes.h>
17 #include "internal/evp_int.h"
18 #include "modes_lcl.h"
19 #include <openssl/rand.h>
20 #include <openssl/cmac.h>
39 } ks; /* AES key schedule to use */
40 int key_set; /* Set if key initialised */
41 int iv_set; /* Set if an iv is set */
43 unsigned char *iv; /* Temporary IV store */
44 int ivlen; /* IV length */
46 int iv_gen; /* It is OK to generate IVs */
47 int iv_gen_rand; /* No IV was specified, so generate a rand IV */
48 int tls_aad_len; /* TLS AAD length */
49 uint64_t tls_enc_records; /* Number of TLS records encrypted */
57 } ks1, ks2; /* AES key schedules to use */
59 void (*stream) (const unsigned char *in,
60 unsigned char *out, size_t length,
61 const AES_KEY *key1, const AES_KEY *key2,
62 const unsigned char iv[16]);
69 } ks; /* AES key schedule to use */
70 int key_set; /* Set if key initialised */
71 int iv_set; /* Set if an iv is set */
72 int tag_set; /* Set if tag is valid */
73 int len_set; /* Set if message length set */
74 int L, M; /* L and M parameters from RFC3610 */
75 int tls_aad_len; /* TLS AAD length */
80 #ifndef OPENSSL_NO_OCB
85 } ksenc; /* AES key schedule to use for encryption */
89 } ksdec; /* AES key schedule to use for decryption */
90 int key_set; /* Set if key initialised */
91 int iv_set; /* Set if an iv is set */
93 unsigned char *iv; /* Temporary IV store */
94 unsigned char tag[16];
95 unsigned char data_buf[16]; /* Store partial data blocks */
96 unsigned char aad_buf[16]; /* Store partial AAD blocks */
99 int ivlen; /* IV length */
104 #define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
107 int vpaes_set_encrypt_key(const unsigned char *userKey, int bits,
109 int vpaes_set_decrypt_key(const unsigned char *userKey, int bits,
112 void vpaes_encrypt(const unsigned char *in, unsigned char *out,
114 void vpaes_decrypt(const unsigned char *in, unsigned char *out,
117 void vpaes_cbc_encrypt(const unsigned char *in,
120 const AES_KEY *key, unsigned char *ivec, int enc);
123 void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out,
124 size_t length, const AES_KEY *key,
125 unsigned char ivec[16], int enc);
126 void bsaes_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
127 size_t len, const AES_KEY *key,
128 const unsigned char ivec[16]);
129 void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out,
130 size_t len, const AES_KEY *key1,
131 const AES_KEY *key2, const unsigned char iv[16]);
132 void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out,
133 size_t len, const AES_KEY *key1,
134 const AES_KEY *key2, const unsigned char iv[16]);
137 void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out,
138 size_t blocks, const AES_KEY *key,
139 const unsigned char ivec[AES_BLOCK_SIZE]);
142 void AES_xts_encrypt(const unsigned char *inp, unsigned char *out, size_t len,
143 const AES_KEY *key1, const AES_KEY *key2,
144 const unsigned char iv[16]);
145 void AES_xts_decrypt(const unsigned char *inp, unsigned char *out, size_t len,
146 const AES_KEY *key1, const AES_KEY *key2,
147 const unsigned char iv[16]);
150 /* increment counter (64-bit int) by 1 */
151 static void ctr64_inc(unsigned char *counter)
166 #if defined(OPENSSL_CPUID_OBJ) && (defined(__powerpc__) || defined(__ppc__) || defined(_ARCH_PPC))
167 # include "ppc_arch.h"
169 # define VPAES_CAPABLE (OPENSSL_ppccap_P & PPC_ALTIVEC)
171 # define HWAES_CAPABLE (OPENSSL_ppccap_P & PPC_CRYPTO207)
172 # define HWAES_set_encrypt_key aes_p8_set_encrypt_key
173 # define HWAES_set_decrypt_key aes_p8_set_decrypt_key
174 # define HWAES_encrypt aes_p8_encrypt
175 # define HWAES_decrypt aes_p8_decrypt
176 # define HWAES_cbc_encrypt aes_p8_cbc_encrypt
177 # define HWAES_ctr32_encrypt_blocks aes_p8_ctr32_encrypt_blocks
178 # define HWAES_xts_encrypt aes_p8_xts_encrypt
179 # define HWAES_xts_decrypt aes_p8_xts_decrypt
182 #if defined(AES_ASM) && !defined(I386_ONLY) && ( \
183 ((defined(__i386) || defined(__i386__) || \
184 defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \
185 defined(__x86_64) || defined(__x86_64__) || \
186 defined(_M_AMD64) || defined(_M_X64) )
188 extern unsigned int OPENSSL_ia32cap_P[];
191 # define VPAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
194 # define BSAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
199 # define AESNI_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(57-32)))
201 int aesni_set_encrypt_key(const unsigned char *userKey, int bits,
203 int aesni_set_decrypt_key(const unsigned char *userKey, int bits,
206 void aesni_encrypt(const unsigned char *in, unsigned char *out,
208 void aesni_decrypt(const unsigned char *in, unsigned char *out,
211 void aesni_ecb_encrypt(const unsigned char *in,
213 size_t length, const AES_KEY *key, int enc);
214 void aesni_cbc_encrypt(const unsigned char *in,
217 const AES_KEY *key, unsigned char *ivec, int enc);
219 void aesni_ctr32_encrypt_blocks(const unsigned char *in,
222 const void *key, const unsigned char *ivec);
224 void aesni_xts_encrypt(const unsigned char *in,
227 const AES_KEY *key1, const AES_KEY *key2,
228 const unsigned char iv[16]);
230 void aesni_xts_decrypt(const unsigned char *in,
233 const AES_KEY *key1, const AES_KEY *key2,
234 const unsigned char iv[16]);
236 void aesni_ccm64_encrypt_blocks(const unsigned char *in,
240 const unsigned char ivec[16],
241 unsigned char cmac[16]);
243 void aesni_ccm64_decrypt_blocks(const unsigned char *in,
247 const unsigned char ivec[16],
248 unsigned char cmac[16]);
250 # if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
251 size_t aesni_gcm_encrypt(const unsigned char *in,
254 const void *key, unsigned char ivec[16], u64 *Xi);
255 # define AES_gcm_encrypt aesni_gcm_encrypt
256 size_t aesni_gcm_decrypt(const unsigned char *in,
259 const void *key, unsigned char ivec[16], u64 *Xi);
260 # define AES_gcm_decrypt aesni_gcm_decrypt
261 void gcm_ghash_avx(u64 Xi[2], const u128 Htable[16], const u8 *in,
263 # define AES_GCM_ASM(gctx) (gctx->ctr==aesni_ctr32_encrypt_blocks && \
264 gctx->gcm.ghash==gcm_ghash_avx)
265 # define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
266 gctx->gcm.ghash==gcm_ghash_avx)
267 # undef AES_GCM_ASM2 /* minor size optimization */
270 static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
271 const unsigned char *iv, int enc)
274 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
276 mode = EVP_CIPHER_CTX_mode(ctx);
277 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
279 ret = aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
281 dat->block = (block128_f) aesni_decrypt;
282 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
283 (cbc128_f) aesni_cbc_encrypt : NULL;
285 ret = aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
287 dat->block = (block128_f) aesni_encrypt;
288 if (mode == EVP_CIPH_CBC_MODE)
289 dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
290 else if (mode == EVP_CIPH_CTR_MODE)
291 dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
293 dat->stream.cbc = NULL;
297 EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
304 static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
305 const unsigned char *in, size_t len)
307 aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
308 EVP_CIPHER_CTX_iv_noconst(ctx),
309 EVP_CIPHER_CTX_encrypting(ctx));
314 static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
315 const unsigned char *in, size_t len)
317 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
322 aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
323 EVP_CIPHER_CTX_encrypting(ctx));
328 # define aesni_ofb_cipher aes_ofb_cipher
329 static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
330 const unsigned char *in, size_t len);
332 # define aesni_cfb_cipher aes_cfb_cipher
333 static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
334 const unsigned char *in, size_t len);
336 # define aesni_cfb8_cipher aes_cfb8_cipher
337 static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
338 const unsigned char *in, size_t len);
340 # define aesni_cfb1_cipher aes_cfb1_cipher
341 static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
342 const unsigned char *in, size_t len);
344 # define aesni_ctr_cipher aes_ctr_cipher
345 static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
346 const unsigned char *in, size_t len);
348 static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
349 const unsigned char *iv, int enc)
351 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
355 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
357 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
358 gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
360 * If we have an iv can set it directly, otherwise use saved IV.
362 if (iv == NULL && gctx->iv_set)
365 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
370 /* If key set use IV, otherwise copy */
372 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
374 memcpy(gctx->iv, iv, gctx->ivlen);
381 # define aesni_gcm_cipher aes_gcm_cipher
382 static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
383 const unsigned char *in, size_t len);
385 static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
386 const unsigned char *iv, int enc)
388 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
393 /* The key is two half length keys in reality */
394 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
395 const int bits = bytes * 8;
398 * Verify that the two keys are different.
400 * This addresses Rogaway's vulnerability.
401 * See comment in aes_xts_init_key() below.
403 if (memcmp(key, key + bytes, bytes) == 0) {
404 EVPerr(EVP_F_AESNI_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
408 /* key_len is two AES keys */
410 aesni_set_encrypt_key(key, bits, &xctx->ks1.ks);
411 xctx->xts.block1 = (block128_f) aesni_encrypt;
412 xctx->stream = aesni_xts_encrypt;
414 aesni_set_decrypt_key(key, bits, &xctx->ks1.ks);
415 xctx->xts.block1 = (block128_f) aesni_decrypt;
416 xctx->stream = aesni_xts_decrypt;
419 aesni_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
420 xctx->xts.block2 = (block128_f) aesni_encrypt;
422 xctx->xts.key1 = &xctx->ks1;
426 xctx->xts.key2 = &xctx->ks2;
427 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
433 # define aesni_xts_cipher aes_xts_cipher
434 static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
435 const unsigned char *in, size_t len);
437 static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
438 const unsigned char *iv, int enc)
440 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
444 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
446 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
447 &cctx->ks, (block128_f) aesni_encrypt);
448 cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
449 (ccm128_f) aesni_ccm64_decrypt_blocks;
453 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
459 # define aesni_ccm_cipher aes_ccm_cipher
460 static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
461 const unsigned char *in, size_t len);
463 # ifndef OPENSSL_NO_OCB
464 void aesni_ocb_encrypt(const unsigned char *in, unsigned char *out,
465 size_t blocks, const void *key,
466 size_t start_block_num,
467 unsigned char offset_i[16],
468 const unsigned char L_[][16],
469 unsigned char checksum[16]);
470 void aesni_ocb_decrypt(const unsigned char *in, unsigned char *out,
471 size_t blocks, const void *key,
472 size_t start_block_num,
473 unsigned char offset_i[16],
474 const unsigned char L_[][16],
475 unsigned char checksum[16]);
477 static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
478 const unsigned char *iv, int enc)
480 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
486 * We set both the encrypt and decrypt key here because decrypt
487 * needs both. We could possibly optimise to remove setting the
488 * decrypt for an encryption operation.
490 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
492 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
494 if (!CRYPTO_ocb128_init(&octx->ocb,
495 &octx->ksenc.ks, &octx->ksdec.ks,
496 (block128_f) aesni_encrypt,
497 (block128_f) aesni_decrypt,
498 enc ? aesni_ocb_encrypt
499 : aesni_ocb_decrypt))
505 * If we have an iv we can set it directly, otherwise use saved IV.
507 if (iv == NULL && octx->iv_set)
510 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
517 /* If key set use IV, otherwise copy */
519 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
521 memcpy(octx->iv, iv, octx->ivlen);
527 # define aesni_ocb_cipher aes_ocb_cipher
528 static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
529 const unsigned char *in, size_t len);
530 # endif /* OPENSSL_NO_OCB */
532 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
533 static const EVP_CIPHER aesni_##keylen##_##mode = { \
534 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
535 flags|EVP_CIPH_##MODE##_MODE, \
537 aesni_##mode##_cipher, \
539 sizeof(EVP_AES_KEY), \
540 NULL,NULL,NULL,NULL }; \
541 static const EVP_CIPHER aes_##keylen##_##mode = { \
542 nid##_##keylen##_##nmode,blocksize, \
544 flags|EVP_CIPH_##MODE##_MODE, \
546 aes_##mode##_cipher, \
548 sizeof(EVP_AES_KEY), \
549 NULL,NULL,NULL,NULL }; \
550 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
551 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
553 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
554 static const EVP_CIPHER aesni_##keylen##_##mode = { \
555 nid##_##keylen##_##mode,blocksize, \
556 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
558 flags|EVP_CIPH_##MODE##_MODE, \
559 aesni_##mode##_init_key, \
560 aesni_##mode##_cipher, \
561 aes_##mode##_cleanup, \
562 sizeof(EVP_AES_##MODE##_CTX), \
563 NULL,NULL,aes_##mode##_ctrl,NULL }; \
564 static const EVP_CIPHER aes_##keylen##_##mode = { \
565 nid##_##keylen##_##mode,blocksize, \
566 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
568 flags|EVP_CIPH_##MODE##_MODE, \
569 aes_##mode##_init_key, \
570 aes_##mode##_cipher, \
571 aes_##mode##_cleanup, \
572 sizeof(EVP_AES_##MODE##_CTX), \
573 NULL,NULL,aes_##mode##_ctrl,NULL }; \
574 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
575 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
577 #elif defined(AES_ASM) && (defined(__sparc) || defined(__sparc__))
579 # include "sparc_arch.h"
581 extern unsigned int OPENSSL_sparcv9cap_P[];
584 * Initial Fujitsu SPARC64 X support
586 # define HWAES_CAPABLE (OPENSSL_sparcv9cap_P[0] & SPARCV9_FJAESX)
587 # define HWAES_set_encrypt_key aes_fx_set_encrypt_key
588 # define HWAES_set_decrypt_key aes_fx_set_decrypt_key
589 # define HWAES_encrypt aes_fx_encrypt
590 # define HWAES_decrypt aes_fx_decrypt
591 # define HWAES_cbc_encrypt aes_fx_cbc_encrypt
592 # define HWAES_ctr32_encrypt_blocks aes_fx_ctr32_encrypt_blocks
594 # define SPARC_AES_CAPABLE (OPENSSL_sparcv9cap_P[1] & CFR_AES)
596 void aes_t4_set_encrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
597 void aes_t4_set_decrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
598 void aes_t4_encrypt(const unsigned char *in, unsigned char *out,
600 void aes_t4_decrypt(const unsigned char *in, unsigned char *out,
603 * Key-length specific subroutines were chosen for following reason.
604 * Each SPARC T4 core can execute up to 8 threads which share core's
605 * resources. Loading as much key material to registers allows to
606 * minimize references to shared memory interface, as well as amount
607 * of instructions in inner loops [much needed on T4]. But then having
608 * non-key-length specific routines would require conditional branches
609 * either in inner loops or on subroutines' entries. Former is hardly
610 * acceptable, while latter means code size increase to size occupied
611 * by multiple key-length specific subroutines, so why fight?
613 void aes128_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
614 size_t len, const AES_KEY *key,
615 unsigned char *ivec);
616 void aes128_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
617 size_t len, const AES_KEY *key,
618 unsigned char *ivec);
619 void aes192_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
620 size_t len, const AES_KEY *key,
621 unsigned char *ivec);
622 void aes192_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
623 size_t len, const AES_KEY *key,
624 unsigned char *ivec);
625 void aes256_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
626 size_t len, const AES_KEY *key,
627 unsigned char *ivec);
628 void aes256_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
629 size_t len, const AES_KEY *key,
630 unsigned char *ivec);
631 void aes128_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
632 size_t blocks, const AES_KEY *key,
633 unsigned char *ivec);
634 void aes192_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
635 size_t blocks, const AES_KEY *key,
636 unsigned char *ivec);
637 void aes256_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
638 size_t blocks, const AES_KEY *key,
639 unsigned char *ivec);
640 void aes128_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
641 size_t blocks, const AES_KEY *key1,
642 const AES_KEY *key2, const unsigned char *ivec);
643 void aes128_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
644 size_t blocks, const AES_KEY *key1,
645 const AES_KEY *key2, const unsigned char *ivec);
646 void aes256_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
647 size_t blocks, const AES_KEY *key1,
648 const AES_KEY *key2, const unsigned char *ivec);
649 void aes256_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
650 size_t blocks, const AES_KEY *key1,
651 const AES_KEY *key2, const unsigned char *ivec);
653 static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
654 const unsigned char *iv, int enc)
657 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
659 mode = EVP_CIPHER_CTX_mode(ctx);
660 bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
661 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
664 aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
665 dat->block = (block128_f) aes_t4_decrypt;
668 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
669 (cbc128_f) aes128_t4_cbc_decrypt : NULL;
672 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
673 (cbc128_f) aes192_t4_cbc_decrypt : NULL;
676 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
677 (cbc128_f) aes256_t4_cbc_decrypt : NULL;
684 aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
685 dat->block = (block128_f) aes_t4_encrypt;
688 if (mode == EVP_CIPH_CBC_MODE)
689 dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
690 else if (mode == EVP_CIPH_CTR_MODE)
691 dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
693 dat->stream.cbc = NULL;
696 if (mode == EVP_CIPH_CBC_MODE)
697 dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
698 else if (mode == EVP_CIPH_CTR_MODE)
699 dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
701 dat->stream.cbc = NULL;
704 if (mode == EVP_CIPH_CBC_MODE)
705 dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
706 else if (mode == EVP_CIPH_CTR_MODE)
707 dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
709 dat->stream.cbc = NULL;
717 EVPerr(EVP_F_AES_T4_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
724 # define aes_t4_cbc_cipher aes_cbc_cipher
725 static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
726 const unsigned char *in, size_t len);
728 # define aes_t4_ecb_cipher aes_ecb_cipher
729 static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
730 const unsigned char *in, size_t len);
732 # define aes_t4_ofb_cipher aes_ofb_cipher
733 static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
734 const unsigned char *in, size_t len);
736 # define aes_t4_cfb_cipher aes_cfb_cipher
737 static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
738 const unsigned char *in, size_t len);
740 # define aes_t4_cfb8_cipher aes_cfb8_cipher
741 static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
742 const unsigned char *in, size_t len);
744 # define aes_t4_cfb1_cipher aes_cfb1_cipher
745 static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
746 const unsigned char *in, size_t len);
748 # define aes_t4_ctr_cipher aes_ctr_cipher
749 static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
750 const unsigned char *in, size_t len);
752 static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
753 const unsigned char *iv, int enc)
755 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
759 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
760 aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
761 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
762 (block128_f) aes_t4_encrypt);
765 gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
768 gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
771 gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
777 * If we have an iv can set it directly, otherwise use saved IV.
779 if (iv == NULL && gctx->iv_set)
782 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
787 /* If key set use IV, otherwise copy */
789 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
791 memcpy(gctx->iv, iv, gctx->ivlen);
798 # define aes_t4_gcm_cipher aes_gcm_cipher
799 static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
800 const unsigned char *in, size_t len);
802 static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
803 const unsigned char *iv, int enc)
805 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
810 /* The key is two half length keys in reality */
811 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
812 const int bits = bytes * 8;
815 * Verify that the two keys are different.
817 * This addresses Rogaway's vulnerability.
818 * See comment in aes_xts_init_key() below.
820 if (memcmp(key, key + bytes, bytes) == 0) {
821 EVPerr(EVP_F_AES_T4_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
826 /* key_len is two AES keys */
828 aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
829 xctx->xts.block1 = (block128_f) aes_t4_encrypt;
832 xctx->stream = aes128_t4_xts_encrypt;
835 xctx->stream = aes256_t4_xts_encrypt;
841 aes_t4_set_decrypt_key(key, bits, &xctx->ks1.ks);
842 xctx->xts.block1 = (block128_f) aes_t4_decrypt;
845 xctx->stream = aes128_t4_xts_decrypt;
848 xctx->stream = aes256_t4_xts_decrypt;
855 aes_t4_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
856 xctx->xts.block2 = (block128_f) aes_t4_encrypt;
858 xctx->xts.key1 = &xctx->ks1;
862 xctx->xts.key2 = &xctx->ks2;
863 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
869 # define aes_t4_xts_cipher aes_xts_cipher
870 static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
871 const unsigned char *in, size_t len);
873 static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
874 const unsigned char *iv, int enc)
876 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
880 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
881 aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
882 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
883 &cctx->ks, (block128_f) aes_t4_encrypt);
888 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
894 # define aes_t4_ccm_cipher aes_ccm_cipher
895 static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
896 const unsigned char *in, size_t len);
898 # ifndef OPENSSL_NO_OCB
899 static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
900 const unsigned char *iv, int enc)
902 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
908 * We set both the encrypt and decrypt key here because decrypt
909 * needs both. We could possibly optimise to remove setting the
910 * decrypt for an encryption operation.
912 aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
914 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
916 if (!CRYPTO_ocb128_init(&octx->ocb,
917 &octx->ksenc.ks, &octx->ksdec.ks,
918 (block128_f) aes_t4_encrypt,
919 (block128_f) aes_t4_decrypt,
926 * If we have an iv we can set it directly, otherwise use saved IV.
928 if (iv == NULL && octx->iv_set)
931 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
938 /* If key set use IV, otherwise copy */
940 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
942 memcpy(octx->iv, iv, octx->ivlen);
948 # define aes_t4_ocb_cipher aes_ocb_cipher
949 static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
950 const unsigned char *in, size_t len);
951 # endif /* OPENSSL_NO_OCB */
953 # ifndef OPENSSL_NO_SIV
954 # define aes_t4_siv_init_key aes_siv_init_key
955 # define aes_t4_siv_cipher aes_siv_cipher
956 # endif /* OPENSSL_NO_SIV */
958 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
959 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
960 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
961 flags|EVP_CIPH_##MODE##_MODE, \
963 aes_t4_##mode##_cipher, \
965 sizeof(EVP_AES_KEY), \
966 NULL,NULL,NULL,NULL }; \
967 static const EVP_CIPHER aes_##keylen##_##mode = { \
968 nid##_##keylen##_##nmode,blocksize, \
970 flags|EVP_CIPH_##MODE##_MODE, \
972 aes_##mode##_cipher, \
974 sizeof(EVP_AES_KEY), \
975 NULL,NULL,NULL,NULL }; \
976 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
977 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
979 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
980 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
981 nid##_##keylen##_##mode,blocksize, \
982 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
984 flags|EVP_CIPH_##MODE##_MODE, \
985 aes_t4_##mode##_init_key, \
986 aes_t4_##mode##_cipher, \
987 aes_##mode##_cleanup, \
988 sizeof(EVP_AES_##MODE##_CTX), \
989 NULL,NULL,aes_##mode##_ctrl,NULL }; \
990 static const EVP_CIPHER aes_##keylen##_##mode = { \
991 nid##_##keylen##_##mode,blocksize, \
992 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
994 flags|EVP_CIPH_##MODE##_MODE, \
995 aes_##mode##_init_key, \
996 aes_##mode##_cipher, \
997 aes_##mode##_cleanup, \
998 sizeof(EVP_AES_##MODE##_CTX), \
999 NULL,NULL,aes_##mode##_ctrl,NULL }; \
1000 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
1001 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
1003 #elif defined(OPENSSL_CPUID_OBJ) && defined(__s390__)
1007 # include "s390x_arch.h"
1013 * KM-AES parameter block - begin
1014 * (see z/Architecture Principles of Operation >= SA22-7832-06)
1017 unsigned char k[32];
1019 /* KM-AES parameter block - end */
1022 } S390X_AES_ECB_CTX;
1028 * KMO-AES parameter block - begin
1029 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1032 unsigned char cv[16];
1033 unsigned char k[32];
1035 /* KMO-AES parameter block - end */
1040 } S390X_AES_OFB_CTX;
1046 * KMF-AES parameter block - begin
1047 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1050 unsigned char cv[16];
1051 unsigned char k[32];
1053 /* KMF-AES parameter block - end */
1058 } S390X_AES_CFB_CTX;
1064 * KMA-GCM-AES parameter block - begin
1065 * (see z/Architecture Principles of Operation >= SA22-7832-11)
1068 unsigned char reserved[12];
1074 unsigned long long g[2];
1075 unsigned char b[16];
1077 unsigned char h[16];
1078 unsigned long long taadl;
1079 unsigned long long tpcl;
1081 unsigned long long g[2];
1084 unsigned char k[32];
1086 /* KMA-GCM-AES parameter block - end */
1098 unsigned char ares[16];
1099 unsigned char mres[16];
1100 unsigned char kres[16];
1106 uint64_t tls_enc_records; /* Number of TLS records encrypted */
1107 } S390X_AES_GCM_CTX;
1113 * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and
1114 * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's
1115 * rounds field is used to store the function code and that the key
1116 * schedule is not stored (if aes hardware support is detected).
1119 unsigned char pad[16];
1125 * KMAC-AES parameter block - begin
1126 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1130 unsigned long long g[2];
1131 unsigned char b[16];
1133 unsigned char k[32];
1135 /* KMAC-AES paramater block - end */
1138 unsigned long long g[2];
1139 unsigned char b[16];
1142 unsigned long long g[2];
1143 unsigned char b[16];
1146 unsigned long long blocks;
1155 unsigned char pad[140];
1159 } S390X_AES_CCM_CTX;
1161 /* Convert key size to function code: [16,24,32] -> [18,19,20]. */
1162 # define S390X_AES_FC(keylen) (S390X_AES_128 + ((((keylen) << 3) - 128) >> 6))
1164 /* Most modes of operation need km for partial block processing. */
1165 # define S390X_aes_128_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1166 S390X_CAPBIT(S390X_AES_128))
1167 # define S390X_aes_192_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1168 S390X_CAPBIT(S390X_AES_192))
1169 # define S390X_aes_256_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1170 S390X_CAPBIT(S390X_AES_256))
1172 # define s390x_aes_init_key aes_init_key
1173 static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1174 const unsigned char *iv, int enc);
1176 # define S390X_aes_128_cbc_CAPABLE 1 /* checked by callee */
1177 # define S390X_aes_192_cbc_CAPABLE 1
1178 # define S390X_aes_256_cbc_CAPABLE 1
1179 # define S390X_AES_CBC_CTX EVP_AES_KEY
1181 # define s390x_aes_cbc_init_key aes_init_key
1183 # define s390x_aes_cbc_cipher aes_cbc_cipher
1184 static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1185 const unsigned char *in, size_t len);
1187 # define S390X_aes_128_ecb_CAPABLE S390X_aes_128_CAPABLE
1188 # define S390X_aes_192_ecb_CAPABLE S390X_aes_192_CAPABLE
1189 # define S390X_aes_256_ecb_CAPABLE S390X_aes_256_CAPABLE
1191 static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX *ctx,
1192 const unsigned char *key,
1193 const unsigned char *iv, int enc)
1195 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1196 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1198 cctx->fc = S390X_AES_FC(keylen);
1200 cctx->fc |= S390X_DECRYPT;
1202 memcpy(cctx->km.param.k, key, keylen);
1206 static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1207 const unsigned char *in, size_t len)
1209 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1211 s390x_km(in, len, out, cctx->fc, &cctx->km.param);
1215 # define S390X_aes_128_ofb_CAPABLE (S390X_aes_128_CAPABLE && \
1216 (OPENSSL_s390xcap_P.kmo[0] & \
1217 S390X_CAPBIT(S390X_AES_128)))
1218 # define S390X_aes_192_ofb_CAPABLE (S390X_aes_192_CAPABLE && \
1219 (OPENSSL_s390xcap_P.kmo[0] & \
1220 S390X_CAPBIT(S390X_AES_192)))
1221 # define S390X_aes_256_ofb_CAPABLE (S390X_aes_256_CAPABLE && \
1222 (OPENSSL_s390xcap_P.kmo[0] & \
1223 S390X_CAPBIT(S390X_AES_256)))
1225 static int s390x_aes_ofb_init_key(EVP_CIPHER_CTX *ctx,
1226 const unsigned char *key,
1227 const unsigned char *ivec, int enc)
1229 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1230 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1231 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1232 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1234 memcpy(cctx->kmo.param.cv, iv, ivlen);
1235 memcpy(cctx->kmo.param.k, key, keylen);
1236 cctx->fc = S390X_AES_FC(keylen);
1241 static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1242 const unsigned char *in, size_t len)
1244 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1249 *out = *in ^ cctx->kmo.param.cv[n];
1258 len &= ~(size_t)0xf;
1260 s390x_kmo(in, len, out, cctx->fc, &cctx->kmo.param);
1267 s390x_km(cctx->kmo.param.cv, 16, cctx->kmo.param.cv, cctx->fc,
1271 out[n] = in[n] ^ cctx->kmo.param.cv[n];
1280 # define S390X_aes_128_cfb_CAPABLE (S390X_aes_128_CAPABLE && \
1281 (OPENSSL_s390xcap_P.kmf[0] & \
1282 S390X_CAPBIT(S390X_AES_128)))
1283 # define S390X_aes_192_cfb_CAPABLE (S390X_aes_192_CAPABLE && \
1284 (OPENSSL_s390xcap_P.kmf[0] & \
1285 S390X_CAPBIT(S390X_AES_192)))
1286 # define S390X_aes_256_cfb_CAPABLE (S390X_aes_256_CAPABLE && \
1287 (OPENSSL_s390xcap_P.kmf[0] & \
1288 S390X_CAPBIT(S390X_AES_256)))
1290 static int s390x_aes_cfb_init_key(EVP_CIPHER_CTX *ctx,
1291 const unsigned char *key,
1292 const unsigned char *ivec, int enc)
1294 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1295 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1296 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1297 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1299 cctx->fc = S390X_AES_FC(keylen);
1300 cctx->fc |= 16 << 24; /* 16 bytes cipher feedback */
1302 cctx->fc |= S390X_DECRYPT;
1305 memcpy(cctx->kmf.param.cv, iv, ivlen);
1306 memcpy(cctx->kmf.param.k, key, keylen);
1310 static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1311 const unsigned char *in, size_t len)
1313 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1314 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1315 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1322 *out = cctx->kmf.param.cv[n] ^ tmp;
1323 cctx->kmf.param.cv[n] = enc ? *out : tmp;
1332 len &= ~(size_t)0xf;
1334 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1341 s390x_km(cctx->kmf.param.cv, 16, cctx->kmf.param.cv,
1342 S390X_AES_FC(keylen), cctx->kmf.param.k);
1346 out[n] = cctx->kmf.param.cv[n] ^ tmp;
1347 cctx->kmf.param.cv[n] = enc ? out[n] : tmp;
1356 # define S390X_aes_128_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1357 S390X_CAPBIT(S390X_AES_128))
1358 # define S390X_aes_192_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1359 S390X_CAPBIT(S390X_AES_192))
1360 # define S390X_aes_256_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1361 S390X_CAPBIT(S390X_AES_256))
1363 static int s390x_aes_cfb8_init_key(EVP_CIPHER_CTX *ctx,
1364 const unsigned char *key,
1365 const unsigned char *ivec, int enc)
1367 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1368 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1369 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1370 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1372 cctx->fc = S390X_AES_FC(keylen);
1373 cctx->fc |= 1 << 24; /* 1 byte cipher feedback */
1375 cctx->fc |= S390X_DECRYPT;
1377 memcpy(cctx->kmf.param.cv, iv, ivlen);
1378 memcpy(cctx->kmf.param.k, key, keylen);
1382 static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1383 const unsigned char *in, size_t len)
1385 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1387 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1391 # define S390X_aes_128_cfb1_CAPABLE 0
1392 # define S390X_aes_192_cfb1_CAPABLE 0
1393 # define S390X_aes_256_cfb1_CAPABLE 0
1395 # define s390x_aes_cfb1_init_key aes_init_key
1397 # define s390x_aes_cfb1_cipher aes_cfb1_cipher
1398 static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1399 const unsigned char *in, size_t len);
1401 # define S390X_aes_128_ctr_CAPABLE 1 /* checked by callee */
1402 # define S390X_aes_192_ctr_CAPABLE 1
1403 # define S390X_aes_256_ctr_CAPABLE 1
1404 # define S390X_AES_CTR_CTX EVP_AES_KEY
1406 # define s390x_aes_ctr_init_key aes_init_key
1408 # define s390x_aes_ctr_cipher aes_ctr_cipher
1409 static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1410 const unsigned char *in, size_t len);
1412 # define S390X_aes_128_gcm_CAPABLE (S390X_aes_128_CAPABLE && \
1413 (OPENSSL_s390xcap_P.kma[0] & \
1414 S390X_CAPBIT(S390X_AES_128)))
1415 # define S390X_aes_192_gcm_CAPABLE (S390X_aes_192_CAPABLE && \
1416 (OPENSSL_s390xcap_P.kma[0] & \
1417 S390X_CAPBIT(S390X_AES_192)))
1418 # define S390X_aes_256_gcm_CAPABLE (S390X_aes_256_CAPABLE && \
1419 (OPENSSL_s390xcap_P.kma[0] & \
1420 S390X_CAPBIT(S390X_AES_256)))
1422 /* iv + padding length for iv lengths != 12 */
1423 # define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16)
1426 * Process additional authenticated data. Returns 0 on success. Code is
1429 static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad,
1432 unsigned long long alen;
1435 if (ctx->kma.param.tpcl)
1438 alen = ctx->kma.param.taadl + len;
1439 if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
1441 ctx->kma.param.taadl = alen;
1446 ctx->ares[n] = *aad;
1451 /* ctx->ares contains a complete block if offset has wrapped around */
1453 s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1454 ctx->fc |= S390X_KMA_HS;
1461 len &= ~(size_t)0xf;
1463 s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1465 ctx->fc |= S390X_KMA_HS;
1473 ctx->ares[rem] = aad[rem];
1480 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for
1481 * success. Code is big-endian.
1483 static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in,
1484 unsigned char *out, size_t len)
1486 const unsigned char *inptr;
1487 unsigned long long mlen;
1490 unsigned char b[16];
1495 mlen = ctx->kma.param.tpcl + len;
1496 if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
1498 ctx->kma.param.tpcl = mlen;
1504 while (n && inlen) {
1505 ctx->mres[n] = *inptr;
1510 /* ctx->mres contains a complete block if offset has wrapped around */
1512 s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b,
1513 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1514 ctx->fc |= S390X_KMA_HS;
1517 /* previous call already encrypted/decrypted its remainder,
1518 * see comment below */
1533 len &= ~(size_t)0xf;
1535 s390x_kma(ctx->ares, ctx->areslen, in, len, out,
1536 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1539 ctx->fc |= S390X_KMA_HS;
1544 * If there is a remainder, it has to be saved such that it can be
1545 * processed by kma later. However, we also have to do the for-now
1546 * unauthenticated encryption/decryption part here and now...
1549 if (!ctx->mreslen) {
1550 buf.w[0] = ctx->kma.param.j0.w[0];
1551 buf.w[1] = ctx->kma.param.j0.w[1];
1552 buf.w[2] = ctx->kma.param.j0.w[2];
1553 buf.w[3] = ctx->kma.param.cv.w + 1;
1554 s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k);
1558 for (i = 0; i < rem; i++) {
1559 ctx->mres[n + i] = in[i];
1560 out[i] = in[i] ^ ctx->kres[n + i];
1563 ctx->mreslen += rem;
1569 * Initialize context structure. Code is big-endian.
1571 static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx,
1572 const unsigned char *iv)
1574 ctx->kma.param.t.g[0] = 0;
1575 ctx->kma.param.t.g[1] = 0;
1576 ctx->kma.param.tpcl = 0;
1577 ctx->kma.param.taadl = 0;
1582 if (ctx->ivlen == 12) {
1583 memcpy(&ctx->kma.param.j0, iv, ctx->ivlen);
1584 ctx->kma.param.j0.w[3] = 1;
1585 ctx->kma.param.cv.w = 1;
1587 /* ctx->iv has the right size and is already padded. */
1588 memcpy(ctx->iv, iv, ctx->ivlen);
1589 s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL,
1590 ctx->fc, &ctx->kma.param);
1591 ctx->fc |= S390X_KMA_HS;
1593 ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0];
1594 ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1];
1595 ctx->kma.param.cv.w = ctx->kma.param.j0.w[3];
1596 ctx->kma.param.t.g[0] = 0;
1597 ctx->kma.param.t.g[1] = 0;
1602 * Performs various operations on the context structure depending on control
1603 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
1604 * Code is big-endian.
1606 static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1608 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1609 S390X_AES_GCM_CTX *gctx_out;
1610 EVP_CIPHER_CTX *out;
1611 unsigned char *buf, *iv;
1612 int ivlen, enc, len;
1616 ivlen = EVP_CIPHER_CTX_iv_length(c);
1617 iv = EVP_CIPHER_CTX_iv_noconst(c);
1620 gctx->ivlen = ivlen;
1624 gctx->tls_aad_len = -1;
1627 case EVP_CTRL_AEAD_SET_IVLEN:
1632 iv = EVP_CIPHER_CTX_iv_noconst(c);
1633 len = S390X_gcm_ivpadlen(arg);
1635 /* Allocate memory for iv if needed. */
1636 if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) {
1638 OPENSSL_free(gctx->iv);
1640 if ((gctx->iv = OPENSSL_malloc(len)) == NULL) {
1641 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
1646 memset(gctx->iv + arg, 0, len - arg - 8);
1647 *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3;
1652 case EVP_CTRL_AEAD_SET_TAG:
1653 buf = EVP_CIPHER_CTX_buf_noconst(c);
1654 enc = EVP_CIPHER_CTX_encrypting(c);
1655 if (arg <= 0 || arg > 16 || enc)
1658 memcpy(buf, ptr, arg);
1662 case EVP_CTRL_AEAD_GET_TAG:
1663 enc = EVP_CIPHER_CTX_encrypting(c);
1664 if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0)
1667 memcpy(ptr, gctx->kma.param.t.b, arg);
1670 case EVP_CTRL_GCM_SET_IV_FIXED:
1671 /* Special case: -1 length restores whole iv */
1673 memcpy(gctx->iv, ptr, gctx->ivlen);
1678 * Fixed field must be at least 4 bytes and invocation field at least
1681 if ((arg < 4) || (gctx->ivlen - arg) < 8)
1685 memcpy(gctx->iv, ptr, arg);
1687 enc = EVP_CIPHER_CTX_encrypting(c);
1688 if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
1694 case EVP_CTRL_GCM_IV_GEN:
1695 if (gctx->iv_gen == 0 || gctx->key_set == 0)
1698 s390x_aes_gcm_setiv(gctx, gctx->iv);
1700 if (arg <= 0 || arg > gctx->ivlen)
1703 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1705 * Invocation field will be at least 8 bytes in size and so no need
1706 * to check wrap around or increment more than last 8 bytes.
1708 ctr64_inc(gctx->iv + gctx->ivlen - 8);
1712 case EVP_CTRL_GCM_SET_IV_INV:
1713 enc = EVP_CIPHER_CTX_encrypting(c);
1714 if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc)
1717 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1718 s390x_aes_gcm_setiv(gctx, gctx->iv);
1722 case EVP_CTRL_AEAD_TLS1_AAD:
1723 /* Save the aad for later use. */
1724 if (arg != EVP_AEAD_TLS1_AAD_LEN)
1727 buf = EVP_CIPHER_CTX_buf_noconst(c);
1728 memcpy(buf, ptr, arg);
1729 gctx->tls_aad_len = arg;
1730 gctx->tls_enc_records = 0;
1732 len = buf[arg - 2] << 8 | buf[arg - 1];
1733 /* Correct length for explicit iv. */
1734 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1736 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1738 /* If decrypting correct for tag too. */
1739 enc = EVP_CIPHER_CTX_encrypting(c);
1741 if (len < EVP_GCM_TLS_TAG_LEN)
1743 len -= EVP_GCM_TLS_TAG_LEN;
1745 buf[arg - 2] = len >> 8;
1746 buf[arg - 1] = len & 0xff;
1747 /* Extra padding: tag appended to record. */
1748 return EVP_GCM_TLS_TAG_LEN;
1752 gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out);
1753 iv = EVP_CIPHER_CTX_iv_noconst(c);
1755 if (gctx->iv == iv) {
1756 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
1758 len = S390X_gcm_ivpadlen(gctx->ivlen);
1760 if ((gctx_out->iv = OPENSSL_malloc(len)) == NULL) {
1761 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
1765 memcpy(gctx_out->iv, gctx->iv, len);
1775 * Set key and/or iv. Returns 1 on success. Otherwise 0 is returned.
1777 static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
1778 const unsigned char *key,
1779 const unsigned char *iv, int enc)
1781 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1784 if (iv == NULL && key == NULL)
1788 keylen = EVP_CIPHER_CTX_key_length(ctx);
1789 memcpy(&gctx->kma.param.k, key, keylen);
1791 gctx->fc = S390X_AES_FC(keylen);
1793 gctx->fc |= S390X_DECRYPT;
1795 if (iv == NULL && gctx->iv_set)
1799 s390x_aes_gcm_setiv(gctx, iv);
1805 s390x_aes_gcm_setiv(gctx, iv);
1807 memcpy(gctx->iv, iv, gctx->ivlen);
1816 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1817 * if successful. Otherwise -1 is returned. Code is big-endian.
1819 static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1820 const unsigned char *in, size_t len)
1822 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1823 const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1824 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1827 if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1831 * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
1832 * Requirements from SP 800-38D". The requirements is for one party to the
1833 * communication to fail after 2^64 - 1 keys. We do this on the encrypting
1836 if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
1837 EVPerr(EVP_F_S390X_AES_GCM_TLS_CIPHER, EVP_R_TOO_MANY_RECORDS);
1841 if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN
1842 : EVP_CTRL_GCM_SET_IV_INV,
1843 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
1846 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1847 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1848 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1850 gctx->kma.param.taadl = gctx->tls_aad_len << 3;
1851 gctx->kma.param.tpcl = len << 3;
1852 s390x_kma(buf, gctx->tls_aad_len, in, len, out,
1853 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1856 memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN);
1857 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1859 if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len,
1860 EVP_GCM_TLS_TAG_LEN)) {
1861 OPENSSL_cleanse(out, len);
1868 gctx->tls_aad_len = -1;
1873 * Called from EVP layer to initialize context, process additional
1874 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1875 * ciphertext or process a TLS packet, depending on context. Returns bytes
1876 * written on success. Otherwise -1 is returned. Code is big-endian.
1878 static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1879 const unsigned char *in, size_t len)
1881 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1882 unsigned char *buf, tmp[16];
1888 if (gctx->tls_aad_len >= 0)
1889 return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
1896 if (s390x_aes_gcm_aad(gctx, in, len))
1899 if (s390x_aes_gcm(gctx, in, out, len))
1904 gctx->kma.param.taadl <<= 3;
1905 gctx->kma.param.tpcl <<= 3;
1906 s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp,
1907 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1908 /* recall that we already did en-/decrypt gctx->mres
1909 * and returned it to caller... */
1910 OPENSSL_cleanse(tmp, gctx->mreslen);
1913 enc = EVP_CIPHER_CTX_encrypting(ctx);
1917 if (gctx->taglen < 0)
1920 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1921 if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen))
1928 static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1930 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1931 const unsigned char *iv;
1936 iv = EVP_CIPHER_CTX_iv(c);
1938 OPENSSL_free(gctx->iv);
1940 OPENSSL_cleanse(gctx, sizeof(*gctx));
1944 # define S390X_AES_XTS_CTX EVP_AES_XTS_CTX
1945 # define S390X_aes_128_xts_CAPABLE 1 /* checked by callee */
1946 # define S390X_aes_256_xts_CAPABLE 1
1948 # define s390x_aes_xts_init_key aes_xts_init_key
1949 static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx,
1950 const unsigned char *key,
1951 const unsigned char *iv, int enc);
1952 # define s390x_aes_xts_cipher aes_xts_cipher
1953 static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1954 const unsigned char *in, size_t len);
1955 # define s390x_aes_xts_ctrl aes_xts_ctrl
1956 static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
1957 # define s390x_aes_xts_cleanup aes_xts_cleanup
1959 # define S390X_aes_128_ccm_CAPABLE (S390X_aes_128_CAPABLE && \
1960 (OPENSSL_s390xcap_P.kmac[0] & \
1961 S390X_CAPBIT(S390X_AES_128)))
1962 # define S390X_aes_192_ccm_CAPABLE (S390X_aes_192_CAPABLE && \
1963 (OPENSSL_s390xcap_P.kmac[0] & \
1964 S390X_CAPBIT(S390X_AES_192)))
1965 # define S390X_aes_256_ccm_CAPABLE (S390X_aes_256_CAPABLE && \
1966 (OPENSSL_s390xcap_P.kmac[0] & \
1967 S390X_CAPBIT(S390X_AES_256)))
1969 # define S390X_CCM_AAD_FLAG 0x40
1972 * Set nonce and length fields. Code is big-endian.
1974 static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx,
1975 const unsigned char *nonce,
1978 ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG;
1979 ctx->aes.ccm.nonce.g[1] = mlen;
1980 memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l);
1984 * Process additional authenticated data. Code is big-endian.
1986 static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad,
1995 ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG;
1997 /* Suppress 'type-punned pointer dereference' warning. */
1998 ptr = ctx->aes.ccm.buf.b;
2000 if (alen < ((1 << 16) - (1 << 8))) {
2001 *(uint16_t *)ptr = alen;
2003 } else if (sizeof(alen) == 8
2004 && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) {
2005 *(uint16_t *)ptr = 0xffff;
2006 *(uint64_t *)(ptr + 2) = alen;
2009 *(uint16_t *)ptr = 0xfffe;
2010 *(uint32_t *)(ptr + 2) = alen;
2014 while (i < 16 && alen) {
2015 ctx->aes.ccm.buf.b[i] = *aad;
2021 ctx->aes.ccm.buf.b[i] = 0;
2025 ctx->aes.ccm.kmac_param.icv.g[0] = 0;
2026 ctx->aes.ccm.kmac_param.icv.g[1] = 0;
2027 s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc,
2028 &ctx->aes.ccm.kmac_param);
2029 ctx->aes.ccm.blocks += 2;
2032 alen &= ~(size_t)0xf;
2034 s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2035 ctx->aes.ccm.blocks += alen >> 4;
2039 for (i = 0; i < rem; i++)
2040 ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i];
2042 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2043 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2044 ctx->aes.ccm.kmac_param.k);
2045 ctx->aes.ccm.blocks++;
2050 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for
2053 static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in,
2054 unsigned char *out, size_t len, int enc)
2057 unsigned int i, l, num;
2058 unsigned char flags;
2060 flags = ctx->aes.ccm.nonce.b[0];
2061 if (!(flags & S390X_CCM_AAD_FLAG)) {
2062 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b,
2063 ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k);
2064 ctx->aes.ccm.blocks++;
2067 ctx->aes.ccm.nonce.b[0] = l;
2070 * Reconstruct length from encoded length field
2071 * and initialize it with counter value.
2074 for (i = 15 - l; i < 15; i++) {
2075 n |= ctx->aes.ccm.nonce.b[i];
2076 ctx->aes.ccm.nonce.b[i] = 0;
2079 n |= ctx->aes.ccm.nonce.b[15];
2080 ctx->aes.ccm.nonce.b[15] = 1;
2083 return -1; /* length mismatch */
2086 /* Two operations per block plus one for tag encryption */
2087 ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1;
2088 if (ctx->aes.ccm.blocks > (1ULL << 61))
2089 return -2; /* too much data */
2094 len &= ~(size_t)0xf;
2097 /* mac-then-encrypt */
2099 s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2101 for (i = 0; i < rem; i++)
2102 ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i];
2104 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2105 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2106 ctx->aes.ccm.kmac_param.k);
2109 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
2110 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
2111 &num, (ctr128_f)AES_ctr32_encrypt);
2113 /* decrypt-then-mac */
2114 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
2115 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
2116 &num, (ctr128_f)AES_ctr32_encrypt);
2119 s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2121 for (i = 0; i < rem; i++)
2122 ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i];
2124 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2125 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2126 ctx->aes.ccm.kmac_param.k);
2130 for (i = 15 - l; i < 16; i++)
2131 ctx->aes.ccm.nonce.b[i] = 0;
2133 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc,
2134 ctx->aes.ccm.kmac_param.k);
2135 ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0];
2136 ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1];
2138 ctx->aes.ccm.nonce.b[0] = flags; /* restore flags field */
2143 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
2144 * if successful. Otherwise -1 is returned.
2146 static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2147 const unsigned char *in, size_t len)
2149 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2150 unsigned char *ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2151 unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2152 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
2155 || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m))
2159 /* Set explicit iv (sequence number). */
2160 memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN);
2163 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
2165 * Get explicit iv (sequence number). We already have fixed iv
2166 * (server/client_write_iv) here.
2168 memcpy(ivec + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN);
2169 s390x_aes_ccm_setiv(cctx, ivec, len);
2171 /* Process aad (sequence number|type|version|length) */
2172 s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len);
2174 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
2175 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
2178 if (s390x_aes_ccm(cctx, in, out, len, enc))
2181 memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2182 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
2184 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2185 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len,
2190 OPENSSL_cleanse(out, len);
2196 * Set key and flag field and/or iv. Returns 1 if successful. Otherwise 0 is
2199 static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx,
2200 const unsigned char *key,
2201 const unsigned char *iv, int enc)
2203 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2204 unsigned char *ivec;
2207 if (iv == NULL && key == NULL)
2211 keylen = EVP_CIPHER_CTX_key_length(ctx);
2212 cctx->aes.ccm.fc = S390X_AES_FC(keylen);
2213 memcpy(cctx->aes.ccm.kmac_param.k, key, keylen);
2215 /* Store encoded m and l. */
2216 cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7)
2217 | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3;
2218 memset(cctx->aes.ccm.nonce.b + 1, 0,
2219 sizeof(cctx->aes.ccm.nonce.b));
2220 cctx->aes.ccm.blocks = 0;
2222 cctx->aes.ccm.key_set = 1;
2226 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2227 memcpy(ivec, iv, 15 - cctx->aes.ccm.l);
2229 cctx->aes.ccm.iv_set = 1;
2236 * Called from EVP layer to initialize context, process additional
2237 * authenticated data, en/de-crypt plain/cipher-text and authenticate
2238 * plaintext or process a TLS packet, depending on context. Returns bytes
2239 * written on success. Otherwise -1 is returned.
2241 static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2242 const unsigned char *in, size_t len)
2244 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2245 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
2247 unsigned char *buf, *ivec;
2249 if (!cctx->aes.ccm.key_set)
2252 if (cctx->aes.ccm.tls_aad_len >= 0)
2253 return s390x_aes_ccm_tls_cipher(ctx, out, in, len);
2256 * Final(): Does not return any data. Recall that ccm is mac-then-encrypt
2257 * so integrity must be checked already at Update() i.e., before
2258 * potentially corrupted data is output.
2260 if (in == NULL && out != NULL)
2263 if (!cctx->aes.ccm.iv_set)
2266 if (!enc && !cctx->aes.ccm.tag_set)
2270 /* Update(): Pass message length. */
2272 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2273 s390x_aes_ccm_setiv(cctx, ivec, len);
2275 cctx->aes.ccm.len_set = 1;
2279 /* Update(): Process aad. */
2280 if (!cctx->aes.ccm.len_set && len)
2283 s390x_aes_ccm_aad(cctx, in, len);
2287 /* Update(): Process message. */
2289 if (!cctx->aes.ccm.len_set) {
2291 * In case message length was not previously set explicitly via
2292 * Update(), set it now.
2294 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2295 s390x_aes_ccm_setiv(cctx, ivec, len);
2297 cctx->aes.ccm.len_set = 1;
2301 if (s390x_aes_ccm(cctx, in, out, len, enc))
2304 cctx->aes.ccm.tag_set = 1;
2309 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2310 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2311 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf,
2317 OPENSSL_cleanse(out, len);
2319 cctx->aes.ccm.iv_set = 0;
2320 cctx->aes.ccm.tag_set = 0;
2321 cctx->aes.ccm.len_set = 0;
2327 * Performs various operations on the context structure depending on control
2328 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
2329 * Code is big-endian.
2331 static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2333 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c);
2334 unsigned char *buf, *iv;
2339 cctx->aes.ccm.key_set = 0;
2340 cctx->aes.ccm.iv_set = 0;
2341 cctx->aes.ccm.l = 8;
2342 cctx->aes.ccm.m = 12;
2343 cctx->aes.ccm.tag_set = 0;
2344 cctx->aes.ccm.len_set = 0;
2345 cctx->aes.ccm.tls_aad_len = -1;
2348 case EVP_CTRL_AEAD_TLS1_AAD:
2349 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2352 /* Save the aad for later use. */
2353 buf = EVP_CIPHER_CTX_buf_noconst(c);
2354 memcpy(buf, ptr, arg);
2355 cctx->aes.ccm.tls_aad_len = arg;
2357 len = buf[arg - 2] << 8 | buf[arg - 1];
2358 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
2361 /* Correct length for explicit iv. */
2362 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
2364 enc = EVP_CIPHER_CTX_encrypting(c);
2366 if (len < cctx->aes.ccm.m)
2369 /* Correct length for tag. */
2370 len -= cctx->aes.ccm.m;
2373 buf[arg - 2] = len >> 8;
2374 buf[arg - 1] = len & 0xff;
2376 /* Extra padding: tag appended to record. */
2377 return cctx->aes.ccm.m;
2379 case EVP_CTRL_CCM_SET_IV_FIXED:
2380 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
2383 /* Copy to first part of the iv. */
2384 iv = EVP_CIPHER_CTX_iv_noconst(c);
2385 memcpy(iv, ptr, arg);
2388 case EVP_CTRL_AEAD_SET_IVLEN:
2392 case EVP_CTRL_CCM_SET_L:
2393 if (arg < 2 || arg > 8)
2396 cctx->aes.ccm.l = arg;
2399 case EVP_CTRL_AEAD_SET_TAG:
2400 if ((arg & 1) || arg < 4 || arg > 16)
2403 enc = EVP_CIPHER_CTX_encrypting(c);
2408 cctx->aes.ccm.tag_set = 1;
2409 buf = EVP_CIPHER_CTX_buf_noconst(c);
2410 memcpy(buf, ptr, arg);
2413 cctx->aes.ccm.m = arg;
2416 case EVP_CTRL_AEAD_GET_TAG:
2417 enc = EVP_CIPHER_CTX_encrypting(c);
2418 if (!enc || !cctx->aes.ccm.tag_set)
2421 if(arg < cctx->aes.ccm.m)
2424 memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2425 cctx->aes.ccm.tag_set = 0;
2426 cctx->aes.ccm.iv_set = 0;
2427 cctx->aes.ccm.len_set = 0;
2438 # define s390x_aes_ccm_cleanup aes_ccm_cleanup
2440 # ifndef OPENSSL_NO_OCB
2441 # define S390X_AES_OCB_CTX EVP_AES_OCB_CTX
2442 # define S390X_aes_128_ocb_CAPABLE 0
2443 # define S390X_aes_192_ocb_CAPABLE 0
2444 # define S390X_aes_256_ocb_CAPABLE 0
2446 # define s390x_aes_ocb_init_key aes_ocb_init_key
2447 static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2448 const unsigned char *iv, int enc);
2449 # define s390x_aes_ocb_cipher aes_ocb_cipher
2450 static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2451 const unsigned char *in, size_t len);
2452 # define s390x_aes_ocb_cleanup aes_ocb_cleanup
2453 static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *);
2454 # define s390x_aes_ocb_ctrl aes_ocb_ctrl
2455 static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
2458 # ifndef OPENSSL_NO_SIV
2459 # define S390X_AES_SIV_CTX EVP_AES_SIV_CTX
2460 # define S390X_aes_128_siv_CAPABLE 0
2461 # define S390X_aes_192_siv_CAPABLE 0
2462 # define S390X_aes_256_siv_CAPABLE 0
2464 # define s390x_aes_siv_init_key aes_siv_init_key
2465 # define s390x_aes_siv_cipher aes_siv_cipher
2466 # define s390x_aes_siv_cleanup aes_siv_cleanup
2467 # define s390x_aes_siv_ctrl aes_siv_ctrl
2470 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode, \
2472 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2473 nid##_##keylen##_##nmode,blocksize, \
2476 flags | EVP_CIPH_##MODE##_MODE, \
2477 s390x_aes_##mode##_init_key, \
2478 s390x_aes_##mode##_cipher, \
2480 sizeof(S390X_AES_##MODE##_CTX), \
2486 static const EVP_CIPHER aes_##keylen##_##mode = { \
2487 nid##_##keylen##_##nmode, \
2491 flags | EVP_CIPH_##MODE##_MODE, \
2493 aes_##mode##_cipher, \
2495 sizeof(EVP_AES_KEY), \
2501 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2503 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2504 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2507 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\
2508 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2509 nid##_##keylen##_##mode, \
2511 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2513 flags | EVP_CIPH_##MODE##_MODE, \
2514 s390x_aes_##mode##_init_key, \
2515 s390x_aes_##mode##_cipher, \
2516 s390x_aes_##mode##_cleanup, \
2517 sizeof(S390X_AES_##MODE##_CTX), \
2520 s390x_aes_##mode##_ctrl, \
2523 static const EVP_CIPHER aes_##keylen##_##mode = { \
2524 nid##_##keylen##_##mode,blocksize, \
2525 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2527 flags | EVP_CIPH_##MODE##_MODE, \
2528 aes_##mode##_init_key, \
2529 aes_##mode##_cipher, \
2530 aes_##mode##_cleanup, \
2531 sizeof(EVP_AES_##MODE##_CTX), \
2534 aes_##mode##_ctrl, \
2537 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2539 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2540 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2545 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
2546 static const EVP_CIPHER aes_##keylen##_##mode = { \
2547 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
2548 flags|EVP_CIPH_##MODE##_MODE, \
2550 aes_##mode##_cipher, \
2552 sizeof(EVP_AES_KEY), \
2553 NULL,NULL,NULL,NULL }; \
2554 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2555 { return &aes_##keylen##_##mode; }
2557 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
2558 static const EVP_CIPHER aes_##keylen##_##mode = { \
2559 nid##_##keylen##_##mode,blocksize, \
2560 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
2562 flags|EVP_CIPH_##MODE##_MODE, \
2563 aes_##mode##_init_key, \
2564 aes_##mode##_cipher, \
2565 aes_##mode##_cleanup, \
2566 sizeof(EVP_AES_##MODE##_CTX), \
2567 NULL,NULL,aes_##mode##_ctrl,NULL }; \
2568 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2569 { return &aes_##keylen##_##mode; }
2573 #if defined(OPENSSL_CPUID_OBJ) && (defined(__arm__) || defined(__arm) || defined(__aarch64__))
2574 # include "arm_arch.h"
2575 # if __ARM_MAX_ARCH__>=7
2576 # if defined(BSAES_ASM)
2577 # define BSAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2579 # if defined(VPAES_ASM)
2580 # define VPAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2582 # define HWAES_CAPABLE (OPENSSL_armcap_P & ARMV8_AES)
2583 # define HWAES_set_encrypt_key aes_v8_set_encrypt_key
2584 # define HWAES_set_decrypt_key aes_v8_set_decrypt_key
2585 # define HWAES_encrypt aes_v8_encrypt
2586 # define HWAES_decrypt aes_v8_decrypt
2587 # define HWAES_cbc_encrypt aes_v8_cbc_encrypt
2588 # define HWAES_ctr32_encrypt_blocks aes_v8_ctr32_encrypt_blocks
2592 #if defined(HWAES_CAPABLE)
2593 int HWAES_set_encrypt_key(const unsigned char *userKey, const int bits,
2595 int HWAES_set_decrypt_key(const unsigned char *userKey, const int bits,
2597 void HWAES_encrypt(const unsigned char *in, unsigned char *out,
2598 const AES_KEY *key);
2599 void HWAES_decrypt(const unsigned char *in, unsigned char *out,
2600 const AES_KEY *key);
2601 void HWAES_cbc_encrypt(const unsigned char *in, unsigned char *out,
2602 size_t length, const AES_KEY *key,
2603 unsigned char *ivec, const int enc);
2604 void HWAES_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
2605 size_t len, const AES_KEY *key,
2606 const unsigned char ivec[16]);
2607 void HWAES_xts_encrypt(const unsigned char *inp, unsigned char *out,
2608 size_t len, const AES_KEY *key1,
2609 const AES_KEY *key2, const unsigned char iv[16]);
2610 void HWAES_xts_decrypt(const unsigned char *inp, unsigned char *out,
2611 size_t len, const AES_KEY *key1,
2612 const AES_KEY *key2, const unsigned char iv[16]);
2615 #define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
2616 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2617 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2618 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2619 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2620 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
2621 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
2622 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
2624 static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2625 const unsigned char *iv, int enc)
2628 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2630 mode = EVP_CIPHER_CTX_mode(ctx);
2631 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
2633 #ifdef HWAES_CAPABLE
2634 if (HWAES_CAPABLE) {
2635 ret = HWAES_set_decrypt_key(key,
2636 EVP_CIPHER_CTX_key_length(ctx) * 8,
2638 dat->block = (block128_f) HWAES_decrypt;
2639 dat->stream.cbc = NULL;
2640 # ifdef HWAES_cbc_encrypt
2641 if (mode == EVP_CIPH_CBC_MODE)
2642 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2646 #ifdef BSAES_CAPABLE
2647 if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
2648 ret = AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2650 dat->block = (block128_f) AES_decrypt;
2651 dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt;
2654 #ifdef VPAES_CAPABLE
2655 if (VPAES_CAPABLE) {
2656 ret = vpaes_set_decrypt_key(key,
2657 EVP_CIPHER_CTX_key_length(ctx) * 8,
2659 dat->block = (block128_f) vpaes_decrypt;
2660 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2661 (cbc128_f) vpaes_cbc_encrypt : NULL;
2665 ret = AES_set_decrypt_key(key,
2666 EVP_CIPHER_CTX_key_length(ctx) * 8,
2668 dat->block = (block128_f) AES_decrypt;
2669 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2670 (cbc128_f) AES_cbc_encrypt : NULL;
2673 #ifdef HWAES_CAPABLE
2674 if (HWAES_CAPABLE) {
2675 ret = HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2677 dat->block = (block128_f) HWAES_encrypt;
2678 dat->stream.cbc = NULL;
2679 # ifdef HWAES_cbc_encrypt
2680 if (mode == EVP_CIPH_CBC_MODE)
2681 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2684 # ifdef HWAES_ctr32_encrypt_blocks
2685 if (mode == EVP_CIPH_CTR_MODE)
2686 dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2689 (void)0; /* terminate potentially open 'else' */
2692 #ifdef BSAES_CAPABLE
2693 if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
2694 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2696 dat->block = (block128_f) AES_encrypt;
2697 dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2700 #ifdef VPAES_CAPABLE
2701 if (VPAES_CAPABLE) {
2702 ret = vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2704 dat->block = (block128_f) vpaes_encrypt;
2705 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2706 (cbc128_f) vpaes_cbc_encrypt : NULL;
2710 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2712 dat->block = (block128_f) AES_encrypt;
2713 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2714 (cbc128_f) AES_cbc_encrypt : NULL;
2716 if (mode == EVP_CIPH_CTR_MODE)
2717 dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
2722 EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
2729 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2730 const unsigned char *in, size_t len)
2732 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2734 if (dat->stream.cbc)
2735 (*dat->stream.cbc) (in, out, len, &dat->ks,
2736 EVP_CIPHER_CTX_iv_noconst(ctx),
2737 EVP_CIPHER_CTX_encrypting(ctx));
2738 else if (EVP_CIPHER_CTX_encrypting(ctx))
2739 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks,
2740 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2742 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
2743 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2748 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2749 const unsigned char *in, size_t len)
2751 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
2753 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2758 for (i = 0, len -= bl; i <= len; i += bl)
2759 (*dat->block) (in + i, out + i, &dat->ks);
2764 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2765 const unsigned char *in, size_t len)
2767 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2769 int num = EVP_CIPHER_CTX_num(ctx);
2770 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
2771 EVP_CIPHER_CTX_iv_noconst(ctx), &num, dat->block);
2772 EVP_CIPHER_CTX_set_num(ctx, num);
2776 static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2777 const unsigned char *in, size_t len)
2779 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2781 int num = EVP_CIPHER_CTX_num(ctx);
2782 CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
2783 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2784 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2785 EVP_CIPHER_CTX_set_num(ctx, num);
2789 static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2790 const unsigned char *in, size_t len)
2792 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2794 int num = EVP_CIPHER_CTX_num(ctx);
2795 CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
2796 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2797 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2798 EVP_CIPHER_CTX_set_num(ctx, num);
2802 static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2803 const unsigned char *in, size_t len)
2805 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2807 if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
2808 int num = EVP_CIPHER_CTX_num(ctx);
2809 CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
2810 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2811 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2812 EVP_CIPHER_CTX_set_num(ctx, num);
2816 while (len >= MAXBITCHUNK) {
2817 int num = EVP_CIPHER_CTX_num(ctx);
2818 CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
2819 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2820 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2821 EVP_CIPHER_CTX_set_num(ctx, num);
2827 int num = EVP_CIPHER_CTX_num(ctx);
2828 CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
2829 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2830 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2831 EVP_CIPHER_CTX_set_num(ctx, num);
2837 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2838 const unsigned char *in, size_t len)
2840 unsigned int num = EVP_CIPHER_CTX_num(ctx);
2841 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2843 if (dat->stream.ctr)
2844 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
2845 EVP_CIPHER_CTX_iv_noconst(ctx),
2846 EVP_CIPHER_CTX_buf_noconst(ctx),
2847 &num, dat->stream.ctr);
2849 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
2850 EVP_CIPHER_CTX_iv_noconst(ctx),
2851 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
2853 EVP_CIPHER_CTX_set_num(ctx, num);
2857 BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
2858 BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
2859 BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
2861 static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
2863 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2866 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
2867 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
2868 OPENSSL_free(gctx->iv);
2872 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2874 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2879 gctx->ivlen = c->cipher->iv_len;
2883 gctx->tls_aad_len = -1;
2886 case EVP_CTRL_AEAD_SET_IVLEN:
2889 /* Allocate memory for IV if needed */
2890 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
2891 if (gctx->iv != c->iv)
2892 OPENSSL_free(gctx->iv);
2893 if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
2894 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
2901 case EVP_CTRL_AEAD_SET_TAG:
2902 if (arg <= 0 || arg > 16 || c->encrypt)
2904 memcpy(c->buf, ptr, arg);
2908 case EVP_CTRL_AEAD_GET_TAG:
2909 if (arg <= 0 || arg > 16 || !c->encrypt
2910 || gctx->taglen < 0)
2912 memcpy(ptr, c->buf, arg);
2915 case EVP_CTRL_GET_IV:
2916 if (gctx->iv_gen != 1 && gctx->iv_gen_rand != 1)
2918 if (gctx->ivlen != arg)
2920 memcpy(ptr, gctx->iv, arg);
2923 case EVP_CTRL_GCM_SET_IV_FIXED:
2924 /* Special case: -1 length restores whole IV */
2926 memcpy(gctx->iv, ptr, gctx->ivlen);
2931 * Fixed field must be at least 4 bytes and invocation field at least
2934 if ((arg < 4) || (gctx->ivlen - arg) < 8)
2937 memcpy(gctx->iv, ptr, arg);
2938 if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
2943 case EVP_CTRL_GCM_IV_GEN:
2944 if (gctx->iv_gen == 0 || gctx->key_set == 0)
2946 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2947 if (arg <= 0 || arg > gctx->ivlen)
2949 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
2951 * Invocation field will be at least 8 bytes in size and so no need
2952 * to check wrap around or increment more than last 8 bytes.
2954 ctr64_inc(gctx->iv + gctx->ivlen - 8);
2958 case EVP_CTRL_GCM_SET_IV_INV:
2959 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
2961 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
2962 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2966 case EVP_CTRL_AEAD_TLS1_AAD:
2967 /* Save the AAD for later use */
2968 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2970 memcpy(c->buf, ptr, arg);
2971 gctx->tls_aad_len = arg;
2972 gctx->tls_enc_records = 0;
2974 unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1];
2975 /* Correct length for explicit IV */
2976 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
2978 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
2979 /* If decrypting correct for tag too */
2981 if (len < EVP_GCM_TLS_TAG_LEN)
2983 len -= EVP_GCM_TLS_TAG_LEN;
2985 c->buf[arg - 2] = len >> 8;
2986 c->buf[arg - 1] = len & 0xff;
2988 /* Extra padding: tag appended to record */
2989 return EVP_GCM_TLS_TAG_LEN;
2993 EVP_CIPHER_CTX *out = ptr;
2994 EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out);
2995 if (gctx->gcm.key) {
2996 if (gctx->gcm.key != &gctx->ks)
2998 gctx_out->gcm.key = &gctx_out->ks;
3000 if (gctx->iv == c->iv)
3001 gctx_out->iv = out->iv;
3003 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
3004 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
3007 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
3018 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3019 const unsigned char *iv, int enc)
3021 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3026 #ifdef HWAES_CAPABLE
3027 if (HWAES_CAPABLE) {
3028 HWAES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3029 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3030 (block128_f) HWAES_encrypt);
3031 # ifdef HWAES_ctr32_encrypt_blocks
3032 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
3039 #ifdef BSAES_CAPABLE
3040 if (BSAES_CAPABLE) {
3041 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3042 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3043 (block128_f) AES_encrypt);
3044 gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
3048 #ifdef VPAES_CAPABLE
3049 if (VPAES_CAPABLE) {
3050 vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3051 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3052 (block128_f) vpaes_encrypt);
3057 (void)0; /* terminate potentially open 'else' */
3059 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3060 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3061 (block128_f) AES_encrypt);
3063 gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
3070 * If we have an iv can set it directly, otherwise use saved IV.
3072 if (iv == NULL && gctx->iv_set)
3075 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
3080 /* If key set use IV, otherwise copy */
3082 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
3084 memcpy(gctx->iv, iv, gctx->ivlen);
3092 * Handle TLS GCM packet format. This consists of the last portion of the IV
3093 * followed by the payload and finally the tag. On encrypt generate IV,
3094 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
3098 static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3099 const unsigned char *in, size_t len)
3101 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3103 /* Encrypt/decrypt must be performed in place */
3105 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
3109 * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
3110 * Requirements from SP 800-38D". The requirements is for one party to the
3111 * communication to fail after 2^64 - 1 keys. We do this on the encrypting
3114 if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
3115 EVPerr(EVP_F_AES_GCM_TLS_CIPHER, EVP_R_TOO_MANY_RECORDS);
3120 * Set IV from start of buffer or generate IV and write to start of
3123 if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ? EVP_CTRL_GCM_IV_GEN
3124 : EVP_CTRL_GCM_SET_IV_INV,
3125 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
3128 if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
3130 /* Fix buffer and length to point to payload */
3131 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
3132 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
3133 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
3135 /* Encrypt payload */
3138 #if defined(AES_GCM_ASM)
3139 if (len >= 32 && AES_GCM_ASM(gctx)) {
3140 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
3143 bulk = AES_gcm_encrypt(in, out, len,
3145 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3146 gctx->gcm.len.u[1] += bulk;
3149 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
3152 len - bulk, gctx->ctr))
3156 #if defined(AES_GCM_ASM2)
3157 if (len >= 32 && AES_GCM_ASM2(gctx)) {
3158 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
3161 bulk = AES_gcm_encrypt(in, out, len,
3163 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3164 gctx->gcm.len.u[1] += bulk;
3167 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3168 in + bulk, out + bulk, len - bulk))
3172 /* Finally write tag */
3173 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
3174 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
3179 #if defined(AES_GCM_ASM)
3180 if (len >= 16 && AES_GCM_ASM(gctx)) {
3181 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
3184 bulk = AES_gcm_decrypt(in, out, len,
3186 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3187 gctx->gcm.len.u[1] += bulk;
3190 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3193 len - bulk, gctx->ctr))
3197 #if defined(AES_GCM_ASM2)
3198 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3199 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
3202 bulk = AES_gcm_decrypt(in, out, len,
3204 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3205 gctx->gcm.len.u[1] += bulk;
3208 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3209 in + bulk, out + bulk, len - bulk))
3213 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN);
3214 /* If tag mismatch wipe buffer */
3215 if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
3216 OPENSSL_cleanse(out, len);
3224 gctx->tls_aad_len = -1;
3230 * See SP800-38D (GCM) Section 8 "Uniqueness requirement on IVS and keys"
3232 * See also 8.2.2 RBG-based construction.
3233 * Random construction consists of a free field (which can be NULL) and a
3234 * random field which will use a DRBG that can return at least 96 bits of
3235 * entropy strength. (The DRBG must be seeded by the FIPS module).
3237 static int aes_gcm_iv_generate(EVP_AES_GCM_CTX *gctx, int offset)
3239 int sz = gctx->ivlen - offset;
3241 /* Must be at least 96 bits */
3242 if (sz <= 0 || gctx->ivlen < 12)
3245 /* Use DRBG to generate random iv */
3246 if (RAND_bytes(gctx->iv + offset, sz) <= 0)
3250 #endif /* FIPS_MODE */
3252 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3253 const unsigned char *in, size_t len)
3255 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3257 /* If not set up, return error */
3261 if (gctx->tls_aad_len >= 0)
3262 return aes_gcm_tls_cipher(ctx, out, in, len);
3266 * FIPS requires generation of AES-GCM IV's inside the FIPS module.
3267 * The IV can still be set externally (the security policy will state that
3268 * this is not FIPS compliant). There are some applications
3269 * where setting the IV externally is the only option available.
3271 if (!gctx->iv_set) {
3272 if (!ctx->encrypt || !aes_gcm_iv_generate(gctx, 0))
3274 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
3276 gctx->iv_gen_rand = 1;
3281 #endif /* FIPS_MODE */
3285 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
3287 } else if (ctx->encrypt) {
3290 #if defined(AES_GCM_ASM)
3291 if (len >= 32 && AES_GCM_ASM(gctx)) {
3292 size_t res = (16 - gctx->gcm.mres) % 16;
3294 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3297 bulk = AES_gcm_encrypt(in + res,
3298 out + res, len - res,
3299 gctx->gcm.key, gctx->gcm.Yi.c,
3301 gctx->gcm.len.u[1] += bulk;
3305 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
3308 len - bulk, gctx->ctr))
3312 #if defined(AES_GCM_ASM2)
3313 if (len >= 32 && AES_GCM_ASM2(gctx)) {
3314 size_t res = (16 - gctx->gcm.mres) % 16;
3316 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3319 bulk = AES_gcm_encrypt(in + res,
3320 out + res, len - res,
3321 gctx->gcm.key, gctx->gcm.Yi.c,
3323 gctx->gcm.len.u[1] += bulk;
3327 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3328 in + bulk, out + bulk, len - bulk))
3334 #if defined(AES_GCM_ASM)
3335 if (len >= 16 && AES_GCM_ASM(gctx)) {
3336 size_t res = (16 - gctx->gcm.mres) % 16;
3338 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3341 bulk = AES_gcm_decrypt(in + res,
3342 out + res, len - res,
3344 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3345 gctx->gcm.len.u[1] += bulk;
3349 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3352 len - bulk, gctx->ctr))
3356 #if defined(AES_GCM_ASM2)
3357 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3358 size_t res = (16 - gctx->gcm.mres) % 16;
3360 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3363 bulk = AES_gcm_decrypt(in + res,
3364 out + res, len - res,
3366 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3367 gctx->gcm.len.u[1] += bulk;
3371 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3372 in + bulk, out + bulk, len - bulk))
3378 if (!ctx->encrypt) {
3379 if (gctx->taglen < 0)
3381 if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0)
3386 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
3388 /* Don't reuse the IV */
3395 #define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
3396 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3397 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3398 | EVP_CIPH_CUSTOM_COPY)
3400 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
3401 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3402 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
3403 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3404 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
3405 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3407 static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3409 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,c);
3410 if (type == EVP_CTRL_COPY) {
3411 EVP_CIPHER_CTX *out = ptr;
3412 EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out);
3413 if (xctx->xts.key1) {
3414 if (xctx->xts.key1 != &xctx->ks1)
3416 xctx_out->xts.key1 = &xctx_out->ks1;
3418 if (xctx->xts.key2) {
3419 if (xctx->xts.key2 != &xctx->ks2)
3421 xctx_out->xts.key2 = &xctx_out->ks2;
3424 } else if (type != EVP_CTRL_INIT)
3426 /* key1 and key2 are used as an indicator both key and IV are set */
3427 xctx->xts.key1 = NULL;
3428 xctx->xts.key2 = NULL;
3432 static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3433 const unsigned char *iv, int enc)
3435 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3441 /* The key is two half length keys in reality */
3442 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
3443 const int bits = bytes * 8;
3446 * Verify that the two keys are different.
3448 * This addresses the vulnerability described in Rogaway's
3449 * September 2004 paper:
3451 * "Efficient Instantiations of Tweakable Blockciphers and
3452 * Refinements to Modes OCB and PMAC".
3453 * (http://web.cs.ucdavis.edu/~rogaway/papers/offsets.pdf)
3455 * FIPS 140-2 IG A.9 XTS-AES Key Generation Requirements states
3457 * "The check for Key_1 != Key_2 shall be done at any place
3458 * BEFORE using the keys in the XTS-AES algorithm to process
3461 if (memcmp(key, key + bytes, bytes) == 0) {
3462 EVPerr(EVP_F_AES_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
3467 xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
3469 xctx->stream = NULL;
3471 /* key_len is two AES keys */
3472 #ifdef HWAES_CAPABLE
3473 if (HWAES_CAPABLE) {
3475 HWAES_set_encrypt_key(key, bits, &xctx->ks1.ks);
3476 xctx->xts.block1 = (block128_f) HWAES_encrypt;
3477 # ifdef HWAES_xts_encrypt
3478 xctx->stream = HWAES_xts_encrypt;
3481 HWAES_set_decrypt_key(key, bits, &xctx->ks1.ks);
3482 xctx->xts.block1 = (block128_f) HWAES_decrypt;
3483 # ifdef HWAES_xts_decrypt
3484 xctx->stream = HWAES_xts_decrypt;
3488 HWAES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3489 xctx->xts.block2 = (block128_f) HWAES_encrypt;
3491 xctx->xts.key1 = &xctx->ks1;
3495 #ifdef BSAES_CAPABLE
3497 xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt;
3500 #ifdef VPAES_CAPABLE
3501 if (VPAES_CAPABLE) {
3503 vpaes_set_encrypt_key(key, bits, &xctx->ks1.ks);
3504 xctx->xts.block1 = (block128_f) vpaes_encrypt;
3506 vpaes_set_decrypt_key(key, bits, &xctx->ks1.ks);
3507 xctx->xts.block1 = (block128_f) vpaes_decrypt;
3510 vpaes_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3511 xctx->xts.block2 = (block128_f) vpaes_encrypt;
3513 xctx->xts.key1 = &xctx->ks1;
3517 (void)0; /* terminate potentially open 'else' */
3520 AES_set_encrypt_key(key, bits, &xctx->ks1.ks);
3521 xctx->xts.block1 = (block128_f) AES_encrypt;
3523 AES_set_decrypt_key(key, bits, &xctx->ks1.ks);
3524 xctx->xts.block1 = (block128_f) AES_decrypt;
3527 AES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3528 xctx->xts.block2 = (block128_f) AES_encrypt;
3530 xctx->xts.key1 = &xctx->ks1;
3535 xctx->xts.key2 = &xctx->ks2;
3536 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
3542 static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3543 const unsigned char *in, size_t len)
3545 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3547 if (xctx->xts.key1 == NULL
3548 || xctx->xts.key2 == NULL
3551 || len < AES_BLOCK_SIZE)
3555 * Impose a limit of 2^20 blocks per data unit as specifed by
3556 * IEEE Std 1619-2018. The earlier and obsolete IEEE Std 1619-2007
3557 * indicated that this was a SHOULD NOT rather than a MUST NOT.
3558 * NIST SP 800-38E mandates the same limit.
3560 if (len > XTS_MAX_BLOCKS_PER_DATA_UNIT * AES_BLOCK_SIZE) {
3561 EVPerr(EVP_F_AES_XTS_CIPHER, EVP_R_XTS_DATA_UNIT_IS_TOO_LARGE);
3566 (*xctx->stream) (in, out, len,
3567 xctx->xts.key1, xctx->xts.key2,
3568 EVP_CIPHER_CTX_iv_noconst(ctx));
3569 else if (CRYPTO_xts128_encrypt(&xctx->xts, EVP_CIPHER_CTX_iv_noconst(ctx),
3571 EVP_CIPHER_CTX_encrypting(ctx)))
3576 #define aes_xts_cleanup NULL
3578 #define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
3579 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3580 | EVP_CIPH_CUSTOM_COPY)
3582 BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
3583 BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
3585 static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3587 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c);
3596 cctx->tls_aad_len = -1;
3599 case EVP_CTRL_AEAD_TLS1_AAD:
3600 /* Save the AAD for later use */
3601 if (arg != EVP_AEAD_TLS1_AAD_LEN)
3603 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3604 cctx->tls_aad_len = arg;
3607 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
3608 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
3609 /* Correct length for explicit IV */
3610 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
3612 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
3613 /* If decrypting correct for tag too */
3614 if (!EVP_CIPHER_CTX_encrypting(c)) {
3619 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
3620 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
3622 /* Extra padding: tag appended to record */
3625 case EVP_CTRL_CCM_SET_IV_FIXED:
3626 /* Sanity check length */
3627 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
3629 /* Just copy to first part of IV */
3630 memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg);
3633 case EVP_CTRL_AEAD_SET_IVLEN:
3636 case EVP_CTRL_CCM_SET_L:
3637 if (arg < 2 || arg > 8)
3642 case EVP_CTRL_AEAD_SET_TAG:
3643 if ((arg & 1) || arg < 4 || arg > 16)
3645 if (EVP_CIPHER_CTX_encrypting(c) && ptr)
3649 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3654 case EVP_CTRL_AEAD_GET_TAG:
3655 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
3657 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
3666 EVP_CIPHER_CTX *out = ptr;
3667 EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out);
3668 if (cctx->ccm.key) {
3669 if (cctx->ccm.key != &cctx->ks)
3671 cctx_out->ccm.key = &cctx_out->ks;
3682 static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3683 const unsigned char *iv, int enc)
3685 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3690 #ifdef HWAES_CAPABLE
3691 if (HWAES_CAPABLE) {
3692 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3695 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3696 &cctx->ks, (block128_f) HWAES_encrypt);
3702 #ifdef VPAES_CAPABLE
3703 if (VPAES_CAPABLE) {
3704 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3706 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3707 &cctx->ks, (block128_f) vpaes_encrypt);
3713 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3715 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3716 &cctx->ks, (block128_f) AES_encrypt);
3721 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
3727 static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3728 const unsigned char *in, size_t len)
3730 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3731 CCM128_CONTEXT *ccm = &cctx->ccm;
3732 /* Encrypt/decrypt must be performed in place */
3733 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
3735 /* If encrypting set explicit IV from sequence number (start of AAD) */
3736 if (EVP_CIPHER_CTX_encrypting(ctx))
3737 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
3738 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3739 /* Get rest of IV from explicit IV */
3740 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in,
3741 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3742 /* Correct length value */
3743 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3744 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L,
3748 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
3749 /* Fix buffer to point to payload */
3750 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3751 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3752 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3753 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3755 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3757 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
3759 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3761 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3763 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3764 unsigned char tag[16];
3765 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3766 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
3770 OPENSSL_cleanse(out, len);
3775 static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3776 const unsigned char *in, size_t len)
3778 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3779 CCM128_CONTEXT *ccm = &cctx->ccm;
3780 /* If not set up, return error */
3784 if (cctx->tls_aad_len >= 0)
3785 return aes_ccm_tls_cipher(ctx, out, in, len);
3787 /* EVP_*Final() doesn't return any data */
3788 if (in == NULL && out != NULL)
3794 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
3798 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3804 /* If have AAD need message length */
3805 if (!cctx->len_set && len)
3807 CRYPTO_ccm128_aad(ccm, in, len);
3810 /* If not set length yet do it */
3811 if (!cctx->len_set) {
3812 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3817 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3818 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3820 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3826 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3828 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3829 unsigned char tag[16];
3830 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3831 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
3837 OPENSSL_cleanse(out, len);
3845 #define aes_ccm_cleanup NULL
3847 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
3848 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3849 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
3850 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3851 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
3852 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3859 /* Indicates if IV has been set */
3863 static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3864 const unsigned char *iv, int enc)
3866 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3870 if (EVP_CIPHER_CTX_encrypting(ctx))
3871 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3874 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3880 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, EVP_CIPHER_CTX_iv_length(ctx));
3881 wctx->iv = EVP_CIPHER_CTX_iv_noconst(ctx);
3886 static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3887 const unsigned char *in, size_t inlen)
3889 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3891 /* AES wrap with padding has IV length of 4, without padding 8 */
3892 int pad = EVP_CIPHER_CTX_iv_length(ctx) == 4;
3893 /* No final operation so always return zero length */
3896 /* Input length must always be non-zero */
3899 /* If decrypting need at least 16 bytes and multiple of 8 */
3900 if (!EVP_CIPHER_CTX_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
3902 /* If not padding input must be multiple of 8 */
3903 if (!pad && inlen & 0x7)
3905 if (is_partially_overlapping(out, in, inlen)) {
3906 EVPerr(EVP_F_AES_WRAP_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
3910 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3911 /* If padding round up to multiple of 8 */
3913 inlen = (inlen + 7) / 8 * 8;
3918 * If not padding output will be exactly 8 bytes smaller than
3919 * input. If padding it will be at least 8 bytes smaller but we
3920 * don't know how much.
3926 if (EVP_CIPHER_CTX_encrypting(ctx))
3927 rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
3929 (block128_f) AES_encrypt);
3931 rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
3933 (block128_f) AES_decrypt);
3935 if (EVP_CIPHER_CTX_encrypting(ctx))
3936 rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
3937 out, in, inlen, (block128_f) AES_encrypt);
3939 rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
3940 out, in, inlen, (block128_f) AES_decrypt);
3942 return rv ? (int)rv : -1;
3945 #define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \
3946 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3947 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
3949 static const EVP_CIPHER aes_128_wrap = {
3951 8, 16, 8, WRAP_FLAGS,
3952 aes_wrap_init_key, aes_wrap_cipher,
3954 sizeof(EVP_AES_WRAP_CTX),
3955 NULL, NULL, NULL, NULL
3958 const EVP_CIPHER *EVP_aes_128_wrap(void)
3960 return &aes_128_wrap;
3963 static const EVP_CIPHER aes_192_wrap = {
3965 8, 24, 8, WRAP_FLAGS,
3966 aes_wrap_init_key, aes_wrap_cipher,
3968 sizeof(EVP_AES_WRAP_CTX),
3969 NULL, NULL, NULL, NULL
3972 const EVP_CIPHER *EVP_aes_192_wrap(void)
3974 return &aes_192_wrap;
3977 static const EVP_CIPHER aes_256_wrap = {
3979 8, 32, 8, WRAP_FLAGS,
3980 aes_wrap_init_key, aes_wrap_cipher,
3982 sizeof(EVP_AES_WRAP_CTX),
3983 NULL, NULL, NULL, NULL
3986 const EVP_CIPHER *EVP_aes_256_wrap(void)
3988 return &aes_256_wrap;
3991 static const EVP_CIPHER aes_128_wrap_pad = {
3992 NID_id_aes128_wrap_pad,
3993 8, 16, 4, WRAP_FLAGS,
3994 aes_wrap_init_key, aes_wrap_cipher,
3996 sizeof(EVP_AES_WRAP_CTX),
3997 NULL, NULL, NULL, NULL
4000 const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
4002 return &aes_128_wrap_pad;
4005 static const EVP_CIPHER aes_192_wrap_pad = {
4006 NID_id_aes192_wrap_pad,
4007 8, 24, 4, WRAP_FLAGS,
4008 aes_wrap_init_key, aes_wrap_cipher,
4010 sizeof(EVP_AES_WRAP_CTX),
4011 NULL, NULL, NULL, NULL
4014 const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
4016 return &aes_192_wrap_pad;
4019 static const EVP_CIPHER aes_256_wrap_pad = {
4020 NID_id_aes256_wrap_pad,
4021 8, 32, 4, WRAP_FLAGS,
4022 aes_wrap_init_key, aes_wrap_cipher,
4024 sizeof(EVP_AES_WRAP_CTX),
4025 NULL, NULL, NULL, NULL
4028 const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
4030 return &aes_256_wrap_pad;
4033 #ifndef OPENSSL_NO_OCB
4034 static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
4036 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
4037 EVP_CIPHER_CTX *newc;
4038 EVP_AES_OCB_CTX *new_octx;
4044 octx->ivlen = EVP_CIPHER_CTX_iv_length(c);
4045 octx->iv = EVP_CIPHER_CTX_iv_noconst(c);
4047 octx->data_buf_len = 0;
4048 octx->aad_buf_len = 0;
4051 case EVP_CTRL_AEAD_SET_IVLEN:
4052 /* IV len must be 1 to 15 */
4053 if (arg <= 0 || arg > 15)
4059 case EVP_CTRL_AEAD_SET_TAG:
4061 /* Tag len must be 0 to 16 */
4062 if (arg < 0 || arg > 16)
4068 if (arg != octx->taglen || EVP_CIPHER_CTX_encrypting(c))
4070 memcpy(octx->tag, ptr, arg);
4073 case EVP_CTRL_AEAD_GET_TAG:
4074 if (arg != octx->taglen || !EVP_CIPHER_CTX_encrypting(c))
4077 memcpy(ptr, octx->tag, arg);
4081 newc = (EVP_CIPHER_CTX *)ptr;
4082 new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc);
4083 return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
4084 &new_octx->ksenc.ks,
4085 &new_octx->ksdec.ks);
4093 # ifdef HWAES_CAPABLE
4094 # ifdef HWAES_ocb_encrypt
4095 void HWAES_ocb_encrypt(const unsigned char *in, unsigned char *out,
4096 size_t blocks, const void *key,
4097 size_t start_block_num,
4098 unsigned char offset_i[16],
4099 const unsigned char L_[][16],
4100 unsigned char checksum[16]);
4102 # define HWAES_ocb_encrypt ((ocb128_f)NULL)
4104 # ifdef HWAES_ocb_decrypt
4105 void HWAES_ocb_decrypt(const unsigned char *in, unsigned char *out,
4106 size_t blocks, const void *key,
4107 size_t start_block_num,
4108 unsigned char offset_i[16],
4109 const unsigned char L_[][16],
4110 unsigned char checksum[16]);
4112 # define HWAES_ocb_decrypt ((ocb128_f)NULL)
4116 static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
4117 const unsigned char *iv, int enc)
4119 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
4125 * We set both the encrypt and decrypt key here because decrypt
4126 * needs both. We could possibly optimise to remove setting the
4127 * decrypt for an encryption operation.
4129 # ifdef HWAES_CAPABLE
4130 if (HWAES_CAPABLE) {
4131 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4133 HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4135 if (!CRYPTO_ocb128_init(&octx->ocb,
4136 &octx->ksenc.ks, &octx->ksdec.ks,
4137 (block128_f) HWAES_encrypt,
4138 (block128_f) HWAES_decrypt,
4139 enc ? HWAES_ocb_encrypt
4140 : HWAES_ocb_decrypt))
4145 # ifdef VPAES_CAPABLE
4146 if (VPAES_CAPABLE) {
4147 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4149 vpaes_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4151 if (!CRYPTO_ocb128_init(&octx->ocb,
4152 &octx->ksenc.ks, &octx->ksdec.ks,
4153 (block128_f) vpaes_encrypt,
4154 (block128_f) vpaes_decrypt,
4160 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4162 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4164 if (!CRYPTO_ocb128_init(&octx->ocb,
4165 &octx->ksenc.ks, &octx->ksdec.ks,
4166 (block128_f) AES_encrypt,
4167 (block128_f) AES_decrypt,
4174 * If we have an iv we can set it directly, otherwise use saved IV.
4176 if (iv == NULL && octx->iv_set)
4179 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
4186 /* If key set use IV, otherwise copy */
4188 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
4190 memcpy(octx->iv, iv, octx->ivlen);
4196 static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
4197 const unsigned char *in, size_t len)
4201 int written_len = 0;
4202 size_t trailing_len;
4203 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
4205 /* If IV or Key not set then return error */
4214 * Need to ensure we are only passing full blocks to low level OCB
4215 * routines. We do it here rather than in EVP_EncryptUpdate/
4216 * EVP_DecryptUpdate because we need to pass full blocks of AAD too
4217 * and those routines don't support that
4220 /* Are we dealing with AAD or normal data here? */
4222 buf = octx->aad_buf;
4223 buf_len = &(octx->aad_buf_len);
4225 buf = octx->data_buf;
4226 buf_len = &(octx->data_buf_len);
4228 if (is_partially_overlapping(out + *buf_len, in, len)) {
4229 EVPerr(EVP_F_AES_OCB_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
4235 * If we've got a partially filled buffer from a previous call then
4236 * use that data first
4239 unsigned int remaining;
4241 remaining = AES_BLOCK_SIZE - (*buf_len);
4242 if (remaining > len) {
4243 memcpy(buf + (*buf_len), in, len);
4247 memcpy(buf + (*buf_len), in, remaining);
4250 * If we get here we've filled the buffer, so process it
4255 if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE))
4257 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
4258 if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out,
4262 if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out,
4266 written_len = AES_BLOCK_SIZE;
4269 out += AES_BLOCK_SIZE;
4272 /* Do we have a partial block to handle at the end? */
4273 trailing_len = len % AES_BLOCK_SIZE;
4276 * If we've got some full blocks to handle, then process these first
4278 if (len != trailing_len) {
4280 if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
4282 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
4283 if (!CRYPTO_ocb128_encrypt
4284 (&octx->ocb, in, out, len - trailing_len))
4287 if (!CRYPTO_ocb128_decrypt
4288 (&octx->ocb, in, out, len - trailing_len))
4291 written_len += len - trailing_len;
4292 in += len - trailing_len;
4295 /* Handle any trailing partial block */
4296 if (trailing_len > 0) {
4297 memcpy(buf, in, trailing_len);
4298 *buf_len = trailing_len;
4304 * First of all empty the buffer of any partial block that we might
4305 * have been provided - both for data and AAD
4307 if (octx->data_buf_len > 0) {
4308 if (EVP_CIPHER_CTX_encrypting(ctx)) {
4309 if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
4310 octx->data_buf_len))
4313 if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
4314 octx->data_buf_len))
4317 written_len = octx->data_buf_len;
4318 octx->data_buf_len = 0;
4320 if (octx->aad_buf_len > 0) {
4321 if (!CRYPTO_ocb128_aad
4322 (&octx->ocb, octx->aad_buf, octx->aad_buf_len))
4324 octx->aad_buf_len = 0;
4326 /* If decrypting then verify */
4327 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
4328 if (octx->taglen < 0)
4330 if (CRYPTO_ocb128_finish(&octx->ocb,
4331 octx->tag, octx->taglen) != 0)
4336 /* If encrypting then just get the tag */
4337 if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
4339 /* Don't reuse the IV */
4345 static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
4347 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
4348 CRYPTO_ocb128_cleanup(&octx->ocb);
4352 BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
4353 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4354 BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
4355 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4356 BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
4357 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4358 #endif /* OPENSSL_NO_OCB */
4361 #ifndef OPENSSL_NO_SIV
4363 typedef SIV128_CONTEXT EVP_AES_SIV_CTX;
4365 #define aesni_siv_init_key aes_siv_init_key
4366 static int aes_siv_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
4367 const unsigned char *iv, int enc)
4369 const EVP_CIPHER *ctr;
4370 const EVP_CIPHER *cbc;
4371 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, ctx);
4372 int klen = EVP_CIPHER_CTX_key_length(ctx) / 2;
4379 cbc = EVP_aes_128_cbc();
4380 ctr = EVP_aes_128_ctr();
4383 cbc = EVP_aes_192_cbc();
4384 ctr = EVP_aes_192_ctr();
4387 cbc = EVP_aes_256_cbc();
4388 ctr = EVP_aes_256_ctr();
4394 /* klen is the length of the underlying cipher, not the input key,
4395 which should be twice as long */
4396 return CRYPTO_siv128_init(sctx, key, klen, cbc, ctr);
4399 #define aesni_siv_cipher aes_siv_cipher
4400 static int aes_siv_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
4401 const unsigned char *in, size_t len)
4403 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, ctx);
4405 /* EncryptFinal or DecryptFinal */
4407 return CRYPTO_siv128_finish(sctx);
4409 /* Deal with associated data */
4411 return CRYPTO_siv128_aad(sctx, in, len);
4413 if (EVP_CIPHER_CTX_encrypting(ctx))
4414 return CRYPTO_siv128_encrypt(sctx, in, out, len);
4416 return CRYPTO_siv128_decrypt(sctx, in, out, len);
4419 #define aesni_siv_cleanup aes_siv_cleanup
4420 static int aes_siv_cleanup(EVP_CIPHER_CTX *c)
4422 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, c);
4424 return CRYPTO_siv128_cleanup(sctx);
4428 #define aesni_siv_ctrl aes_siv_ctrl
4429 static int aes_siv_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
4431 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, c);
4432 SIV128_CONTEXT *sctx_out;
4436 return CRYPTO_siv128_cleanup(sctx);
4438 case EVP_CTRL_SET_SPEED:
4439 return CRYPTO_siv128_speed(sctx, arg);
4441 case EVP_CTRL_AEAD_SET_TAG:
4442 if (!EVP_CIPHER_CTX_encrypting(c))
4443 return CRYPTO_siv128_set_tag(sctx, ptr, arg);
4446 case EVP_CTRL_AEAD_GET_TAG:
4447 if (!EVP_CIPHER_CTX_encrypting(c))
4449 return CRYPTO_siv128_get_tag(sctx, ptr, arg);
4452 sctx_out = EVP_C_DATA(SIV128_CONTEXT, (EVP_CIPHER_CTX*)ptr);
4453 return CRYPTO_siv128_copy_ctx(sctx_out, sctx);
4461 #define SIV_FLAGS (EVP_CIPH_FLAG_AEAD_CIPHER | EVP_CIPH_FLAG_DEFAULT_ASN1 \
4462 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
4463 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CUSTOM_COPY \
4464 | EVP_CIPH_CTRL_INIT)
4466 BLOCK_CIPHER_custom(NID_aes, 128, 1, 0, siv, SIV, SIV_FLAGS)
4467 BLOCK_CIPHER_custom(NID_aes, 192, 1, 0, siv, SIV, SIV_FLAGS)
4468 BLOCK_CIPHER_custom(NID_aes, 256, 1, 0, siv, SIV, SIV_FLAGS)