2 * Copyright 2001-2018 The OpenSSL Project Authors. All Rights Reserved.
4 * Licensed under the Apache License 2.0 (the "License"). You may not use
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
10 #include <openssl/opensslconf.h>
11 #include <openssl/crypto.h>
12 #include <openssl/evp.h>
13 #include <openssl/err.h>
16 #include <openssl/aes.h>
17 #include "internal/evp_int.h"
18 #include "modes_lcl.h"
19 #include <openssl/rand.h>
20 #include <openssl/cmac.h>
39 } ks; /* AES key schedule to use */
40 int key_set; /* Set if key initialised */
41 int iv_set; /* Set if an iv is set */
43 unsigned char *iv; /* Temporary IV store */
44 int ivlen; /* IV length */
46 int iv_gen; /* It is OK to generate IVs */
47 int tls_aad_len; /* TLS AAD length */
48 uint64_t tls_enc_records; /* Number of TLS records encrypted */
56 } ks1, ks2; /* AES key schedules to use */
58 void (*stream) (const unsigned char *in,
59 unsigned char *out, size_t length,
60 const AES_KEY *key1, const AES_KEY *key2,
61 const unsigned char iv[16]);
68 } ks; /* AES key schedule to use */
69 int key_set; /* Set if key initialised */
70 int iv_set; /* Set if an iv is set */
71 int tag_set; /* Set if tag is valid */
72 int len_set; /* Set if message length set */
73 int L, M; /* L and M parameters from RFC3610 */
74 int tls_aad_len; /* TLS AAD length */
79 #ifndef OPENSSL_NO_OCB
84 } ksenc; /* AES key schedule to use for encryption */
88 } ksdec; /* AES key schedule to use for decryption */
89 int key_set; /* Set if key initialised */
90 int iv_set; /* Set if an iv is set */
92 unsigned char *iv; /* Temporary IV store */
93 unsigned char tag[16];
94 unsigned char data_buf[16]; /* Store partial data blocks */
95 unsigned char aad_buf[16]; /* Store partial AAD blocks */
98 int ivlen; /* IV length */
103 #define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
106 int vpaes_set_encrypt_key(const unsigned char *userKey, int bits,
108 int vpaes_set_decrypt_key(const unsigned char *userKey, int bits,
111 void vpaes_encrypt(const unsigned char *in, unsigned char *out,
113 void vpaes_decrypt(const unsigned char *in, unsigned char *out,
116 void vpaes_cbc_encrypt(const unsigned char *in,
119 const AES_KEY *key, unsigned char *ivec, int enc);
122 void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out,
123 size_t length, const AES_KEY *key,
124 unsigned char ivec[16], int enc);
125 void bsaes_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
126 size_t len, const AES_KEY *key,
127 const unsigned char ivec[16]);
128 void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out,
129 size_t len, const AES_KEY *key1,
130 const AES_KEY *key2, const unsigned char iv[16]);
131 void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out,
132 size_t len, const AES_KEY *key1,
133 const AES_KEY *key2, const unsigned char iv[16]);
136 void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out,
137 size_t blocks, const AES_KEY *key,
138 const unsigned char ivec[AES_BLOCK_SIZE]);
141 void AES_xts_encrypt(const unsigned char *inp, unsigned char *out, size_t len,
142 const AES_KEY *key1, const AES_KEY *key2,
143 const unsigned char iv[16]);
144 void AES_xts_decrypt(const unsigned char *inp, unsigned char *out, size_t len,
145 const AES_KEY *key1, const AES_KEY *key2,
146 const unsigned char iv[16]);
149 /* increment counter (64-bit int) by 1 */
150 static void ctr64_inc(unsigned char *counter)
165 #if defined(OPENSSL_CPUID_OBJ) && (defined(__powerpc__) || defined(__ppc__) || defined(_ARCH_PPC))
166 # include "ppc_arch.h"
168 # define VPAES_CAPABLE (OPENSSL_ppccap_P & PPC_ALTIVEC)
170 # define HWAES_CAPABLE (OPENSSL_ppccap_P & PPC_CRYPTO207)
171 # define HWAES_set_encrypt_key aes_p8_set_encrypt_key
172 # define HWAES_set_decrypt_key aes_p8_set_decrypt_key
173 # define HWAES_encrypt aes_p8_encrypt
174 # define HWAES_decrypt aes_p8_decrypt
175 # define HWAES_cbc_encrypt aes_p8_cbc_encrypt
176 # define HWAES_ctr32_encrypt_blocks aes_p8_ctr32_encrypt_blocks
177 # define HWAES_xts_encrypt aes_p8_xts_encrypt
178 # define HWAES_xts_decrypt aes_p8_xts_decrypt
181 #if defined(AES_ASM) && !defined(I386_ONLY) && ( \
182 ((defined(__i386) || defined(__i386__) || \
183 defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \
184 defined(__x86_64) || defined(__x86_64__) || \
185 defined(_M_AMD64) || defined(_M_X64) )
187 extern unsigned int OPENSSL_ia32cap_P[];
190 # define VPAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
193 # define BSAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
198 # define AESNI_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(57-32)))
200 int aesni_set_encrypt_key(const unsigned char *userKey, int bits,
202 int aesni_set_decrypt_key(const unsigned char *userKey, int bits,
205 void aesni_encrypt(const unsigned char *in, unsigned char *out,
207 void aesni_decrypt(const unsigned char *in, unsigned char *out,
210 void aesni_ecb_encrypt(const unsigned char *in,
212 size_t length, const AES_KEY *key, int enc);
213 void aesni_cbc_encrypt(const unsigned char *in,
216 const AES_KEY *key, unsigned char *ivec, int enc);
218 void aesni_ctr32_encrypt_blocks(const unsigned char *in,
221 const void *key, const unsigned char *ivec);
223 void aesni_xts_encrypt(const unsigned char *in,
226 const AES_KEY *key1, const AES_KEY *key2,
227 const unsigned char iv[16]);
229 void aesni_xts_decrypt(const unsigned char *in,
232 const AES_KEY *key1, const AES_KEY *key2,
233 const unsigned char iv[16]);
235 void aesni_ccm64_encrypt_blocks(const unsigned char *in,
239 const unsigned char ivec[16],
240 unsigned char cmac[16]);
242 void aesni_ccm64_decrypt_blocks(const unsigned char *in,
246 const unsigned char ivec[16],
247 unsigned char cmac[16]);
249 # if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
250 size_t aesni_gcm_encrypt(const unsigned char *in,
253 const void *key, unsigned char ivec[16], u64 *Xi);
254 # define AES_gcm_encrypt aesni_gcm_encrypt
255 size_t aesni_gcm_decrypt(const unsigned char *in,
258 const void *key, unsigned char ivec[16], u64 *Xi);
259 # define AES_gcm_decrypt aesni_gcm_decrypt
260 void gcm_ghash_avx(u64 Xi[2], const u128 Htable[16], const u8 *in,
262 # define AES_GCM_ASM(gctx) (gctx->ctr==aesni_ctr32_encrypt_blocks && \
263 gctx->gcm.ghash==gcm_ghash_avx)
264 # define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
265 gctx->gcm.ghash==gcm_ghash_avx)
266 # undef AES_GCM_ASM2 /* minor size optimization */
269 static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
270 const unsigned char *iv, int enc)
273 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
275 mode = EVP_CIPHER_CTX_mode(ctx);
276 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
278 ret = aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
280 dat->block = (block128_f) aesni_decrypt;
281 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
282 (cbc128_f) aesni_cbc_encrypt : NULL;
284 ret = aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
286 dat->block = (block128_f) aesni_encrypt;
287 if (mode == EVP_CIPH_CBC_MODE)
288 dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
289 else if (mode == EVP_CIPH_CTR_MODE)
290 dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
292 dat->stream.cbc = NULL;
296 EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
303 static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
304 const unsigned char *in, size_t len)
306 aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
307 EVP_CIPHER_CTX_iv_noconst(ctx),
308 EVP_CIPHER_CTX_encrypting(ctx));
313 static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
314 const unsigned char *in, size_t len)
316 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
321 aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
322 EVP_CIPHER_CTX_encrypting(ctx));
327 # define aesni_ofb_cipher aes_ofb_cipher
328 static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
329 const unsigned char *in, size_t len);
331 # define aesni_cfb_cipher aes_cfb_cipher
332 static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
333 const unsigned char *in, size_t len);
335 # define aesni_cfb8_cipher aes_cfb8_cipher
336 static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
337 const unsigned char *in, size_t len);
339 # define aesni_cfb1_cipher aes_cfb1_cipher
340 static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
341 const unsigned char *in, size_t len);
343 # define aesni_ctr_cipher aes_ctr_cipher
344 static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
345 const unsigned char *in, size_t len);
347 static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
348 const unsigned char *iv, int enc)
350 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
354 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
356 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
357 gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
359 * If we have an iv can set it directly, otherwise use saved IV.
361 if (iv == NULL && gctx->iv_set)
364 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
369 /* If key set use IV, otherwise copy */
371 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
373 memcpy(gctx->iv, iv, gctx->ivlen);
380 # define aesni_gcm_cipher aes_gcm_cipher
381 static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
382 const unsigned char *in, size_t len);
384 static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
385 const unsigned char *iv, int enc)
387 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
392 /* key_len is two AES keys */
394 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
396 xctx->xts.block1 = (block128_f) aesni_encrypt;
397 xctx->stream = aesni_xts_encrypt;
399 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
401 xctx->xts.block1 = (block128_f) aesni_decrypt;
402 xctx->stream = aesni_xts_decrypt;
405 aesni_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
406 EVP_CIPHER_CTX_key_length(ctx) * 4,
408 xctx->xts.block2 = (block128_f) aesni_encrypt;
410 xctx->xts.key1 = &xctx->ks1;
414 xctx->xts.key2 = &xctx->ks2;
415 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
421 # define aesni_xts_cipher aes_xts_cipher
422 static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
423 const unsigned char *in, size_t len);
425 static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
426 const unsigned char *iv, int enc)
428 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
432 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
434 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
435 &cctx->ks, (block128_f) aesni_encrypt);
436 cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
437 (ccm128_f) aesni_ccm64_decrypt_blocks;
441 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
447 # define aesni_ccm_cipher aes_ccm_cipher
448 static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
449 const unsigned char *in, size_t len);
451 # ifndef OPENSSL_NO_OCB
452 void aesni_ocb_encrypt(const unsigned char *in, unsigned char *out,
453 size_t blocks, const void *key,
454 size_t start_block_num,
455 unsigned char offset_i[16],
456 const unsigned char L_[][16],
457 unsigned char checksum[16]);
458 void aesni_ocb_decrypt(const unsigned char *in, unsigned char *out,
459 size_t blocks, const void *key,
460 size_t start_block_num,
461 unsigned char offset_i[16],
462 const unsigned char L_[][16],
463 unsigned char checksum[16]);
465 static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
466 const unsigned char *iv, int enc)
468 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
474 * We set both the encrypt and decrypt key here because decrypt
475 * needs both. We could possibly optimise to remove setting the
476 * decrypt for an encryption operation.
478 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
480 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
482 if (!CRYPTO_ocb128_init(&octx->ocb,
483 &octx->ksenc.ks, &octx->ksdec.ks,
484 (block128_f) aesni_encrypt,
485 (block128_f) aesni_decrypt,
486 enc ? aesni_ocb_encrypt
487 : aesni_ocb_decrypt))
493 * If we have an iv we can set it directly, otherwise use saved IV.
495 if (iv == NULL && octx->iv_set)
498 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
505 /* If key set use IV, otherwise copy */
507 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
509 memcpy(octx->iv, iv, octx->ivlen);
515 # define aesni_ocb_cipher aes_ocb_cipher
516 static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
517 const unsigned char *in, size_t len);
518 # endif /* OPENSSL_NO_OCB */
520 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
521 static const EVP_CIPHER aesni_##keylen##_##mode = { \
522 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
523 flags|EVP_CIPH_##MODE##_MODE, \
525 aesni_##mode##_cipher, \
527 sizeof(EVP_AES_KEY), \
528 NULL,NULL,NULL,NULL }; \
529 static const EVP_CIPHER aes_##keylen##_##mode = { \
530 nid##_##keylen##_##nmode,blocksize, \
532 flags|EVP_CIPH_##MODE##_MODE, \
534 aes_##mode##_cipher, \
536 sizeof(EVP_AES_KEY), \
537 NULL,NULL,NULL,NULL }; \
538 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
539 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
541 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
542 static const EVP_CIPHER aesni_##keylen##_##mode = { \
543 nid##_##keylen##_##mode,blocksize, \
544 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
546 flags|EVP_CIPH_##MODE##_MODE, \
547 aesni_##mode##_init_key, \
548 aesni_##mode##_cipher, \
549 aes_##mode##_cleanup, \
550 sizeof(EVP_AES_##MODE##_CTX), \
551 NULL,NULL,aes_##mode##_ctrl,NULL }; \
552 static const EVP_CIPHER aes_##keylen##_##mode = { \
553 nid##_##keylen##_##mode,blocksize, \
554 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
556 flags|EVP_CIPH_##MODE##_MODE, \
557 aes_##mode##_init_key, \
558 aes_##mode##_cipher, \
559 aes_##mode##_cleanup, \
560 sizeof(EVP_AES_##MODE##_CTX), \
561 NULL,NULL,aes_##mode##_ctrl,NULL }; \
562 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
563 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
565 #elif defined(AES_ASM) && (defined(__sparc) || defined(__sparc__))
567 # include "sparc_arch.h"
569 extern unsigned int OPENSSL_sparcv9cap_P[];
572 * Initial Fujitsu SPARC64 X support
574 # define HWAES_CAPABLE (OPENSSL_sparcv9cap_P[0] & SPARCV9_FJAESX)
575 # define HWAES_set_encrypt_key aes_fx_set_encrypt_key
576 # define HWAES_set_decrypt_key aes_fx_set_decrypt_key
577 # define HWAES_encrypt aes_fx_encrypt
578 # define HWAES_decrypt aes_fx_decrypt
579 # define HWAES_cbc_encrypt aes_fx_cbc_encrypt
580 # define HWAES_ctr32_encrypt_blocks aes_fx_ctr32_encrypt_blocks
582 # define SPARC_AES_CAPABLE (OPENSSL_sparcv9cap_P[1] & CFR_AES)
584 void aes_t4_set_encrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
585 void aes_t4_set_decrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
586 void aes_t4_encrypt(const unsigned char *in, unsigned char *out,
588 void aes_t4_decrypt(const unsigned char *in, unsigned char *out,
591 * Key-length specific subroutines were chosen for following reason.
592 * Each SPARC T4 core can execute up to 8 threads which share core's
593 * resources. Loading as much key material to registers allows to
594 * minimize references to shared memory interface, as well as amount
595 * of instructions in inner loops [much needed on T4]. But then having
596 * non-key-length specific routines would require conditional branches
597 * either in inner loops or on subroutines' entries. Former is hardly
598 * acceptable, while latter means code size increase to size occupied
599 * by multiple key-length specific subroutines, so why fight?
601 void aes128_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
602 size_t len, const AES_KEY *key,
603 unsigned char *ivec);
604 void aes128_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
605 size_t len, const AES_KEY *key,
606 unsigned char *ivec);
607 void aes192_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
608 size_t len, const AES_KEY *key,
609 unsigned char *ivec);
610 void aes192_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
611 size_t len, const AES_KEY *key,
612 unsigned char *ivec);
613 void aes256_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
614 size_t len, const AES_KEY *key,
615 unsigned char *ivec);
616 void aes256_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
617 size_t len, const AES_KEY *key,
618 unsigned char *ivec);
619 void aes128_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
620 size_t blocks, const AES_KEY *key,
621 unsigned char *ivec);
622 void aes192_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
623 size_t blocks, const AES_KEY *key,
624 unsigned char *ivec);
625 void aes256_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
626 size_t blocks, const AES_KEY *key,
627 unsigned char *ivec);
628 void aes128_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
629 size_t blocks, const AES_KEY *key1,
630 const AES_KEY *key2, const unsigned char *ivec);
631 void aes128_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
632 size_t blocks, const AES_KEY *key1,
633 const AES_KEY *key2, const unsigned char *ivec);
634 void aes256_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
635 size_t blocks, const AES_KEY *key1,
636 const AES_KEY *key2, const unsigned char *ivec);
637 void aes256_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
638 size_t blocks, const AES_KEY *key1,
639 const AES_KEY *key2, const unsigned char *ivec);
641 static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
642 const unsigned char *iv, int enc)
645 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
647 mode = EVP_CIPHER_CTX_mode(ctx);
648 bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
649 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
652 aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
653 dat->block = (block128_f) aes_t4_decrypt;
656 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
657 (cbc128_f) aes128_t4_cbc_decrypt : NULL;
660 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
661 (cbc128_f) aes192_t4_cbc_decrypt : NULL;
664 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
665 (cbc128_f) aes256_t4_cbc_decrypt : NULL;
672 aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
673 dat->block = (block128_f) aes_t4_encrypt;
676 if (mode == EVP_CIPH_CBC_MODE)
677 dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
678 else if (mode == EVP_CIPH_CTR_MODE)
679 dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
681 dat->stream.cbc = NULL;
684 if (mode == EVP_CIPH_CBC_MODE)
685 dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
686 else if (mode == EVP_CIPH_CTR_MODE)
687 dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
689 dat->stream.cbc = NULL;
692 if (mode == EVP_CIPH_CBC_MODE)
693 dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
694 else if (mode == EVP_CIPH_CTR_MODE)
695 dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
697 dat->stream.cbc = NULL;
705 EVPerr(EVP_F_AES_T4_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
712 # define aes_t4_cbc_cipher aes_cbc_cipher
713 static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
714 const unsigned char *in, size_t len);
716 # define aes_t4_ecb_cipher aes_ecb_cipher
717 static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
718 const unsigned char *in, size_t len);
720 # define aes_t4_ofb_cipher aes_ofb_cipher
721 static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
722 const unsigned char *in, size_t len);
724 # define aes_t4_cfb_cipher aes_cfb_cipher
725 static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
726 const unsigned char *in, size_t len);
728 # define aes_t4_cfb8_cipher aes_cfb8_cipher
729 static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
730 const unsigned char *in, size_t len);
732 # define aes_t4_cfb1_cipher aes_cfb1_cipher
733 static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
734 const unsigned char *in, size_t len);
736 # define aes_t4_ctr_cipher aes_ctr_cipher
737 static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
738 const unsigned char *in, size_t len);
740 static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
741 const unsigned char *iv, int enc)
743 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
747 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
748 aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
749 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
750 (block128_f) aes_t4_encrypt);
753 gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
756 gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
759 gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
765 * If we have an iv can set it directly, otherwise use saved IV.
767 if (iv == NULL && gctx->iv_set)
770 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
775 /* If key set use IV, otherwise copy */
777 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
779 memcpy(gctx->iv, iv, gctx->ivlen);
786 # define aes_t4_gcm_cipher aes_gcm_cipher
787 static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
788 const unsigned char *in, size_t len);
790 static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
791 const unsigned char *iv, int enc)
793 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
798 int bits = EVP_CIPHER_CTX_key_length(ctx) * 4;
800 /* key_len is two AES keys */
802 aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
803 xctx->xts.block1 = (block128_f) aes_t4_encrypt;
806 xctx->stream = aes128_t4_xts_encrypt;
809 xctx->stream = aes256_t4_xts_encrypt;
815 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
817 xctx->xts.block1 = (block128_f) aes_t4_decrypt;
820 xctx->stream = aes128_t4_xts_decrypt;
823 xctx->stream = aes256_t4_xts_decrypt;
830 aes_t4_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
831 EVP_CIPHER_CTX_key_length(ctx) * 4,
833 xctx->xts.block2 = (block128_f) aes_t4_encrypt;
835 xctx->xts.key1 = &xctx->ks1;
839 xctx->xts.key2 = &xctx->ks2;
840 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
846 # define aes_t4_xts_cipher aes_xts_cipher
847 static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
848 const unsigned char *in, size_t len);
850 static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
851 const unsigned char *iv, int enc)
853 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
857 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
858 aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
859 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
860 &cctx->ks, (block128_f) aes_t4_encrypt);
865 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
871 # define aes_t4_ccm_cipher aes_ccm_cipher
872 static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
873 const unsigned char *in, size_t len);
875 # ifndef OPENSSL_NO_OCB
876 static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
877 const unsigned char *iv, int enc)
879 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
885 * We set both the encrypt and decrypt key here because decrypt
886 * needs both. We could possibly optimise to remove setting the
887 * decrypt for an encryption operation.
889 aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
891 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
893 if (!CRYPTO_ocb128_init(&octx->ocb,
894 &octx->ksenc.ks, &octx->ksdec.ks,
895 (block128_f) aes_t4_encrypt,
896 (block128_f) aes_t4_decrypt,
903 * If we have an iv we can set it directly, otherwise use saved IV.
905 if (iv == NULL && octx->iv_set)
908 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
915 /* If key set use IV, otherwise copy */
917 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
919 memcpy(octx->iv, iv, octx->ivlen);
925 # define aes_t4_ocb_cipher aes_ocb_cipher
926 static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
927 const unsigned char *in, size_t len);
928 # endif /* OPENSSL_NO_OCB */
930 # ifndef OPENSSL_NO_SIV
931 # define aes_t4_siv_init_key aes_siv_init_key
932 # define aes_t4_siv_cipher aes_siv_cipher
933 # endif /* OPENSSL_NO_SIV */
935 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
936 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
937 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
938 flags|EVP_CIPH_##MODE##_MODE, \
940 aes_t4_##mode##_cipher, \
942 sizeof(EVP_AES_KEY), \
943 NULL,NULL,NULL,NULL }; \
944 static const EVP_CIPHER aes_##keylen##_##mode = { \
945 nid##_##keylen##_##nmode,blocksize, \
947 flags|EVP_CIPH_##MODE##_MODE, \
949 aes_##mode##_cipher, \
951 sizeof(EVP_AES_KEY), \
952 NULL,NULL,NULL,NULL }; \
953 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
954 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
956 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
957 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
958 nid##_##keylen##_##mode,blocksize, \
959 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
961 flags|EVP_CIPH_##MODE##_MODE, \
962 aes_t4_##mode##_init_key, \
963 aes_t4_##mode##_cipher, \
964 aes_##mode##_cleanup, \
965 sizeof(EVP_AES_##MODE##_CTX), \
966 NULL,NULL,aes_##mode##_ctrl,NULL }; \
967 static const EVP_CIPHER aes_##keylen##_##mode = { \
968 nid##_##keylen##_##mode,blocksize, \
969 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
971 flags|EVP_CIPH_##MODE##_MODE, \
972 aes_##mode##_init_key, \
973 aes_##mode##_cipher, \
974 aes_##mode##_cleanup, \
975 sizeof(EVP_AES_##MODE##_CTX), \
976 NULL,NULL,aes_##mode##_ctrl,NULL }; \
977 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
978 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
980 #elif defined(OPENSSL_CPUID_OBJ) && defined(__s390__)
984 # include "s390x_arch.h"
990 * KM-AES parameter block - begin
991 * (see z/Architecture Principles of Operation >= SA22-7832-06)
996 /* KM-AES parameter block - end */
1005 * KMO-AES parameter block - begin
1006 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1009 unsigned char cv[16];
1010 unsigned char k[32];
1012 /* KMO-AES parameter block - end */
1017 } S390X_AES_OFB_CTX;
1023 * KMF-AES parameter block - begin
1024 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1027 unsigned char cv[16];
1028 unsigned char k[32];
1030 /* KMF-AES parameter block - end */
1035 } S390X_AES_CFB_CTX;
1041 * KMA-GCM-AES parameter block - begin
1042 * (see z/Architecture Principles of Operation >= SA22-7832-11)
1045 unsigned char reserved[12];
1051 unsigned long long g[2];
1052 unsigned char b[16];
1054 unsigned char h[16];
1055 unsigned long long taadl;
1056 unsigned long long tpcl;
1058 unsigned long long g[2];
1061 unsigned char k[32];
1063 /* KMA-GCM-AES parameter block - end */
1075 unsigned char ares[16];
1076 unsigned char mres[16];
1077 unsigned char kres[16];
1083 uint64_t tls_enc_records; /* Number of TLS records encrypted */
1084 } S390X_AES_GCM_CTX;
1090 * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and
1091 * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's
1092 * rounds field is used to store the function code and that the key
1093 * schedule is not stored (if aes hardware support is detected).
1096 unsigned char pad[16];
1102 * KMAC-AES parameter block - begin
1103 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1107 unsigned long long g[2];
1108 unsigned char b[16];
1110 unsigned char k[32];
1112 /* KMAC-AES paramater block - end */
1115 unsigned long long g[2];
1116 unsigned char b[16];
1119 unsigned long long g[2];
1120 unsigned char b[16];
1123 unsigned long long blocks;
1132 unsigned char pad[140];
1136 } S390X_AES_CCM_CTX;
1138 /* Convert key size to function code: [16,24,32] -> [18,19,20]. */
1139 # define S390X_AES_FC(keylen) (S390X_AES_128 + ((((keylen) << 3) - 128) >> 6))
1141 /* Most modes of operation need km for partial block processing. */
1142 # define S390X_aes_128_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1143 S390X_CAPBIT(S390X_AES_128))
1144 # define S390X_aes_192_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1145 S390X_CAPBIT(S390X_AES_192))
1146 # define S390X_aes_256_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1147 S390X_CAPBIT(S390X_AES_256))
1149 # define s390x_aes_init_key aes_init_key
1150 static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1151 const unsigned char *iv, int enc);
1153 # define S390X_aes_128_cbc_CAPABLE 1 /* checked by callee */
1154 # define S390X_aes_192_cbc_CAPABLE 1
1155 # define S390X_aes_256_cbc_CAPABLE 1
1156 # define S390X_AES_CBC_CTX EVP_AES_KEY
1158 # define s390x_aes_cbc_init_key aes_init_key
1160 # define s390x_aes_cbc_cipher aes_cbc_cipher
1161 static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1162 const unsigned char *in, size_t len);
1164 # define S390X_aes_128_ecb_CAPABLE S390X_aes_128_CAPABLE
1165 # define S390X_aes_192_ecb_CAPABLE S390X_aes_192_CAPABLE
1166 # define S390X_aes_256_ecb_CAPABLE S390X_aes_256_CAPABLE
1168 static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX *ctx,
1169 const unsigned char *key,
1170 const unsigned char *iv, int enc)
1172 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1173 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1175 cctx->fc = S390X_AES_FC(keylen);
1177 cctx->fc |= S390X_DECRYPT;
1179 memcpy(cctx->km.param.k, key, keylen);
1183 static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1184 const unsigned char *in, size_t len)
1186 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1188 s390x_km(in, len, out, cctx->fc, &cctx->km.param);
1192 # define S390X_aes_128_ofb_CAPABLE (S390X_aes_128_CAPABLE && \
1193 (OPENSSL_s390xcap_P.kmo[0] & \
1194 S390X_CAPBIT(S390X_AES_128)))
1195 # define S390X_aes_192_ofb_CAPABLE (S390X_aes_192_CAPABLE && \
1196 (OPENSSL_s390xcap_P.kmo[0] & \
1197 S390X_CAPBIT(S390X_AES_192)))
1198 # define S390X_aes_256_ofb_CAPABLE (S390X_aes_256_CAPABLE && \
1199 (OPENSSL_s390xcap_P.kmo[0] & \
1200 S390X_CAPBIT(S390X_AES_256)))
1202 static int s390x_aes_ofb_init_key(EVP_CIPHER_CTX *ctx,
1203 const unsigned char *key,
1204 const unsigned char *ivec, int enc)
1206 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1207 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1208 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1209 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1211 memcpy(cctx->kmo.param.cv, iv, ivlen);
1212 memcpy(cctx->kmo.param.k, key, keylen);
1213 cctx->fc = S390X_AES_FC(keylen);
1218 static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1219 const unsigned char *in, size_t len)
1221 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1226 *out = *in ^ cctx->kmo.param.cv[n];
1235 len &= ~(size_t)0xf;
1237 s390x_kmo(in, len, out, cctx->fc, &cctx->kmo.param);
1244 s390x_km(cctx->kmo.param.cv, 16, cctx->kmo.param.cv, cctx->fc,
1248 out[n] = in[n] ^ cctx->kmo.param.cv[n];
1257 # define S390X_aes_128_cfb_CAPABLE (S390X_aes_128_CAPABLE && \
1258 (OPENSSL_s390xcap_P.kmf[0] & \
1259 S390X_CAPBIT(S390X_AES_128)))
1260 # define S390X_aes_192_cfb_CAPABLE (S390X_aes_192_CAPABLE && \
1261 (OPENSSL_s390xcap_P.kmf[0] & \
1262 S390X_CAPBIT(S390X_AES_192)))
1263 # define S390X_aes_256_cfb_CAPABLE (S390X_aes_256_CAPABLE && \
1264 (OPENSSL_s390xcap_P.kmf[0] & \
1265 S390X_CAPBIT(S390X_AES_256)))
1267 static int s390x_aes_cfb_init_key(EVP_CIPHER_CTX *ctx,
1268 const unsigned char *key,
1269 const unsigned char *ivec, int enc)
1271 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1272 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1273 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1274 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1276 cctx->fc = S390X_AES_FC(keylen);
1277 cctx->fc |= 16 << 24; /* 16 bytes cipher feedback */
1279 cctx->fc |= S390X_DECRYPT;
1282 memcpy(cctx->kmf.param.cv, iv, ivlen);
1283 memcpy(cctx->kmf.param.k, key, keylen);
1287 static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1288 const unsigned char *in, size_t len)
1290 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1291 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1292 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1299 *out = cctx->kmf.param.cv[n] ^ tmp;
1300 cctx->kmf.param.cv[n] = enc ? *out : tmp;
1309 len &= ~(size_t)0xf;
1311 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1318 s390x_km(cctx->kmf.param.cv, 16, cctx->kmf.param.cv,
1319 S390X_AES_FC(keylen), cctx->kmf.param.k);
1323 out[n] = cctx->kmf.param.cv[n] ^ tmp;
1324 cctx->kmf.param.cv[n] = enc ? out[n] : tmp;
1333 # define S390X_aes_128_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1334 S390X_CAPBIT(S390X_AES_128))
1335 # define S390X_aes_192_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1336 S390X_CAPBIT(S390X_AES_192))
1337 # define S390X_aes_256_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1338 S390X_CAPBIT(S390X_AES_256))
1340 static int s390x_aes_cfb8_init_key(EVP_CIPHER_CTX *ctx,
1341 const unsigned char *key,
1342 const unsigned char *ivec, int enc)
1344 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1345 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1346 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1347 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1349 cctx->fc = S390X_AES_FC(keylen);
1350 cctx->fc |= 1 << 24; /* 1 byte cipher feedback */
1352 cctx->fc |= S390X_DECRYPT;
1354 memcpy(cctx->kmf.param.cv, iv, ivlen);
1355 memcpy(cctx->kmf.param.k, key, keylen);
1359 static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1360 const unsigned char *in, size_t len)
1362 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1364 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1368 # define S390X_aes_128_cfb1_CAPABLE 0
1369 # define S390X_aes_192_cfb1_CAPABLE 0
1370 # define S390X_aes_256_cfb1_CAPABLE 0
1372 # define s390x_aes_cfb1_init_key aes_init_key
1374 # define s390x_aes_cfb1_cipher aes_cfb1_cipher
1375 static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1376 const unsigned char *in, size_t len);
1378 # define S390X_aes_128_ctr_CAPABLE 1 /* checked by callee */
1379 # define S390X_aes_192_ctr_CAPABLE 1
1380 # define S390X_aes_256_ctr_CAPABLE 1
1381 # define S390X_AES_CTR_CTX EVP_AES_KEY
1383 # define s390x_aes_ctr_init_key aes_init_key
1385 # define s390x_aes_ctr_cipher aes_ctr_cipher
1386 static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1387 const unsigned char *in, size_t len);
1389 # define S390X_aes_128_gcm_CAPABLE (S390X_aes_128_CAPABLE && \
1390 (OPENSSL_s390xcap_P.kma[0] & \
1391 S390X_CAPBIT(S390X_AES_128)))
1392 # define S390X_aes_192_gcm_CAPABLE (S390X_aes_192_CAPABLE && \
1393 (OPENSSL_s390xcap_P.kma[0] & \
1394 S390X_CAPBIT(S390X_AES_192)))
1395 # define S390X_aes_256_gcm_CAPABLE (S390X_aes_256_CAPABLE && \
1396 (OPENSSL_s390xcap_P.kma[0] & \
1397 S390X_CAPBIT(S390X_AES_256)))
1399 /* iv + padding length for iv lenghts != 12 */
1400 # define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16)
1403 * Process additional authenticated data. Returns 0 on success. Code is
1406 static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad,
1409 unsigned long long alen;
1412 if (ctx->kma.param.tpcl)
1415 alen = ctx->kma.param.taadl + len;
1416 if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
1418 ctx->kma.param.taadl = alen;
1423 ctx->ares[n] = *aad;
1428 /* ctx->ares contains a complete block if offset has wrapped around */
1430 s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1431 ctx->fc |= S390X_KMA_HS;
1438 len &= ~(size_t)0xf;
1440 s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1442 ctx->fc |= S390X_KMA_HS;
1450 ctx->ares[rem] = aad[rem];
1457 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for
1458 * success. Code is big-endian.
1460 static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in,
1461 unsigned char *out, size_t len)
1463 const unsigned char *inptr;
1464 unsigned long long mlen;
1467 unsigned char b[16];
1472 mlen = ctx->kma.param.tpcl + len;
1473 if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
1475 ctx->kma.param.tpcl = mlen;
1481 while (n && inlen) {
1482 ctx->mres[n] = *inptr;
1487 /* ctx->mres contains a complete block if offset has wrapped around */
1489 s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b,
1490 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1491 ctx->fc |= S390X_KMA_HS;
1494 /* previous call already encrypted/decrypted its remainder,
1495 * see comment below */
1510 len &= ~(size_t)0xf;
1512 s390x_kma(ctx->ares, ctx->areslen, in, len, out,
1513 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1516 ctx->fc |= S390X_KMA_HS;
1521 * If there is a remainder, it has to be saved such that it can be
1522 * processed by kma later. However, we also have to do the for-now
1523 * unauthenticated encryption/decryption part here and now...
1526 if (!ctx->mreslen) {
1527 buf.w[0] = ctx->kma.param.j0.w[0];
1528 buf.w[1] = ctx->kma.param.j0.w[1];
1529 buf.w[2] = ctx->kma.param.j0.w[2];
1530 buf.w[3] = ctx->kma.param.cv.w + 1;
1531 s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k);
1535 for (i = 0; i < rem; i++) {
1536 ctx->mres[n + i] = in[i];
1537 out[i] = in[i] ^ ctx->kres[n + i];
1540 ctx->mreslen += rem;
1546 * Initialize context structure. Code is big-endian.
1548 static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx,
1549 const unsigned char *iv)
1551 ctx->kma.param.t.g[0] = 0;
1552 ctx->kma.param.t.g[1] = 0;
1553 ctx->kma.param.tpcl = 0;
1554 ctx->kma.param.taadl = 0;
1559 if (ctx->ivlen == 12) {
1560 memcpy(&ctx->kma.param.j0, iv, ctx->ivlen);
1561 ctx->kma.param.j0.w[3] = 1;
1562 ctx->kma.param.cv.w = 1;
1564 /* ctx->iv has the right size and is already padded. */
1565 memcpy(ctx->iv, iv, ctx->ivlen);
1566 s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL,
1567 ctx->fc, &ctx->kma.param);
1568 ctx->fc |= S390X_KMA_HS;
1570 ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0];
1571 ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1];
1572 ctx->kma.param.cv.w = ctx->kma.param.j0.w[3];
1573 ctx->kma.param.t.g[0] = 0;
1574 ctx->kma.param.t.g[1] = 0;
1579 * Performs various operations on the context structure depending on control
1580 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
1581 * Code is big-endian.
1583 static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1585 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1586 S390X_AES_GCM_CTX *gctx_out;
1587 EVP_CIPHER_CTX *out;
1588 unsigned char *buf, *iv;
1589 int ivlen, enc, len;
1593 ivlen = EVP_CIPHER_CTX_iv_length(c);
1594 iv = EVP_CIPHER_CTX_iv_noconst(c);
1597 gctx->ivlen = ivlen;
1601 gctx->tls_aad_len = -1;
1604 case EVP_CTRL_AEAD_SET_IVLEN:
1609 iv = EVP_CIPHER_CTX_iv_noconst(c);
1610 len = S390X_gcm_ivpadlen(arg);
1612 /* Allocate memory for iv if needed. */
1613 if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) {
1615 OPENSSL_free(gctx->iv);
1617 if ((gctx->iv = OPENSSL_malloc(len)) == NULL) {
1618 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
1623 memset(gctx->iv + arg, 0, len - arg - 8);
1624 *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3;
1629 case EVP_CTRL_AEAD_SET_TAG:
1630 buf = EVP_CIPHER_CTX_buf_noconst(c);
1631 enc = EVP_CIPHER_CTX_encrypting(c);
1632 if (arg <= 0 || arg > 16 || enc)
1635 memcpy(buf, ptr, arg);
1639 case EVP_CTRL_AEAD_GET_TAG:
1640 enc = EVP_CIPHER_CTX_encrypting(c);
1641 if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0)
1644 memcpy(ptr, gctx->kma.param.t.b, arg);
1647 case EVP_CTRL_GCM_SET_IV_FIXED:
1648 /* Special case: -1 length restores whole iv */
1650 memcpy(gctx->iv, ptr, gctx->ivlen);
1655 * Fixed field must be at least 4 bytes and invocation field at least
1658 if ((arg < 4) || (gctx->ivlen - arg) < 8)
1662 memcpy(gctx->iv, ptr, arg);
1664 enc = EVP_CIPHER_CTX_encrypting(c);
1665 if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
1671 case EVP_CTRL_GCM_IV_GEN:
1672 if (gctx->iv_gen == 0 || gctx->key_set == 0)
1675 s390x_aes_gcm_setiv(gctx, gctx->iv);
1677 if (arg <= 0 || arg > gctx->ivlen)
1680 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1682 * Invocation field will be at least 8 bytes in size and so no need
1683 * to check wrap around or increment more than last 8 bytes.
1685 ctr64_inc(gctx->iv + gctx->ivlen - 8);
1689 case EVP_CTRL_GCM_SET_IV_INV:
1690 enc = EVP_CIPHER_CTX_encrypting(c);
1691 if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc)
1694 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1695 s390x_aes_gcm_setiv(gctx, gctx->iv);
1699 case EVP_CTRL_AEAD_TLS1_AAD:
1700 /* Save the aad for later use. */
1701 if (arg != EVP_AEAD_TLS1_AAD_LEN)
1704 buf = EVP_CIPHER_CTX_buf_noconst(c);
1705 memcpy(buf, ptr, arg);
1706 gctx->tls_aad_len = arg;
1707 gctx->tls_enc_records = 0;
1709 len = buf[arg - 2] << 8 | buf[arg - 1];
1710 /* Correct length for explicit iv. */
1711 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1713 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1715 /* If decrypting correct for tag too. */
1716 enc = EVP_CIPHER_CTX_encrypting(c);
1718 if (len < EVP_GCM_TLS_TAG_LEN)
1720 len -= EVP_GCM_TLS_TAG_LEN;
1722 buf[arg - 2] = len >> 8;
1723 buf[arg - 1] = len & 0xff;
1724 /* Extra padding: tag appended to record. */
1725 return EVP_GCM_TLS_TAG_LEN;
1729 gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out);
1730 iv = EVP_CIPHER_CTX_iv_noconst(c);
1732 if (gctx->iv == iv) {
1733 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
1735 len = S390X_gcm_ivpadlen(gctx->ivlen);
1737 if ((gctx_out->iv = OPENSSL_malloc(len)) == NULL) {
1738 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
1742 memcpy(gctx_out->iv, gctx->iv, len);
1752 * Set key and/or iv. Returns 1 on success. Otherwise 0 is returned.
1754 static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
1755 const unsigned char *key,
1756 const unsigned char *iv, int enc)
1758 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1761 if (iv == NULL && key == NULL)
1765 keylen = EVP_CIPHER_CTX_key_length(ctx);
1766 memcpy(&gctx->kma.param.k, key, keylen);
1768 gctx->fc = S390X_AES_FC(keylen);
1770 gctx->fc |= S390X_DECRYPT;
1772 if (iv == NULL && gctx->iv_set)
1776 s390x_aes_gcm_setiv(gctx, iv);
1782 s390x_aes_gcm_setiv(gctx, iv);
1784 memcpy(gctx->iv, iv, gctx->ivlen);
1793 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1794 * if successful. Otherwise -1 is returned. Code is big-endian.
1796 static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1797 const unsigned char *in, size_t len)
1799 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1800 const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1801 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1804 if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1808 * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
1809 * Requirements from SP 800-38D". The requirements is for one party to the
1810 * communication to fail after 2^64 - 1 keys. We do this on the encrypting
1813 if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
1814 EVPerr(EVP_F_S390X_AES_GCM_TLS_CIPHER, EVP_R_TOO_MANY_RECORDS);
1818 if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN
1819 : EVP_CTRL_GCM_SET_IV_INV,
1820 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
1823 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1824 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1825 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1827 gctx->kma.param.taadl = gctx->tls_aad_len << 3;
1828 gctx->kma.param.tpcl = len << 3;
1829 s390x_kma(buf, gctx->tls_aad_len, in, len, out,
1830 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1833 memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN);
1834 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1836 if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len,
1837 EVP_GCM_TLS_TAG_LEN)) {
1838 OPENSSL_cleanse(out, len);
1845 gctx->tls_aad_len = -1;
1850 * Called from EVP layer to initialize context, process additional
1851 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1852 * ciphertext or process a TLS packet, depending on context. Returns bytes
1853 * written on success. Otherwise -1 is returned. Code is big-endian.
1855 static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1856 const unsigned char *in, size_t len)
1858 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1859 unsigned char *buf, tmp[16];
1865 if (gctx->tls_aad_len >= 0)
1866 return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
1873 if (s390x_aes_gcm_aad(gctx, in, len))
1876 if (s390x_aes_gcm(gctx, in, out, len))
1881 gctx->kma.param.taadl <<= 3;
1882 gctx->kma.param.tpcl <<= 3;
1883 s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp,
1884 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1885 /* recall that we already did en-/decrypt gctx->mres
1886 * and returned it to caller... */
1887 OPENSSL_cleanse(tmp, gctx->mreslen);
1890 enc = EVP_CIPHER_CTX_encrypting(ctx);
1894 if (gctx->taglen < 0)
1897 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1898 if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen))
1905 static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1907 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1908 const unsigned char *iv;
1913 iv = EVP_CIPHER_CTX_iv(c);
1915 OPENSSL_free(gctx->iv);
1917 OPENSSL_cleanse(gctx, sizeof(*gctx));
1921 # define S390X_AES_XTS_CTX EVP_AES_XTS_CTX
1922 # define S390X_aes_128_xts_CAPABLE 1 /* checked by callee */
1923 # define S390X_aes_256_xts_CAPABLE 1
1925 # define s390x_aes_xts_init_key aes_xts_init_key
1926 static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx,
1927 const unsigned char *key,
1928 const unsigned char *iv, int enc);
1929 # define s390x_aes_xts_cipher aes_xts_cipher
1930 static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1931 const unsigned char *in, size_t len);
1932 # define s390x_aes_xts_ctrl aes_xts_ctrl
1933 static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
1934 # define s390x_aes_xts_cleanup aes_xts_cleanup
1936 # define S390X_aes_128_ccm_CAPABLE (S390X_aes_128_CAPABLE && \
1937 (OPENSSL_s390xcap_P.kmac[0] & \
1938 S390X_CAPBIT(S390X_AES_128)))
1939 # define S390X_aes_192_ccm_CAPABLE (S390X_aes_192_CAPABLE && \
1940 (OPENSSL_s390xcap_P.kmac[0] & \
1941 S390X_CAPBIT(S390X_AES_192)))
1942 # define S390X_aes_256_ccm_CAPABLE (S390X_aes_256_CAPABLE && \
1943 (OPENSSL_s390xcap_P.kmac[0] & \
1944 S390X_CAPBIT(S390X_AES_256)))
1946 # define S390X_CCM_AAD_FLAG 0x40
1949 * Set nonce and length fields. Code is big-endian.
1951 static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx,
1952 const unsigned char *nonce,
1955 ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG;
1956 ctx->aes.ccm.nonce.g[1] = mlen;
1957 memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l);
1961 * Process additional authenticated data. Code is big-endian.
1963 static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad,
1972 ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG;
1974 /* Suppress 'type-punned pointer dereference' warning. */
1975 ptr = ctx->aes.ccm.buf.b;
1977 if (alen < ((1 << 16) - (1 << 8))) {
1978 *(uint16_t *)ptr = alen;
1980 } else if (sizeof(alen) == 8
1981 && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) {
1982 *(uint16_t *)ptr = 0xffff;
1983 *(uint64_t *)(ptr + 2) = alen;
1986 *(uint16_t *)ptr = 0xfffe;
1987 *(uint32_t *)(ptr + 2) = alen;
1991 while (i < 16 && alen) {
1992 ctx->aes.ccm.buf.b[i] = *aad;
1998 ctx->aes.ccm.buf.b[i] = 0;
2002 ctx->aes.ccm.kmac_param.icv.g[0] = 0;
2003 ctx->aes.ccm.kmac_param.icv.g[1] = 0;
2004 s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc,
2005 &ctx->aes.ccm.kmac_param);
2006 ctx->aes.ccm.blocks += 2;
2009 alen &= ~(size_t)0xf;
2011 s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2012 ctx->aes.ccm.blocks += alen >> 4;
2016 for (i = 0; i < rem; i++)
2017 ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i];
2019 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2020 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2021 ctx->aes.ccm.kmac_param.k);
2022 ctx->aes.ccm.blocks++;
2027 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for
2030 static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in,
2031 unsigned char *out, size_t len, int enc)
2034 unsigned int i, l, num;
2035 unsigned char flags;
2037 flags = ctx->aes.ccm.nonce.b[0];
2038 if (!(flags & S390X_CCM_AAD_FLAG)) {
2039 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b,
2040 ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k);
2041 ctx->aes.ccm.blocks++;
2044 ctx->aes.ccm.nonce.b[0] = l;
2047 * Reconstruct length from encoded length field
2048 * and initialize it with counter value.
2051 for (i = 15 - l; i < 15; i++) {
2052 n |= ctx->aes.ccm.nonce.b[i];
2053 ctx->aes.ccm.nonce.b[i] = 0;
2056 n |= ctx->aes.ccm.nonce.b[15];
2057 ctx->aes.ccm.nonce.b[15] = 1;
2060 return -1; /* length mismatch */
2063 /* Two operations per block plus one for tag encryption */
2064 ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1;
2065 if (ctx->aes.ccm.blocks > (1ULL << 61))
2066 return -2; /* too much data */
2071 len &= ~(size_t)0xf;
2074 /* mac-then-encrypt */
2076 s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2078 for (i = 0; i < rem; i++)
2079 ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i];
2081 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2082 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2083 ctx->aes.ccm.kmac_param.k);
2086 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
2087 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
2088 &num, (ctr128_f)AES_ctr32_encrypt);
2090 /* decrypt-then-mac */
2091 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
2092 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
2093 &num, (ctr128_f)AES_ctr32_encrypt);
2096 s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2098 for (i = 0; i < rem; i++)
2099 ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i];
2101 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2102 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2103 ctx->aes.ccm.kmac_param.k);
2107 for (i = 15 - l; i < 16; i++)
2108 ctx->aes.ccm.nonce.b[i] = 0;
2110 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc,
2111 ctx->aes.ccm.kmac_param.k);
2112 ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0];
2113 ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1];
2115 ctx->aes.ccm.nonce.b[0] = flags; /* restore flags field */
2120 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
2121 * if successful. Otherwise -1 is returned.
2123 static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2124 const unsigned char *in, size_t len)
2126 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2127 unsigned char *ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2128 unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2129 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
2132 || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m))
2136 /* Set explicit iv (sequence number). */
2137 memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN);
2140 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
2142 * Get explicit iv (sequence number). We already have fixed iv
2143 * (server/client_write_iv) here.
2145 memcpy(ivec + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN);
2146 s390x_aes_ccm_setiv(cctx, ivec, len);
2148 /* Process aad (sequence number|type|version|length) */
2149 s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len);
2151 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
2152 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
2155 if (s390x_aes_ccm(cctx, in, out, len, enc))
2158 memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2159 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
2161 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2162 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len,
2167 OPENSSL_cleanse(out, len);
2173 * Set key and flag field and/or iv. Returns 1 if successful. Otherwise 0 is
2176 static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx,
2177 const unsigned char *key,
2178 const unsigned char *iv, int enc)
2180 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2181 unsigned char *ivec;
2184 if (iv == NULL && key == NULL)
2188 keylen = EVP_CIPHER_CTX_key_length(ctx);
2189 cctx->aes.ccm.fc = S390X_AES_FC(keylen);
2190 memcpy(cctx->aes.ccm.kmac_param.k, key, keylen);
2192 /* Store encoded m and l. */
2193 cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7)
2194 | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3;
2195 memset(cctx->aes.ccm.nonce.b + 1, 0,
2196 sizeof(cctx->aes.ccm.nonce.b));
2197 cctx->aes.ccm.blocks = 0;
2199 cctx->aes.ccm.key_set = 1;
2203 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2204 memcpy(ivec, iv, 15 - cctx->aes.ccm.l);
2206 cctx->aes.ccm.iv_set = 1;
2213 * Called from EVP layer to initialize context, process additional
2214 * authenticated data, en/de-crypt plain/cipher-text and authenticate
2215 * plaintext or process a TLS packet, depending on context. Returns bytes
2216 * written on success. Otherwise -1 is returned.
2218 static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2219 const unsigned char *in, size_t len)
2221 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2222 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
2224 unsigned char *buf, *ivec;
2226 if (!cctx->aes.ccm.key_set)
2229 if (cctx->aes.ccm.tls_aad_len >= 0)
2230 return s390x_aes_ccm_tls_cipher(ctx, out, in, len);
2233 * Final(): Does not return any data. Recall that ccm is mac-then-encrypt
2234 * so integrity must be checked already at Update() i.e., before
2235 * potentially corrupted data is output.
2237 if (in == NULL && out != NULL)
2240 if (!cctx->aes.ccm.iv_set)
2243 if (!enc && !cctx->aes.ccm.tag_set)
2247 /* Update(): Pass message length. */
2249 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2250 s390x_aes_ccm_setiv(cctx, ivec, len);
2252 cctx->aes.ccm.len_set = 1;
2256 /* Update(): Process aad. */
2257 if (!cctx->aes.ccm.len_set && len)
2260 s390x_aes_ccm_aad(cctx, in, len);
2264 /* Update(): Process message. */
2266 if (!cctx->aes.ccm.len_set) {
2268 * In case message length was not previously set explicitly via
2269 * Update(), set it now.
2271 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2272 s390x_aes_ccm_setiv(cctx, ivec, len);
2274 cctx->aes.ccm.len_set = 1;
2278 if (s390x_aes_ccm(cctx, in, out, len, enc))
2281 cctx->aes.ccm.tag_set = 1;
2286 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2287 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2288 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf,
2294 OPENSSL_cleanse(out, len);
2296 cctx->aes.ccm.iv_set = 0;
2297 cctx->aes.ccm.tag_set = 0;
2298 cctx->aes.ccm.len_set = 0;
2304 * Performs various operations on the context structure depending on control
2305 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
2306 * Code is big-endian.
2308 static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2310 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c);
2311 unsigned char *buf, *iv;
2316 cctx->aes.ccm.key_set = 0;
2317 cctx->aes.ccm.iv_set = 0;
2318 cctx->aes.ccm.l = 8;
2319 cctx->aes.ccm.m = 12;
2320 cctx->aes.ccm.tag_set = 0;
2321 cctx->aes.ccm.len_set = 0;
2322 cctx->aes.ccm.tls_aad_len = -1;
2325 case EVP_CTRL_AEAD_TLS1_AAD:
2326 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2329 /* Save the aad for later use. */
2330 buf = EVP_CIPHER_CTX_buf_noconst(c);
2331 memcpy(buf, ptr, arg);
2332 cctx->aes.ccm.tls_aad_len = arg;
2334 len = buf[arg - 2] << 8 | buf[arg - 1];
2335 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
2338 /* Correct length for explicit iv. */
2339 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
2341 enc = EVP_CIPHER_CTX_encrypting(c);
2343 if (len < cctx->aes.ccm.m)
2346 /* Correct length for tag. */
2347 len -= cctx->aes.ccm.m;
2350 buf[arg - 2] = len >> 8;
2351 buf[arg - 1] = len & 0xff;
2353 /* Extra padding: tag appended to record. */
2354 return cctx->aes.ccm.m;
2356 case EVP_CTRL_CCM_SET_IV_FIXED:
2357 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
2360 /* Copy to first part of the iv. */
2361 iv = EVP_CIPHER_CTX_iv_noconst(c);
2362 memcpy(iv, ptr, arg);
2365 case EVP_CTRL_AEAD_SET_IVLEN:
2369 case EVP_CTRL_CCM_SET_L:
2370 if (arg < 2 || arg > 8)
2373 cctx->aes.ccm.l = arg;
2376 case EVP_CTRL_AEAD_SET_TAG:
2377 if ((arg & 1) || arg < 4 || arg > 16)
2380 enc = EVP_CIPHER_CTX_encrypting(c);
2385 cctx->aes.ccm.tag_set = 1;
2386 buf = EVP_CIPHER_CTX_buf_noconst(c);
2387 memcpy(buf, ptr, arg);
2390 cctx->aes.ccm.m = arg;
2393 case EVP_CTRL_AEAD_GET_TAG:
2394 enc = EVP_CIPHER_CTX_encrypting(c);
2395 if (!enc || !cctx->aes.ccm.tag_set)
2398 if(arg < cctx->aes.ccm.m)
2401 memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2402 cctx->aes.ccm.tag_set = 0;
2403 cctx->aes.ccm.iv_set = 0;
2404 cctx->aes.ccm.len_set = 0;
2415 # define s390x_aes_ccm_cleanup aes_ccm_cleanup
2417 # ifndef OPENSSL_NO_OCB
2418 # define S390X_AES_OCB_CTX EVP_AES_OCB_CTX
2419 # define S390X_aes_128_ocb_CAPABLE 0
2420 # define S390X_aes_192_ocb_CAPABLE 0
2421 # define S390X_aes_256_ocb_CAPABLE 0
2423 # define s390x_aes_ocb_init_key aes_ocb_init_key
2424 static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2425 const unsigned char *iv, int enc);
2426 # define s390x_aes_ocb_cipher aes_ocb_cipher
2427 static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2428 const unsigned char *in, size_t len);
2429 # define s390x_aes_ocb_cleanup aes_ocb_cleanup
2430 static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *);
2431 # define s390x_aes_ocb_ctrl aes_ocb_ctrl
2432 static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
2435 # ifndef OPENSSL_NO_SIV
2436 # define S390X_AES_SIV_CTX EVP_AES_SIV_CTX
2437 # define S390X_aes_128_siv_CAPABLE 0
2438 # define S390X_aes_192_siv_CAPABLE 0
2439 # define S390X_aes_256_siv_CAPABLE 0
2441 # define s390x_aes_siv_init_key aes_siv_init_key
2442 # define s390x_aes_siv_cipher aes_siv_cipher
2443 # define s390x_aes_siv_cleanup aes_siv_cleanup
2444 # define s390x_aes_siv_ctrl aes_siv_ctrl
2447 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode, \
2449 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2450 nid##_##keylen##_##nmode,blocksize, \
2453 flags | EVP_CIPH_##MODE##_MODE, \
2454 s390x_aes_##mode##_init_key, \
2455 s390x_aes_##mode##_cipher, \
2457 sizeof(S390X_AES_##MODE##_CTX), \
2463 static const EVP_CIPHER aes_##keylen##_##mode = { \
2464 nid##_##keylen##_##nmode, \
2468 flags | EVP_CIPH_##MODE##_MODE, \
2470 aes_##mode##_cipher, \
2472 sizeof(EVP_AES_KEY), \
2478 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2480 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2481 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2484 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\
2485 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2486 nid##_##keylen##_##mode, \
2488 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2490 flags | EVP_CIPH_##MODE##_MODE, \
2491 s390x_aes_##mode##_init_key, \
2492 s390x_aes_##mode##_cipher, \
2493 s390x_aes_##mode##_cleanup, \
2494 sizeof(S390X_AES_##MODE##_CTX), \
2497 s390x_aes_##mode##_ctrl, \
2500 static const EVP_CIPHER aes_##keylen##_##mode = { \
2501 nid##_##keylen##_##mode,blocksize, \
2502 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2504 flags | EVP_CIPH_##MODE##_MODE, \
2505 aes_##mode##_init_key, \
2506 aes_##mode##_cipher, \
2507 aes_##mode##_cleanup, \
2508 sizeof(EVP_AES_##MODE##_CTX), \
2511 aes_##mode##_ctrl, \
2514 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2516 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2517 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2522 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
2523 static const EVP_CIPHER aes_##keylen##_##mode = { \
2524 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
2525 flags|EVP_CIPH_##MODE##_MODE, \
2527 aes_##mode##_cipher, \
2529 sizeof(EVP_AES_KEY), \
2530 NULL,NULL,NULL,NULL }; \
2531 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2532 { return &aes_##keylen##_##mode; }
2534 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
2535 static const EVP_CIPHER aes_##keylen##_##mode = { \
2536 nid##_##keylen##_##mode,blocksize, \
2537 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
2539 flags|EVP_CIPH_##MODE##_MODE, \
2540 aes_##mode##_init_key, \
2541 aes_##mode##_cipher, \
2542 aes_##mode##_cleanup, \
2543 sizeof(EVP_AES_##MODE##_CTX), \
2544 NULL,NULL,aes_##mode##_ctrl,NULL }; \
2545 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2546 { return &aes_##keylen##_##mode; }
2550 #if defined(OPENSSL_CPUID_OBJ) && (defined(__arm__) || defined(__arm) || defined(__aarch64__))
2551 # include "arm_arch.h"
2552 # if __ARM_MAX_ARCH__>=7
2553 # if defined(BSAES_ASM)
2554 # define BSAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2556 # if defined(VPAES_ASM)
2557 # define VPAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2559 # define HWAES_CAPABLE (OPENSSL_armcap_P & ARMV8_AES)
2560 # define HWAES_set_encrypt_key aes_v8_set_encrypt_key
2561 # define HWAES_set_decrypt_key aes_v8_set_decrypt_key
2562 # define HWAES_encrypt aes_v8_encrypt
2563 # define HWAES_decrypt aes_v8_decrypt
2564 # define HWAES_cbc_encrypt aes_v8_cbc_encrypt
2565 # define HWAES_ctr32_encrypt_blocks aes_v8_ctr32_encrypt_blocks
2569 #if defined(HWAES_CAPABLE)
2570 int HWAES_set_encrypt_key(const unsigned char *userKey, const int bits,
2572 int HWAES_set_decrypt_key(const unsigned char *userKey, const int bits,
2574 void HWAES_encrypt(const unsigned char *in, unsigned char *out,
2575 const AES_KEY *key);
2576 void HWAES_decrypt(const unsigned char *in, unsigned char *out,
2577 const AES_KEY *key);
2578 void HWAES_cbc_encrypt(const unsigned char *in, unsigned char *out,
2579 size_t length, const AES_KEY *key,
2580 unsigned char *ivec, const int enc);
2581 void HWAES_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
2582 size_t len, const AES_KEY *key,
2583 const unsigned char ivec[16]);
2584 void HWAES_xts_encrypt(const unsigned char *inp, unsigned char *out,
2585 size_t len, const AES_KEY *key1,
2586 const AES_KEY *key2, const unsigned char iv[16]);
2587 void HWAES_xts_decrypt(const unsigned char *inp, unsigned char *out,
2588 size_t len, const AES_KEY *key1,
2589 const AES_KEY *key2, const unsigned char iv[16]);
2592 #define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
2593 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2594 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2595 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2596 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2597 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
2598 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
2599 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
2601 static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2602 const unsigned char *iv, int enc)
2605 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2607 mode = EVP_CIPHER_CTX_mode(ctx);
2608 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
2610 #ifdef HWAES_CAPABLE
2611 if (HWAES_CAPABLE) {
2612 ret = HWAES_set_decrypt_key(key,
2613 EVP_CIPHER_CTX_key_length(ctx) * 8,
2615 dat->block = (block128_f) HWAES_decrypt;
2616 dat->stream.cbc = NULL;
2617 # ifdef HWAES_cbc_encrypt
2618 if (mode == EVP_CIPH_CBC_MODE)
2619 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2623 #ifdef BSAES_CAPABLE
2624 if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
2625 ret = AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2627 dat->block = (block128_f) AES_decrypt;
2628 dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt;
2631 #ifdef VPAES_CAPABLE
2632 if (VPAES_CAPABLE) {
2633 ret = vpaes_set_decrypt_key(key,
2634 EVP_CIPHER_CTX_key_length(ctx) * 8,
2636 dat->block = (block128_f) vpaes_decrypt;
2637 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2638 (cbc128_f) vpaes_cbc_encrypt : NULL;
2642 ret = AES_set_decrypt_key(key,
2643 EVP_CIPHER_CTX_key_length(ctx) * 8,
2645 dat->block = (block128_f) AES_decrypt;
2646 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2647 (cbc128_f) AES_cbc_encrypt : NULL;
2650 #ifdef HWAES_CAPABLE
2651 if (HWAES_CAPABLE) {
2652 ret = HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2654 dat->block = (block128_f) HWAES_encrypt;
2655 dat->stream.cbc = NULL;
2656 # ifdef HWAES_cbc_encrypt
2657 if (mode == EVP_CIPH_CBC_MODE)
2658 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2661 # ifdef HWAES_ctr32_encrypt_blocks
2662 if (mode == EVP_CIPH_CTR_MODE)
2663 dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2666 (void)0; /* terminate potentially open 'else' */
2669 #ifdef BSAES_CAPABLE
2670 if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
2671 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2673 dat->block = (block128_f) AES_encrypt;
2674 dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2677 #ifdef VPAES_CAPABLE
2678 if (VPAES_CAPABLE) {
2679 ret = vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2681 dat->block = (block128_f) vpaes_encrypt;
2682 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2683 (cbc128_f) vpaes_cbc_encrypt : NULL;
2687 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2689 dat->block = (block128_f) AES_encrypt;
2690 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2691 (cbc128_f) AES_cbc_encrypt : NULL;
2693 if (mode == EVP_CIPH_CTR_MODE)
2694 dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
2699 EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
2706 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2707 const unsigned char *in, size_t len)
2709 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2711 if (dat->stream.cbc)
2712 (*dat->stream.cbc) (in, out, len, &dat->ks,
2713 EVP_CIPHER_CTX_iv_noconst(ctx),
2714 EVP_CIPHER_CTX_encrypting(ctx));
2715 else if (EVP_CIPHER_CTX_encrypting(ctx))
2716 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks,
2717 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2719 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
2720 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2725 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2726 const unsigned char *in, size_t len)
2728 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
2730 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2735 for (i = 0, len -= bl; i <= len; i += bl)
2736 (*dat->block) (in + i, out + i, &dat->ks);
2741 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2742 const unsigned char *in, size_t len)
2744 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2746 int num = EVP_CIPHER_CTX_num(ctx);
2747 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
2748 EVP_CIPHER_CTX_iv_noconst(ctx), &num, dat->block);
2749 EVP_CIPHER_CTX_set_num(ctx, num);
2753 static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2754 const unsigned char *in, size_t len)
2756 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2758 int num = EVP_CIPHER_CTX_num(ctx);
2759 CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
2760 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2761 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2762 EVP_CIPHER_CTX_set_num(ctx, num);
2766 static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2767 const unsigned char *in, size_t len)
2769 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2771 int num = EVP_CIPHER_CTX_num(ctx);
2772 CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
2773 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2774 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2775 EVP_CIPHER_CTX_set_num(ctx, num);
2779 static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2780 const unsigned char *in, size_t len)
2782 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2784 if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
2785 int num = EVP_CIPHER_CTX_num(ctx);
2786 CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
2787 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2788 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2789 EVP_CIPHER_CTX_set_num(ctx, num);
2793 while (len >= MAXBITCHUNK) {
2794 int num = EVP_CIPHER_CTX_num(ctx);
2795 CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
2796 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2797 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2798 EVP_CIPHER_CTX_set_num(ctx, num);
2804 int num = EVP_CIPHER_CTX_num(ctx);
2805 CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
2806 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2807 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2808 EVP_CIPHER_CTX_set_num(ctx, num);
2814 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2815 const unsigned char *in, size_t len)
2817 unsigned int num = EVP_CIPHER_CTX_num(ctx);
2818 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2820 if (dat->stream.ctr)
2821 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
2822 EVP_CIPHER_CTX_iv_noconst(ctx),
2823 EVP_CIPHER_CTX_buf_noconst(ctx),
2824 &num, dat->stream.ctr);
2826 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
2827 EVP_CIPHER_CTX_iv_noconst(ctx),
2828 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
2830 EVP_CIPHER_CTX_set_num(ctx, num);
2834 BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
2835 BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
2836 BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
2838 static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
2840 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2843 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
2844 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
2845 OPENSSL_free(gctx->iv);
2849 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2851 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2856 gctx->ivlen = c->cipher->iv_len;
2860 gctx->tls_aad_len = -1;
2863 case EVP_CTRL_AEAD_SET_IVLEN:
2866 /* Allocate memory for IV if needed */
2867 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
2868 if (gctx->iv != c->iv)
2869 OPENSSL_free(gctx->iv);
2870 if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
2871 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
2878 case EVP_CTRL_AEAD_SET_TAG:
2879 if (arg <= 0 || arg > 16 || c->encrypt)
2881 memcpy(c->buf, ptr, arg);
2885 case EVP_CTRL_AEAD_GET_TAG:
2886 if (arg <= 0 || arg > 16 || !c->encrypt
2887 || gctx->taglen < 0)
2889 memcpy(ptr, c->buf, arg);
2892 case EVP_CTRL_GET_IV:
2893 if (gctx->iv_gen != 1)
2895 if (gctx->ivlen != arg)
2897 memcpy(ptr, gctx->iv, arg);
2900 case EVP_CTRL_GCM_SET_IV_FIXED:
2901 /* Special case: -1 length restores whole IV */
2903 memcpy(gctx->iv, ptr, gctx->ivlen);
2908 * Fixed field must be at least 4 bytes and invocation field at least
2911 if ((arg < 4) || (gctx->ivlen - arg) < 8)
2914 memcpy(gctx->iv, ptr, arg);
2915 if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
2920 case EVP_CTRL_GCM_IV_GEN:
2921 if (gctx->iv_gen == 0 || gctx->key_set == 0)
2923 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2924 if (arg <= 0 || arg > gctx->ivlen)
2926 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
2928 * Invocation field will be at least 8 bytes in size and so no need
2929 * to check wrap around or increment more than last 8 bytes.
2931 ctr64_inc(gctx->iv + gctx->ivlen - 8);
2935 case EVP_CTRL_GCM_SET_IV_INV:
2936 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
2938 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
2939 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2943 case EVP_CTRL_AEAD_TLS1_AAD:
2944 /* Save the AAD for later use */
2945 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2947 memcpy(c->buf, ptr, arg);
2948 gctx->tls_aad_len = arg;
2949 gctx->tls_enc_records = 0;
2951 unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1];
2952 /* Correct length for explicit IV */
2953 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
2955 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
2956 /* If decrypting correct for tag too */
2958 if (len < EVP_GCM_TLS_TAG_LEN)
2960 len -= EVP_GCM_TLS_TAG_LEN;
2962 c->buf[arg - 2] = len >> 8;
2963 c->buf[arg - 1] = len & 0xff;
2965 /* Extra padding: tag appended to record */
2966 return EVP_GCM_TLS_TAG_LEN;
2970 EVP_CIPHER_CTX *out = ptr;
2971 EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out);
2972 if (gctx->gcm.key) {
2973 if (gctx->gcm.key != &gctx->ks)
2975 gctx_out->gcm.key = &gctx_out->ks;
2977 if (gctx->iv == c->iv)
2978 gctx_out->iv = out->iv;
2980 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
2981 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
2984 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
2995 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2996 const unsigned char *iv, int enc)
2998 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3003 #ifdef HWAES_CAPABLE
3004 if (HWAES_CAPABLE) {
3005 HWAES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3006 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3007 (block128_f) HWAES_encrypt);
3008 # ifdef HWAES_ctr32_encrypt_blocks
3009 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
3016 #ifdef BSAES_CAPABLE
3017 if (BSAES_CAPABLE) {
3018 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3019 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3020 (block128_f) AES_encrypt);
3021 gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
3025 #ifdef VPAES_CAPABLE
3026 if (VPAES_CAPABLE) {
3027 vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3028 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3029 (block128_f) vpaes_encrypt);
3034 (void)0; /* terminate potentially open 'else' */
3036 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3037 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3038 (block128_f) AES_encrypt);
3040 gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
3047 * If we have an iv can set it directly, otherwise use saved IV.
3049 if (iv == NULL && gctx->iv_set)
3052 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
3057 /* If key set use IV, otherwise copy */
3059 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
3061 memcpy(gctx->iv, iv, gctx->ivlen);
3069 * Handle TLS GCM packet format. This consists of the last portion of the IV
3070 * followed by the payload and finally the tag. On encrypt generate IV,
3071 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
3075 static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3076 const unsigned char *in, size_t len)
3078 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3080 /* Encrypt/decrypt must be performed in place */
3082 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
3086 * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
3087 * Requirements from SP 800-38D". The requirements is for one party to the
3088 * communication to fail after 2^64 - 1 keys. We do this on the encrypting
3091 if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
3092 EVPerr(EVP_F_AES_GCM_TLS_CIPHER, EVP_R_TOO_MANY_RECORDS);
3097 * Set IV from start of buffer or generate IV and write to start of
3100 if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ? EVP_CTRL_GCM_IV_GEN
3101 : EVP_CTRL_GCM_SET_IV_INV,
3102 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
3105 if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
3107 /* Fix buffer and length to point to payload */
3108 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
3109 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
3110 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
3112 /* Encrypt payload */
3115 #if defined(AES_GCM_ASM)
3116 if (len >= 32 && AES_GCM_ASM(gctx)) {
3117 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
3120 bulk = AES_gcm_encrypt(in, out, len,
3122 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3123 gctx->gcm.len.u[1] += bulk;
3126 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
3129 len - bulk, gctx->ctr))
3133 #if defined(AES_GCM_ASM2)
3134 if (len >= 32 && AES_GCM_ASM2(gctx)) {
3135 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
3138 bulk = AES_gcm_encrypt(in, out, len,
3140 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3141 gctx->gcm.len.u[1] += bulk;
3144 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3145 in + bulk, out + bulk, len - bulk))
3149 /* Finally write tag */
3150 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
3151 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
3156 #if defined(AES_GCM_ASM)
3157 if (len >= 16 && AES_GCM_ASM(gctx)) {
3158 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
3161 bulk = AES_gcm_decrypt(in, out, len,
3163 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3164 gctx->gcm.len.u[1] += bulk;
3167 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3170 len - bulk, gctx->ctr))
3174 #if defined(AES_GCM_ASM2)
3175 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3176 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
3179 bulk = AES_gcm_decrypt(in, out, len,
3181 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3182 gctx->gcm.len.u[1] += bulk;
3185 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3186 in + bulk, out + bulk, len - bulk))
3190 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN);
3191 /* If tag mismatch wipe buffer */
3192 if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
3193 OPENSSL_cleanse(out, len);
3201 gctx->tls_aad_len = -1;
3205 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3206 const unsigned char *in, size_t len)
3208 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3209 /* If not set up, return error */
3213 if (gctx->tls_aad_len >= 0)
3214 return aes_gcm_tls_cipher(ctx, out, in, len);
3220 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
3222 } else if (ctx->encrypt) {
3225 #if defined(AES_GCM_ASM)
3226 if (len >= 32 && AES_GCM_ASM(gctx)) {
3227 size_t res = (16 - gctx->gcm.mres) % 16;
3229 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3232 bulk = AES_gcm_encrypt(in + res,
3233 out + res, len - res,
3234 gctx->gcm.key, gctx->gcm.Yi.c,
3236 gctx->gcm.len.u[1] += bulk;
3240 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
3243 len - bulk, gctx->ctr))
3247 #if defined(AES_GCM_ASM2)
3248 if (len >= 32 && AES_GCM_ASM2(gctx)) {
3249 size_t res = (16 - gctx->gcm.mres) % 16;
3251 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3254 bulk = AES_gcm_encrypt(in + res,
3255 out + res, len - res,
3256 gctx->gcm.key, gctx->gcm.Yi.c,
3258 gctx->gcm.len.u[1] += bulk;
3262 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3263 in + bulk, out + bulk, len - bulk))
3269 #if defined(AES_GCM_ASM)
3270 if (len >= 16 && AES_GCM_ASM(gctx)) {
3271 size_t res = (16 - gctx->gcm.mres) % 16;
3273 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3276 bulk = AES_gcm_decrypt(in + res,
3277 out + res, len - res,
3279 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3280 gctx->gcm.len.u[1] += bulk;
3284 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3287 len - bulk, gctx->ctr))
3291 #if defined(AES_GCM_ASM2)
3292 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3293 size_t res = (16 - gctx->gcm.mres) % 16;
3295 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3298 bulk = AES_gcm_decrypt(in + res,
3299 out + res, len - res,
3301 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3302 gctx->gcm.len.u[1] += bulk;
3306 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3307 in + bulk, out + bulk, len - bulk))
3313 if (!ctx->encrypt) {
3314 if (gctx->taglen < 0)
3316 if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0)
3321 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
3323 /* Don't reuse the IV */
3330 #define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
3331 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3332 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3333 | EVP_CIPH_CUSTOM_COPY)
3335 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
3336 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3337 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
3338 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3339 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
3340 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3342 static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3344 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,c);
3345 if (type == EVP_CTRL_COPY) {
3346 EVP_CIPHER_CTX *out = ptr;
3347 EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out);
3348 if (xctx->xts.key1) {
3349 if (xctx->xts.key1 != &xctx->ks1)
3351 xctx_out->xts.key1 = &xctx_out->ks1;
3353 if (xctx->xts.key2) {
3354 if (xctx->xts.key2 != &xctx->ks2)
3356 xctx_out->xts.key2 = &xctx_out->ks2;
3359 } else if (type != EVP_CTRL_INIT)
3361 /* key1 and key2 are used as an indicator both key and IV are set */
3362 xctx->xts.key1 = NULL;
3363 xctx->xts.key2 = NULL;
3367 static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3368 const unsigned char *iv, int enc)
3370 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3377 xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
3379 xctx->stream = NULL;
3381 /* key_len is two AES keys */
3382 #ifdef HWAES_CAPABLE
3383 if (HWAES_CAPABLE) {
3385 HWAES_set_encrypt_key(key,
3386 EVP_CIPHER_CTX_key_length(ctx) * 4,
3388 xctx->xts.block1 = (block128_f) HWAES_encrypt;
3389 # ifdef HWAES_xts_encrypt
3390 xctx->stream = HWAES_xts_encrypt;
3393 HWAES_set_decrypt_key(key,
3394 EVP_CIPHER_CTX_key_length(ctx) * 4,
3396 xctx->xts.block1 = (block128_f) HWAES_decrypt;
3397 # ifdef HWAES_xts_decrypt
3398 xctx->stream = HWAES_xts_decrypt;
3402 HWAES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3403 EVP_CIPHER_CTX_key_length(ctx) * 4,
3405 xctx->xts.block2 = (block128_f) HWAES_encrypt;
3407 xctx->xts.key1 = &xctx->ks1;
3411 #ifdef BSAES_CAPABLE
3413 xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt;
3416 #ifdef VPAES_CAPABLE
3417 if (VPAES_CAPABLE) {
3419 vpaes_set_encrypt_key(key,
3420 EVP_CIPHER_CTX_key_length(ctx) * 4,
3422 xctx->xts.block1 = (block128_f) vpaes_encrypt;
3424 vpaes_set_decrypt_key(key,
3425 EVP_CIPHER_CTX_key_length(ctx) * 4,
3427 xctx->xts.block1 = (block128_f) vpaes_decrypt;
3430 vpaes_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3431 EVP_CIPHER_CTX_key_length(ctx) * 4,
3433 xctx->xts.block2 = (block128_f) vpaes_encrypt;
3435 xctx->xts.key1 = &xctx->ks1;
3439 (void)0; /* terminate potentially open 'else' */
3442 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
3444 xctx->xts.block1 = (block128_f) AES_encrypt;
3446 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
3448 xctx->xts.block1 = (block128_f) AES_decrypt;
3451 AES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3452 EVP_CIPHER_CTX_key_length(ctx) * 4,
3454 xctx->xts.block2 = (block128_f) AES_encrypt;
3456 xctx->xts.key1 = &xctx->ks1;
3460 xctx->xts.key2 = &xctx->ks2;
3461 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
3467 static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3468 const unsigned char *in, size_t len)
3470 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3472 if (xctx->xts.key1 == NULL
3473 || xctx->xts.key2 == NULL
3476 || len < AES_BLOCK_SIZE)
3480 * Verify that the two keys are different.
3482 * This addresses the vulnerability described in Rogaway's September 2004
3483 * paper (http://web.cs.ucdavis.edu/~rogaway/papers/offsets.pdf):
3484 * "Efficient Instantiations of Tweakable Blockciphers and Refinements
3485 * to Modes OCB and PMAC".
3487 * FIPS 140-2 IG A.9 XTS-AES Key Generation Requirements states that:
3488 * "The check for Key_1 != Key_2 shall be done at any place BEFORE
3489 * using the keys in the XTS-AES algorithm to process data with them."
3491 if (CRYPTO_memcmp(xctx->xts.key1, xctx->xts.key2,
3492 EVP_CIPHER_CTX_key_length(ctx) / 2) == 0)
3496 (*xctx->stream) (in, out, len,
3497 xctx->xts.key1, xctx->xts.key2,
3498 EVP_CIPHER_CTX_iv_noconst(ctx));
3499 else if (CRYPTO_xts128_encrypt(&xctx->xts, EVP_CIPHER_CTX_iv_noconst(ctx),
3501 EVP_CIPHER_CTX_encrypting(ctx)))
3506 #define aes_xts_cleanup NULL
3508 #define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
3509 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3510 | EVP_CIPH_CUSTOM_COPY)
3512 BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
3513 BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
3515 static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3517 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c);
3526 cctx->tls_aad_len = -1;
3529 case EVP_CTRL_AEAD_TLS1_AAD:
3530 /* Save the AAD for later use */
3531 if (arg != EVP_AEAD_TLS1_AAD_LEN)
3533 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3534 cctx->tls_aad_len = arg;
3537 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
3538 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
3539 /* Correct length for explicit IV */
3540 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
3542 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
3543 /* If decrypting correct for tag too */
3544 if (!EVP_CIPHER_CTX_encrypting(c)) {
3549 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
3550 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
3552 /* Extra padding: tag appended to record */
3555 case EVP_CTRL_CCM_SET_IV_FIXED:
3556 /* Sanity check length */
3557 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
3559 /* Just copy to first part of IV */
3560 memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg);
3563 case EVP_CTRL_AEAD_SET_IVLEN:
3566 case EVP_CTRL_CCM_SET_L:
3567 if (arg < 2 || arg > 8)
3572 case EVP_CTRL_AEAD_SET_TAG:
3573 if ((arg & 1) || arg < 4 || arg > 16)
3575 if (EVP_CIPHER_CTX_encrypting(c) && ptr)
3579 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3584 case EVP_CTRL_AEAD_GET_TAG:
3585 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
3587 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
3596 EVP_CIPHER_CTX *out = ptr;
3597 EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out);
3598 if (cctx->ccm.key) {
3599 if (cctx->ccm.key != &cctx->ks)
3601 cctx_out->ccm.key = &cctx_out->ks;
3612 static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3613 const unsigned char *iv, int enc)
3615 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3620 #ifdef HWAES_CAPABLE
3621 if (HWAES_CAPABLE) {
3622 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3625 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3626 &cctx->ks, (block128_f) HWAES_encrypt);
3632 #ifdef VPAES_CAPABLE
3633 if (VPAES_CAPABLE) {
3634 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3636 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3637 &cctx->ks, (block128_f) vpaes_encrypt);
3643 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3645 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3646 &cctx->ks, (block128_f) AES_encrypt);
3651 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
3657 static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3658 const unsigned char *in, size_t len)
3660 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3661 CCM128_CONTEXT *ccm = &cctx->ccm;
3662 /* Encrypt/decrypt must be performed in place */
3663 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
3665 /* If encrypting set explicit IV from sequence number (start of AAD) */
3666 if (EVP_CIPHER_CTX_encrypting(ctx))
3667 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
3668 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3669 /* Get rest of IV from explicit IV */
3670 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in,
3671 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3672 /* Correct length value */
3673 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3674 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L,
3678 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
3679 /* Fix buffer to point to payload */
3680 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3681 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3682 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3683 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3685 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3687 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
3689 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3691 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3693 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3694 unsigned char tag[16];
3695 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3696 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
3700 OPENSSL_cleanse(out, len);
3705 static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3706 const unsigned char *in, size_t len)
3708 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3709 CCM128_CONTEXT *ccm = &cctx->ccm;
3710 /* If not set up, return error */
3714 if (cctx->tls_aad_len >= 0)
3715 return aes_ccm_tls_cipher(ctx, out, in, len);
3717 /* EVP_*Final() doesn't return any data */
3718 if (in == NULL && out != NULL)
3724 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
3728 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3734 /* If have AAD need message length */
3735 if (!cctx->len_set && len)
3737 CRYPTO_ccm128_aad(ccm, in, len);
3740 /* If not set length yet do it */
3741 if (!cctx->len_set) {
3742 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3747 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3748 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3750 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3756 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3758 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3759 unsigned char tag[16];
3760 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3761 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
3767 OPENSSL_cleanse(out, len);
3775 #define aes_ccm_cleanup NULL
3777 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
3778 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3779 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
3780 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3781 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
3782 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3789 /* Indicates if IV has been set */
3793 static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3794 const unsigned char *iv, int enc)
3796 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3800 if (EVP_CIPHER_CTX_encrypting(ctx))
3801 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3804 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3810 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, EVP_CIPHER_CTX_iv_length(ctx));
3811 wctx->iv = EVP_CIPHER_CTX_iv_noconst(ctx);
3816 static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3817 const unsigned char *in, size_t inlen)
3819 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3821 /* AES wrap with padding has IV length of 4, without padding 8 */
3822 int pad = EVP_CIPHER_CTX_iv_length(ctx) == 4;
3823 /* No final operation so always return zero length */
3826 /* Input length must always be non-zero */
3829 /* If decrypting need at least 16 bytes and multiple of 8 */
3830 if (!EVP_CIPHER_CTX_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
3832 /* If not padding input must be multiple of 8 */
3833 if (!pad && inlen & 0x7)
3835 if (is_partially_overlapping(out, in, inlen)) {
3836 EVPerr(EVP_F_AES_WRAP_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
3840 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3841 /* If padding round up to multiple of 8 */
3843 inlen = (inlen + 7) / 8 * 8;
3848 * If not padding output will be exactly 8 bytes smaller than
3849 * input. If padding it will be at least 8 bytes smaller but we
3850 * don't know how much.
3856 if (EVP_CIPHER_CTX_encrypting(ctx))
3857 rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
3859 (block128_f) AES_encrypt);
3861 rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
3863 (block128_f) AES_decrypt);
3865 if (EVP_CIPHER_CTX_encrypting(ctx))
3866 rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
3867 out, in, inlen, (block128_f) AES_encrypt);
3869 rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
3870 out, in, inlen, (block128_f) AES_decrypt);
3872 return rv ? (int)rv : -1;
3875 #define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \
3876 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3877 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
3879 static const EVP_CIPHER aes_128_wrap = {
3881 8, 16, 8, WRAP_FLAGS,
3882 aes_wrap_init_key, aes_wrap_cipher,
3884 sizeof(EVP_AES_WRAP_CTX),
3885 NULL, NULL, NULL, NULL
3888 const EVP_CIPHER *EVP_aes_128_wrap(void)
3890 return &aes_128_wrap;
3893 static const EVP_CIPHER aes_192_wrap = {
3895 8, 24, 8, WRAP_FLAGS,
3896 aes_wrap_init_key, aes_wrap_cipher,
3898 sizeof(EVP_AES_WRAP_CTX),
3899 NULL, NULL, NULL, NULL
3902 const EVP_CIPHER *EVP_aes_192_wrap(void)
3904 return &aes_192_wrap;
3907 static const EVP_CIPHER aes_256_wrap = {
3909 8, 32, 8, WRAP_FLAGS,
3910 aes_wrap_init_key, aes_wrap_cipher,
3912 sizeof(EVP_AES_WRAP_CTX),
3913 NULL, NULL, NULL, NULL
3916 const EVP_CIPHER *EVP_aes_256_wrap(void)
3918 return &aes_256_wrap;
3921 static const EVP_CIPHER aes_128_wrap_pad = {
3922 NID_id_aes128_wrap_pad,
3923 8, 16, 4, WRAP_FLAGS,
3924 aes_wrap_init_key, aes_wrap_cipher,
3926 sizeof(EVP_AES_WRAP_CTX),
3927 NULL, NULL, NULL, NULL
3930 const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
3932 return &aes_128_wrap_pad;
3935 static const EVP_CIPHER aes_192_wrap_pad = {
3936 NID_id_aes192_wrap_pad,
3937 8, 24, 4, WRAP_FLAGS,
3938 aes_wrap_init_key, aes_wrap_cipher,
3940 sizeof(EVP_AES_WRAP_CTX),
3941 NULL, NULL, NULL, NULL
3944 const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
3946 return &aes_192_wrap_pad;
3949 static const EVP_CIPHER aes_256_wrap_pad = {
3950 NID_id_aes256_wrap_pad,
3951 8, 32, 4, WRAP_FLAGS,
3952 aes_wrap_init_key, aes_wrap_cipher,
3954 sizeof(EVP_AES_WRAP_CTX),
3955 NULL, NULL, NULL, NULL
3958 const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
3960 return &aes_256_wrap_pad;
3963 #ifndef OPENSSL_NO_OCB
3964 static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3966 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
3967 EVP_CIPHER_CTX *newc;
3968 EVP_AES_OCB_CTX *new_octx;
3974 octx->ivlen = EVP_CIPHER_CTX_iv_length(c);
3975 octx->iv = EVP_CIPHER_CTX_iv_noconst(c);
3977 octx->data_buf_len = 0;
3978 octx->aad_buf_len = 0;
3981 case EVP_CTRL_AEAD_SET_IVLEN:
3982 /* IV len must be 1 to 15 */
3983 if (arg <= 0 || arg > 15)
3989 case EVP_CTRL_AEAD_SET_TAG:
3991 /* Tag len must be 0 to 16 */
3992 if (arg < 0 || arg > 16)
3998 if (arg != octx->taglen || EVP_CIPHER_CTX_encrypting(c))
4000 memcpy(octx->tag, ptr, arg);
4003 case EVP_CTRL_AEAD_GET_TAG:
4004 if (arg != octx->taglen || !EVP_CIPHER_CTX_encrypting(c))
4007 memcpy(ptr, octx->tag, arg);
4011 newc = (EVP_CIPHER_CTX *)ptr;
4012 new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc);
4013 return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
4014 &new_octx->ksenc.ks,
4015 &new_octx->ksdec.ks);
4023 # ifdef HWAES_CAPABLE
4024 # ifdef HWAES_ocb_encrypt
4025 void HWAES_ocb_encrypt(const unsigned char *in, unsigned char *out,
4026 size_t blocks, const void *key,
4027 size_t start_block_num,
4028 unsigned char offset_i[16],
4029 const unsigned char L_[][16],
4030 unsigned char checksum[16]);
4032 # define HWAES_ocb_encrypt ((ocb128_f)NULL)
4034 # ifdef HWAES_ocb_decrypt
4035 void HWAES_ocb_decrypt(const unsigned char *in, unsigned char *out,
4036 size_t blocks, const void *key,
4037 size_t start_block_num,
4038 unsigned char offset_i[16],
4039 const unsigned char L_[][16],
4040 unsigned char checksum[16]);
4042 # define HWAES_ocb_decrypt ((ocb128_f)NULL)
4046 static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
4047 const unsigned char *iv, int enc)
4049 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
4055 * We set both the encrypt and decrypt key here because decrypt
4056 * needs both. We could possibly optimise to remove setting the
4057 * decrypt for an encryption operation.
4059 # ifdef HWAES_CAPABLE
4060 if (HWAES_CAPABLE) {
4061 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4063 HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4065 if (!CRYPTO_ocb128_init(&octx->ocb,
4066 &octx->ksenc.ks, &octx->ksdec.ks,
4067 (block128_f) HWAES_encrypt,
4068 (block128_f) HWAES_decrypt,
4069 enc ? HWAES_ocb_encrypt
4070 : HWAES_ocb_decrypt))
4075 # ifdef VPAES_CAPABLE
4076 if (VPAES_CAPABLE) {
4077 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4079 vpaes_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4081 if (!CRYPTO_ocb128_init(&octx->ocb,
4082 &octx->ksenc.ks, &octx->ksdec.ks,
4083 (block128_f) vpaes_encrypt,
4084 (block128_f) vpaes_decrypt,
4090 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4092 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4094 if (!CRYPTO_ocb128_init(&octx->ocb,
4095 &octx->ksenc.ks, &octx->ksdec.ks,
4096 (block128_f) AES_encrypt,
4097 (block128_f) AES_decrypt,
4104 * If we have an iv we can set it directly, otherwise use saved IV.
4106 if (iv == NULL && octx->iv_set)
4109 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
4116 /* If key set use IV, otherwise copy */
4118 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
4120 memcpy(octx->iv, iv, octx->ivlen);
4126 static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
4127 const unsigned char *in, size_t len)
4131 int written_len = 0;
4132 size_t trailing_len;
4133 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
4135 /* If IV or Key not set then return error */
4144 * Need to ensure we are only passing full blocks to low level OCB
4145 * routines. We do it here rather than in EVP_EncryptUpdate/
4146 * EVP_DecryptUpdate because we need to pass full blocks of AAD too
4147 * and those routines don't support that
4150 /* Are we dealing with AAD or normal data here? */
4152 buf = octx->aad_buf;
4153 buf_len = &(octx->aad_buf_len);
4155 buf = octx->data_buf;
4156 buf_len = &(octx->data_buf_len);
4158 if (is_partially_overlapping(out + *buf_len, in, len)) {
4159 EVPerr(EVP_F_AES_OCB_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
4165 * If we've got a partially filled buffer from a previous call then
4166 * use that data first
4169 unsigned int remaining;
4171 remaining = AES_BLOCK_SIZE - (*buf_len);
4172 if (remaining > len) {
4173 memcpy(buf + (*buf_len), in, len);
4177 memcpy(buf + (*buf_len), in, remaining);
4180 * If we get here we've filled the buffer, so process it
4185 if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE))
4187 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
4188 if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out,
4192 if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out,
4196 written_len = AES_BLOCK_SIZE;
4199 out += AES_BLOCK_SIZE;
4202 /* Do we have a partial block to handle at the end? */
4203 trailing_len = len % AES_BLOCK_SIZE;
4206 * If we've got some full blocks to handle, then process these first
4208 if (len != trailing_len) {
4210 if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
4212 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
4213 if (!CRYPTO_ocb128_encrypt
4214 (&octx->ocb, in, out, len - trailing_len))
4217 if (!CRYPTO_ocb128_decrypt
4218 (&octx->ocb, in, out, len - trailing_len))
4221 written_len += len - trailing_len;
4222 in += len - trailing_len;
4225 /* Handle any trailing partial block */
4226 if (trailing_len > 0) {
4227 memcpy(buf, in, trailing_len);
4228 *buf_len = trailing_len;
4234 * First of all empty the buffer of any partial block that we might
4235 * have been provided - both for data and AAD
4237 if (octx->data_buf_len > 0) {
4238 if (EVP_CIPHER_CTX_encrypting(ctx)) {
4239 if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
4240 octx->data_buf_len))
4243 if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
4244 octx->data_buf_len))
4247 written_len = octx->data_buf_len;
4248 octx->data_buf_len = 0;
4250 if (octx->aad_buf_len > 0) {
4251 if (!CRYPTO_ocb128_aad
4252 (&octx->ocb, octx->aad_buf, octx->aad_buf_len))
4254 octx->aad_buf_len = 0;
4256 /* If decrypting then verify */
4257 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
4258 if (octx->taglen < 0)
4260 if (CRYPTO_ocb128_finish(&octx->ocb,
4261 octx->tag, octx->taglen) != 0)
4266 /* If encrypting then just get the tag */
4267 if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
4269 /* Don't reuse the IV */
4275 static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
4277 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
4278 CRYPTO_ocb128_cleanup(&octx->ocb);
4282 BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
4283 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4284 BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
4285 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4286 BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
4287 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4288 #endif /* OPENSSL_NO_OCB */
4291 #ifndef OPENSSL_NO_SIV
4293 typedef SIV128_CONTEXT EVP_AES_SIV_CTX;
4295 #define aesni_siv_init_key aes_siv_init_key
4296 static int aes_siv_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
4297 const unsigned char *iv, int enc)
4299 const EVP_CIPHER *ctr;
4300 const EVP_CIPHER *cbc;
4301 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, ctx);
4302 int klen = EVP_CIPHER_CTX_key_length(ctx) / 2;
4309 cbc = EVP_aes_128_cbc();
4310 ctr = EVP_aes_128_ctr();
4313 cbc = EVP_aes_192_cbc();
4314 ctr = EVP_aes_192_ctr();
4317 cbc = EVP_aes_256_cbc();
4318 ctr = EVP_aes_256_ctr();
4324 /* klen is the length of the underlying cipher, not the input key,
4325 which should be twice as long */
4326 return CRYPTO_siv128_init(sctx, key, klen, cbc, ctr);
4329 #define aesni_siv_cipher aes_siv_cipher
4330 static int aes_siv_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
4331 const unsigned char *in, size_t len)
4333 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, ctx);
4335 /* EncryptFinal or DecryptFinal */
4337 return CRYPTO_siv128_finish(sctx);
4339 /* Deal with associated data */
4341 return CRYPTO_siv128_aad(sctx, in, len);
4343 if (EVP_CIPHER_CTX_encrypting(ctx))
4344 return CRYPTO_siv128_encrypt(sctx, in, out, len);
4346 return CRYPTO_siv128_decrypt(sctx, in, out, len);
4349 #define aesni_siv_cleanup aes_siv_cleanup
4350 static int aes_siv_cleanup(EVP_CIPHER_CTX *c)
4352 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, c);
4354 return CRYPTO_siv128_cleanup(sctx);
4358 #define aesni_siv_ctrl aes_siv_ctrl
4359 static int aes_siv_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
4361 SIV128_CONTEXT *sctx = EVP_C_DATA(SIV128_CONTEXT, c);
4362 SIV128_CONTEXT *sctx_out;
4366 return CRYPTO_siv128_cleanup(sctx);
4368 case EVP_CTRL_SET_SPEED:
4369 return CRYPTO_siv128_speed(sctx, arg);
4371 case EVP_CTRL_AEAD_SET_TAG:
4372 if (!EVP_CIPHER_CTX_encrypting(c))
4373 return CRYPTO_siv128_set_tag(sctx, ptr, arg);
4376 case EVP_CTRL_AEAD_GET_TAG:
4377 if (!EVP_CIPHER_CTX_encrypting(c))
4379 return CRYPTO_siv128_get_tag(sctx, ptr, arg);
4382 sctx_out = EVP_C_DATA(SIV128_CONTEXT, (EVP_CIPHER_CTX*)ptr);
4383 return CRYPTO_siv128_copy_ctx(sctx_out, sctx);
4391 #define SIV_FLAGS (EVP_CIPH_FLAG_AEAD_CIPHER | EVP_CIPH_FLAG_DEFAULT_ASN1 \
4392 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
4393 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CUSTOM_COPY \
4394 | EVP_CIPH_CTRL_INIT)
4396 BLOCK_CIPHER_custom(NID_aes, 128, 1, 0, siv, SIV, SIV_FLAGS)
4397 BLOCK_CIPHER_custom(NID_aes, 192, 1, 0, siv, SIV, SIV_FLAGS)
4398 BLOCK_CIPHER_custom(NID_aes, 256, 1, 0, siv, SIV, SIV_FLAGS)