2 * Copyright 2017-2021 The OpenSSL Project Authors. All Rights Reserved.
3 * Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved.
5 * Licensed under the Apache License 2.0 (the "License"). You may not use
6 * this file except in compliance with the License. You can obtain a copy
7 * in the file LICENSE in the source distribution or at
8 * https://www.openssl.org/source/license.html
11 #include "internal/deprecated.h"
13 #include "internal/cryptlib.h"
14 #ifndef OPENSSL_NO_ARIA
15 # include <openssl/evp.h>
16 # include <openssl/modes.h>
17 # include <openssl/rand.h>
18 # include "crypto/aria.h"
19 # include "crypto/evp.h"
20 # include "crypto/modes.h"
21 # include "evp_local.h"
23 /* ARIA subkey Structure */
28 /* ARIA GCM context */
33 } ks; /* ARIA subkey to use */
34 int key_set; /* Set if key initialised */
35 int iv_set; /* Set if an iv is set */
37 unsigned char *iv; /* Temporary IV store */
38 int ivlen; /* IV length */
40 int iv_gen; /* It is OK to generate IVs */
41 int tls_aad_len; /* TLS AAD length */
44 /* ARIA CCM context */
49 } ks; /* ARIA key schedule to use */
50 int key_set; /* Set if key initialised */
51 int iv_set; /* Set if an iv is set */
52 int tag_set; /* Set if tag is valid */
53 int len_set; /* Set if message length set */
54 int L, M; /* L and M parameters from RFC3610 */
55 int tls_aad_len; /* TLS AAD length */
60 /* The subkey for ARIA is generated. */
61 static int aria_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
62 const unsigned char *iv, int enc)
65 int mode = EVP_CIPHER_CTX_get_mode(ctx);
67 if (enc || (mode != EVP_CIPH_ECB_MODE && mode != EVP_CIPH_CBC_MODE))
68 ret = ossl_aria_set_encrypt_key(key,
69 EVP_CIPHER_CTX_get_key_length(ctx) * 8,
70 EVP_CIPHER_CTX_get_cipher_data(ctx));
72 ret = ossl_aria_set_decrypt_key(key,
73 EVP_CIPHER_CTX_get_key_length(ctx) * 8,
74 EVP_CIPHER_CTX_get_cipher_data(ctx));
76 ERR_raise(ERR_LIB_EVP,EVP_R_ARIA_KEY_SETUP_FAILED);
82 static void aria_cbc_encrypt(const unsigned char *in, unsigned char *out,
83 size_t len, const ARIA_KEY *key,
84 unsigned char *ivec, const int enc)
88 CRYPTO_cbc128_encrypt(in, out, len, key, ivec,
89 (block128_f) ossl_aria_encrypt);
91 CRYPTO_cbc128_decrypt(in, out, len, key, ivec,
92 (block128_f) ossl_aria_encrypt);
95 static void aria_cfb128_encrypt(const unsigned char *in, unsigned char *out,
96 size_t length, const ARIA_KEY *key,
97 unsigned char *ivec, int *num, const int enc)
100 CRYPTO_cfb128_encrypt(in, out, length, key, ivec, num, enc,
101 (block128_f) ossl_aria_encrypt);
104 static void aria_cfb1_encrypt(const unsigned char *in, unsigned char *out,
105 size_t length, const ARIA_KEY *key,
106 unsigned char *ivec, int *num, const int enc)
108 CRYPTO_cfb128_1_encrypt(in, out, length, key, ivec, num, enc,
109 (block128_f) ossl_aria_encrypt);
112 static void aria_cfb8_encrypt(const unsigned char *in, unsigned char *out,
113 size_t length, const ARIA_KEY *key,
114 unsigned char *ivec, int *num, const int enc)
116 CRYPTO_cfb128_8_encrypt(in, out, length, key, ivec, num, enc,
117 (block128_f) ossl_aria_encrypt);
120 static void aria_ecb_encrypt(const unsigned char *in, unsigned char *out,
121 const ARIA_KEY *key, const int enc)
123 ossl_aria_encrypt(in, out, key);
126 static void aria_ofb128_encrypt(const unsigned char *in, unsigned char *out,
127 size_t length, const ARIA_KEY *key,
128 unsigned char *ivec, int *num)
130 CRYPTO_ofb128_encrypt(in, out, length, key, ivec, num,
131 (block128_f) ossl_aria_encrypt);
134 IMPLEMENT_BLOCK_CIPHER(aria_128, ks, aria, EVP_ARIA_KEY,
135 NID_aria_128, 16, 16, 16, 128,
136 0, aria_init_key, NULL,
137 EVP_CIPHER_set_asn1_iv,
138 EVP_CIPHER_get_asn1_iv,
140 IMPLEMENT_BLOCK_CIPHER(aria_192, ks, aria, EVP_ARIA_KEY,
141 NID_aria_192, 16, 24, 16, 128,
142 0, aria_init_key, NULL,
143 EVP_CIPHER_set_asn1_iv,
144 EVP_CIPHER_get_asn1_iv,
146 IMPLEMENT_BLOCK_CIPHER(aria_256, ks, aria, EVP_ARIA_KEY,
147 NID_aria_256, 16, 32, 16, 128,
148 0, aria_init_key, NULL,
149 EVP_CIPHER_set_asn1_iv,
150 EVP_CIPHER_get_asn1_iv,
153 # define IMPLEMENT_ARIA_CFBR(ksize,cbits) \
154 IMPLEMENT_CFBR(aria,aria,EVP_ARIA_KEY,ks,ksize,cbits,16,0)
155 IMPLEMENT_ARIA_CFBR(128,1)
156 IMPLEMENT_ARIA_CFBR(192,1)
157 IMPLEMENT_ARIA_CFBR(256,1)
158 IMPLEMENT_ARIA_CFBR(128,8)
159 IMPLEMENT_ARIA_CFBR(192,8)
160 IMPLEMENT_ARIA_CFBR(256,8)
162 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
163 static const EVP_CIPHER aria_##keylen##_##mode = { \
164 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
165 flags|EVP_CIPH_##MODE##_MODE, \
168 aria_##mode##_cipher, \
170 sizeof(EVP_ARIA_KEY), \
171 NULL,NULL,NULL,NULL }; \
172 const EVP_CIPHER *EVP_aria_##keylen##_##mode(void) \
173 { return &aria_##keylen##_##mode; }
175 static int aria_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
176 const unsigned char *in, size_t len)
178 unsigned int num = EVP_CIPHER_CTX_get_num(ctx);
179 EVP_ARIA_KEY *dat = EVP_C_DATA(EVP_ARIA_KEY, ctx);
181 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks, ctx->iv,
182 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
183 (block128_f) ossl_aria_encrypt);
184 EVP_CIPHER_CTX_set_num(ctx, num);
188 BLOCK_CIPHER_generic(NID_aria, 128, 1, 16, ctr, ctr, CTR, 0)
189 BLOCK_CIPHER_generic(NID_aria, 192, 1, 16, ctr, ctr, CTR, 0)
190 BLOCK_CIPHER_generic(NID_aria, 256, 1, 16, ctr, ctr, CTR, 0)
192 /* Authenticated cipher modes (GCM/CCM) */
194 /* increment counter (64-bit int) by 1 */
195 static void ctr64_inc(unsigned char *counter)
210 static int aria_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
211 const unsigned char *iv, int enc)
214 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,ctx);
219 ret = ossl_aria_set_encrypt_key(key,
220 EVP_CIPHER_CTX_get_key_length(ctx) * 8,
222 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
223 (block128_f) ossl_aria_encrypt);
225 ERR_raise(ERR_LIB_EVP,EVP_R_ARIA_KEY_SETUP_FAILED);
230 * If we have an iv can set it directly, otherwise use saved IV.
232 if (iv == NULL && gctx->iv_set)
235 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
240 /* If key set use IV, otherwise copy */
242 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
244 memcpy(gctx->iv, iv, gctx->ivlen);
251 static int aria_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
253 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,c);
259 gctx->ivlen = EVP_CIPHER_get_iv_length(c->cipher);
263 gctx->tls_aad_len = -1;
266 case EVP_CTRL_GET_IVLEN:
267 *(int *)ptr = gctx->ivlen;
270 case EVP_CTRL_AEAD_SET_IVLEN:
273 /* Allocate memory for IV if needed */
274 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
275 if (gctx->iv != c->iv)
276 OPENSSL_free(gctx->iv);
277 if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
278 ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
285 case EVP_CTRL_AEAD_SET_TAG:
286 if (arg <= 0 || arg > 16 || EVP_CIPHER_CTX_is_encrypting(c))
288 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
292 case EVP_CTRL_AEAD_GET_TAG:
293 if (arg <= 0 || arg > 16 || !EVP_CIPHER_CTX_is_encrypting(c)
296 memcpy(ptr, EVP_CIPHER_CTX_buf_noconst(c), arg);
299 case EVP_CTRL_GCM_SET_IV_FIXED:
300 /* Special case: -1 length restores whole IV */
302 memcpy(gctx->iv, ptr, gctx->ivlen);
307 * Fixed field must be at least 4 bytes and invocation field at least
310 if ((arg < 4) || (gctx->ivlen - arg) < 8)
313 memcpy(gctx->iv, ptr, arg);
314 if (EVP_CIPHER_CTX_is_encrypting(c)
315 && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
320 case EVP_CTRL_GCM_IV_GEN:
321 if (gctx->iv_gen == 0 || gctx->key_set == 0)
323 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
324 if (arg <= 0 || arg > gctx->ivlen)
326 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
328 * Invocation field will be at least 8 bytes in size and so no need
329 * to check wrap around or increment more than last 8 bytes.
331 ctr64_inc(gctx->iv + gctx->ivlen - 8);
335 case EVP_CTRL_GCM_SET_IV_INV:
336 if (gctx->iv_gen == 0 || gctx->key_set == 0
337 || EVP_CIPHER_CTX_is_encrypting(c))
339 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
340 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
344 case EVP_CTRL_AEAD_TLS1_AAD:
345 /* Save the AAD for later use */
346 if (arg != EVP_AEAD_TLS1_AAD_LEN)
348 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
349 gctx->tls_aad_len = arg;
352 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
353 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
354 /* Correct length for explicit IV */
355 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
357 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
358 /* If decrypting correct for tag too */
359 if (!EVP_CIPHER_CTX_is_encrypting(c)) {
360 if (len < EVP_GCM_TLS_TAG_LEN)
362 len -= EVP_GCM_TLS_TAG_LEN;
364 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
365 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
367 /* Extra padding: tag appended to record */
368 return EVP_GCM_TLS_TAG_LEN;
372 EVP_CIPHER_CTX *out = ptr;
373 EVP_ARIA_GCM_CTX *gctx_out = EVP_C_DATA(EVP_ARIA_GCM_CTX,out);
375 if (gctx->gcm.key != &gctx->ks)
377 gctx_out->gcm.key = &gctx_out->ks;
379 if (gctx->iv == c->iv)
380 gctx_out->iv = out->iv;
382 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
383 ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
386 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
397 static int aria_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
398 const unsigned char *in, size_t len)
400 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,ctx);
403 /* Encrypt/decrypt must be performed in place */
405 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
408 * Set IV from start of buffer or generate IV and write to start of
411 if (EVP_CIPHER_CTX_ctrl(ctx, EVP_CIPHER_CTX_is_encrypting(ctx) ?
412 EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
413 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
416 if (CRYPTO_gcm128_aad(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
419 /* Fix buffer and length to point to payload */
420 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
421 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
422 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
423 if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
424 /* Encrypt payload */
425 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, len))
428 /* Finally write tag */
429 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
430 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
433 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, len))
436 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
437 EVP_GCM_TLS_TAG_LEN);
438 /* If tag mismatch wipe buffer */
439 if (CRYPTO_memcmp(EVP_CIPHER_CTX_buf_noconst(ctx), in + len,
440 EVP_GCM_TLS_TAG_LEN)) {
441 OPENSSL_cleanse(out, len);
449 gctx->tls_aad_len = -1;
453 static int aria_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
454 const unsigned char *in, size_t len)
456 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,ctx);
458 /* If not set up, return error */
462 if (gctx->tls_aad_len >= 0)
463 return aria_gcm_tls_cipher(ctx, out, in, len);
469 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
471 } else if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
472 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, len))
475 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, len))
480 if (!EVP_CIPHER_CTX_is_encrypting(ctx)) {
481 if (gctx->taglen < 0)
483 if (CRYPTO_gcm128_finish(&gctx->gcm,
484 EVP_CIPHER_CTX_buf_noconst(ctx),
490 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx), 16);
492 /* Don't reuse the IV */
497 static int aria_gcm_cleanup(EVP_CIPHER_CTX *ctx)
499 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX, ctx);
501 if (gctx->iv != ctx->iv)
502 OPENSSL_free(gctx->iv);
507 static int aria_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
508 const unsigned char *iv, int enc)
511 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,ctx);
517 ret = ossl_aria_set_encrypt_key(key,
518 EVP_CIPHER_CTX_get_key_length(ctx) * 8,
520 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
521 &cctx->ks, (block128_f) ossl_aria_encrypt);
523 ERR_raise(ERR_LIB_EVP,EVP_R_ARIA_KEY_SETUP_FAILED);
530 memcpy(ctx->iv, iv, 15 - cctx->L);
536 static int aria_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
538 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,c);
548 cctx->tls_aad_len = -1;
551 case EVP_CTRL_GET_IVLEN:
552 *(int *)ptr = 15 - cctx->L;
555 case EVP_CTRL_AEAD_TLS1_AAD:
556 /* Save the AAD for later use */
557 if (arg != EVP_AEAD_TLS1_AAD_LEN)
559 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
560 cctx->tls_aad_len = arg;
563 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
564 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
565 /* Correct length for explicit IV */
566 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
568 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
569 /* If decrypting correct for tag too */
570 if (!EVP_CIPHER_CTX_is_encrypting(c)) {
575 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
576 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
578 /* Extra padding: tag appended to record */
581 case EVP_CTRL_CCM_SET_IV_FIXED:
582 /* Sanity check length */
583 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
585 /* Just copy to first part of IV */
586 memcpy(c->iv, ptr, arg);
589 case EVP_CTRL_AEAD_SET_IVLEN:
592 case EVP_CTRL_CCM_SET_L:
593 if (arg < 2 || arg > 8)
597 case EVP_CTRL_AEAD_SET_TAG:
598 if ((arg & 1) || arg < 4 || arg > 16)
600 if (EVP_CIPHER_CTX_is_encrypting(c) && ptr)
604 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
609 case EVP_CTRL_AEAD_GET_TAG:
610 if (!EVP_CIPHER_CTX_is_encrypting(c) || !cctx->tag_set)
612 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
621 EVP_CIPHER_CTX *out = ptr;
622 EVP_ARIA_CCM_CTX *cctx_out = EVP_C_DATA(EVP_ARIA_CCM_CTX,out);
624 if (cctx->ccm.key != &cctx->ks)
626 cctx_out->ccm.key = &cctx_out->ks;
636 static int aria_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
637 const unsigned char *in, size_t len)
639 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,ctx);
640 CCM128_CONTEXT *ccm = &cctx->ccm;
642 /* Encrypt/decrypt must be performed in place */
643 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
645 /* If encrypting set explicit IV from sequence number (start of AAD) */
646 if (EVP_CIPHER_CTX_is_encrypting(ctx))
647 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
648 EVP_CCM_TLS_EXPLICIT_IV_LEN);
649 /* Get rest of IV from explicit IV */
650 memcpy(ctx->iv + EVP_CCM_TLS_FIXED_IV_LEN, in,
651 EVP_CCM_TLS_EXPLICIT_IV_LEN);
652 /* Correct length value */
653 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
654 if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L,
658 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx),
660 /* Fix buffer to point to payload */
661 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
662 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
663 if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
664 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len, cctx->str)
665 : CRYPTO_ccm128_encrypt(ccm, in, out, len))
667 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
669 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
671 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len, cctx->str)
672 : !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
673 unsigned char tag[16];
674 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
675 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
679 OPENSSL_cleanse(out, len);
684 static int aria_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
685 const unsigned char *in, size_t len)
687 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,ctx);
688 CCM128_CONTEXT *ccm = &cctx->ccm;
690 /* If not set up, return error */
694 if (cctx->tls_aad_len >= 0)
695 return aria_ccm_tls_cipher(ctx, out, in, len);
697 /* EVP_*Final() doesn't return any data */
698 if (in == NULL && out != NULL)
706 if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len))
711 /* If have AAD need message length */
712 if (!cctx->len_set && len)
714 CRYPTO_ccm128_aad(ccm, in, len);
718 /* The tag must be set before actually decrypting data */
719 if (!EVP_CIPHER_CTX_is_encrypting(ctx) && !cctx->tag_set)
722 /* If not set length yet do it */
723 if (!cctx->len_set) {
724 if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len))
728 if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
729 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len, cctx->str)
730 : CRYPTO_ccm128_encrypt(ccm, in, out, len))
736 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
738 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
739 unsigned char tag[16];
740 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
741 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
747 OPENSSL_cleanse(out, len);
755 #define aria_ccm_cleanup NULL
757 #define ARIA_AUTH_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
758 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
759 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
760 | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_FLAG_AEAD_CIPHER \
761 | EVP_CIPH_CUSTOM_IV_LENGTH)
763 #define BLOCK_CIPHER_aead(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
764 static const EVP_CIPHER aria_##keylen##_##mode = { \
765 nid##_##keylen##_##nmode, \
766 blocksize, keylen/8, ivlen, \
767 ARIA_AUTH_FLAGS|EVP_CIPH_##MODE##_MODE, \
769 aria_##mode##_init_key, \
770 aria_##mode##_cipher, \
771 aria_##mode##_cleanup, \
772 sizeof(EVP_ARIA_##MODE##_CTX), \
773 NULL,NULL,aria_##mode##_ctrl,NULL }; \
774 const EVP_CIPHER *EVP_aria_##keylen##_##mode(void) \
775 { return (EVP_CIPHER*)&aria_##keylen##_##mode; }
777 BLOCK_CIPHER_aead(NID_aria, 128, 1, 12, gcm, gcm, GCM, 0)
778 BLOCK_CIPHER_aead(NID_aria, 192, 1, 12, gcm, gcm, GCM, 0)
779 BLOCK_CIPHER_aead(NID_aria, 256, 1, 12, gcm, gcm, GCM, 0)
781 BLOCK_CIPHER_aead(NID_aria, 128, 1, 12, ccm, ccm, CCM, 0)
782 BLOCK_CIPHER_aead(NID_aria, 192, 1, 12, ccm, ccm, CCM, 0)
783 BLOCK_CIPHER_aead(NID_aria, 256, 1, 12, ccm, ccm, CCM, 0)