2 * Copyright 2017 The OpenSSL Project Authors. All Rights Reserved.
3 * Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved.
5 * Licensed under the OpenSSL license (the "License"). You may not use
6 * this file except in compliance with the License. You can obtain a copy
7 * in the file LICENSE in the source distribution or at
8 * https://www.openssl.org/source/license.html
11 #include "internal/cryptlib.h"
12 #ifndef OPENSSL_NO_ARIA
13 # include <openssl/evp.h>
14 # include <openssl/modes.h>
15 # include <openssl/rand.h>
16 # include "internal/aria.h"
17 # include "internal/evp_int.h"
18 # include "modes_lcl.h"
19 # include "evp_locl.h"
21 /* ARIA subkey Structure */
26 /* ARIA GCM context */
31 } ks; /* ARIA subkey to use */
32 int key_set; /* Set if key initialised */
33 int iv_set; /* Set if an iv is set */
35 unsigned char *iv; /* Temporary IV store */
36 int ivlen; /* IV length */
38 int iv_gen; /* It is OK to generate IVs */
39 int tls_aad_len; /* TLS AAD length */
42 /* ARIA CCM context */
47 } ks; /* ARIA key schedule to use */
48 int key_set; /* Set if key initialised */
49 int iv_set; /* Set if an iv is set */
50 int tag_set; /* Set if tag is valid */
51 int len_set; /* Set if message length set */
52 int L, M; /* L and M parameters from RFC3610 */
53 int tls_aad_len; /* TLS AAD length */
58 /* The subkey for ARIA is generated. */
59 static int aria_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
60 const unsigned char *iv, int enc)
63 int mode = EVP_CIPHER_CTX_mode(ctx);
65 if (enc || (mode != EVP_CIPH_ECB_MODE && mode != EVP_CIPH_CBC_MODE))
66 ret = aria_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
67 EVP_CIPHER_CTX_get_cipher_data(ctx));
69 ret = aria_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
70 EVP_CIPHER_CTX_get_cipher_data(ctx));
72 EVPerr(EVP_F_ARIA_INIT_KEY,EVP_R_ARIA_KEY_SETUP_FAILED);
78 static void aria_cbc_encrypt(const unsigned char *in, unsigned char *out,
79 size_t len, const ARIA_KEY *key,
80 unsigned char *ivec, const int enc)
84 CRYPTO_cbc128_encrypt(in, out, len, key, ivec,
85 (block128_f) aria_encrypt);
87 CRYPTO_cbc128_decrypt(in, out, len, key, ivec,
88 (block128_f) aria_encrypt);
91 static void aria_cfb128_encrypt(const unsigned char *in, unsigned char *out,
92 size_t length, const ARIA_KEY *key,
93 unsigned char *ivec, int *num, const int enc)
96 CRYPTO_cfb128_encrypt(in, out, length, key, ivec, num, enc,
97 (block128_f) aria_encrypt);
100 static void aria_cfb1_encrypt(const unsigned char *in, unsigned char *out,
101 size_t length, const ARIA_KEY *key,
102 unsigned char *ivec, int *num, const int enc)
104 CRYPTO_cfb128_1_encrypt(in, out, length, key, ivec, num, enc,
105 (block128_f) aria_encrypt);
108 static void aria_cfb8_encrypt(const unsigned char *in, unsigned char *out,
109 size_t length, const ARIA_KEY *key,
110 unsigned char *ivec, int *num, const int enc)
112 CRYPTO_cfb128_8_encrypt(in, out, length, key, ivec, num, enc,
113 (block128_f) aria_encrypt);
116 static void aria_ecb_encrypt(const unsigned char *in, unsigned char *out,
117 const ARIA_KEY *key, const int enc)
119 aria_encrypt(in, out, key);
122 static void aria_ofb128_encrypt(const unsigned char *in, unsigned char *out,
123 size_t length, const ARIA_KEY *key,
124 unsigned char *ivec, int *num)
126 CRYPTO_ofb128_encrypt(in, out, length, key, ivec, num,
127 (block128_f) aria_encrypt);
130 IMPLEMENT_BLOCK_CIPHER(aria_128, ks, aria, EVP_ARIA_KEY,
131 NID_aria_128, 16, 16, 16, 128,
132 0, aria_init_key, NULL,
133 EVP_CIPHER_set_asn1_iv,
134 EVP_CIPHER_get_asn1_iv,
136 IMPLEMENT_BLOCK_CIPHER(aria_192, ks, aria, EVP_ARIA_KEY,
137 NID_aria_192, 16, 24, 16, 128,
138 0, aria_init_key, NULL,
139 EVP_CIPHER_set_asn1_iv,
140 EVP_CIPHER_get_asn1_iv,
142 IMPLEMENT_BLOCK_CIPHER(aria_256, ks, aria, EVP_ARIA_KEY,
143 NID_aria_256, 16, 32, 16, 128,
144 0, aria_init_key, NULL,
145 EVP_CIPHER_set_asn1_iv,
146 EVP_CIPHER_get_asn1_iv,
149 # define IMPLEMENT_ARIA_CFBR(ksize,cbits) \
150 IMPLEMENT_CFBR(aria,aria,EVP_ARIA_KEY,ks,ksize,cbits,16,0)
151 IMPLEMENT_ARIA_CFBR(128,1)
152 IMPLEMENT_ARIA_CFBR(192,1)
153 IMPLEMENT_ARIA_CFBR(256,1)
154 IMPLEMENT_ARIA_CFBR(128,8)
155 IMPLEMENT_ARIA_CFBR(192,8)
156 IMPLEMENT_ARIA_CFBR(256,8)
158 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
159 static const EVP_CIPHER aria_##keylen##_##mode = { \
160 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
161 flags|EVP_CIPH_##MODE##_MODE, \
163 aria_##mode##_cipher, \
165 sizeof(EVP_ARIA_KEY), \
166 NULL,NULL,NULL,NULL }; \
167 const EVP_CIPHER *EVP_aria_##keylen##_##mode(void) \
168 { return &aria_##keylen##_##mode; }
170 static int aria_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
171 const unsigned char *in, size_t len)
173 unsigned int num = EVP_CIPHER_CTX_num(ctx);
174 EVP_ARIA_KEY *dat = EVP_C_DATA(EVP_ARIA_KEY,ctx);
176 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
177 EVP_CIPHER_CTX_iv_noconst(ctx),
178 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
179 (block128_f) aria_encrypt);
180 EVP_CIPHER_CTX_set_num(ctx, num);
184 BLOCK_CIPHER_generic(NID_aria, 128, 1, 16, ctr, ctr, CTR, 0)
185 BLOCK_CIPHER_generic(NID_aria, 192, 1, 16, ctr, ctr, CTR, 0)
186 BLOCK_CIPHER_generic(NID_aria, 256, 1, 16, ctr, ctr, CTR, 0)
188 /* Authenticated cipher modes (GCM/CCM) */
190 /* increment counter (64-bit int) by 1 */
191 static void ctr64_inc(unsigned char *counter)
206 static int aria_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
207 const unsigned char *iv, int enc)
210 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,ctx);
215 ret = aria_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
217 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
218 (block128_f) aria_encrypt);
220 EVPerr(EVP_F_ARIA_GCM_INIT_KEY,EVP_R_ARIA_KEY_SETUP_FAILED);
225 * If we have an iv can set it directly, otherwise use saved IV.
227 if (iv == NULL && gctx->iv_set)
230 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
235 /* If key set use IV, otherwise copy */
237 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
239 memcpy(gctx->iv, iv, gctx->ivlen);
246 static int aria_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
248 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,c);
254 gctx->ivlen = EVP_CIPHER_CTX_iv_length(c);
255 gctx->iv = EVP_CIPHER_CTX_iv_noconst(c);
258 gctx->tls_aad_len = -1;
261 case EVP_CTRL_AEAD_SET_IVLEN:
264 /* Allocate memory for IV if needed */
265 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
266 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
267 OPENSSL_free(gctx->iv);
268 gctx->iv = OPENSSL_malloc(arg);
269 if (gctx->iv == NULL)
275 case EVP_CTRL_AEAD_SET_TAG:
276 if (arg <= 0 || arg > 16 || EVP_CIPHER_CTX_encrypting(c))
278 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
282 case EVP_CTRL_AEAD_GET_TAG:
283 if (arg <= 0 || arg > 16 || !EVP_CIPHER_CTX_encrypting(c)
286 memcpy(ptr, EVP_CIPHER_CTX_buf_noconst(c), arg);
289 case EVP_CTRL_GCM_SET_IV_FIXED:
290 /* Special case: -1 length restores whole IV */
292 memcpy(gctx->iv, ptr, gctx->ivlen);
297 * Fixed field must be at least 4 bytes and invocation field at least
300 if ((arg < 4) || (gctx->ivlen - arg) < 8)
303 memcpy(gctx->iv, ptr, arg);
304 if (EVP_CIPHER_CTX_encrypting(c)
305 && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
310 case EVP_CTRL_GCM_IV_GEN:
311 if (gctx->iv_gen == 0 || gctx->key_set == 0)
313 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
314 if (arg <= 0 || arg > gctx->ivlen)
316 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
318 * Invocation field will be at least 8 bytes in size and so no need
319 * to check wrap around or increment more than last 8 bytes.
321 ctr64_inc(gctx->iv + gctx->ivlen - 8);
325 case EVP_CTRL_GCM_SET_IV_INV:
326 if (gctx->iv_gen == 0 || gctx->key_set == 0
327 || EVP_CIPHER_CTX_encrypting(c))
329 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
330 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
334 case EVP_CTRL_AEAD_TLS1_AAD:
335 /* Save the AAD for later use */
336 if (arg != EVP_AEAD_TLS1_AAD_LEN)
338 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
339 gctx->tls_aad_len = arg;
342 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
343 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
344 /* Correct length for explicit IV */
345 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
347 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
348 /* If decrypting correct for tag too */
349 if (!EVP_CIPHER_CTX_encrypting(c)) {
350 if (len < EVP_GCM_TLS_TAG_LEN)
352 len -= EVP_GCM_TLS_TAG_LEN;
354 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
355 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
357 /* Extra padding: tag appended to record */
358 return EVP_GCM_TLS_TAG_LEN;
362 EVP_CIPHER_CTX *out = ptr;
363 EVP_ARIA_GCM_CTX *gctx_out = EVP_C_DATA(EVP_ARIA_GCM_CTX,out);
365 if (gctx->gcm.key != &gctx->ks)
367 gctx_out->gcm.key = &gctx_out->ks;
369 if (gctx->iv == EVP_CIPHER_CTX_iv_noconst(c))
370 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
372 gctx_out->iv = OPENSSL_malloc(gctx->ivlen);
373 if (gctx_out->iv == NULL)
375 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
386 static int aria_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
387 const unsigned char *in, size_t len)
389 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,ctx);
392 /* Encrypt/decrypt must be performed in place */
394 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
397 * Set IV from start of buffer or generate IV and write to start of
400 if (EVP_CIPHER_CTX_ctrl(ctx, EVP_CIPHER_CTX_encrypting(ctx) ?
401 EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
402 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
405 if (CRYPTO_gcm128_aad(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
408 /* Fix buffer and length to point to payload */
409 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
410 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
411 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
412 if (EVP_CIPHER_CTX_encrypting(ctx)) {
413 /* Encrypt payload */
414 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, len))
417 /* Finally write tag */
418 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
419 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
422 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, len))
425 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
426 EVP_GCM_TLS_TAG_LEN);
427 /* If tag mismatch wipe buffer */
428 if (CRYPTO_memcmp(EVP_CIPHER_CTX_buf_noconst(ctx), in + len,
429 EVP_GCM_TLS_TAG_LEN)) {
430 OPENSSL_cleanse(out, len);
438 gctx->tls_aad_len = -1;
442 static int aria_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
443 const unsigned char *in, size_t len)
445 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,ctx);
447 /* If not set up, return error */
451 if (gctx->tls_aad_len >= 0)
452 return aria_gcm_tls_cipher(ctx, out, in, len);
458 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
460 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
461 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, len))
464 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, len))
469 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
470 if (gctx->taglen < 0)
472 if (CRYPTO_gcm128_finish(&gctx->gcm,
473 EVP_CIPHER_CTX_buf_noconst(ctx),
479 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx), 16);
481 /* Don't reuse the IV */
486 static int aria_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
487 const unsigned char *iv, int enc)
490 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,ctx);
496 ret = aria_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
498 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
499 &cctx->ks, (block128_f) aria_encrypt);
501 EVPerr(EVP_F_ARIA_CCM_INIT_KEY,EVP_R_ARIA_KEY_SETUP_FAILED);
508 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
514 static int aria_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
516 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,c);
526 cctx->tls_aad_len = -1;
529 case EVP_CTRL_AEAD_TLS1_AAD:
530 /* Save the AAD for later use */
531 if (arg != EVP_AEAD_TLS1_AAD_LEN)
533 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
534 cctx->tls_aad_len = arg;
537 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
538 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
539 /* Correct length for explicit IV */
540 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
542 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
543 /* If decrypting correct for tag too */
544 if (!EVP_CIPHER_CTX_encrypting(c)) {
549 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
550 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
552 /* Extra padding: tag appended to record */
555 case EVP_CTRL_CCM_SET_IV_FIXED:
556 /* Sanity check length */
557 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
559 /* Just copy to first part of IV */
560 memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg);
563 case EVP_CTRL_AEAD_SET_IVLEN:
566 case EVP_CTRL_CCM_SET_L:
567 if (arg < 2 || arg > 8)
571 case EVP_CTRL_AEAD_SET_TAG:
572 if ((arg & 1) || arg < 4 || arg > 16)
574 if (EVP_CIPHER_CTX_encrypting(c) && ptr)
578 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
583 case EVP_CTRL_AEAD_GET_TAG:
584 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
586 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
595 EVP_CIPHER_CTX *out = ptr;
596 EVP_ARIA_CCM_CTX *cctx_out = EVP_C_DATA(EVP_ARIA_CCM_CTX,out);
598 if (cctx->ccm.key != &cctx->ks)
600 cctx_out->ccm.key = &cctx_out->ks;
610 static int aria_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
611 const unsigned char *in, size_t len)
613 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,ctx);
614 CCM128_CONTEXT *ccm = &cctx->ccm;
616 /* Encrypt/decrypt must be performed in place */
617 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
619 /* If encrypting set explicit IV from sequence number (start of AAD) */
620 if (EVP_CIPHER_CTX_encrypting(ctx))
621 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
622 EVP_CCM_TLS_EXPLICIT_IV_LEN);
623 /* Get rest of IV from explicit IV */
624 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in,
625 EVP_CCM_TLS_EXPLICIT_IV_LEN);
626 /* Correct length value */
627 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
628 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L,
632 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
633 /* Fix buffer to point to payload */
634 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
635 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
636 if (EVP_CIPHER_CTX_encrypting(ctx)) {
637 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len, cctx->str)
638 : CRYPTO_ccm128_encrypt(ccm, in, out, len))
640 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
642 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
644 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len, cctx->str)
645 : !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
646 unsigned char tag[16];
647 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
648 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
652 OPENSSL_cleanse(out, len);
657 static int aria_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
658 const unsigned char *in, size_t len)
660 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,ctx);
661 CCM128_CONTEXT *ccm = &cctx->ccm;
663 /* If not set up, return error */
667 if (cctx->tls_aad_len >= 0)
668 return aria_ccm_tls_cipher(ctx, out, in, len);
670 /* EVP_*Final() doesn't return any data */
671 if (in == NULL && out != NULL)
677 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
681 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
687 /* If have AAD need message length */
688 if (!cctx->len_set && len)
690 CRYPTO_ccm128_aad(ccm, in, len);
693 /* If not set length yet do it */
694 if (!cctx->len_set) {
695 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
700 if (EVP_CIPHER_CTX_encrypting(ctx)) {
701 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len, cctx->str)
702 : CRYPTO_ccm128_encrypt(ccm, in, out, len))
708 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
710 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
711 unsigned char tag[16];
712 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
713 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
719 OPENSSL_cleanse(out, len);
727 #define ARIA_AUTH_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
728 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
729 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
730 | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_FLAG_AEAD_CIPHER)
732 #define BLOCK_CIPHER_aead(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
733 static const EVP_CIPHER aria_##keylen##_##mode = { \
734 nid##_##keylen##_##nmode, \
735 blocksize, keylen/8, ivlen, \
736 ARIA_AUTH_FLAGS|EVP_CIPH_##MODE##_MODE, \
737 aria_##mode##_init_key, \
738 aria_##mode##_cipher, \
740 sizeof(EVP_ARIA_##MODE##_CTX), \
741 NULL,NULL,aria_##mode##_ctrl,NULL }; \
742 const EVP_CIPHER *EVP_aria_##keylen##_##mode(void) \
743 { return (EVP_CIPHER*)&aria_##keylen##_##mode; }
745 BLOCK_CIPHER_aead(NID_aria, 128, 1, 12, gcm, gcm, GCM, 0)
746 BLOCK_CIPHER_aead(NID_aria, 192, 1, 12, gcm, gcm, GCM, 0)
747 BLOCK_CIPHER_aead(NID_aria, 256, 1, 12, gcm, gcm, GCM, 0)
749 BLOCK_CIPHER_aead(NID_aria, 128, 1, 12, ccm, ccm, CCM, 0)
750 BLOCK_CIPHER_aead(NID_aria, 192, 1, 12, ccm, ccm, CCM, 0)
751 BLOCK_CIPHER_aead(NID_aria, 256, 1, 12, ccm, ccm, CCM, 0)