const unsigned char ivec[16],
unsigned char cmac[16]);
+#if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
+size_t aesni_gcm_encrypt(const unsigned char *in,
+ unsigned char *out,
+ size_t len,
+ const void *key,
+ unsigned char ivec[16],
+ u64 *Xi);
+#define AES_gcm_encrypt aesni_gcm_encrypt
+size_t aesni_gcm_decrypt(const unsigned char *in,
+ unsigned char *out,
+ size_t len,
+ const void *key,
+ unsigned char ivec[16],
+ u64 *Xi);
+#define AES_gcm_decrypt aesni_gcm_decrypt
+void gcm_ghash_avx(u64 Xi[2],const u128 Htable[16],const u8 *in,size_t len);
+#define AES_GCM_ASM(gctx) (gctx->ctr==aesni_ctr32_encrypt_blocks && \
+ gctx->gcm.ghash==gcm_ghash_avx)
+#endif
+
static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
const unsigned char *iv, int enc)
{
void aes256_t4_ctr32_encrypt (const unsigned char *in, unsigned char *out,
size_t blocks, const AES_KEY *key,
unsigned char *ivec);
+void aes128_t4_xts_encrypt (const unsigned char *in, unsigned char *out,
+ size_t blocks, const AES_KEY *key1,
+ const AES_KEY *key2, const unsigned char *ivec);
+void aes128_t4_xts_decrypt (const unsigned char *in, unsigned char *out,
+ size_t blocks, const AES_KEY *key1,
+ const AES_KEY *key2, const unsigned char *ivec);
+void aes256_t4_xts_encrypt (const unsigned char *in, unsigned char *out,
+ size_t blocks, const AES_KEY *key1,
+ const AES_KEY *key2, const unsigned char *ivec);
+void aes256_t4_xts_decrypt (const unsigned char *in, unsigned char *out,
+ size_t blocks, const AES_KEY *key1,
+ const AES_KEY *key2, const unsigned char *ivec);
static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
const unsigned char *iv, int enc)
if(ret < 0)
{
- EVPerr(EVP_F_AES_INIT_KEY,EVP_R_AES_KEY_SETUP_FAILED);
+ EVPerr(EVP_F_AES_T4_INIT_KEY,EVP_R_AES_KEY_SETUP_FAILED);
return 0;
}
if (key)
{
int bits = ctx->key_len * 4;
+ xctx->stream = NULL;
/* key_len is two AES keys */
if (enc)
{
aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
xctx->xts.block1 = (block128_f)aes_t4_encrypt;
-#if 0 /* not yet */
switch (bits) {
case 128:
xctx->stream = aes128_t4_xts_encrypt;
break;
+#if 0 /* not yet */
case 192:
xctx->stream = aes192_t4_xts_encrypt;
break;
+#endif
case 256:
xctx->stream = aes256_t4_xts_encrypt;
break;
default:
return 0;
}
-#endif
}
else
{
aes_t4_set_decrypt_key(key, ctx->key_len * 4, &xctx->ks1.ks);
xctx->xts.block1 = (block128_f)aes_t4_decrypt;
-#if 0 /* not yet */
switch (bits) {
case 128:
xctx->stream = aes128_t4_xts_decrypt;
break;
+#if 0 /* not yet */
case 192:
xctx->stream = aes192_t4_xts_decrypt;
break;
+#endif
case 256:
xctx->stream = aes256_t4_xts_decrypt;
break;
default:
return 0;
}
-#endif
}
aes_t4_set_encrypt_key(key + ctx->key_len/2,
/* Encrypt payload */
if (gctx->ctr)
{
+ size_t bulk=0;
+#if defined(AES_GCM_ASM)
+ if (len>=32 && AES_GCM_ASM(gctx))
+ {
+ if (CRYPTO_gcm128_encrypt(&gctx->gcm,NULL,NULL,0))
+ return -1;
+
+ bulk = AES_gcm_encrypt(in,out,len,
+ gctx->gcm.key,
+ gctx->gcm.Yi.c,
+ gctx->gcm.Xi.u);
+ gctx->gcm.len.u[1] += bulk;
+ }
+#endif
if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
- in, out, len,
+ in +bulk,
+ out+bulk,
+ len-bulk,
gctx->ctr))
goto err;
}
/* Decrypt */
if (gctx->ctr)
{
+ size_t bulk=0;
+#if defined(AES_GCM_ASM)
+ if (len>=16 && AES_GCM_ASM(gctx))
+ {
+ if (CRYPTO_gcm128_decrypt(&gctx->gcm,NULL,NULL,0))
+ return -1;
+
+ bulk = AES_gcm_decrypt(in,out,len,
+ gctx->gcm.key,
+ gctx->gcm.Yi.c,
+ gctx->gcm.Xi.u);
+ gctx->gcm.len.u[1] += bulk;
+ }
+#endif
if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
- in, out, len,
+ in +bulk,
+ out+bulk,
+ len-bulk,
gctx->ctr))
goto err;
}
{
if (gctx->ctr)
{
+ size_t bulk=0;
+#if defined(AES_GCM_ASM)
+ if (len>=32 && AES_GCM_ASM(gctx))
+ {
+ size_t res = (16-gctx->gcm.mres)%16;
+
+ if (CRYPTO_gcm128_encrypt(&gctx->gcm,
+ in,out,res))
+ return -1;
+
+ bulk = AES_gcm_encrypt(in+res,
+ out+res,len-res, gctx->gcm.key,
+ gctx->gcm.Yi.c,
+ gctx->gcm.Xi.u);
+ gctx->gcm.len.u[1] += bulk;
+ bulk += res;
+ }
+#endif
if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
- in, out, len,
+ in +bulk,
+ out+bulk,
+ len-bulk,
gctx->ctr))
return -1;
}
{
if (gctx->ctr)
{
+ size_t bulk=0;
+#if defined(AES_GCM_ASM)
+ if (len>=16 && AES_GCM_ASM(gctx))
+ {
+ size_t res = (16-gctx->gcm.mres)%16;
+
+ if (CRYPTO_gcm128_decrypt(&gctx->gcm,
+ in,out,res))
+ return -1;
+
+ bulk = AES_gcm_decrypt(in+res,
+ out+res,len-res,
+ gctx->gcm.key,
+ gctx->gcm.Yi.c,
+ gctx->gcm.Xi.u);
+ gctx->gcm.len.u[1] += bulk;
+ bulk += res;
+ }
+#endif
if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
- in, out, len,
+ in +bulk,
+ out+bulk,
+ len-bulk,
gctx->ctr))
return -1;
}