Add a shared_target to the VC-common config
[openssl.git] / engines / e_padlock.c
index fa323b15abc10de3b9fd8e4874c4e43afea7ba51..00732edb981b8aa8fcb90cc2aa60d6115270722d 100644 (file)
@@ -112,8 +112,8 @@ static ENGINE *ENGINE_padlock(void);
 #  endif
 
 #  ifdef OPENSSL_NO_DYNAMIC_ENGINE
-
-void ENGINE_load_padlock(void)
+void engine_load_padlock_internal(void);
+void engine_load_padlock_internal(void)
 {
 /* On non-x86 CPUs it just returns. */
 #   ifdef COMPILE_HW_PADLOCK
@@ -361,7 +361,7 @@ static int padlock_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
 #    define NEAREST_ALIGNED(ptr) ( (unsigned char *)(ptr) +         \
         ( (0x10 - ((size_t)(ptr) & 0x0F)) & 0x0F )      )
 #    define ALIGNED_CIPHER_DATA(ctx) ((struct padlock_cipher_data *)\
-        NEAREST_ALIGNED(ctx->cipher_data))
+        NEAREST_ALIGNED(EVP_CIPHER_CTX_cipher_data(ctx)))
 
 static int
 padlock_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
@@ -378,9 +378,9 @@ padlock_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
     struct padlock_cipher_data *cdata = ALIGNED_CIPHER_DATA(ctx);
     int ret;
 
-    memcpy(cdata->iv, ctx->iv, AES_BLOCK_SIZE);
+    memcpy(cdata->iv, EVP_CIPHER_CTX_iv(ctx), AES_BLOCK_SIZE);
     if ((ret = padlock_cbc_encrypt(out_arg, in_arg, cdata, nbytes)))
-        memcpy(ctx->iv, cdata->iv, AES_BLOCK_SIZE);
+        memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), cdata->iv, AES_BLOCK_SIZE);
     return ret;
 }
 
@@ -391,13 +391,13 @@ padlock_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
     struct padlock_cipher_data *cdata = ALIGNED_CIPHER_DATA(ctx);
     size_t chunk;
 
-    if ((chunk = ctx->num)) {   /* borrow chunk variable */
-        unsigned char *ivp = ctx->iv;
+    if ((chunk = EVP_CIPHER_CTX_num(ctx))) {   /* borrow chunk variable */
+        unsigned char *ivp = EVP_CIPHER_CTX_iv_noconst(ctx);
 
         if (chunk >= AES_BLOCK_SIZE)
             return 0;           /* bogus value */
 
-        if (ctx->encrypt)
+        if (EVP_CIPHER_CTX_encrypting(ctx))
             while (chunk < AES_BLOCK_SIZE && nbytes != 0) {
                 ivp[chunk] = *(out_arg++) = *(in_arg++) ^ ivp[chunk];
                 chunk++, nbytes--;
@@ -408,13 +408,13 @@ padlock_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
                 ivp[chunk++] = c, nbytes--;
             }
 
-        ctx->num = chunk % AES_BLOCK_SIZE;
+        EVP_CIPHER_CTX_set_num(ctx, chunk % AES_BLOCK_SIZE);
     }
 
     if (nbytes == 0)
         return 1;
 
-    memcpy(cdata->iv, ctx->iv, AES_BLOCK_SIZE);
+    memcpy(cdata->iv, EVP_CIPHER_CTX_iv(ctx), AES_BLOCK_SIZE);
 
     if ((chunk = nbytes & ~(AES_BLOCK_SIZE - 1))) {
         if (!padlock_cfb_encrypt(out_arg, in_arg, cdata, chunk))
@@ -427,7 +427,7 @@ padlock_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
 
         out_arg += chunk;
         in_arg += chunk;
-        ctx->num = nbytes;
+        EVP_CIPHER_CTX_set_num(ctx, nbytes);
         if (cdata->cword.b.encdec) {
             cdata->cword.b.encdec = 0;
             padlock_reload_key();
@@ -450,7 +450,7 @@ padlock_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
         }
     }
 
-    memcpy(ctx->iv, cdata->iv, AES_BLOCK_SIZE);
+    memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), cdata->iv, AES_BLOCK_SIZE);
 
     return 1;
 }
@@ -465,8 +465,8 @@ padlock_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
     /*
      * ctx->num is maintained in byte-oriented modes, such as CFB and OFB...
      */
-    if ((chunk = ctx->num)) {   /* borrow chunk variable */
-        unsigned char *ivp = ctx->iv;
+    if ((chunk = EVP_CIPHER_CTX_num(ctx))) {   /* borrow chunk variable */
+        unsigned char *ivp = EVP_CIPHER_CTX_iv_noconst(ctx);
 
         if (chunk >= AES_BLOCK_SIZE)
             return 0;           /* bogus value */
@@ -476,13 +476,13 @@ padlock_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
             chunk++, nbytes--;
         }
 
-        ctx->num = chunk % AES_BLOCK_SIZE;
+        EVP_CIPHER_CTX_set_num(ctx, chunk % AES_BLOCK_SIZE);
     }
 
     if (nbytes == 0)
         return 1;
 
-    memcpy(cdata->iv, ctx->iv, AES_BLOCK_SIZE);
+    memcpy(cdata->iv, EVP_CIPHER_CTX_iv(ctx), AES_BLOCK_SIZE);
 
     if ((chunk = nbytes & ~(AES_BLOCK_SIZE - 1))) {
         if (!padlock_ofb_encrypt(out_arg, in_arg, cdata, chunk))
@@ -495,7 +495,7 @@ padlock_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
 
         out_arg += chunk;
         in_arg += chunk;
-        ctx->num = nbytes;
+        EVP_CIPHER_CTX_set_num(ctx, nbytes);
         padlock_reload_key();   /* empirically found */
         padlock_aes_block(ivp, ivp, cdata);
         padlock_reload_key();   /* empirically found */
@@ -505,7 +505,7 @@ padlock_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
         }
     }
 
-    memcpy(ctx->iv, cdata->iv, AES_BLOCK_SIZE);
+    memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), cdata->iv, AES_BLOCK_SIZE);
 
     return 1;
 }
@@ -524,13 +524,14 @@ padlock_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
                    const unsigned char *in_arg, size_t nbytes)
 {
     struct padlock_cipher_data *cdata = ALIGNED_CIPHER_DATA(ctx);
-    unsigned int num = ctx->num;
+    unsigned int num = EVP_CIPHER_CTX_num(ctx);
 
     CRYPTO_ctr128_encrypt_ctr32(in_arg, out_arg, nbytes,
-                                cdata, ctx->iv, ctx->buf, &num,
+                                cdata, EVP_CIPHER_CTX_iv_noconst(ctx),
+                                EVP_CIPHER_CTX_buf_noconst(ctx), &num,
                                 (ctr128_f) padlock_ctr32_encrypt_glue);
 
-    ctx->num = (size_t)num;
+    EVP_CIPHER_CTX_set_num(ctx, (size_t)num);
     return 1;
 }
 
@@ -545,39 +546,51 @@ padlock_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
  * of preprocessor magic :-)
  */
 #    define DECLARE_AES_EVP(ksize,lmode,umode)      \
-static const EVP_CIPHER padlock_aes_##ksize##_##lmode = {       \
-        NID_aes_##ksize##_##lmode,              \
-        EVP_CIPHER_block_size_##umode,  \
-        AES_KEY_SIZE_##ksize,           \
-        AES_BLOCK_SIZE,                 \
-        0 | EVP_CIPH_##umode##_MODE,    \
-        padlock_aes_init_key,           \
-        padlock_##lmode##_cipher,       \
-        NULL,                           \
-        sizeof(struct padlock_cipher_data) + 16,        \
-        EVP_CIPHER_set_asn1_iv,         \
-        EVP_CIPHER_get_asn1_iv,         \
-        NULL,                           \
-        NULL                            \
+static EVP_CIPHER *_hidden_aes_##ksize##_##lmode = NULL; \
+static const EVP_CIPHER *padlock_aes_##ksize##_##lmode(void) \
+{                                                                       \
+    if (_hidden_aes_##ksize##_##lmode == NULL                           \
+        && ((_hidden_aes_##ksize##_##lmode =                            \
+             EVP_CIPHER_meth_new(NID_aes_##ksize##_##lmode,             \
+                                 EVP_CIPHER_block_size_##umode,         \
+                                 AES_KEY_SIZE_##ksize)) == NULL         \
+            || !EVP_CIPHER_meth_set_iv_length(_hidden_aes_##ksize##_##lmode, \
+                                              AES_BLOCK_SIZE)           \
+            || !EVP_CIPHER_meth_set_flags(_hidden_aes_##ksize##_##lmode, \
+                                          0 | EVP_CIPH_##umode##_MODE)  \
+            || !EVP_CIPHER_meth_set_init(_hidden_aes_##ksize##_##lmode, \
+                                         padlock_aes_init_key)          \
+            || !EVP_CIPHER_meth_set_do_cipher(_hidden_aes_##ksize##_##lmode, \
+                                              padlock_##lmode##_cipher) \
+            || !EVP_CIPHER_meth_set_impl_ctx_size(_hidden_aes_##ksize##_##lmode, \
+                                                  sizeof(struct padlock_cipher_data) + 16) \
+            || !EVP_CIPHER_meth_set_set_asn1_params(_hidden_aes_##ksize##_##lmode, \
+                                                    EVP_CIPHER_set_asn1_iv) \
+            || !EVP_CIPHER_meth_set_get_asn1_params(_hidden_aes_##ksize##_##lmode, \
+                                                    EVP_CIPHER_get_asn1_iv))) { \
+        EVP_CIPHER_meth_free(_hidden_aes_##ksize##_##lmode);            \
+        _hidden_aes_##ksize##_##lmode = NULL;                           \
+    }                                                                   \
+    return _hidden_aes_##ksize##_##lmode;                               \
 }
 
-DECLARE_AES_EVP(128, ecb, ECB);
-DECLARE_AES_EVP(128, cbc, CBC);
-DECLARE_AES_EVP(128, cfb, CFB);
-DECLARE_AES_EVP(128, ofb, OFB);
-DECLARE_AES_EVP(128, ctr, CTR);
+DECLARE_AES_EVP(128, ecb, ECB)
+DECLARE_AES_EVP(128, cbc, CBC)
+DECLARE_AES_EVP(128, cfb, CFB)
+DECLARE_AES_EVP(128, ofb, OFB)
+DECLARE_AES_EVP(128, ctr, CTR)
 
-DECLARE_AES_EVP(192, ecb, ECB);
-DECLARE_AES_EVP(192, cbc, CBC);
-DECLARE_AES_EVP(192, cfb, CFB);
-DECLARE_AES_EVP(192, ofb, OFB);
-DECLARE_AES_EVP(192, ctr, CTR);
+DECLARE_AES_EVP(192, ecb, ECB)
+DECLARE_AES_EVP(192, cbc, CBC)
+DECLARE_AES_EVP(192, cfb, CFB)
+DECLARE_AES_EVP(192, ofb, OFB)
+DECLARE_AES_EVP(192, ctr, CTR)
 
-DECLARE_AES_EVP(256, ecb, ECB);
-DECLARE_AES_EVP(256, cbc, CBC);
-DECLARE_AES_EVP(256, cfb, CFB);
-DECLARE_AES_EVP(256, ofb, OFB);
-DECLARE_AES_EVP(256, ctr, CTR);
+DECLARE_AES_EVP(256, ecb, ECB)
+DECLARE_AES_EVP(256, cbc, CBC)
+DECLARE_AES_EVP(256, cfb, CFB)
+DECLARE_AES_EVP(256, ofb, OFB)
+DECLARE_AES_EVP(256, ctr, CTR)
 
 static int
 padlock_ciphers(ENGINE *e, const EVP_CIPHER **cipher, const int **nids,
@@ -592,51 +605,51 @@ padlock_ciphers(ENGINE *e, const EVP_CIPHER **cipher, const int **nids,
     /* ... or the requested "cipher" otherwise */
     switch (nid) {
     case NID_aes_128_ecb:
-        *cipher = &padlock_aes_128_ecb;
+        *cipher = padlock_aes_128_ecb();
         break;
     case NID_aes_128_cbc:
-        *cipher = &padlock_aes_128_cbc;
+        *cipher = padlock_aes_128_cbc();
         break;
     case NID_aes_128_cfb:
-        *cipher = &padlock_aes_128_cfb;
+        *cipher = padlock_aes_128_cfb();
         break;
     case NID_aes_128_ofb:
-        *cipher = &padlock_aes_128_ofb;
+        *cipher = padlock_aes_128_ofb();
         break;
     case NID_aes_128_ctr:
-        *cipher = &padlock_aes_128_ctr;
+        *cipher = padlock_aes_128_ctr();
         break;
 
     case NID_aes_192_ecb:
-        *cipher = &padlock_aes_192_ecb;
+        *cipher = padlock_aes_192_ecb();
         break;
     case NID_aes_192_cbc:
-        *cipher = &padlock_aes_192_cbc;
+        *cipher = padlock_aes_192_cbc();
         break;
     case NID_aes_192_cfb:
-        *cipher = &padlock_aes_192_cfb;
+        *cipher = padlock_aes_192_cfb();
         break;
     case NID_aes_192_ofb:
-        *cipher = &padlock_aes_192_ofb;
+        *cipher = padlock_aes_192_ofb();
         break;
     case NID_aes_192_ctr:
-        *cipher = &padlock_aes_192_ctr;
+        *cipher = padlock_aes_192_ctr();
         break;
 
     case NID_aes_256_ecb:
-        *cipher = &padlock_aes_256_ecb;
+        *cipher = padlock_aes_256_ecb();
         break;
     case NID_aes_256_cbc:
-        *cipher = &padlock_aes_256_cbc;
+        *cipher = padlock_aes_256_cbc();
         break;
     case NID_aes_256_cfb:
-        *cipher = &padlock_aes_256_cfb;
+        *cipher = padlock_aes_256_cfb();
         break;
     case NID_aes_256_ofb:
-        *cipher = &padlock_aes_256_ofb;
+        *cipher = padlock_aes_256_ofb();
         break;
     case NID_aes_256_ctr:
-        *cipher = &padlock_aes_256_ctr;
+        *cipher = padlock_aes_256_ctr();
         break;
 
     default:
@@ -667,7 +680,7 @@ padlock_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
     if (mode == EVP_CIPH_OFB_MODE || mode == EVP_CIPH_CTR_MODE)
         cdata->cword.b.encdec = 0;
     else
-        cdata->cword.b.encdec = (ctx->encrypt == 0);
+        cdata->cword.b.encdec = (EVP_CIPHER_CTX_encrypting(ctx) == 0);
     cdata->cword.b.rounds = 10 + (key_len - 128) / 32;
     cdata->cword.b.ksize = (key_len - 128) / 64;