crypto/modes/modes_lcl.h: let STRICT_ALIGNMENT be on ARMv7.
[openssl.git] / crypto / modes / modes_lcl.h
index 12368fb039ef46f6328cae733656af2d82058d52..9d83e128444e0bb583a8313753a0b5fd5e0ec388 100644 (file)
@@ -33,23 +33,32 @@ typedef unsigned char u8;
 # undef STRICT_ALIGNMENT
 #endif
 
-#if !defined(PEDANTIC) && !defined(OPENSSL_NO_ASM) && !defined(OPNESSL_NO_INLINE_ASM)
+#if !defined(PEDANTIC) && !defined(OPENSSL_NO_ASM) && !defined(OPENSSL_NO_INLINE_ASM)
 #if defined(__GNUC__) && __GNUC__>=2
 # if defined(__x86_64) || defined(__x86_64__)
 #  define BSWAP8(x) ({ u64 ret=(x);                    \
-                       asm volatile ("bswapq %0"       \
+                       asm ("bswapq %0"                \
                        : "+r"(ret));   ret;            })
 #  define BSWAP4(x) ({ u32 ret=(x);                    \
-                       asm volatile ("bswapl %0"       \
+                       asm ("bswapl %0"                \
                        : "+r"(ret));   ret;            })
-# elif (defined(__i386) || defined(__i386__))
+# elif (defined(__i386) || defined(__i386__)) && !defined(I386_ONLY)
 #  define BSWAP8(x) ({ u32 lo=(u64)(x)>>32,hi=(x);     \
-                       asm volatile ("bswapl %0; bswapl %1"    \
+                       asm ("bswapl %0; bswapl %1"     \
                        : "+r"(hi),"+r"(lo));           \
                        (u64)hi<<32|lo;                 })
 #  define BSWAP4(x) ({ u32 ret=(x);                    \
-                       asm volatile ("bswapl %0"       \
+                       asm ("bswapl %0"                \
                        : "+r"(ret));   ret;            })
+# elif (defined(__arm__) || defined(__arm)) && !defined(STRICT_ALIGNMENT)
+#  define BSWAP8(x) ({ u32 lo=(u64)(x)>>32,hi=(x);     \
+                       asm ("rev %0,%0; rev %1,%1"     \
+                       : "+r"(hi),"+r"(lo));           \
+                       (u64)hi<<32|lo;                 })
+#  define BSWAP4(x) ({ u32 ret;                        \
+                       asm ("rev %0,%1"                \
+                       : "=r"(ret) : "r"((u32)(x)));   \
+                       ret;                            })
 # endif
 #elif defined(_MSC_VER)
 # if _MSC_VER>=1300
@@ -73,3 +82,47 @@ typedef unsigned char u8;
 #define GETU32(p)      ((u32)(p)[0]<<24|(u32)(p)[1]<<16|(u32)(p)[2]<<8|(u32)(p)[3])
 #define PUTU32(p,v)    ((p)[0]=(u8)((v)>>24),(p)[1]=(u8)((v)>>16),(p)[2]=(u8)((v)>>8),(p)[3]=(u8)(v))
 #endif
+
+/* GCM definitions */
+
+typedef struct { u64 hi,lo; } u128;
+
+#ifdef TABLE_BITS
+#undef TABLE_BITS
+#endif
+/*
+ * Even though permitted values for TABLE_BITS are 8, 4 and 1, it should
+ * never be set to 8 [or 1]. For further information see gcm128.c.
+ */
+#define        TABLE_BITS 4
+
+struct gcm128_context {
+       /* Following 6 names follow names in GCM specification */
+       union { u64 u[2]; u32 d[4]; u8 c[16]; size_t t[16/sizeof(size_t)]; }
+         Yi,EKi,EK0,len,Xi,H;
+       /* Relative position of Xi, H and pre-computed Htable is used
+        * in some assembler modules, i.e. don't change the order! */
+#if TABLE_BITS==8
+       u128 Htable[256];
+#else
+       u128 Htable[16];
+       void (*gmult)(u64 Xi[2],const u128 Htable[16]);
+       void (*ghash)(u64 Xi[2],const u128 Htable[16],const u8 *inp,size_t len);
+#endif
+       unsigned int mres, ares;
+       block128_f block;
+       void *key;
+};
+
+struct xts128_context {
+       void      *key1, *key2;
+       block128_f block1,block2;
+};
+
+struct ccm128_context {
+       union { u64 u[2]; u8 c[16]; } nonce, cmac;
+       u64 blocks;
+       block128_f block;
+       void *key;
+};
+