X-Git-Url: https://git.openssl.org/gitweb/?p=openssl.git;a=blobdiff_plain;f=crypto%2Fmodes%2Fmodes_lcl.h;h=35d4d9fa53a024839731e8e10891008ad765e5ad;hp=12368fb039ef46f6328cae733656af2d82058d52;hb=039081b80977e2a5de84e1f88f8b4d025b559956;hpb=f472ec8c2f354314d278e11be567b43630acf090 diff --git a/crypto/modes/modes_lcl.h b/crypto/modes/modes_lcl.h index 12368fb039..35d4d9fa53 100644 --- a/crypto/modes/modes_lcl.h +++ b/crypto/modes/modes_lcl.h @@ -29,27 +29,44 @@ typedef unsigned char u8; #if defined(__i386) || defined(__i386__) || \ defined(__x86_64) || defined(__x86_64__) || \ defined(_M_IX86) || defined(_M_AMD64) || defined(_M_X64) || \ + defined(__aarch64__) || \ defined(__s390__) || defined(__s390x__) # undef STRICT_ALIGNMENT #endif -#if !defined(PEDANTIC) && !defined(OPENSSL_NO_ASM) && !defined(OPNESSL_NO_INLINE_ASM) +#if !defined(PEDANTIC) && !defined(OPENSSL_NO_ASM) && !defined(OPENSSL_NO_INLINE_ASM) #if defined(__GNUC__) && __GNUC__>=2 # if defined(__x86_64) || defined(__x86_64__) # define BSWAP8(x) ({ u64 ret=(x); \ - asm volatile ("bswapq %0" \ + asm ("bswapq %0" \ : "+r"(ret)); ret; }) # define BSWAP4(x) ({ u32 ret=(x); \ - asm volatile ("bswapl %0" \ + asm ("bswapl %0" \ : "+r"(ret)); ret; }) -# elif (defined(__i386) || defined(__i386__)) +# elif (defined(__i386) || defined(__i386__)) && !defined(I386_ONLY) # define BSWAP8(x) ({ u32 lo=(u64)(x)>>32,hi=(x); \ - asm volatile ("bswapl %0; bswapl %1" \ + asm ("bswapl %0; bswapl %1" \ : "+r"(hi),"+r"(lo)); \ (u64)hi<<32|lo; }) # define BSWAP4(x) ({ u32 ret=(x); \ - asm volatile ("bswapl %0" \ + asm ("bswapl %0" \ : "+r"(ret)); ret; }) +# elif defined(__aarch64__) +# define BSWAP8(x) ({ u64 ret; \ + asm ("rev %0,%1" \ + : "=r"(ret) : "r"(x)); ret; }) +# define BSWAP4(x) ({ u32 ret; \ + asm ("rev %w0,%w1" \ + : "=r"(ret) : "r"(x)); ret; }) +# elif (defined(__arm__) || defined(__arm)) && !defined(STRICT_ALIGNMENT) +# define BSWAP8(x) ({ u32 lo=(u64)(x)>>32,hi=(x); \ + asm ("rev %0,%0; rev %1,%1" \ + : "+r"(hi),"+r"(lo)); \ + (u64)hi<<32|lo; }) +# define BSWAP4(x) ({ u32 ret; \ + asm ("rev %0,%1" \ + : "=r"(ret) : "r"((u32)(x))); \ + ret; }) # endif #elif defined(_MSC_VER) # if _MSC_VER>=1300 @@ -73,3 +90,47 @@ typedef unsigned char u8; #define GETU32(p) ((u32)(p)[0]<<24|(u32)(p)[1]<<16|(u32)(p)[2]<<8|(u32)(p)[3]) #define PUTU32(p,v) ((p)[0]=(u8)((v)>>24),(p)[1]=(u8)((v)>>16),(p)[2]=(u8)((v)>>8),(p)[3]=(u8)(v)) #endif + +/* GCM definitions */ + +typedef struct { u64 hi,lo; } u128; + +#ifdef TABLE_BITS +#undef TABLE_BITS +#endif +/* + * Even though permitted values for TABLE_BITS are 8, 4 and 1, it should + * never be set to 8 [or 1]. For further information see gcm128.c. + */ +#define TABLE_BITS 4 + +struct gcm128_context { + /* Following 6 names follow names in GCM specification */ + union { u64 u[2]; u32 d[4]; u8 c[16]; size_t t[16/sizeof(size_t)]; } + Yi,EKi,EK0,len,Xi,H; + /* Relative position of Xi, H and pre-computed Htable is used + * in some assembler modules, i.e. don't change the order! */ +#if TABLE_BITS==8 + u128 Htable[256]; +#else + u128 Htable[16]; + void (*gmult)(u64 Xi[2],const u128 Htable[16]); + void (*ghash)(u64 Xi[2],const u128 Htable[16],const u8 *inp,size_t len); +#endif + unsigned int mres, ares; + block128_f block; + void *key; +}; + +struct xts128_context { + void *key1, *key2; + block128_f block1,block2; +}; + +struct ccm128_context { + union { u64 u[2]; u8 c[16]; } nonce, cmac; + u64 blocks; + block128_f block; + void *key; +}; +