From 1e8f69c6a5b52ac2f3941b49d710912ffe0f04ca Mon Sep 17 00:00:00 2001 From: Andy Polyakov Date: Wed, 21 Jan 2015 11:11:32 +0100 Subject: [PATCH] modes/modes_lcl.h: make it indent-friendly. Reviewed-by: Tim Hudson --- crypto/modes/modes_lcl.h | 62 ++++++++++++++++++++-------------------- 1 file changed, 31 insertions(+), 31 deletions(-) diff --git a/crypto/modes/modes_lcl.h b/crypto/modes/modes_lcl.h index 1d8315554d..15cfa17786 100644 --- a/crypto/modes/modes_lcl.h +++ b/crypto/modes/modes_lcl.h @@ -27,63 +27,63 @@ typedef unsigned char u8; #define STRICT_ALIGNMENT 1 #ifndef PEDANTIC -#if defined(__i386) || defined(__i386__) || \ - defined(__x86_64) || defined(__x86_64__) || \ - defined(_M_IX86) || defined(_M_AMD64) || defined(_M_X64) || \ - defined(__aarch64__) || \ - defined(__s390__) || defined(__s390x__) -# undef STRICT_ALIGNMENT -#endif +# if defined(__i386) || defined(__i386__) || \ + defined(__x86_64) || defined(__x86_64__) || \ + defined(_M_IX86) || defined(_M_AMD64) || defined(_M_X64) || \ + defined(__aarch64__) || \ + defined(__s390__) || defined(__s390x__) +# undef STRICT_ALIGNMENT +# endif #endif #if !defined(PEDANTIC) && !defined(OPENSSL_NO_ASM) && !defined(OPENSSL_NO_INLINE_ASM) -#if defined(__GNUC__) && __GNUC__>=2 -# if defined(__x86_64) || defined(__x86_64__) -# define BSWAP8(x) ({ u64 ret=(x); \ +# if defined(__GNUC__) && __GNUC__>=2 +# if defined(__x86_64) || defined(__x86_64__) +# define BSWAP8(x) ({ u64 ret=(x); \ asm ("bswapq %0" \ : "+r"(ret)); ret; }) -# define BSWAP4(x) ({ u32 ret=(x); \ +# define BSWAP4(x) ({ u32 ret=(x); \ asm ("bswapl %0" \ : "+r"(ret)); ret; }) -# elif (defined(__i386) || defined(__i386__)) && !defined(I386_ONLY) -# define BSWAP8(x) ({ u32 lo=(u64)(x)>>32,hi=(x); \ +# elif (defined(__i386) || defined(__i386__)) && !defined(I386_ONLY) +# define BSWAP8(x) ({ u32 lo=(u64)(x)>>32,hi=(x); \ asm ("bswapl %0; bswapl %1" \ : "+r"(hi),"+r"(lo)); \ (u64)hi<<32|lo; }) -# define BSWAP4(x) ({ u32 ret=(x); \ +# define BSWAP4(x) ({ u32 ret=(x); \ asm ("bswapl %0" \ : "+r"(ret)); ret; }) -# elif defined(__aarch64__) -# define BSWAP8(x) ({ u64 ret; \ +# elif defined(__aarch64__) +# define BSWAP8(x) ({ u64 ret; \ asm ("rev %0,%1" \ : "=r"(ret) : "r"(x)); ret; }) -# define BSWAP4(x) ({ u32 ret; \ +# define BSWAP4(x) ({ u32 ret; \ asm ("rev %w0,%w1" \ : "=r"(ret) : "r"(x)); ret; }) -# elif (defined(__arm__) || defined(__arm)) && !defined(STRICT_ALIGNMENT) -# define BSWAP8(x) ({ u32 lo=(u64)(x)>>32,hi=(x); \ +# elif (defined(__arm__) || defined(__arm)) && !defined(STRICT_ALIGNMENT) +# define BSWAP8(x) ({ u32 lo=(u64)(x)>>32,hi=(x); \ asm ("rev %0,%0; rev %1,%1" \ : "+r"(hi),"+r"(lo)); \ (u64)hi<<32|lo; }) -# define BSWAP4(x) ({ u32 ret; \ +# define BSWAP4(x) ({ u32 ret; \ asm ("rev %0,%1" \ : "=r"(ret) : "r"((u32)(x))); \ ret; }) -# endif -#elif defined(_MSC_VER) -# if _MSC_VER>=1300 -# pragma intrinsic(_byteswap_uint64,_byteswap_ulong) -# define BSWAP8(x) _byteswap_uint64((u64)(x)) -# define BSWAP4(x) _byteswap_ulong((u32)(x)) -# elif defined(_M_IX86) - __inline u32 _bswap4(u32 val) { +# endif +# elif defined(_MSC_VER) +# if _MSC_VER>=1300 +# pragma intrinsic(_byteswap_uint64,_byteswap_ulong) +# define BSWAP8(x) _byteswap_uint64((u64)(x)) +# define BSWAP4(x) _byteswap_ulong((u32)(x)) +# elif defined(_M_IX86) + __inline u32 _bswap4(u32 val) { _asm mov eax,val _asm bswap eax - } -# define BSWAP4(x) _bswap4(x) + } +# define BSWAP4(x) _bswap4(x) +# endif # endif #endif -#endif #if defined(BSWAP4) && !defined(STRICT_ALIGNMENT) #define GETU32(p) BSWAP4(*(const u32 *)(p)) -- 2.34.1