Initial aarch64 bits.
authorAndy Polyakov <appro@openssl.org>
Sun, 13 Oct 2013 17:15:15 +0000 (19:15 +0200)
committerAndy Polyakov <appro@openssl.org>
Sun, 13 Oct 2013 17:15:15 +0000 (19:15 +0200)
crypto/bn/bn_lcl.h
crypto/md32_common.h
crypto/modes/modes_lcl.h
crypto/sha/sha512.c

index 64df27aaf93e22091a5b38854a7b8a5208b312b9..90787e576e88dfae14e7bd40dd469bd36313602d 100644 (file)
@@ -318,6 +318,15 @@ extern "C" {
             : "r"(a), "r"(b));
 #    endif
 #  endif
+# elif defined(__aarch64__) && defined(SIXTY_FOUR_BIT_LONG)
+#  if defined(__GNUC__) && __GNUC__>=2
+#   define BN_UMULT_HIGH(a,b)  ({      \
+       register BN_ULONG ret;          \
+       asm ("umulh     %0,%1,%2"       \
+            : "=r"(ret)                \
+            : "r"(a), "r"(b));         \
+       ret;                    })
+#  endif
 # endif                /* cpu */
 #endif         /* OPENSSL_NO_ASM */
 
index d7f7a26e7023241b1ca3b3a03a0bc2968a296c00..147a7a00c30e3649ad5695cc49b3776a0720d594 100644 (file)
                                   asm ("bswapl %0":"=r"(r):"0"(r));    \
                                   *((unsigned int *)(c))=r; (c)+=4; r; })
 #   endif
+#  elif defined(__aarch64__)
+#   if defined(__BYTE_ORDER__)
+#    if defined(__ORDER_LITTLE_ENDIAN__) && __BYTE_ORDER__==__ORDER_LITTLE_ENDIAN__
+#     define HOST_c2l(c,l)     ({ unsigned int r;              \
+                                  asm ("rev    %w0,%w1"        \
+                                       :"=r"(r)                \
+                                       :"r"(*((const unsigned int *)(c))));\
+                                  (c)+=4; (l)=r;               })
+#     define HOST_l2c(l,c)     ({ unsigned int r;              \
+                                  asm ("rev    %w0,%w1"        \
+                                       :"=r"(r)                \
+                                       :"r"((unsigned int)(l)));\
+                                  *((unsigned int *)(c))=r; (c)+=4; r; })
+#    elif defined(__ORDER_BIG_ENDIAN__) && __BYTE_ORDER__==__ORDER_BIG_ENDIAN__
+#     define HOST_c2l(c,l)     ((l)=*((const unsigned int *)(c)), (c)+=4, (l))
+#     define HOST_l2c(l,c)     (*((unsigned int *)(c))=(l), (c)+=4, (l))
+#    endif
+#   endif
 #  endif
 # endif
 #endif
index 9d83e128444e0bb583a8313753a0b5fd5e0ec388..35d4d9fa53a024839731e8e10891008ad765e5ad 100644 (file)
@@ -29,6 +29,7 @@ typedef unsigned char u8;
 #if defined(__i386)    || defined(__i386__)    || \
     defined(__x86_64)  || defined(__x86_64__)  || \
     defined(_M_IX86)   || defined(_M_AMD64)    || defined(_M_X64) || \
+    defined(__aarch64__)                       || \
     defined(__s390__)  || defined(__s390x__)
 # undef STRICT_ALIGNMENT
 #endif
@@ -50,6 +51,13 @@ typedef unsigned char u8;
 #  define BSWAP4(x) ({ u32 ret=(x);                    \
                        asm ("bswapl %0"                \
                        : "+r"(ret));   ret;            })
+# elif defined(__aarch64__)
+#  define BSWAP8(x) ({ u64 ret;                        \
+                       asm ("rev %0,%1"                \
+                       : "=r"(ret) : "r"(x)); ret;     })
+#  define BSWAP4(x) ({ u32 ret;                        \
+                       asm ("rev %w0,%w1"              \
+                       : "=r"(ret) : "r"(x)); ret;     })
 # elif (defined(__arm__) || defined(__arm)) && !defined(STRICT_ALIGNMENT)
 #  define BSWAP8(x) ({ u32 lo=(u64)(x)>>32,hi=(x);     \
                        asm ("rev %0,%0; rev %1,%1"     \
index aab1f1e8d457931fac527beddd9cdc74c6a7ff6c..59b4ace23496b6d8d5d9ba466e1c23eb267c21dc 100644 (file)
@@ -56,6 +56,7 @@ const char SHA512_version[]="SHA-512" OPENSSL_VERSION_PTEXT;
 #if defined(__i386) || defined(__i386__) || defined(_M_IX86) || \
     defined(__x86_64) || defined(_M_AMD64) || defined(_M_X64) || \
     defined(__s390__) || defined(__s390x__) || \
+    defined(__aarch64__) || \
     defined(SHA512_ASM)
 #define SHA512_BLOCK_CAN_MANAGE_UNALIGNED_DATA
 #endif
@@ -342,6 +343,18 @@ static const SHA_LONG64 K512[80] = {
                                asm ("rotrdi %0,%1,%2"  \
                                : "=r"(ret)             \
                                : "r"(a),"K"(n)); ret;  })
+#  elif defined(__aarch64__)
+#   define ROTR(a,n)   ({ SHA_LONG64 ret;              \
+                               asm ("ror %0,%1,%2"     \
+                               : "=r"(ret)             \
+                               : "r"(a),"I"(n)); ret;  })
+#   if  defined(__BYTE_ORDER__) && defined(__ORDER_LITTLE_ENDIAN__) && \
+       __BYTE_ORDER__==__ORDER_LITTLE_ENDIAN__
+#    define PULL64(x)  ({ SHA_LONG64 ret;                      \
+                               asm ("rev       %0,%1"          \
+                               : "=r"(ret)                     \
+                               : "r"(*((const SHA_LONG64 *)(&(x))))); ret;             })
+#   endif
 #  endif
 # elif defined(_MSC_VER)
 #  if defined(_WIN64)  /* applies to both IA-64 and AMD64 */