Add ROTATE inline asm support for SM3
authorHongren (Zenithal) Zheng <i@zenithal.me>
Wed, 11 May 2022 09:18:27 +0000 (17:18 +0800)
committerTomas Mraz <tomas@openssl.org>
Wed, 22 Jun 2022 10:46:50 +0000 (12:46 +0200)
And move ROTATE inline asm to header.

Now this benefits SM3, SHA (when with Zbb only and no Zknh)
and other hash functions

Reviewed-by: Paul Dale <pauli@openssl.org>
Reviewed-by: Tomas Mraz <tomas@openssl.org>
(Merged from https://github.com/openssl/openssl/pull/18287)

crypto/sm3/sm3_local.h
include/crypto/md32_common.h

index 48ec9ae90bb6f13485bdbcbab894b65b4073230a..cb5a187a12fe1a8e46ebe9a6003f870588bac961 100644 (file)
@@ -57,14 +57,14 @@ void ossl_sm3_transform(SM3_CTX *c, const unsigned char *data);
 # if defined(__GNUC__) && __GNUC__>=2 && \
      !defined(OPENSSL_NO_ASM) && !defined(OPENSSL_NO_INLINE_ASM)
 #  if defined(__riscv_zksh)
-#   define P0(x) ({ MD32_REG_T ret;                \
-                       asm ("sm3p0 %0, %1"         \
-                       : "=r"(ret)                 \
-                       : "r"(x)); ret;             })
-#   define P1(x) ({ MD32_REG_T ret;                \
-                       asm ("sm3p1 %0, %1"         \
-                       : "=r"(ret)                 \
-                       : "r"(x)); ret;             })
+#   define P0(x) ({ MD32_REG_T ret;        \
+                       asm ("sm3p0 %0, %1" \
+                       : "=r"(ret)         \
+                       : "r"(x)); ret;     })
+#   define P1(x) ({ MD32_REG_T ret;        \
+                       asm ("sm3p1 %0, %1" \
+                       : "=r"(ret)         \
+                       : "r"(x)); ret;     })
 #  endif
 # endif
 #endif
index 262dc6503fcee395ea468e5811d89a9cf066e8ef..46214f3237ccb702bf57110260981f3bd9f8a9bb 100644 (file)
 
 # define ROTATE(a,n)     (((a)<<(n))|(((a)&0xffffffff)>>(32-(n))))
 
+#ifndef PEDANTIC
+# if defined(__GNUC__) && __GNUC__>=2 && \
+     !defined(OPENSSL_NO_ASM) && !defined(OPENSSL_NO_INLINE_ASM)
+#  if defined(__riscv_zbb) || defined(__riscv_zbkb)
+#   if __riscv_xlen == 64
+#   undef ROTATE
+#   define ROTATE(x, n) ({ MD32_REG_T ret;            \
+                       asm ("roriw %0, %1, %2"        \
+                       : "=r"(ret)                    \
+                       : "r"(x), "i"(32 - (n))); ret;})
+#   endif
+#   if __riscv_xlen == 32
+#   undef ROTATE
+#   define ROTATE(x, n) ({ MD32_REG_T ret;            \
+                       asm ("rori %0, %1, %2"         \
+                       : "=r"(ret)                    \
+                       : "r"(x), "i"(32 - (n))); ret;})
+#   endif
+#  endif
+# endif
+#endif
+
 # if defined(DATA_ORDER_IS_BIG_ENDIAN)
 
 #  define HOST_c2l(c,l)  (l =(((unsigned long)(*((c)++)))<<24),          \