Further synchronizations with md32_common.h update.
authorAndy Polyakov <appro@openssl.org>
Tue, 17 Oct 2006 13:38:10 +0000 (13:38 +0000)
committerAndy Polyakov <appro@openssl.org>
Tue, 17 Oct 2006 13:38:10 +0000 (13:38 +0000)
Configure
TABLE
crypto/md5/asm/md5-ia64.S
crypto/md5/asm/md5-sparcv9.S [deleted file]
crypto/sha/asm/sha1-ia64.pl

index 3b0df9c6f328aadc834b2f83ba6e10a1eb5a5bbf..80cc58a00653618ce0207dba0fb092735afe8e05 100755 (executable)
--- a/Configure
+++ b/Configure
@@ -120,7 +120,7 @@ my $x86_out_asm="x86cpuid-out.o:bn86-out.o co86-out.o mo86-out.o:dx86-out.o yx86
 
 my $x86_64_asm="x86_64cpuid.o:x86_64-gcc.o x86_64-mont.o::aes-x86_64.o::md5-x86_64.o:sha1-x86_64.o sha256-x86_64.o sha512-x86_64.o::rc4-x86_64.o:::wp-x86_64.o";
 my $ia64_asm="ia64cpuid.o:bn-ia64.o::aes_core.o aes_cbc.o aes-ia64.o::md5-ia64.o:sha1-ia64.o sha256-ia64.o sha512-ia64.o::rc4-ia64.o:::";
 
 my $x86_64_asm="x86_64cpuid.o:x86_64-gcc.o x86_64-mont.o::aes-x86_64.o::md5-x86_64.o:sha1-x86_64.o sha256-x86_64.o sha512-x86_64.o::rc4-x86_64.o:::wp-x86_64.o";
 my $ia64_asm="ia64cpuid.o:bn-ia64.o::aes_core.o aes_cbc.o aes-ia64.o::md5-ia64.o:sha1-ia64.o sha256-ia64.o sha512-ia64.o::rc4-ia64.o:::";
-my $sparcv9_asm="sparcv9cap.o sparccpuid.o:bn-sparcv9.o sparcv9-mont.o sparcv9a-mont.o:des_enc-sparc.o fcrypt_b.o:aes_core.o aes_cbc.o aes-sparcv9.o::md5-sparcv9.o::::::";
+my $sparcv9_asm="sparcv9cap.o sparccpuid.o:bn-sparcv9.o sparcv9-mont.o sparcv9a-mont.o:des_enc-sparc.o fcrypt_b.o:aes_core.o aes_cbc.o aes-sparcv9.o::::::::";
 my $no_asm=":::::::::::";
 
 # As for $BSDthreads. Idea is to maintain "collective" set of flags,
 my $no_asm=":::::::::::";
 
 # As for $BSDthreads. Idea is to maintain "collective" set of flags,
diff --git a/TABLE b/TABLE
index 88726ebf7a572ddbcb3891c8ca05e354b84d73fa..4e97ed42331fc6a26ed7ba5c574ffde329f39e97 100644 (file)
--- a/TABLE
+++ b/TABLE
@@ -153,7 +153,7 @@ $bn_obj       = bn-sparcv9.o sparcv9-mont.o sparcv9a-mont.o
 $des_obj      = des_enc-sparc.o fcrypt_b.o
 $aes_obj      = aes_core.o aes_cbc.o aes-sparcv9.o
 $bf_obj       = 
 $des_obj      = des_enc-sparc.o fcrypt_b.o
 $aes_obj      = aes_core.o aes_cbc.o aes-sparcv9.o
 $bf_obj       = 
-$md5_obj      = md5-sparcv9.o
+$md5_obj      = 
 $sha1_obj     = 
 $cast_obj     = 
 $rc4_obj      = 
 $sha1_obj     = 
 $cast_obj     = 
 $rc4_obj      = 
@@ -1777,7 +1777,7 @@ $bn_obj       = bn-sparcv9.o sparcv9-mont.o sparcv9a-mont.o
 $des_obj      = des_enc-sparc.o fcrypt_b.o
 $aes_obj      = aes_core.o aes_cbc.o aes-sparcv9.o
 $bf_obj       = 
 $des_obj      = des_enc-sparc.o fcrypt_b.o
 $aes_obj      = aes_core.o aes_cbc.o aes-sparcv9.o
 $bf_obj       = 
-$md5_obj      = md5-sparcv9.o
+$md5_obj      = 
 $sha1_obj     = 
 $cast_obj     = 
 $rc4_obj      = 
 $sha1_obj     = 
 $cast_obj     = 
 $rc4_obj      = 
@@ -1805,7 +1805,7 @@ $bn_obj       = bn-sparcv9.o sparcv9-mont.o sparcv9a-mont.o
 $des_obj      = des_enc-sparc.o fcrypt_b.o
 $aes_obj      = aes_core.o aes_cbc.o aes-sparcv9.o
 $bf_obj       = 
 $des_obj      = des_enc-sparc.o fcrypt_b.o
 $aes_obj      = aes_core.o aes_cbc.o aes-sparcv9.o
 $bf_obj       = 
-$md5_obj      = md5-sparcv9.o
+$md5_obj      = 
 $sha1_obj     = 
 $cast_obj     = 
 $rc4_obj      = 
 $sha1_obj     = 
 $cast_obj     = 
 $rc4_obj      = 
@@ -3121,7 +3121,7 @@ $bn_obj       = bn-sparcv9.o sparcv9-mont.o sparcv9a-mont.o
 $des_obj      = des_enc-sparc.o fcrypt_b.o
 $aes_obj      = aes_core.o aes_cbc.o aes-sparcv9.o
 $bf_obj       = 
 $des_obj      = des_enc-sparc.o fcrypt_b.o
 $aes_obj      = aes_core.o aes_cbc.o aes-sparcv9.o
 $bf_obj       = 
-$md5_obj      = md5-sparcv9.o
+$md5_obj      = 
 $sha1_obj     = 
 $cast_obj     = 
 $rc4_obj      = 
 $sha1_obj     = 
 $cast_obj     = 
 $rc4_obj      = 
@@ -3177,7 +3177,7 @@ $bn_obj       = bn-sparcv9.o sparcv9-mont.o sparcv9a-mont.o
 $des_obj      = des_enc-sparc.o fcrypt_b.o
 $aes_obj      = aes_core.o aes_cbc.o aes-sparcv9.o
 $bf_obj       = 
 $des_obj      = des_enc-sparc.o fcrypt_b.o
 $aes_obj      = aes_core.o aes_cbc.o aes-sparcv9.o
 $bf_obj       = 
-$md5_obj      = md5-sparcv9.o
+$md5_obj      = 
 $sha1_obj     = 
 $cast_obj     = 
 $rc4_obj      = 
 $sha1_obj     = 
 $cast_obj     = 
 $rc4_obj      = 
@@ -3793,7 +3793,7 @@ $bn_obj       = bn-sparcv9.o sparcv9-mont.o sparcv9a-mont.o
 $des_obj      = des_enc-sparc.o fcrypt_b.o
 $aes_obj      = aes_core.o aes_cbc.o aes-sparcv9.o
 $bf_obj       = 
 $des_obj      = des_enc-sparc.o fcrypt_b.o
 $aes_obj      = aes_core.o aes_cbc.o aes-sparcv9.o
 $bf_obj       = 
-$md5_obj      = md5-sparcv9.o
+$md5_obj      = 
 $sha1_obj     = 
 $cast_obj     = 
 $rc4_obj      = 
 $sha1_obj     = 
 $cast_obj     = 
 $rc4_obj      = 
@@ -3821,7 +3821,7 @@ $bn_obj       = bn-sparcv9.o sparcv9-mont.o sparcv9a-mont.o
 $des_obj      = des_enc-sparc.o fcrypt_b.o
 $aes_obj      = aes_core.o aes_cbc.o aes-sparcv9.o
 $bf_obj       = 
 $des_obj      = des_enc-sparc.o fcrypt_b.o
 $aes_obj      = aes_core.o aes_cbc.o aes-sparcv9.o
 $bf_obj       = 
-$md5_obj      = md5-sparcv9.o
+$md5_obj      = 
 $sha1_obj     = 
 $cast_obj     = 
 $rc4_obj      = 
 $sha1_obj     = 
 $cast_obj     = 
 $rc4_obj      = 
@@ -3905,7 +3905,7 @@ $bn_obj       = bn-sparcv9.o sparcv9-mont.o sparcv9a-mont.o
 $des_obj      = des_enc-sparc.o fcrypt_b.o
 $aes_obj      = aes_core.o aes_cbc.o aes-sparcv9.o
 $bf_obj       = 
 $des_obj      = des_enc-sparc.o fcrypt_b.o
 $aes_obj      = aes_core.o aes_cbc.o aes-sparcv9.o
 $bf_obj       = 
-$md5_obj      = md5-sparcv9.o
+$md5_obj      = 
 $sha1_obj     = 
 $cast_obj     = 
 $rc4_obj      = 
 $sha1_obj     = 
 $cast_obj     = 
 $rc4_obj      = 
@@ -3933,7 +3933,7 @@ $bn_obj       = bn-sparcv9.o sparcv9-mont.o sparcv9a-mont.o
 $des_obj      = des_enc-sparc.o fcrypt_b.o
 $aes_obj      = aes_core.o aes_cbc.o aes-sparcv9.o
 $bf_obj       = 
 $des_obj      = des_enc-sparc.o fcrypt_b.o
 $aes_obj      = aes_core.o aes_cbc.o aes-sparcv9.o
 $bf_obj       = 
-$md5_obj      = md5-sparcv9.o
+$md5_obj      = 
 $sha1_obj     = 
 $cast_obj     = 
 $rc4_obj      = 
 $sha1_obj     = 
 $cast_obj     = 
 $rc4_obj      = 
index f6bb6aa9d1ef49c482094d1250f0f78cabe68256..2f9818aec264b4f84c50713f0c73404b59982261 100644 (file)
@@ -86,9 +86,6 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.  */
 #define        pPad2           p12
 #define        pPad3           p13
 #define        pSkip           p8
 #define        pPad2           p12
 #define        pPad3           p13
 #define        pSkip           p8
-//     This two below shall remain constant througout whole routine
-#define        pDataOrder      p14
-#define        pHostOrder      p15
 
 #define        A_              out24
 #define        B_              out25
 
 #define        A_              out24
 #define        B_              out25
@@ -150,7 +147,7 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.  */
 #define        Y               in21
 #define        Z               in20
 
 #define        Y               in21
 #define        Z               in20
 
-/* register stack configuration for md5_block_asm_host_order(): */
+/* register stack configuration for md5_block_asm_data_order(): */
 #define        MD5_NINP        3
 #define        MD5_NLOC        0
 #define MD5_NOUT       29
 #define        MD5_NINP        3
 #define        MD5_NLOC        0
 #define MD5_NOUT       29
@@ -242,7 +239,7 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.  */
 
        .text
 
 
        .text
 
-/* md5_block_asm_host_order(MD5_CTX *c, const void *data, size_t num)
+/* md5_block_asm_data_order(MD5_CTX *c, const void *data, size_t num)
 
      where:
       c: a pointer to a structure of this type:
 
      where:
       c: a pointer to a structure of this type:
@@ -267,22 +264,8 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.  */
        .align  32
        .proc   md5_block_asm_data_order
 md5_block_asm_data_order:
        .align  32
        .proc   md5_block_asm_data_order
 md5_block_asm_data_order:
-{      .mib
-       cmp.eq  pDataOrder,pHostOrder = r0,r0
-       br.sptk.many    .md5_block
-};;
-       .endp   md5_block_asm_data_order
-
-       .type   md5_block_asm_host_order, @function
-       .global md5_block_asm_host_order
-
-       .proc   md5_block_asm_host_order
-md5_block_asm_host_order:
-       .prologue
-{      .mib
-       cmp.eq  pHostOrder,pDataOrder = r0,r0
-};;
 .md5_block:
 .md5_block:
+       .prologue
 {      .mmi
        .save   ar.pfs, PFSSave
        alloc   PFSSave = ar.pfs, MD5_NINP, MD5_NLOC, MD5_NOUT, MD5_NROT
 {      .mmi
        .save   ar.pfs, PFSSave
        alloc   PFSSave = ar.pfs, MD5_NINP, MD5_NLOC, MD5_NOUT, MD5_NROT
@@ -296,10 +279,8 @@ md5_block_asm_host_order:
        mov     LCSave = ar.lc
 }
 ;;
        mov     LCSave = ar.lc
 }
 ;;
-.pred.rel      "mutex",pDataOrder,pHostOrder
 {      .mmi
 {      .mmi
-(pDataOrder)   add     CTable = .md5_tbl_data_order#-.md5_block#, CTable
-(pHostOrder)   add     CTable = .md5_tbl_host_order#-.md5_block#, CTable       
+       add     CTable = .md5_tbl_data_order#-.md5_block#, CTable
        and     InAlign = 0x3, DPtrIn
 }
 
        and     InAlign = 0x3, DPtrIn
 }
 
@@ -317,7 +298,7 @@ md5_block_asm_host_order:
        dep     DPtr_ = 0, DPtrIn, 0, 2
 } ;;
 #ifdef HOST_IS_BIG_ENDIAN
        dep     DPtr_ = 0, DPtrIn, 0, 2
 } ;;
 #ifdef HOST_IS_BIG_ENDIAN
-(pDataOrder)   rum     psr.be;;        // switch to little-endian
+       rum     psr.be;;        // switch to little-endian
 #endif
 {      .mmb
        ld4     CTable0 = [CTable], 4
 #endif
 {      .mmb
        ld4     CTable0 = [CTable], 4
@@ -366,7 +347,7 @@ md5_block_asm_host_order:
 
 .md5_exit:
 #ifdef HOST_IS_BIG_ENDIAN
 
 .md5_exit:
 #ifdef HOST_IS_BIG_ENDIAN
-(pDataOrder)   sum     psr.be;;        // switch back to big-endian mode
+       sum     psr.be;;        // switch back to big-endian mode
 #endif
 {      .mmi
        st4     [CtxPtr0] = AccumB, -4
 #endif
 {      .mmi
        st4     [CtxPtr0] = AccumB, -4
@@ -452,7 +433,7 @@ md5_block_asm_host_order:
        MD5UNALIGNED(1)
        MD5UNALIGNED(2)
 
        MD5UNALIGNED(1)
        MD5UNALIGNED(2)
 
-       .endp md5_block_asm_host_order
+       .endp md5_block_asm_data_order
 
 
 // MD5 Perform the F function and load
 
 
 // MD5 Perform the F function and load
@@ -1008,23 +989,4 @@ md5_constants:
        data1 0x35, 0xf2, 0x3a, 0xbd    //    61
        data1 0xbb, 0xd2, 0xd7, 0x2a    //    62
        data1 0x91, 0xd3, 0x86, 0xeb    //    63
        data1 0x35, 0xf2, 0x3a, 0xbd    //    61
        data1 0xbb, 0xd2, 0xd7, 0x2a    //    62
        data1 0x91, 0xd3, 0x86, 0xeb    //    63
-
-.md5_tbl_host_order:                   // OS data order, might as well
-                                       // be little-endian.
-       data4 0xd76aa478, 0xe8c7b756, 0x242070db, 0xc1bdceee    // 0
-       data4 0xf57c0faf, 0x4787c62a, 0xa8304613, 0xfd469501    // 4
-       data4 0x698098d8, 0x8b44f7af, 0xffff5bb1, 0x895cd7be    // 8
-       data4 0x6b901122, 0xfd987193, 0xa679438e, 0x49b40821    // 12
-       data4 0xf61e2562, 0xc040b340, 0x265e5a51, 0xe9b6c7aa    // 16
-       data4 0xd62f105d, 0x02441453, 0xd8a1e681, 0xe7d3fbc8    // 20
-       data4 0x21e1cde6, 0xc33707d6, 0xf4d50d87, 0x455a14ed    // 24
-       data4 0xa9e3e905, 0xfcefa3f8, 0x676f02d9, 0x8d2a4c8a    // 28
-       data4 0xfffa3942, 0x8771f681, 0x6d9d6122, 0xfde5380c    // 32
-       data4 0xa4beea44, 0x4bdecfa9, 0xf6bb4b60, 0xbebfbc70    // 36
-       data4 0x289b7ec6, 0xeaa127fa, 0xd4ef3085, 0x04881d05    // 40
-       data4 0xd9d4d039, 0xe6db99e5, 0x1fa27cf8, 0xc4ac5665    // 44
-       data4 0xf4292244, 0x432aff97, 0xab9423a7, 0xfc93a039    // 48
-       data4 0x655b59c3, 0x8f0ccc92, 0xffeff47d, 0x85845dd1    // 52
-       data4 0x6fa87e4f, 0xfe2ce6e0, 0xa3014314, 0x4e0811a1    // 56
-       data4 0xf7537e82, 0xbd3af235, 0x2ad7d2bb, 0xeb86d391    // 60
-.size  md5_constants#,64*4*2
+.size  md5_constants#,64*4
diff --git a/crypto/md5/asm/md5-sparcv9.S b/crypto/md5/asm/md5-sparcv9.S
deleted file mode 100644 (file)
index db45aa4..0000000
+++ /dev/null
@@ -1,1031 +0,0 @@
-.ident "md5-sparcv9.S, Version 1.0"
-.ident "SPARC V9 ISA artwork by Andy Polyakov <appro@fy.chalmers.se>"
-.file  "md5-sparcv9.S"
-
-/*
- * ====================================================================
- * Copyright (c) 1999 Andy Polyakov <appro@fy.chalmers.se>.
- *
- * Rights for redistribution and usage in source and binary forms are
- * granted as long as above copyright notices are retained. Warranty
- * of any kind is (of course:-) disclaimed.
- * ====================================================================
- */
-
-/*
- * This is my modest contribution to OpenSSL project (see
- * http://www.openssl.org/ for more information about it) and is an
- * assembler implementation of MD5 block hash function. I've hand-coded
- * this for the sole reason to reach UltraSPARC-specific "load in
- * little-endian byte order" instruction. This gives up to 15%
- * performance improvement for cases when input message is aligned at
- * 32 bits boundary. The module was tested under both 32 *and* 64 bit
- * kernels. For updates see http://fy.chalmers.se/~appro/hpe/.
- *
- * To compile with SC4.x/SC5.x:
- *
- *     cc -xarch=v[9|8plus] -DOPENSSL_SYSNAME_ULTRASPARC -DMD5_BLOCK_DATA_ORDER \
- *             -c md5-sparcv9.S
- *
- * and with gcc:
- *
- *     gcc -mcpu=ultrasparc -DOPENSSL_SYSNAME_ULTRASPARC -DMD5_BLOCK_DATA_ORDER \
- *             -c md5-sparcv9.S
- *
- * or if above fails (it does if you have gas):
- *
- *     gcc -E -DOPENSSL_SYSNAMEULTRASPARC -DMD5_BLOCK_DATA_ORDER md5_block.sparc.S | \
- *             as -xarch=v8plus /dev/fd/0 -o md5-sparcv9.o
- */
-
-#include <openssl/e_os2.h>
-
-#define        A       %o0
-#define B      %o1
-#define        C       %o2
-#define        D       %o3
-#define        T1      %o4
-#define        T2      %o5
-
-#define        R0      %l0
-#define        R1      %l1
-#define        R2      %l2
-#define        R3      %l3
-#define        R4      %l4
-#define        R5      %l5
-#define        R6      %l6
-#define        R7      %l7
-#define        R8      %i3
-#define        R9      %i4
-#define        R10     %i5
-#define        R11     %g1
-#define R12    %g2
-#define        R13     %g3
-#define RX     %g4
-
-#define Aptr   %i0+0
-#define Bptr   %i0+4
-#define Cptr   %i0+8
-#define Dptr   %i0+12
-
-#define Aval   R5      /* those not used at the end of the last round */
-#define Bval   R6
-#define Cval   R7
-#define Dval   R8
-
-#if defined(MD5_BLOCK_DATA_ORDER)
-# if defined(OPENSSL_SYSNAME_ULTRASPARC)
-#  define      LOAD                    lda
-#  define      X(i)                    [%i1+i*4]%asi
-#  define      md5_block               md5_block_asm_data_order_aligned
-#  define      ASI_PRIMARY_LITTLE      0x88
-# else
-#  error "MD5_BLOCK_DATA_ORDER is supported only on UltraSPARC!"
-# endif
-#else
-# define       LOAD                    ld
-# define       X(i)                    [%i1+i*4]
-# define       md5_block               md5_block_asm_host_order
-#endif
-
-.section        ".text",#alloc,#execinstr
-
-#if defined(__SUNPRO_C) && defined(__sparcv9)
-  /* They've said -xarch=v9 at command line */
-  .register    %g2,#scratch
-  .register    %g3,#scratch
-# define       FRAME   -192
-#elif defined(__GNUC__) && defined(__arch64__)
-  /* They've said -m64 at command line */
-  .register     %g2,#scratch
-  .register     %g3,#scratch
-# define        FRAME   -192
-#else
-# define       FRAME   -96
-#endif
-
-.align  32
-
-.global md5_block
-md5_block:
-       save    %sp,FRAME,%sp
-
-       ld      [Dptr],D
-       ld      [Cptr],C
-       ld      [Bptr],B
-       ld      [Aptr],A
-#ifdef ASI_PRIMARY_LITTLE
-       rd      %asi,%o7        ! How dare I? Well, I just do:-)
-       wr      %g0,ASI_PRIMARY_LITTLE,%asi
-#endif
-       LOAD    X(0),R0
-
-.Lmd5_block_loop:
-
-!!!!!!!!Round 0
-
-       xor     C,D,T1
-       sethi   %hi(0xd76aa478),T2
-       and     T1,B,T1
-       or      T2,%lo(0xd76aa478),T2   !=
-       xor     T1,D,T1
-       add     T1,R0,T1
-       LOAD    X(1),R1
-       add     T1,T2,T1                !=
-       add     A,T1,A
-       sll     A,7,T2
-       srl     A,32-7,A
-       or      A,T2,A                  !=
-        xor     B,C,T1
-       add     A,B,A
-
-       sethi   %hi(0xe8c7b756),T2
-       and     T1,A,T1                 !=
-       or      T2,%lo(0xe8c7b756),T2
-       xor     T1,C,T1
-       LOAD    X(2),R2
-       add     T1,R1,T1                !=
-       add     T1,T2,T1
-       add     D,T1,D
-       sll     D,12,T2
-       srl     D,32-12,D               !=
-       or      D,T2,D
-        xor     A,B,T1
-       add     D,A,D
-
-       sethi   %hi(0x242070db),T2      !=
-       and     T1,D,T1
-       or      T2,%lo(0x242070db),T2
-       xor     T1,B,T1
-       add     T1,R2,T1                !=
-       LOAD    X(3),R3
-       add     T1,T2,T1
-       add     C,T1,C
-       sll     C,17,T2                 !=
-       srl     C,32-17,C
-       or      C,T2,C
-        xor     D,A,T1
-       add     C,D,C                   !=
-
-       sethi   %hi(0xc1bdceee),T2
-       and     T1,C,T1
-       or      T2,%lo(0xc1bdceee),T2
-       xor     T1,A,T1                 !=
-       add     T1,R3,T1
-       LOAD    X(4),R4
-       add     T1,T2,T1
-       add     B,T1,B                  !=
-       sll     B,22,T2
-       srl     B,32-22,B
-       or      B,T2,B
-        xor     C,D,T1                 !=
-       add     B,C,B
-
-       sethi   %hi(0xf57c0faf),T2
-       and     T1,B,T1
-       or      T2,%lo(0xf57c0faf),T2   !=
-       xor     T1,D,T1
-       add     T1,R4,T1
-       LOAD    X(5),R5
-       add     T1,T2,T1                !=
-       add     A,T1,A
-       sll     A,7,T2
-       srl     A,32-7,A
-       or      A,T2,A                  !=
-        xor     B,C,T1
-       add     A,B,A
-
-       sethi   %hi(0x4787c62a),T2
-       and     T1,A,T1                 !=
-       or      T2,%lo(0x4787c62a),T2
-       xor     T1,C,T1
-       LOAD    X(6),R6
-       add     T1,R5,T1                !=
-       add     T1,T2,T1
-       add     D,T1,D
-       sll     D,12,T2
-       srl     D,32-12,D               !=
-       or      D,T2,D
-        xor     A,B,T1
-       add     D,A,D
-
-       sethi   %hi(0xa8304613),T2      !=
-       and     T1,D,T1
-       or      T2,%lo(0xa8304613),T2
-       xor     T1,B,T1
-       add     T1,R6,T1                !=
-       LOAD    X(7),R7
-       add     T1,T2,T1
-       add     C,T1,C
-       sll     C,17,T2                 !=
-       srl     C,32-17,C
-       or      C,T2,C
-        xor     D,A,T1
-       add     C,D,C                   !=
-
-       sethi   %hi(0xfd469501),T2
-       and     T1,C,T1
-       or      T2,%lo(0xfd469501),T2
-       xor     T1,A,T1                 !=
-       add     T1,R7,T1
-       LOAD    X(8),R8
-       add     T1,T2,T1
-       add     B,T1,B                  !=
-       sll     B,22,T2
-       srl     B,32-22,B
-       or      B,T2,B
-        xor     C,D,T1                 !=
-       add     B,C,B
-
-       sethi   %hi(0x698098d8),T2
-       and     T1,B,T1
-       or      T2,%lo(0x698098d8),T2   !=
-       xor     T1,D,T1
-       add     T1,R8,T1
-       LOAD    X(9),R9
-       add     T1,T2,T1                !=
-       add     A,T1,A
-       sll     A,7,T2
-       srl     A,32-7,A
-       or      A,T2,A                  !=
-        xor     B,C,T1
-       add     A,B,A
-
-       sethi   %hi(0x8b44f7af),T2
-       and     T1,A,T1                 !=
-       or      T2,%lo(0x8b44f7af),T2
-       xor     T1,C,T1
-       LOAD    X(10),R10
-       add     T1,R9,T1                !=
-       add     T1,T2,T1
-       add     D,T1,D
-       sll     D,12,T2
-       srl     D,32-12,D               !=
-       or      D,T2,D
-        xor     A,B,T1
-       add     D,A,D
-
-       sethi   %hi(0xffff5bb1),T2      !=
-       and     T1,D,T1
-       or      T2,%lo(0xffff5bb1),T2
-       xor     T1,B,T1
-       add     T1,R10,T1               !=
-       LOAD    X(11),R11
-       add     T1,T2,T1
-       add     C,T1,C
-       sll     C,17,T2                 !=
-       srl     C,32-17,C
-       or      C,T2,C
-        xor     D,A,T1
-       add     C,D,C                   !=
-
-       sethi   %hi(0x895cd7be),T2
-       and     T1,C,T1
-       or      T2,%lo(0x895cd7be),T2
-       xor     T1,A,T1                 !=
-       add     T1,R11,T1
-       LOAD    X(12),R12
-       add     T1,T2,T1
-       add     B,T1,B                  !=
-       sll     B,22,T2
-       srl     B,32-22,B
-       or      B,T2,B
-        xor     C,D,T1                 !=
-       add     B,C,B
-
-       sethi   %hi(0x6b901122),T2
-       and     T1,B,T1
-       or      T2,%lo(0x6b901122),T2   !=
-       xor     T1,D,T1
-       add     T1,R12,T1
-       LOAD    X(13),R13
-       add     T1,T2,T1                !=
-       add     A,T1,A
-       sll     A,7,T2
-       srl     A,32-7,A
-       or      A,T2,A                  !=
-        xor     B,C,T1
-       add     A,B,A
-
-       sethi   %hi(0xfd987193),T2
-       and     T1,A,T1                 !=
-       or      T2,%lo(0xfd987193),T2
-       xor     T1,C,T1
-       LOAD    X(14),RX
-       add     T1,R13,T1               !=
-       add     T1,T2,T1
-       add     D,T1,D
-       sll     D,12,T2
-       srl     D,32-12,D               !=
-       or      D,T2,D
-        xor     A,B,T1
-       add     D,A,D
-
-       sethi   %hi(0xa679438e),T2      !=
-       and     T1,D,T1
-       or      T2,%lo(0xa679438e),T2
-       xor     T1,B,T1
-       add     T1,RX,T1                !=
-       LOAD    X(15),RX
-       add     T1,T2,T1
-       add     C,T1,C
-       sll     C,17,T2                 !=
-       srl     C,32-17,C
-       or      C,T2,C
-        xor     D,A,T1
-       add     C,D,C                   !=
-
-       sethi   %hi(0x49b40821),T2
-       and     T1,C,T1
-       or      T2,%lo(0x49b40821),T2
-       xor     T1,A,T1                 !=
-       add     T1,RX,T1
-       !pre-LOADed     X(1),R1
-       add     T1,T2,T1
-       add     B,T1,B
-       sll     B,22,T2                 !=
-       srl     B,32-22,B
-       or      B,T2,B
-       add     B,C,B
-
-!!!!!!!!Round 1
-
-       xor     B,C,T1                  !=
-       sethi   %hi(0xf61e2562),T2
-       and     T1,D,T1
-       or      T2,%lo(0xf61e2562),T2
-       xor     T1,C,T1                 !=
-       add     T1,R1,T1
-       !pre-LOADed     X(6),R6
-       add     T1,T2,T1
-       add     A,T1,A
-       sll     A,5,T2                  !=
-       srl     A,32-5,A
-       or      A,T2,A
-       add     A,B,A
-
-       xor     A,B,T1                  !=
-       sethi   %hi(0xc040b340),T2
-       and     T1,C,T1
-       or      T2,%lo(0xc040b340),T2
-       xor     T1,B,T1                 !=
-       add     T1,R6,T1
-       !pre-LOADed     X(11),R11
-       add     T1,T2,T1
-       add     D,T1,D
-       sll     D,9,T2                  !=
-       srl     D,32-9,D
-       or      D,T2,D
-       add     D,A,D
-
-       xor     D,A,T1                  !=
-       sethi   %hi(0x265e5a51),T2
-       and     T1,B,T1
-       or      T2,%lo(0x265e5a51),T2
-       xor     T1,A,T1                 !=
-       add     T1,R11,T1
-       !pre-LOADed     X(0),R0
-       add     T1,T2,T1
-       add     C,T1,C
-       sll     C,14,T2                 !=
-       srl     C,32-14,C
-       or      C,T2,C
-       add     C,D,C
-
-       xor     C,D,T1                  !=
-       sethi   %hi(0xe9b6c7aa),T2
-       and     T1,A,T1
-       or      T2,%lo(0xe9b6c7aa),T2
-       xor     T1,D,T1                 !=
-       add     T1,R0,T1
-       !pre-LOADed     X(5),R5
-       add     T1,T2,T1
-       add     B,T1,B
-       sll     B,20,T2                 !=
-       srl     B,32-20,B
-       or      B,T2,B
-       add     B,C,B
-
-       xor     B,C,T1                  !=
-       sethi   %hi(0xd62f105d),T2
-       and     T1,D,T1
-       or      T2,%lo(0xd62f105d),T2
-       xor     T1,C,T1                 !=
-       add     T1,R5,T1
-       !pre-LOADed     X(10),R10
-       add     T1,T2,T1
-       add     A,T1,A
-       sll     A,5,T2                  !=
-       srl     A,32-5,A
-       or      A,T2,A
-       add     A,B,A
-
-       xor     A,B,T1                  !=
-       sethi   %hi(0x02441453),T2
-       and     T1,C,T1
-       or      T2,%lo(0x02441453),T2
-       xor     T1,B,T1                 !=
-       add     T1,R10,T1
-       LOAD    X(15),RX
-       add     T1,T2,T1
-       add     D,T1,D                  !=
-       sll     D,9,T2
-       srl     D,32-9,D
-       or      D,T2,D
-       add     D,A,D                   !=
-
-       xor     D,A,T1
-       sethi   %hi(0xd8a1e681),T2
-       and     T1,B,T1
-       or      T2,%lo(0xd8a1e681),T2   !=
-       xor     T1,A,T1
-       add     T1,RX,T1
-       !pre-LOADed     X(4),R4
-       add     T1,T2,T1
-       add     C,T1,C                  !=
-       sll     C,14,T2
-       srl     C,32-14,C
-       or      C,T2,C
-       add     C,D,C                   !=
-
-       xor     C,D,T1
-       sethi   %hi(0xe7d3fbc8),T2
-       and     T1,A,T1
-       or      T2,%lo(0xe7d3fbc8),T2   !=
-       xor     T1,D,T1
-       add     T1,R4,T1
-       !pre-LOADed     X(9),R9
-       add     T1,T2,T1
-       add     B,T1,B                  !=
-       sll     B,20,T2
-       srl     B,32-20,B
-       or      B,T2,B
-       add     B,C,B                   !=
-
-       xor     B,C,T1
-       sethi   %hi(0x21e1cde6),T2
-       and     T1,D,T1
-       or      T2,%lo(0x21e1cde6),T2   !=
-       xor     T1,C,T1
-       add     T1,R9,T1
-       LOAD    X(14),RX
-       add     T1,T2,T1                !=
-       add     A,T1,A
-       sll     A,5,T2
-       srl     A,32-5,A
-       or      A,T2,A                  !=
-       add     A,B,A
-
-       xor     A,B,T1
-       sethi   %hi(0xc33707d6),T2
-       and     T1,C,T1                 !=
-       or      T2,%lo(0xc33707d6),T2
-       xor     T1,B,T1
-       add     T1,RX,T1
-       !pre-LOADed     X(3),R3
-       add     T1,T2,T1                !=
-       add     D,T1,D
-       sll     D,9,T2
-       srl     D,32-9,D
-       or      D,T2,D                  !=
-       add     D,A,D
-
-       xor     D,A,T1
-       sethi   %hi(0xf4d50d87),T2
-       and     T1,B,T1                 !=
-       or      T2,%lo(0xf4d50d87),T2
-       xor     T1,A,T1
-       add     T1,R3,T1
-       !pre-LOADed     X(8),R8
-       add     T1,T2,T1                !=
-       add     C,T1,C
-       sll     C,14,T2
-       srl     C,32-14,C
-       or      C,T2,C                  !=
-       add     C,D,C
-
-       xor     C,D,T1
-       sethi   %hi(0x455a14ed),T2
-       and     T1,A,T1                 !=
-       or      T2,%lo(0x455a14ed),T2
-       xor     T1,D,T1
-       add     T1,R8,T1
-       !pre-LOADed     X(13),R13
-       add     T1,T2,T1                !=
-       add     B,T1,B
-       sll     B,20,T2
-       srl     B,32-20,B
-       or      B,T2,B                  !=
-       add     B,C,B
-
-       xor     B,C,T1
-       sethi   %hi(0xa9e3e905),T2
-       and     T1,D,T1                 !=
-       or      T2,%lo(0xa9e3e905),T2
-       xor     T1,C,T1
-       add     T1,R13,T1
-       !pre-LOADed     X(2),R2
-       add     T1,T2,T1                !=
-       add     A,T1,A
-       sll     A,5,T2
-       srl     A,32-5,A
-       or      A,T2,A                  !=
-       add     A,B,A
-
-       xor     A,B,T1
-       sethi   %hi(0xfcefa3f8),T2
-       and     T1,C,T1                 !=
-       or      T2,%lo(0xfcefa3f8),T2
-       xor     T1,B,T1
-       add     T1,R2,T1
-       !pre-LOADed     X(7),R7
-       add     T1,T2,T1                !=
-       add     D,T1,D
-       sll     D,9,T2
-       srl     D,32-9,D
-       or      D,T2,D                  !=
-       add     D,A,D
-
-       xor     D,A,T1
-       sethi   %hi(0x676f02d9),T2
-       and     T1,B,T1                 !=
-       or      T2,%lo(0x676f02d9),T2
-       xor     T1,A,T1
-       add     T1,R7,T1
-       !pre-LOADed     X(12),R12
-       add     T1,T2,T1                !=
-       add     C,T1,C
-       sll     C,14,T2
-       srl     C,32-14,C
-       or      C,T2,C                  !=
-       add     C,D,C
-
-       xor     C,D,T1
-       sethi   %hi(0x8d2a4c8a),T2
-       and     T1,A,T1                 !=
-       or      T2,%lo(0x8d2a4c8a),T2
-       xor     T1,D,T1
-       add     T1,R12,T1
-       !pre-LOADed     X(5),R5
-       add     T1,T2,T1                !=
-       add     B,T1,B
-       sll     B,20,T2
-       srl     B,32-20,B
-       or      B,T2,B                  !=
-       add     B,C,B
-
-!!!!!!!!Round 2
-
-       xor     B,C,T1
-       sethi   %hi(0xfffa3942),T2
-       xor     T1,D,T1                 !=
-       or      T2,%lo(0xfffa3942),T2
-       add     T1,R5,T1
-       !pre-LOADed     X(8),R8
-       add     T1,T2,T1
-       add     A,T1,A                  !=
-       sll     A,4,T2
-       srl     A,32-4,A
-       or      A,T2,A
-       add     A,B,A                   !=
-
-       xor     A,B,T1
-       sethi   %hi(0x8771f681),T2
-       xor     T1,C,T1
-       or      T2,%lo(0x8771f681),T2   !=
-       add     T1,R8,T1
-       !pre-LOADed     X(11),R11
-       add     T1,T2,T1
-       add     D,T1,D
-       sll     D,11,T2                 !=
-       srl     D,32-11,D
-       or      D,T2,D
-       add     D,A,D
-
-       xor     D,A,T1                  !=
-       sethi   %hi(0x6d9d6122),T2
-       xor     T1,B,T1
-       or      T2,%lo(0x6d9d6122),T2
-       add     T1,R11,T1               !=
-       LOAD    X(14),RX
-       add     T1,T2,T1
-       add     C,T1,C
-       sll     C,16,T2                 !=
-       srl     C,32-16,C
-       or      C,T2,C
-       add     C,D,C
-
-       xor     C,D,T1                  !=
-       sethi   %hi(0xfde5380c),T2
-       xor     T1,A,T1
-       or      T2,%lo(0xfde5380c),T2
-       add     T1,RX,T1                !=
-       !pre-LOADed     X(1),R1
-       add     T1,T2,T1
-       add     B,T1,B
-       sll     B,23,T2
-       srl     B,32-23,B               !=
-       or      B,T2,B
-       add     B,C,B
-
-       xor     B,C,T1
-       sethi   %hi(0xa4beea44),T2      !=
-       xor     T1,D,T1
-       or      T2,%lo(0xa4beea44),T2
-       add     T1,R1,T1
-       !pre-LOADed     X(4),R4
-       add     T1,T2,T1                !=
-       add     A,T1,A
-       sll     A,4,T2
-       srl     A,32-4,A
-       or      A,T2,A                  !=
-       add     A,B,A
-
-       xor     A,B,T1
-       sethi   %hi(0x4bdecfa9),T2
-       xor     T1,C,T1                 !=
-       or      T2,%lo(0x4bdecfa9),T2
-       add     T1,R4,T1
-       !pre-LOADed     X(7),R7
-       add     T1,T2,T1
-       add     D,T1,D                  !=
-       sll     D,11,T2
-       srl     D,32-11,D
-       or      D,T2,D
-       add     D,A,D                   !=
-
-       xor     D,A,T1
-       sethi   %hi(0xf6bb4b60),T2
-       xor     T1,B,T1
-       or      T2,%lo(0xf6bb4b60),T2   !=
-       add     T1,R7,T1
-       !pre-LOADed     X(10),R10
-       add     T1,T2,T1
-       add     C,T1,C
-       sll     C,16,T2                 !=
-       srl     C,32-16,C
-       or      C,T2,C
-       add     C,D,C
-
-       xor     C,D,T1                  !=
-       sethi   %hi(0xbebfbc70),T2
-       xor     T1,A,T1
-       or      T2,%lo(0xbebfbc70),T2
-       add     T1,R10,T1               !=
-       !pre-LOADed     X(13),R13
-       add     T1,T2,T1
-       add     B,T1,B
-       sll     B,23,T2
-       srl     B,32-23,B               !=
-       or      B,T2,B
-       add     B,C,B
-
-       xor     B,C,T1
-       sethi   %hi(0x289b7ec6),T2      !=
-       xor     T1,D,T1
-       or      T2,%lo(0x289b7ec6),T2
-       add     T1,R13,T1
-       !pre-LOADed     X(0),R0
-       add     T1,T2,T1                !=
-       add     A,T1,A
-       sll     A,4,T2
-       srl     A,32-4,A
-       or      A,T2,A                  !=
-       add     A,B,A
-
-       xor     A,B,T1
-       sethi   %hi(0xeaa127fa),T2
-       xor     T1,C,T1                 !=
-       or      T2,%lo(0xeaa127fa),T2
-       add     T1,R0,T1
-       !pre-LOADed     X(3),R3
-       add     T1,T2,T1
-       add     D,T1,D                  !=
-       sll     D,11,T2
-       srl     D,32-11,D
-       or      D,T2,D
-       add     D,A,D                   !=
-
-       xor     D,A,T1
-       sethi   %hi(0xd4ef3085),T2
-       xor     T1,B,T1
-       or      T2,%lo(0xd4ef3085),T2   !=
-       add     T1,R3,T1
-       !pre-LOADed     X(6),R6
-       add     T1,T2,T1
-       add     C,T1,C
-       sll     C,16,T2                 !=
-       srl     C,32-16,C
-       or      C,T2,C
-       add     C,D,C
-
-       xor     C,D,T1                  !=
-       sethi   %hi(0x04881d05),T2
-       xor     T1,A,T1
-       or      T2,%lo(0x04881d05),T2
-       add     T1,R6,T1                !=
-       !pre-LOADed     X(9),R9
-       add     T1,T2,T1
-       add     B,T1,B
-       sll     B,23,T2
-       srl     B,32-23,B               !=
-       or      B,T2,B
-       add     B,C,B
-
-       xor     B,C,T1
-       sethi   %hi(0xd9d4d039),T2      !=
-       xor     T1,D,T1
-       or      T2,%lo(0xd9d4d039),T2
-       add     T1,R9,T1
-       !pre-LOADed     X(12),R12
-       add     T1,T2,T1                !=
-       add     A,T1,A
-       sll     A,4,T2
-       srl     A,32-4,A
-       or      A,T2,A                  !=
-       add     A,B,A
-
-       xor     A,B,T1
-       sethi   %hi(0xe6db99e5),T2
-       xor     T1,C,T1                 !=
-       or      T2,%lo(0xe6db99e5),T2
-       add     T1,R12,T1
-       LOAD    X(15),RX
-       add     T1,T2,T1                !=
-       add     D,T1,D
-       sll     D,11,T2
-       srl     D,32-11,D
-       or      D,T2,D                  !=
-       add     D,A,D
-
-       xor     D,A,T1
-       sethi   %hi(0x1fa27cf8),T2
-       xor     T1,B,T1                 !=
-       or      T2,%lo(0x1fa27cf8),T2
-       add     T1,RX,T1
-       !pre-LOADed     X(2),R2
-       add     T1,T2,T1
-       add     C,T1,C                  !=
-       sll     C,16,T2
-       srl     C,32-16,C
-       or      C,T2,C
-       add     C,D,C                   !=
-
-       xor     C,D,T1
-       sethi   %hi(0xc4ac5665),T2
-       xor     T1,A,T1
-       or      T2,%lo(0xc4ac5665),T2   !=
-       add     T1,R2,T1
-       !pre-LOADed     X(0),R0
-       add     T1,T2,T1
-       add     B,T1,B
-       sll     B,23,T2                 !=
-       srl     B,32-23,B
-       or      B,T2,B
-       add     B,C,B
-
-!!!!!!!!Round 3
-
-       orn     B,D,T1                  !=
-       sethi   %hi(0xf4292244),T2
-       xor     T1,C,T1
-       or      T2,%lo(0xf4292244),T2
-       add     T1,R0,T1                !=
-       !pre-LOADed     X(7),R7
-       add     T1,T2,T1
-       add     A,T1,A
-       sll     A,6,T2
-       srl     A,32-6,A                !=
-       or      A,T2,A
-       add     A,B,A
-
-       orn     A,C,T1
-       sethi   %hi(0x432aff97),T2      !=
-       xor     T1,B,T1
-       or      T2,%lo(0x432aff97),T2
-       LOAD    X(14),RX
-       add     T1,R7,T1                !=
-       add     T1,T2,T1
-       add     D,T1,D
-       sll     D,10,T2
-       srl     D,32-10,D               !=
-       or      D,T2,D
-       add     D,A,D
-
-       orn     D,B,T1
-       sethi   %hi(0xab9423a7),T2      !=
-       xor     T1,A,T1
-       or      T2,%lo(0xab9423a7),T2
-       add     T1,RX,T1
-       !pre-LOADed     X(5),R5
-       add     T1,T2,T1                !=
-       add     C,T1,C
-       sll     C,15,T2
-       srl     C,32-15,C
-       or      C,T2,C                  !=
-       add     C,D,C
-
-       orn     C,A,T1
-       sethi   %hi(0xfc93a039),T2
-       xor     T1,D,T1                 !=
-       or      T2,%lo(0xfc93a039),T2
-       add     T1,R5,T1
-       !pre-LOADed     X(12),R12
-       add     T1,T2,T1
-       add     B,T1,B                  !=
-       sll     B,21,T2
-       srl     B,32-21,B
-       or      B,T2,B
-       add     B,C,B                   !=
-
-       orn     B,D,T1
-       sethi   %hi(0x655b59c3),T2
-       xor     T1,C,T1
-       or      T2,%lo(0x655b59c3),T2   !=
-       add     T1,R12,T1
-       !pre-LOADed     X(3),R3
-       add     T1,T2,T1
-       add     A,T1,A
-       sll     A,6,T2                  !=
-       srl     A,32-6,A
-       or      A,T2,A
-       add     A,B,A
-
-       orn     A,C,T1                  !=
-       sethi   %hi(0x8f0ccc92),T2
-       xor     T1,B,T1
-       or      T2,%lo(0x8f0ccc92),T2
-       add     T1,R3,T1                !=
-       !pre-LOADed     X(10),R10
-       add     T1,T2,T1
-       add     D,T1,D
-       sll     D,10,T2
-       srl     D,32-10,D               !=
-       or      D,T2,D
-       add     D,A,D
-
-       orn     D,B,T1
-       sethi   %hi(0xffeff47d),T2      !=
-       xor     T1,A,T1
-       or      T2,%lo(0xffeff47d),T2
-       add     T1,R10,T1
-       !pre-LOADed     X(1),R1
-       add     T1,T2,T1                !=
-       add     C,T1,C
-       sll     C,15,T2
-       srl     C,32-15,C
-       or      C,T2,C                  !=
-       add     C,D,C
-
-       orn     C,A,T1
-       sethi   %hi(0x85845dd1),T2
-       xor     T1,D,T1                 !=
-       or      T2,%lo(0x85845dd1),T2
-       add     T1,R1,T1
-       !pre-LOADed     X(8),R8
-       add     T1,T2,T1
-       add     B,T1,B                  !=
-       sll     B,21,T2
-       srl     B,32-21,B
-       or      B,T2,B
-       add     B,C,B                   !=
-
-       orn     B,D,T1
-       sethi   %hi(0x6fa87e4f),T2
-       xor     T1,C,T1
-       or      T2,%lo(0x6fa87e4f),T2   !=
-       add     T1,R8,T1
-       LOAD    X(15),RX
-       add     T1,T2,T1
-       add     A,T1,A                  !=
-       sll     A,6,T2
-       srl     A,32-6,A
-       or      A,T2,A
-       add     A,B,A                   !=
-
-       orn     A,C,T1
-       sethi   %hi(0xfe2ce6e0),T2
-       xor     T1,B,T1
-       or      T2,%lo(0xfe2ce6e0),T2   !=
-       add     T1,RX,T1
-       !pre-LOADed     X(6),R6
-       add     T1,T2,T1
-       add     D,T1,D
-       sll     D,10,T2                 !=
-       srl     D,32-10,D
-       or      D,T2,D
-       add     D,A,D
-
-       orn     D,B,T1                  !=
-       sethi   %hi(0xa3014314),T2
-       xor     T1,A,T1
-       or      T2,%lo(0xa3014314),T2
-       add     T1,R6,T1                !=
-       !pre-LOADed     X(13),R13
-       add     T1,T2,T1
-       add     C,T1,C
-       sll     C,15,T2
-       srl     C,32-15,C               !=
-       or      C,T2,C
-       add     C,D,C
-
-       orn     C,A,T1
-       sethi   %hi(0x4e0811a1),T2      !=
-       xor     T1,D,T1
-       or      T2,%lo(0x4e0811a1),T2
-       !pre-LOADed     X(4),R4
-        ld      [Aptr],Aval
-       add     T1,R13,T1               !=
-       add     T1,T2,T1
-       add     B,T1,B
-       sll     B,21,T2
-       srl     B,32-21,B               !=
-       or      B,T2,B
-       add     B,C,B
-
-       orn     B,D,T1
-       sethi   %hi(0xf7537e82),T2      !=
-       xor     T1,C,T1
-       or      T2,%lo(0xf7537e82),T2
-       !pre-LOADed     X(11),R11
-        ld      [Dptr],Dval
-       add     T1,R4,T1                !=
-       add     T1,T2,T1
-       add     A,T1,A
-       sll     A,6,T2
-       srl     A,32-6,A                !=
-       or      A,T2,A
-       add     A,B,A
-
-       orn     A,C,T1
-       sethi   %hi(0xbd3af235),T2      !=
-       xor     T1,B,T1
-       or      T2,%lo(0xbd3af235),T2
-       !pre-LOADed     X(2),R2
-        ld      [Cptr],Cval
-       add     T1,R11,T1               !=
-       add     T1,T2,T1
-       add     D,T1,D
-       sll     D,10,T2
-       srl     D,32-10,D               !=
-       or      D,T2,D
-       add     D,A,D
-
-       orn     D,B,T1
-       sethi   %hi(0x2ad7d2bb),T2      !=
-       xor     T1,A,T1
-       or      T2,%lo(0x2ad7d2bb),T2
-       !pre-LOADed     X(9),R9
-        ld      [Bptr],Bval
-       add     T1,R2,T1                !=
-        add     Aval,A,Aval
-       add     T1,T2,T1
-        st      Aval,[Aptr]
-       add     C,T1,C                  !=
-       sll     C,15,T2
-        add     Dval,D,Dval
-       srl     C,32-15,C
-       or      C,T2,C                  !=
-        st      Dval,[Dptr]
-       add     C,D,C
-
-       orn     C,A,T1
-       sethi   %hi(0xeb86d391),T2      !=
-       xor     T1,D,T1
-       or      T2,%lo(0xeb86d391),T2
-       add     T1,R9,T1
-       !pre-LOADed     X(0),R0
-        mov     Aval,A                 !=
-       add     T1,T2,T1
-        mov     Dval,D
-       add     B,T1,B
-       sll     B,21,T2                 !=
-        add     Cval,C,Cval
-       srl     B,32-21,B
-        st      Cval,[Cptr]
-       or      B,T2,B                  !=
-       add     B,C,B
-
-       deccc   %i2
-       mov     Cval,C
-       add     B,Bval,B                !=
-       inc     64,%i1
-       nop
-       st      B,[Bptr]
-       nop                             !=
-
-#ifdef OPENSSL_SYSNAME_ULTRASPARC
-       bg,a,pt %icc,.Lmd5_block_loop
-#else
-       bg,a    .Lmd5_block_loop
-#endif
-       LOAD    X(0),R0
-
-#ifdef ASI_PRIMARY_LITTLE
-       wr      %g0,%o7,%asi
-#endif
-       ret
-       restore %g0,0,%o0
-
-.type  md5_block,#function
-.size  md5_block,(.-md5_block)
index a5d81979451ac7ed462446f0e48baab3a6713b1f..9e2f73560f3a97b9c7eda6cf0929b22963efbfb3 100644 (file)
@@ -186,9 +186,7 @@ $code.=<<___;
        extr.u  tmp1=$a,27,5            }   // a>>27
 { .mib;        add     $f=$f,tmp4                  // f+=e+K_20_39
        add     $h1=$h1,$a              };; // wrap up
        extr.u  tmp1=$a,27,5            }   // a>>27
 { .mib;        add     $f=$f,tmp4                  // f+=e+K_20_39
        add     $h1=$h1,$a              };; // wrap up
-{ .mmi;
-(p16)  ld4.s   $X[0]=[inp],4               // non-faulting prefetch
-       add     $f=$f,tmp0                  // f+=F_20_39(b,c,d)
+{ .mmi;        add     $f=$f,tmp0                  // f+=F_20_39(b,c,d)
        shrp    $b=tmp6,tmp6,2          }   // b=ROTATE(b,30) ;;?
 { .mmi;        or      tmp1=tmp1,tmp5              // ROTATE(a,5)
        add     $h3=$h3,$c              };; // wrap up
        shrp    $b=tmp6,tmp6,2          }   // b=ROTATE(b,30) ;;?
 { .mmi;        or      tmp1=tmp1,tmp5              // ROTATE(a,5)
        add     $h3=$h3,$c              };; // wrap up