3 # ====================================================================
4 # Written by David S. Miller <davem@devemloft.net> and Andy Polyakov
5 # <appro@openssl.org>. The module is licensed under 2-clause BSD
6 # license. October 2012. All rights reserved.
7 # ====================================================================
9 ######################################################################
12 # AES round instructions complete in 3 cycles and can be issued every
13 # cycle. It means that round calculations should take 4*rounds cycles,
14 # because any given round instruction depends on result of *both*
15 # previous instructions:
23 # Provided that fxor [with IV] takes 3 cycles to complete, critical
24 # path length for CBC encrypt would be 3+4*rounds, or in other words
25 # it should process one byte in at least (3+4*rounds)/16 cycles. This
26 # estimate doesn't account for "collateral" instructions, such as
27 # fetching input from memory, xor-ing it with zero-round key and
28 # storing the result. Yet, *measured* performance [for data aligned
29 # at 64-bit boundary!] deviates from this equation by less than 0.5%:
31 # 128-bit key 192- 256-
32 # CBC encrypt 2.70/2.90(*) 3.20/3.40 3.70/3.90
33 # (*) numbers after slash are for
36 # Out-of-order execution logic managed to fully overlap "collateral"
37 # instructions with those on critical path. Amazing!
39 # As with Intel AES-NI, question is if it's possible to improve
40 # performance of parallelizeable modes by interleaving round
41 # instructions. Provided round instruction latency and throughput
42 # optimal interleave factor is 2. But can we expect 2x performance
43 # improvement? Well, as round instructions can be issued one per
44 # cycle, they don't saturate the 2-way issue pipeline and therefore
45 # there is room for "collateral" calculations... Yet, 2x speed-up
46 # over CBC encrypt remains unattaintable:
48 # 128-bit key 192- 256-
49 # CBC decrypt 1.64/2.11 1.89/2.37 2.23/2.61
50 # CTR 1.64/2.08(*) 1.89/2.33 2.23/2.61
51 # (*) numbers after slash are for
54 # Estimates based on amount of instructions under assumption that
55 # round instructions are not pairable with any other instruction
56 # suggest that latter is the actual case and pipeline runs
57 # underutilized. It should be noted that T4 out-of-order execution
58 # logic is so capable that performance gain from 2x interleave is
59 # not even impressive, ~7-13% over non-interleaved code, largest
62 # To anchor to something else, software implementation processes
63 # one byte in 29 cycles with 128-bit key on same processor. Intel
64 # Sandy Bridge encrypts byte in 5.07 cycles in CBC mode and decrypts
65 # in 0.93, naturally with AES-NI.
67 $0 =~ m/(.*[\/\\])[^\/\\]+$/; $dir=$1;
68 push(@INC,"${dir}","${dir}../../perlasm");
69 require "sparcv9_modes.pl";
73 $::evp=1; # if $evp is set to 0, script generates module with
74 # AES_[en|de]crypt, AES_set_[en|de]crypt_key and AES_cbc_encrypt entry
75 # points. These however are not fully compatible with openssl/aes.h,
76 # because they expect AES_KEY to be aligned at 64-bit boundary. When
77 # used through EVP, alignment is arranged at EVP layer. Second thing
78 # that is arranged by EVP is at least 32-bit alignment of IV.
80 ######################################################################
81 # single-round subroutines
84 my ($inp,$out,$key,$rounds,$tmp,$mask)=map("%o$_",(0..5));
92 andcc $inp, 7, %g1 ! is input aligned?
101 ldx [$inp + 16], $inp
111 ld [$key + 240], $rounds
112 ldd [$key + 16], %f12
113 ldd [$key + 24], %f14
118 srl $rounds, 1, $rounds
119 ldd [$key + 32], %f16
120 sub $rounds, 1, $rounds
121 ldd [$key + 40], %f18
125 aes_eround01 %f12, %f0, %f2, %f4
126 aes_eround23 %f14, %f0, %f2, %f2
129 sub $rounds,1,$rounds
130 aes_eround01 %f16, %f4, %f2, %f0
131 aes_eround23 %f18, %f4, %f2, %f2
132 ldd [$key + 16], %f16
133 ldd [$key + 24], %f18
134 brnz,pt $rounds, .Lenc
137 andcc $out, 7, $tmp ! is output aligned?
138 aes_eround01 %f12, %f0, %f2, %f4
139 aes_eround23 %f14, %f0, %f2, %f2
140 aes_eround01_l %f16, %f4, %f2, %f0
141 aes_eround23_l %f18, %f4, %f2, %f2
150 2: alignaddrl $out, %g0, $out
152 srl $mask, $tmp, $mask
154 faligndata %f0, %f0, %f4
155 faligndata %f0, %f2, %f6
156 faligndata %f2, %f2, %f8
158 stda %f4, [$out + $mask]0xc0 ! partial store
161 orn %g0, $mask, $mask
163 stda %f8, [$out + $mask]0xc0 ! partial store
164 .type aes_t4_encrypt,#function
165 .size aes_t4_encrypt,.-aes_t4_encrypt
167 .globl aes_t4_decrypt
170 andcc $inp, 7, %g1 ! is input aligned?
179 ldx [$inp + 16], $inp
189 ld [$key + 240], $rounds
190 ldd [$key + 16], %f12
191 ldd [$key + 24], %f14
196 srl $rounds, 1, $rounds
197 ldd [$key + 32], %f16
198 sub $rounds, 1, $rounds
199 ldd [$key + 40], %f18
203 aes_dround01 %f12, %f0, %f2, %f4
204 aes_dround23 %f14, %f0, %f2, %f2
207 sub $rounds,1,$rounds
208 aes_dround01 %f16, %f4, %f2, %f0
209 aes_dround23 %f18, %f4, %f2, %f2
210 ldd [$key + 16], %f16
211 ldd [$key + 24], %f18
212 brnz,pt $rounds, .Ldec
215 andcc $out, 7, $tmp ! is output aligned?
216 aes_dround01 %f12, %f0, %f2, %f4
217 aes_dround23 %f14, %f0, %f2, %f2
218 aes_dround01_l %f16, %f4, %f2, %f0
219 aes_dround23_l %f18, %f4, %f2, %f2
228 2: alignaddrl $out, %g0, $out
230 srl $mask, $tmp, $mask
232 faligndata %f0, %f0, %f4
233 faligndata %f0, %f2, %f6
234 faligndata %f2, %f2, %f8
236 stda %f4, [$out + $mask]0xc0 ! partial store
239 orn %g0, $mask, $mask
241 stda %f8, [$out + $mask]0xc0 ! partial store
242 .type aes_t4_decrypt,#function
243 .size aes_t4_decrypt,.-aes_t4_decrypt
247 ######################################################################
248 # key setup subroutines
251 my ($inp,$bits,$out,$tmp)=map("%o$_",(0..5));
253 .globl aes_t4_set_encrypt_key
255 aes_t4_set_encrypt_key:
258 alignaddr $inp, %g0, $inp
266 brz,pt $tmp, .L256aligned
270 faligndata %f0, %f2, %f0
271 faligndata %f2, %f4, %f2
272 faligndata %f4, %f6, %f4
273 faligndata %f6, %f8, %f6
276 for ($i=0; $i<6; $i++) {
278 std %f0, [$out + `32*$i+0`]
279 aes_kexpand1 %f0, %f6, $i, %f0
280 std %f2, [$out + `32*$i+8`]
281 aes_kexpand2 %f2, %f0, %f2
282 std %f4, [$out + `32*$i+16`]
283 aes_kexpand0 %f4, %f2, %f4
284 std %f6, [$out + `32*$i+24`]
285 aes_kexpand2 %f6, %f4, %f6
289 std %f0, [$out + `32*$i+0`]
290 aes_kexpand1 %f0, %f6, $i, %f0
291 std %f2, [$out + `32*$i+8`]
292 aes_kexpand2 %f2, %f0, %f2
293 std %f4, [$out + `32*$i+16`]
294 std %f6, [$out + `32*$i+24`]
295 std %f0, [$out + `32*$i+32`]
296 std %f2, [$out + `32*$i+40`]
299 st $tmp, [$out + 240]
305 brz,pt $tmp, .L192aligned
309 faligndata %f0, %f2, %f0
310 faligndata %f2, %f4, %f2
311 faligndata %f4, %f6, %f4
314 for ($i=0; $i<7; $i++) {
316 std %f0, [$out + `24*$i+0`]
317 aes_kexpand1 %f0, %f4, $i, %f0
318 std %f2, [$out + `24*$i+8`]
319 aes_kexpand2 %f2, %f0, %f2
320 std %f4, [$out + `24*$i+16`]
321 aes_kexpand2 %f4, %f2, %f4
325 std %f0, [$out + `24*$i+0`]
326 aes_kexpand1 %f0, %f4, $i, %f0
327 std %f2, [$out + `24*$i+8`]
328 aes_kexpand2 %f2, %f0, %f2
329 std %f4, [$out + `24*$i+16`]
330 std %f0, [$out + `24*$i+24`]
331 std %f2, [$out + `24*$i+32`]
334 st $tmp, [$out + 240]
340 brz,pt $tmp, .L128aligned
344 faligndata %f0, %f2, %f0
345 faligndata %f2, %f4, %f2
348 for ($i=0; $i<10; $i++) {
350 std %f0, [$out + `16*$i+0`]
351 aes_kexpand1 %f0, %f2, $i, %f0
352 std %f2, [$out + `16*$i+8`]
353 aes_kexpand2 %f2, %f0, %f2
357 std %f0, [$out + `16*$i+0`]
358 std %f2, [$out + `16*$i+8`]
361 st $tmp, [$out + 240]
364 .type aes_t4_set_encrypt_key,#function
365 .size aes_t4_set_encrypt_key,.-aes_t4_set_encrypt_key
367 .globl aes_t4_set_decrypt_key
369 aes_t4_set_decrypt_key:
371 call .Lset_encrypt_key
375 sll $tmp, 4, $inp ! $tmp is number of rounds
377 add $out, $inp, $inp ! $inp=$out+16*rounds
378 srl $tmp, 2, $tmp ! $tmp=(rounds+2)/4
387 ldd [$inp - 16], %f12
396 std %f12, [$out + 16]
397 std %f14, [$out + 24]
399 brnz $tmp, .Lkey_flip
404 .type aes_t4_set_decrypt_key,#function
405 .size aes_t4_set_decrypt_key,.-aes_t4_set_decrypt_key
410 my ($inp,$out,$len,$key,$ivec,$enc)=map("%i$_",(0..5));
411 my ($ileft,$iright,$ooff,$omask,$ivoff)=map("%l$_",(1..7));
419 for ($i=2; $i<22;$i++) { # load key schedule
421 ldd [$key + `8*$i`], %f`12+2*$i`
427 .type _aes128_loadkey,#function
428 .size _aes128_loadkey,.-_aes128_loadkey
429 _aes128_load_enckey=_aes128_loadkey
430 _aes128_load_deckey=_aes128_loadkey
435 for ($i=0; $i<4; $i++) {
437 aes_eround01 %f`16+8*$i+0`, %f0, %f2, %f4
438 aes_eround23 %f`16+8*$i+2`, %f0, %f2, %f2
439 aes_eround01 %f`16+8*$i+4`, %f4, %f2, %f0
440 aes_eround23 %f`16+8*$i+6`, %f4, %f2, %f2
444 aes_eround01 %f48, %f0, %f2, %f4
445 aes_eround23 %f50, %f0, %f2, %f2
446 aes_eround01_l %f52, %f4, %f2, %f0
448 aes_eround23_l %f54, %f4, %f2, %f2
449 .type _aes128_encrypt_1x,#function
450 .size _aes128_encrypt_1x,.-_aes128_encrypt_1x
455 for ($i=0; $i<4; $i++) {
457 aes_eround01 %f`16+8*$i+0`, %f0, %f2, %f8
458 aes_eround23 %f`16+8*$i+2`, %f0, %f2, %f2
459 aes_eround01 %f`16+8*$i+0`, %f4, %f6, %f10
460 aes_eround23 %f`16+8*$i+2`, %f4, %f6, %f6
461 aes_eround01 %f`16+8*$i+4`, %f8, %f2, %f0
462 aes_eround23 %f`16+8*$i+6`, %f8, %f2, %f2
463 aes_eround01 %f`16+8*$i+4`, %f10, %f6, %f4
464 aes_eround23 %f`16+8*$i+6`, %f10, %f6, %f6
468 aes_eround01 %f48, %f0, %f2, %f8
469 aes_eround23 %f50, %f0, %f2, %f2
470 aes_eround01 %f48, %f4, %f6, %f10
471 aes_eround23 %f50, %f4, %f6, %f6
472 aes_eround01_l %f52, %f8, %f2, %f0
473 aes_eround23_l %f54, %f8, %f2, %f2
474 aes_eround01_l %f52, %f10, %f6, %f4
476 aes_eround23_l %f54, %f10, %f6, %f6
477 .type _aes128_encrypt_2x,#function
478 .size _aes128_encrypt_2x,.-_aes128_encrypt_2x
483 for ($i=0; $i<4; $i++) {
485 aes_dround01 %f`16+8*$i+0`, %f0, %f2, %f4
486 aes_dround23 %f`16+8*$i+2`, %f0, %f2, %f2
487 aes_dround01 %f`16+8*$i+4`, %f4, %f2, %f0
488 aes_dround23 %f`16+8*$i+6`, %f4, %f2, %f2
492 aes_dround01 %f48, %f0, %f2, %f4
493 aes_dround23 %f50, %f0, %f2, %f2
494 aes_dround01_l %f52, %f4, %f2, %f0
496 aes_dround23_l %f54, %f4, %f2, %f2
497 .type _aes128_decrypt_1x,#function
498 .size _aes128_decrypt_1x,.-_aes128_decrypt_1x
503 for ($i=0; $i<4; $i++) {
505 aes_dround01 %f`16+8*$i+0`, %f0, %f2, %f8
506 aes_dround23 %f`16+8*$i+2`, %f0, %f2, %f2
507 aes_dround01 %f`16+8*$i+0`, %f4, %f6, %f10
508 aes_dround23 %f`16+8*$i+2`, %f4, %f6, %f6
509 aes_dround01 %f`16+8*$i+4`, %f8, %f2, %f0
510 aes_dround23 %f`16+8*$i+6`, %f8, %f2, %f2
511 aes_dround01 %f`16+8*$i+4`, %f10, %f6, %f4
512 aes_dround23 %f`16+8*$i+6`, %f10, %f6, %f6
516 aes_dround01 %f48, %f0, %f2, %f8
517 aes_dround23 %f50, %f0, %f2, %f2
518 aes_dround01 %f48, %f4, %f6, %f10
519 aes_dround23 %f50, %f4, %f6, %f6
520 aes_dround01_l %f52, %f8, %f2, %f0
521 aes_dround23_l %f54, %f8, %f2, %f2
522 aes_dround01_l %f52, %f10, %f6, %f4
524 aes_dround23_l %f54, %f10, %f6, %f6
525 .type _aes128_decrypt_2x,#function
526 .size _aes128_decrypt_2x,.-_aes128_decrypt_2x
534 for ($i=2; $i<26;$i++) { # load key schedule
536 ldd [$key + `8*$i`], %f`12+2*$i`
542 .type _aes192_loadkey,#function
543 .size _aes192_loadkey,.-_aes192_loadkey
544 _aes192_load_enckey=_aes192_loadkey
545 _aes192_load_deckey=_aes192_loadkey
546 _aes256_load_enckey=_aes192_loadkey
547 _aes256_load_deckey=_aes192_loadkey
552 for ($i=0; $i<5; $i++) {
554 aes_eround01 %f`16+8*$i+0`, %f0, %f2, %f4
555 aes_eround23 %f`16+8*$i+2`, %f0, %f2, %f2
556 aes_eround01 %f`16+8*$i+4`, %f4, %f2, %f0
557 aes_eround23 %f`16+8*$i+6`, %f4, %f2, %f2
561 aes_eround01 %f56, %f0, %f2, %f4
562 aes_eround23 %f58, %f0, %f2, %f2
563 aes_eround01_l %f60, %f4, %f2, %f0
565 aes_eround23_l %f62, %f4, %f2, %f2
566 .type _aes192_encrypt_1x,#function
567 .size _aes192_encrypt_1x,.-_aes192_encrypt_1x
572 for ($i=0; $i<5; $i++) {
574 aes_eround01 %f`16+8*$i+0`, %f0, %f2, %f8
575 aes_eround23 %f`16+8*$i+2`, %f0, %f2, %f2
576 aes_eround01 %f`16+8*$i+0`, %f4, %f6, %f10
577 aes_eround23 %f`16+8*$i+2`, %f4, %f6, %f6
578 aes_eround01 %f`16+8*$i+4`, %f8, %f2, %f0
579 aes_eround23 %f`16+8*$i+6`, %f8, %f2, %f2
580 aes_eround01 %f`16+8*$i+4`, %f10, %f6, %f4
581 aes_eround23 %f`16+8*$i+6`, %f10, %f6, %f6
585 aes_eround01 %f56, %f0, %f2, %f8
586 aes_eround23 %f58, %f0, %f2, %f2
587 aes_eround01 %f56, %f4, %f6, %f10
588 aes_eround23 %f58, %f4, %f6, %f6
589 aes_eround01_l %f60, %f8, %f2, %f0
590 aes_eround23_l %f62, %f8, %f2, %f2
591 aes_eround01_l %f60, %f10, %f6, %f4
593 aes_eround23_l %f62, %f10, %f6, %f6
594 .type _aes192_encrypt_2x,#function
595 .size _aes192_encrypt_2x,.-_aes192_encrypt_2x
600 for ($i=0; $i<5; $i++) {
602 aes_dround01 %f`16+8*$i+0`, %f0, %f2, %f4
603 aes_dround23 %f`16+8*$i+2`, %f0, %f2, %f2
604 aes_dround01 %f`16+8*$i+4`, %f4, %f2, %f0
605 aes_dround23 %f`16+8*$i+6`, %f4, %f2, %f2
609 aes_dround01 %f56, %f0, %f2, %f4
610 aes_dround23 %f58, %f0, %f2, %f2
611 aes_dround01_l %f60, %f4, %f2, %f0
613 aes_dround23_l %f62, %f4, %f2, %f2
614 .type _aes192_decrypt_1x,#function
615 .size _aes192_decrypt_1x,.-_aes192_decrypt_1x
620 for ($i=0; $i<5; $i++) {
622 aes_dround01 %f`16+8*$i+0`, %f0, %f2, %f8
623 aes_dround23 %f`16+8*$i+2`, %f0, %f2, %f2
624 aes_dround01 %f`16+8*$i+0`, %f4, %f6, %f10
625 aes_dround23 %f`16+8*$i+2`, %f4, %f6, %f6
626 aes_dround01 %f`16+8*$i+4`, %f8, %f2, %f0
627 aes_dround23 %f`16+8*$i+6`, %f8, %f2, %f2
628 aes_dround01 %f`16+8*$i+4`, %f10, %f6, %f4
629 aes_dround23 %f`16+8*$i+6`, %f10, %f6, %f6
633 aes_dround01 %f56, %f0, %f2, %f8
634 aes_dround23 %f58, %f0, %f2, %f2
635 aes_dround01 %f56, %f4, %f6, %f10
636 aes_dround23 %f58, %f4, %f6, %f6
637 aes_dround01_l %f60, %f8, %f2, %f0
638 aes_dround23_l %f62, %f8, %f2, %f2
639 aes_dround01_l %f60, %f10, %f6, %f4
641 aes_dround23_l %f62, %f10, %f6, %f6
642 .type _aes192_decrypt_2x,#function
643 .size _aes192_decrypt_2x,.-_aes192_decrypt_2x
647 aes_eround01 %f16, %f0, %f2, %f4
648 aes_eround23 %f18, %f0, %f2, %f2
649 ldd [$key + 208], %f16
650 ldd [$key + 216], %f18
651 aes_eround01 %f20, %f4, %f2, %f0
652 aes_eround23 %f22, %f4, %f2, %f2
653 ldd [$key + 224], %f20
654 ldd [$key + 232], %f22
656 for ($i=1; $i<6; $i++) {
658 aes_eround01 %f`16+8*$i+0`, %f0, %f2, %f4
659 aes_eround23 %f`16+8*$i+2`, %f0, %f2, %f2
660 aes_eround01 %f`16+8*$i+4`, %f4, %f2, %f0
661 aes_eround23 %f`16+8*$i+6`, %f4, %f2, %f2
665 aes_eround01 %f16, %f0, %f2, %f4
666 aes_eround23 %f18, %f0, %f2, %f2
667 ldd [$key + 16], %f16
668 ldd [$key + 24], %f18
669 aes_eround01_l %f20, %f4, %f2, %f0
670 aes_eround23_l %f22, %f4, %f2, %f2
671 ldd [$key + 32], %f20
673 ldd [$key + 40], %f22
674 .type _aes256_encrypt_1x,#function
675 .size _aes256_encrypt_1x,.-_aes256_encrypt_1x
679 aes_eround01 %f16, %f0, %f2, %f8
680 aes_eround23 %f18, %f0, %f2, %f2
681 aes_eround01 %f16, %f4, %f6, %f10
682 aes_eround23 %f18, %f4, %f6, %f6
683 ldd [$key + 208], %f16
684 ldd [$key + 216], %f18
685 aes_eround01 %f20, %f8, %f2, %f0
686 aes_eround23 %f22, %f8, %f2, %f2
687 aes_eround01 %f20, %f10, %f6, %f4
688 aes_eround23 %f22, %f10, %f6, %f6
689 ldd [$key + 224], %f20
690 ldd [$key + 232], %f22
692 for ($i=1; $i<6; $i++) {
694 aes_eround01 %f`16+8*$i+0`, %f0, %f2, %f8
695 aes_eround23 %f`16+8*$i+2`, %f0, %f2, %f2
696 aes_eround01 %f`16+8*$i+0`, %f4, %f6, %f10
697 aes_eround23 %f`16+8*$i+2`, %f4, %f6, %f6
698 aes_eround01 %f`16+8*$i+4`, %f8, %f2, %f0
699 aes_eround23 %f`16+8*$i+6`, %f8, %f2, %f2
700 aes_eround01 %f`16+8*$i+4`, %f10, %f6, %f4
701 aes_eround23 %f`16+8*$i+6`, %f10, %f6, %f6
705 aes_eround01 %f16, %f0, %f2, %f8
706 aes_eround23 %f18, %f0, %f2, %f2
707 aes_eround01 %f16, %f4, %f6, %f10
708 aes_eround23 %f18, %f4, %f6, %f6
709 ldd [$key + 16], %f16
710 ldd [$key + 24], %f18
711 aes_eround01_l %f20, %f8, %f2, %f0
712 aes_eround23_l %f22, %f8, %f2, %f2
713 aes_eround01_l %f20, %f10, %f6, %f4
714 aes_eround23_l %f22, %f10, %f6, %f6
715 ldd [$key + 32], %f20
717 ldd [$key + 40], %f22
718 .type _aes256_encrypt_2x,#function
719 .size _aes256_encrypt_2x,.-_aes256_encrypt_2x
723 aes_dround01 %f16, %f0, %f2, %f4
724 aes_dround23 %f18, %f0, %f2, %f2
725 ldd [$key + 208], %f16
726 ldd [$key + 216], %f18
727 aes_dround01 %f20, %f4, %f2, %f0
728 aes_dround23 %f22, %f4, %f2, %f2
729 ldd [$key + 224], %f20
730 ldd [$key + 232], %f22
732 for ($i=1; $i<6; $i++) {
734 aes_dround01 %f`16+8*$i+0`, %f0, %f2, %f4
735 aes_dround23 %f`16+8*$i+2`, %f0, %f2, %f2
736 aes_dround01 %f`16+8*$i+4`, %f4, %f2, %f0
737 aes_dround23 %f`16+8*$i+6`, %f4, %f2, %f2
741 aes_dround01 %f16, %f0, %f2, %f4
742 aes_dround23 %f18, %f0, %f2, %f2
743 ldd [$key + 16], %f16
744 ldd [$key + 24], %f18
745 aes_dround01_l %f20, %f4, %f2, %f0
746 aes_dround23_l %f22, %f4, %f2, %f2
747 ldd [$key + 32], %f20
749 ldd [$key + 40], %f22
750 .type _aes256_decrypt_1x,#function
751 .size _aes256_decrypt_1x,.-_aes256_decrypt_1x
755 aes_dround01 %f16, %f0, %f2, %f8
756 aes_dround23 %f18, %f0, %f2, %f2
757 aes_dround01 %f16, %f4, %f6, %f10
758 aes_dround23 %f18, %f4, %f6, %f6
759 ldd [$key + 208], %f16
760 ldd [$key + 216], %f18
761 aes_dround01 %f20, %f8, %f2, %f0
762 aes_dround23 %f22, %f8, %f2, %f2
763 aes_dround01 %f20, %f10, %f6, %f4
764 aes_dround23 %f22, %f10, %f6, %f6
765 ldd [$key + 224], %f20
766 ldd [$key + 232], %f22
768 for ($i=1; $i<6; $i++) {
770 aes_dround01 %f`16+8*$i+0`, %f0, %f2, %f8
771 aes_dround23 %f`16+8*$i+2`, %f0, %f2, %f2
772 aes_dround01 %f`16+8*$i+0`, %f4, %f6, %f10
773 aes_dround23 %f`16+8*$i+2`, %f4, %f6, %f6
774 aes_dround01 %f`16+8*$i+4`, %f8, %f2, %f0
775 aes_dround23 %f`16+8*$i+6`, %f8, %f2, %f2
776 aes_dround01 %f`16+8*$i+4`, %f10, %f6, %f4
777 aes_dround23 %f`16+8*$i+6`, %f10, %f6, %f6
781 aes_dround01 %f16, %f0, %f2, %f8
782 aes_dround23 %f18, %f0, %f2, %f2
783 aes_dround01 %f16, %f4, %f6, %f10
784 aes_dround23 %f18, %f4, %f6, %f6
785 ldd [$key + 16], %f16
786 ldd [$key + 24], %f18
787 aes_dround01_l %f20, %f8, %f2, %f0
788 aes_dround23_l %f22, %f8, %f2, %f2
789 aes_dround01_l %f20, %f10, %f6, %f4
790 aes_dround23_l %f22, %f10, %f6, %f6
791 ldd [$key + 32], %f20
793 ldd [$key + 40], %f22
794 .type _aes256_decrypt_2x,#function
795 .size _aes256_decrypt_2x,.-_aes256_decrypt_2x
798 &alg_cbc_encrypt_implement("aes",128);
799 &alg_cbc_encrypt_implement("aes",192);
800 &alg_cbc_encrypt_implement("aes",256);
802 &alg_cbc_decrypt_implement("aes",128);
803 &alg_cbc_decrypt_implement("aes",192);
804 &alg_cbc_decrypt_implement("aes",256);
807 &alg_ctr32_implement("aes",128);
808 &alg_ctr32_implement("aes",192);
809 &alg_ctr32_implement("aes",256);
816 AES_encrypt=aes_t4_encrypt
818 AES_decrypt=aes_t4_decrypt
819 .global AES_set_encrypt_key
822 andcc %o2, 7, %g0 ! check alignment
829 andncc %o1, 0x1c0, %g0
835 b aes_t4_set_encrypt_key
839 .type AES_set_encrypt_key,#function
840 .size AES_set_encrypt_key,.-AES_set_encrypt_key
842 .global AES_set_decrypt_key
845 andcc %o2, 7, %g0 ! check alignment
852 andncc %o1, 0x1c0, %g0
858 b aes_t4_set_decrypt_key
862 .type AES_set_decrypt_key,#function
863 .size AES_set_decrypt_key,.-AES_set_decrypt_key
866 my ($inp,$out,$len,$key,$ivec,$enc)=map("%o$_",(0..5));
869 .globl AES_cbc_encrypt
874 brz $enc, .Lcbc_decrypt
877 bl,pt %icc, aes128_t4_cbc_encrypt
879 be,pn %icc, aes192_t4_cbc_encrypt
881 ba aes256_t4_cbc_encrypt
885 bl,pt %icc, aes128_t4_cbc_decrypt
887 be,pn %icc, aes192_t4_cbc_decrypt
889 ba aes256_t4_cbc_decrypt
891 .type AES_cbc_encrypt,#function
892 .size AES_cbc_encrypt,.-AES_cbc_encrypt
896 .asciz "AES for SPARC T4, David S. Miller, Andy Polyakov"