+ jz .L${mode}_exit
+ cmp $chunk,$len
+ jae .L${mode}_aligned_loop
+
+.L${mode}_aligned_skip:
+___
+$code.=<<___ if ($PADLOCK_PREFETCH{$mode});
+ lea ($inp,$len),%rbp
+ neg %rbp
+ and \$0xfff,%rbp # distance to page boundary
+ xor %eax,%eax
+ cmp \$$PADLOCK_PREFETCH{$mode},%rbp
+ mov \$$PADLOCK_PREFETCH{$mode}-1,%rbp
+ cmovae %rax,%rbp
+ and $len,%rbp # remainder
+ sub %rbp,$len
+ jz .L${mode}_aligned_tail
+___
+$code.=<<___;
+ lea -16($ctx),%rax # ivp
+ lea 16($ctx),%rbx # key
+ shr \$4,$len # len/=AES_BLOCK_SIZE
+ .byte 0xf3,0x0f,0xa7,$opcode # rep xcrypt*
+___
+$code.=<<___ if ($mode !~ /ecb|ctr/);
+ movdqa (%rax),%xmm0
+ movdqa %xmm0,-16($ctx) # copy [or refresh] iv
+___
+$code.=<<___ if ($PADLOCK_PREFETCH{$mode});
+ test %rbp,%rbp # check remainder
+ jz .L${mode}_exit
+
+.L${mode}_aligned_tail:
+ mov $out,%r8
+ mov %rbp,$chunk
+ mov %rbp,$len
+ lea (%rsp),%rbp
+ sub $len,%rsp
+ shr \$3,$len
+ lea (%rsp),$out
+ .byte 0xf3,0x48,0xa5 # rep movsq
+ lea (%r8),$out
+ lea (%rsp),$inp
+ mov $chunk,$len
+ jmp .L${mode}_loop