aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/libs/openssl/asm/linux
diff options
context:
space:
mode:
authororivej <orivej@yandex-team.ru>2022-02-10 16:45:01 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:45:01 +0300
commit2d37894b1b037cf24231090eda8589bbb44fb6fc (patch)
treebe835aa92c6248212e705f25388ebafcf84bc7a1 /contrib/libs/openssl/asm/linux
parent718c552901d703c502ccbefdfc3c9028d608b947 (diff)
downloadydb-2d37894b1b037cf24231090eda8589bbb44fb6fc.tar.gz
Restoring authorship annotation for <orivej@yandex-team.ru>. Commit 2 of 2.
Diffstat (limited to 'contrib/libs/openssl/asm/linux')
-rw-r--r--contrib/libs/openssl/asm/linux/crypto/chacha/chacha-x86_64.s2470
-rw-r--r--contrib/libs/openssl/asm/linux/crypto/poly1305/poly1305-x86_64.s3012
2 files changed, 2741 insertions, 2741 deletions
diff --git a/contrib/libs/openssl/asm/linux/crypto/chacha/chacha-x86_64.s b/contrib/libs/openssl/asm/linux/crypto/chacha/chacha-x86_64.s
index 6ec329ee8e..1812bc84b1 100644
--- a/contrib/libs/openssl/asm/linux/crypto/chacha/chacha-x86_64.s
+++ b/contrib/libs/openssl/asm/linux/crypto/chacha/chacha-x86_64.s
@@ -41,10 +41,10 @@ ChaCha20_ctr32:
cmpq $0,%rdx
je .Lno_data
movq OPENSSL_ia32cap_P+4(%rip),%r10
- btq $48,%r10
- jc .LChaCha20_avx512
- testq %r10,%r10
- js .LChaCha20_avx512vl
+ btq $48,%r10
+ jc .LChaCha20_avx512
+ testq %r10,%r10
+ js .LChaCha20_avx512vl
testl $512,%r10d
jnz .LChaCha20_ssse3
@@ -2195,1234 +2195,1234 @@ ChaCha20_8x:
.byte 0xf3,0xc3
.cfi_endproc
.size ChaCha20_8x,.-ChaCha20_8x
-.type ChaCha20_avx512,@function
-.align 32
-ChaCha20_avx512:
-.cfi_startproc
-.LChaCha20_avx512:
- movq %rsp,%r9
-.cfi_def_cfa_register %r9
- cmpq $512,%rdx
- ja .LChaCha20_16x
-
- subq $64+8,%rsp
- vbroadcasti32x4 .Lsigma(%rip),%zmm0
- vbroadcasti32x4 (%rcx),%zmm1
- vbroadcasti32x4 16(%rcx),%zmm2
- vbroadcasti32x4 (%r8),%zmm3
-
- vmovdqa32 %zmm0,%zmm16
- vmovdqa32 %zmm1,%zmm17
- vmovdqa32 %zmm2,%zmm18
- vpaddd .Lzeroz(%rip),%zmm3,%zmm3
- vmovdqa32 .Lfourz(%rip),%zmm20
- movq $10,%r8
- vmovdqa32 %zmm3,%zmm19
- jmp .Loop_avx512
-
-.align 16
-.Loop_outer_avx512:
- vmovdqa32 %zmm16,%zmm0
- vmovdqa32 %zmm17,%zmm1
- vmovdqa32 %zmm18,%zmm2
- vpaddd %zmm20,%zmm19,%zmm3
- movq $10,%r8
- vmovdqa32 %zmm3,%zmm19
- jmp .Loop_avx512
-
-.align 32
-.Loop_avx512:
- vpaddd %zmm1,%zmm0,%zmm0
- vpxord %zmm0,%zmm3,%zmm3
- vprold $16,%zmm3,%zmm3
- vpaddd %zmm3,%zmm2,%zmm2
- vpxord %zmm2,%zmm1,%zmm1
- vprold $12,%zmm1,%zmm1
- vpaddd %zmm1,%zmm0,%zmm0
- vpxord %zmm0,%zmm3,%zmm3
- vprold $8,%zmm3,%zmm3
- vpaddd %zmm3,%zmm2,%zmm2
- vpxord %zmm2,%zmm1,%zmm1
- vprold $7,%zmm1,%zmm1
- vpshufd $78,%zmm2,%zmm2
- vpshufd $57,%zmm1,%zmm1
- vpshufd $147,%zmm3,%zmm3
- vpaddd %zmm1,%zmm0,%zmm0
- vpxord %zmm0,%zmm3,%zmm3
- vprold $16,%zmm3,%zmm3
- vpaddd %zmm3,%zmm2,%zmm2
- vpxord %zmm2,%zmm1,%zmm1
- vprold $12,%zmm1,%zmm1
- vpaddd %zmm1,%zmm0,%zmm0
- vpxord %zmm0,%zmm3,%zmm3
- vprold $8,%zmm3,%zmm3
- vpaddd %zmm3,%zmm2,%zmm2
- vpxord %zmm2,%zmm1,%zmm1
- vprold $7,%zmm1,%zmm1
- vpshufd $78,%zmm2,%zmm2
- vpshufd $147,%zmm1,%zmm1
- vpshufd $57,%zmm3,%zmm3
- decq %r8
- jnz .Loop_avx512
- vpaddd %zmm16,%zmm0,%zmm0
- vpaddd %zmm17,%zmm1,%zmm1
- vpaddd %zmm18,%zmm2,%zmm2
- vpaddd %zmm19,%zmm3,%zmm3
-
- subq $64,%rdx
- jb .Ltail64_avx512
-
- vpxor 0(%rsi),%xmm0,%xmm4
- vpxor 16(%rsi),%xmm1,%xmm5
- vpxor 32(%rsi),%xmm2,%xmm6
- vpxor 48(%rsi),%xmm3,%xmm7
- leaq 64(%rsi),%rsi
-
- vmovdqu %xmm4,0(%rdi)
- vmovdqu %xmm5,16(%rdi)
- vmovdqu %xmm6,32(%rdi)
- vmovdqu %xmm7,48(%rdi)
- leaq 64(%rdi),%rdi
-
- jz .Ldone_avx512
-
- vextracti32x4 $1,%zmm0,%xmm4
- vextracti32x4 $1,%zmm1,%xmm5
- vextracti32x4 $1,%zmm2,%xmm6
- vextracti32x4 $1,%zmm3,%xmm7
-
- subq $64,%rdx
- jb .Ltail_avx512
-
- vpxor 0(%rsi),%xmm4,%xmm4
- vpxor 16(%rsi),%xmm5,%xmm5
- vpxor 32(%rsi),%xmm6,%xmm6
- vpxor 48(%rsi),%xmm7,%xmm7
- leaq 64(%rsi),%rsi
-
- vmovdqu %xmm4,0(%rdi)
- vmovdqu %xmm5,16(%rdi)
- vmovdqu %xmm6,32(%rdi)
- vmovdqu %xmm7,48(%rdi)
- leaq 64(%rdi),%rdi
-
- jz .Ldone_avx512
-
- vextracti32x4 $2,%zmm0,%xmm4
- vextracti32x4 $2,%zmm1,%xmm5
- vextracti32x4 $2,%zmm2,%xmm6
- vextracti32x4 $2,%zmm3,%xmm7
-
- subq $64,%rdx
- jb .Ltail_avx512
-
- vpxor 0(%rsi),%xmm4,%xmm4
- vpxor 16(%rsi),%xmm5,%xmm5
- vpxor 32(%rsi),%xmm6,%xmm6
- vpxor 48(%rsi),%xmm7,%xmm7
- leaq 64(%rsi),%rsi
-
- vmovdqu %xmm4,0(%rdi)
- vmovdqu %xmm5,16(%rdi)
- vmovdqu %xmm6,32(%rdi)
- vmovdqu %xmm7,48(%rdi)
- leaq 64(%rdi),%rdi
-
- jz .Ldone_avx512
-
- vextracti32x4 $3,%zmm0,%xmm4
- vextracti32x4 $3,%zmm1,%xmm5
- vextracti32x4 $3,%zmm2,%xmm6
- vextracti32x4 $3,%zmm3,%xmm7
-
- subq $64,%rdx
- jb .Ltail_avx512
-
- vpxor 0(%rsi),%xmm4,%xmm4
- vpxor 16(%rsi),%xmm5,%xmm5
- vpxor 32(%rsi),%xmm6,%xmm6
- vpxor 48(%rsi),%xmm7,%xmm7
- leaq 64(%rsi),%rsi
-
- vmovdqu %xmm4,0(%rdi)
- vmovdqu %xmm5,16(%rdi)
- vmovdqu %xmm6,32(%rdi)
- vmovdqu %xmm7,48(%rdi)
- leaq 64(%rdi),%rdi
-
- jnz .Loop_outer_avx512
-
- jmp .Ldone_avx512
-
-.align 16
-.Ltail64_avx512:
- vmovdqa %xmm0,0(%rsp)
- vmovdqa %xmm1,16(%rsp)
- vmovdqa %xmm2,32(%rsp)
- vmovdqa %xmm3,48(%rsp)
- addq $64,%rdx
- jmp .Loop_tail_avx512
-
-.align 16
-.Ltail_avx512:
- vmovdqa %xmm4,0(%rsp)
- vmovdqa %xmm5,16(%rsp)
- vmovdqa %xmm6,32(%rsp)
- vmovdqa %xmm7,48(%rsp)
- addq $64,%rdx
-
-.Loop_tail_avx512:
- movzbl (%rsi,%r8,1),%eax
- movzbl (%rsp,%r8,1),%ecx
- leaq 1(%r8),%r8
- xorl %ecx,%eax
- movb %al,-1(%rdi,%r8,1)
- decq %rdx
- jnz .Loop_tail_avx512
-
- vmovdqu32 %zmm16,0(%rsp)
-
-.Ldone_avx512:
- vzeroall
- leaq (%r9),%rsp
-.cfi_def_cfa_register %rsp
-.Lavx512_epilogue:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size ChaCha20_avx512,.-ChaCha20_avx512
-.type ChaCha20_avx512vl,@function
-.align 32
-ChaCha20_avx512vl:
-.cfi_startproc
-.LChaCha20_avx512vl:
- movq %rsp,%r9
-.cfi_def_cfa_register %r9
- cmpq $128,%rdx
- ja .LChaCha20_8xvl
-
- subq $64+8,%rsp
- vbroadcasti128 .Lsigma(%rip),%ymm0
- vbroadcasti128 (%rcx),%ymm1
- vbroadcasti128 16(%rcx),%ymm2
- vbroadcasti128 (%r8),%ymm3
-
- vmovdqa32 %ymm0,%ymm16
- vmovdqa32 %ymm1,%ymm17
- vmovdqa32 %ymm2,%ymm18
- vpaddd .Lzeroz(%rip),%ymm3,%ymm3
- vmovdqa32 .Ltwoy(%rip),%ymm20
- movq $10,%r8
- vmovdqa32 %ymm3,%ymm19
- jmp .Loop_avx512vl
-
-.align 16
-.Loop_outer_avx512vl:
- vmovdqa32 %ymm18,%ymm2
- vpaddd %ymm20,%ymm19,%ymm3
- movq $10,%r8
- vmovdqa32 %ymm3,%ymm19
- jmp .Loop_avx512vl
-
-.align 32
-.Loop_avx512vl:
- vpaddd %ymm1,%ymm0,%ymm0
- vpxor %ymm0,%ymm3,%ymm3
- vprold $16,%ymm3,%ymm3
- vpaddd %ymm3,%ymm2,%ymm2
- vpxor %ymm2,%ymm1,%ymm1
- vprold $12,%ymm1,%ymm1
- vpaddd %ymm1,%ymm0,%ymm0
- vpxor %ymm0,%ymm3,%ymm3
- vprold $8,%ymm3,%ymm3
- vpaddd %ymm3,%ymm2,%ymm2
- vpxor %ymm2,%ymm1,%ymm1
- vprold $7,%ymm1,%ymm1
- vpshufd $78,%ymm2,%ymm2
- vpshufd $57,%ymm1,%ymm1
- vpshufd $147,%ymm3,%ymm3
- vpaddd %ymm1,%ymm0,%ymm0
- vpxor %ymm0,%ymm3,%ymm3
- vprold $16,%ymm3,%ymm3
- vpaddd %ymm3,%ymm2,%ymm2
- vpxor %ymm2,%ymm1,%ymm1
- vprold $12,%ymm1,%ymm1
- vpaddd %ymm1,%ymm0,%ymm0
- vpxor %ymm0,%ymm3,%ymm3
- vprold $8,%ymm3,%ymm3
- vpaddd %ymm3,%ymm2,%ymm2
- vpxor %ymm2,%ymm1,%ymm1
- vprold $7,%ymm1,%ymm1
- vpshufd $78,%ymm2,%ymm2
- vpshufd $147,%ymm1,%ymm1
- vpshufd $57,%ymm3,%ymm3
- decq %r8
- jnz .Loop_avx512vl
- vpaddd %ymm16,%ymm0,%ymm0
- vpaddd %ymm17,%ymm1,%ymm1
- vpaddd %ymm18,%ymm2,%ymm2
- vpaddd %ymm19,%ymm3,%ymm3
-
- subq $64,%rdx
- jb .Ltail64_avx512vl
-
- vpxor 0(%rsi),%xmm0,%xmm4
- vpxor 16(%rsi),%xmm1,%xmm5
- vpxor 32(%rsi),%xmm2,%xmm6
- vpxor 48(%rsi),%xmm3,%xmm7
- leaq 64(%rsi),%rsi
-
- vmovdqu %xmm4,0(%rdi)
- vmovdqu %xmm5,16(%rdi)
- vmovdqu %xmm6,32(%rdi)
- vmovdqu %xmm7,48(%rdi)
- leaq 64(%rdi),%rdi
-
- jz .Ldone_avx512vl
-
- vextracti128 $1,%ymm0,%xmm4
- vextracti128 $1,%ymm1,%xmm5
- vextracti128 $1,%ymm2,%xmm6
- vextracti128 $1,%ymm3,%xmm7
-
- subq $64,%rdx
- jb .Ltail_avx512vl
-
- vpxor 0(%rsi),%xmm4,%xmm4
- vpxor 16(%rsi),%xmm5,%xmm5
- vpxor 32(%rsi),%xmm6,%xmm6
- vpxor 48(%rsi),%xmm7,%xmm7
- leaq 64(%rsi),%rsi
-
- vmovdqu %xmm4,0(%rdi)
- vmovdqu %xmm5,16(%rdi)
- vmovdqu %xmm6,32(%rdi)
- vmovdqu %xmm7,48(%rdi)
- leaq 64(%rdi),%rdi
-
- vmovdqa32 %ymm16,%ymm0
- vmovdqa32 %ymm17,%ymm1
- jnz .Loop_outer_avx512vl
-
- jmp .Ldone_avx512vl
-
-.align 16
-.Ltail64_avx512vl:
- vmovdqa %xmm0,0(%rsp)
- vmovdqa %xmm1,16(%rsp)
- vmovdqa %xmm2,32(%rsp)
- vmovdqa %xmm3,48(%rsp)
- addq $64,%rdx
- jmp .Loop_tail_avx512vl
-
-.align 16
-.Ltail_avx512vl:
- vmovdqa %xmm4,0(%rsp)
- vmovdqa %xmm5,16(%rsp)
- vmovdqa %xmm6,32(%rsp)
- vmovdqa %xmm7,48(%rsp)
- addq $64,%rdx
-
-.Loop_tail_avx512vl:
- movzbl (%rsi,%r8,1),%eax
- movzbl (%rsp,%r8,1),%ecx
- leaq 1(%r8),%r8
- xorl %ecx,%eax
- movb %al,-1(%rdi,%r8,1)
- decq %rdx
- jnz .Loop_tail_avx512vl
-
- vmovdqu32 %ymm16,0(%rsp)
- vmovdqu32 %ymm16,32(%rsp)
-
-.Ldone_avx512vl:
- vzeroall
- leaq (%r9),%rsp
-.cfi_def_cfa_register %rsp
-.Lavx512vl_epilogue:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size ChaCha20_avx512vl,.-ChaCha20_avx512vl
-.type ChaCha20_16x,@function
-.align 32
-ChaCha20_16x:
-.cfi_startproc
-.LChaCha20_16x:
- movq %rsp,%r9
-.cfi_def_cfa_register %r9
- subq $64+8,%rsp
- andq $-64,%rsp
- vzeroupper
-
- leaq .Lsigma(%rip),%r10
- vbroadcasti32x4 (%r10),%zmm3
- vbroadcasti32x4 (%rcx),%zmm7
- vbroadcasti32x4 16(%rcx),%zmm11
- vbroadcasti32x4 (%r8),%zmm15
-
- vpshufd $0x00,%zmm3,%zmm0
- vpshufd $0x55,%zmm3,%zmm1
- vpshufd $0xaa,%zmm3,%zmm2
- vpshufd $0xff,%zmm3,%zmm3
- vmovdqa64 %zmm0,%zmm16
- vmovdqa64 %zmm1,%zmm17
- vmovdqa64 %zmm2,%zmm18
- vmovdqa64 %zmm3,%zmm19
-
- vpshufd $0x00,%zmm7,%zmm4
- vpshufd $0x55,%zmm7,%zmm5
- vpshufd $0xaa,%zmm7,%zmm6
- vpshufd $0xff,%zmm7,%zmm7
- vmovdqa64 %zmm4,%zmm20
- vmovdqa64 %zmm5,%zmm21
- vmovdqa64 %zmm6,%zmm22
- vmovdqa64 %zmm7,%zmm23
-
- vpshufd $0x00,%zmm11,%zmm8
- vpshufd $0x55,%zmm11,%zmm9
- vpshufd $0xaa,%zmm11,%zmm10
- vpshufd $0xff,%zmm11,%zmm11
- vmovdqa64 %zmm8,%zmm24
- vmovdqa64 %zmm9,%zmm25
- vmovdqa64 %zmm10,%zmm26
- vmovdqa64 %zmm11,%zmm27
-
- vpshufd $0x00,%zmm15,%zmm12
- vpshufd $0x55,%zmm15,%zmm13
- vpshufd $0xaa,%zmm15,%zmm14
- vpshufd $0xff,%zmm15,%zmm15
- vpaddd .Lincz(%rip),%zmm12,%zmm12
- vmovdqa64 %zmm12,%zmm28
- vmovdqa64 %zmm13,%zmm29
- vmovdqa64 %zmm14,%zmm30
- vmovdqa64 %zmm15,%zmm31
-
- movl $10,%eax
- jmp .Loop16x
-
-.align 32
-.Loop_outer16x:
- vpbroadcastd 0(%r10),%zmm0
- vpbroadcastd 4(%r10),%zmm1
- vpbroadcastd 8(%r10),%zmm2
- vpbroadcastd 12(%r10),%zmm3
- vpaddd .Lsixteen(%rip),%zmm28,%zmm28
- vmovdqa64 %zmm20,%zmm4
- vmovdqa64 %zmm21,%zmm5
- vmovdqa64 %zmm22,%zmm6
- vmovdqa64 %zmm23,%zmm7
- vmovdqa64 %zmm24,%zmm8
- vmovdqa64 %zmm25,%zmm9
- vmovdqa64 %zmm26,%zmm10
- vmovdqa64 %zmm27,%zmm11
- vmovdqa64 %zmm28,%zmm12
- vmovdqa64 %zmm29,%zmm13
- vmovdqa64 %zmm30,%zmm14
- vmovdqa64 %zmm31,%zmm15
-
- vmovdqa64 %zmm0,%zmm16
- vmovdqa64 %zmm1,%zmm17
- vmovdqa64 %zmm2,%zmm18
- vmovdqa64 %zmm3,%zmm19
-
- movl $10,%eax
- jmp .Loop16x
-
-.align 32
-.Loop16x:
- vpaddd %zmm4,%zmm0,%zmm0
- vpaddd %zmm5,%zmm1,%zmm1
- vpaddd %zmm6,%zmm2,%zmm2
- vpaddd %zmm7,%zmm3,%zmm3
- vpxord %zmm0,%zmm12,%zmm12
- vpxord %zmm1,%zmm13,%zmm13
- vpxord %zmm2,%zmm14,%zmm14
- vpxord %zmm3,%zmm15,%zmm15
- vprold $16,%zmm12,%zmm12
- vprold $16,%zmm13,%zmm13
- vprold $16,%zmm14,%zmm14
- vprold $16,%zmm15,%zmm15
- vpaddd %zmm12,%zmm8,%zmm8
- vpaddd %zmm13,%zmm9,%zmm9
- vpaddd %zmm14,%zmm10,%zmm10
- vpaddd %zmm15,%zmm11,%zmm11
- vpxord %zmm8,%zmm4,%zmm4
- vpxord %zmm9,%zmm5,%zmm5
- vpxord %zmm10,%zmm6,%zmm6
- vpxord %zmm11,%zmm7,%zmm7
- vprold $12,%zmm4,%zmm4
- vprold $12,%zmm5,%zmm5
- vprold $12,%zmm6,%zmm6
- vprold $12,%zmm7,%zmm7
- vpaddd %zmm4,%zmm0,%zmm0
- vpaddd %zmm5,%zmm1,%zmm1
- vpaddd %zmm6,%zmm2,%zmm2
- vpaddd %zmm7,%zmm3,%zmm3
- vpxord %zmm0,%zmm12,%zmm12
- vpxord %zmm1,%zmm13,%zmm13
- vpxord %zmm2,%zmm14,%zmm14
- vpxord %zmm3,%zmm15,%zmm15
- vprold $8,%zmm12,%zmm12
- vprold $8,%zmm13,%zmm13
- vprold $8,%zmm14,%zmm14
- vprold $8,%zmm15,%zmm15
- vpaddd %zmm12,%zmm8,%zmm8
- vpaddd %zmm13,%zmm9,%zmm9
- vpaddd %zmm14,%zmm10,%zmm10
- vpaddd %zmm15,%zmm11,%zmm11
- vpxord %zmm8,%zmm4,%zmm4
- vpxord %zmm9,%zmm5,%zmm5
- vpxord %zmm10,%zmm6,%zmm6
- vpxord %zmm11,%zmm7,%zmm7
- vprold $7,%zmm4,%zmm4
- vprold $7,%zmm5,%zmm5
- vprold $7,%zmm6,%zmm6
- vprold $7,%zmm7,%zmm7
- vpaddd %zmm5,%zmm0,%zmm0
- vpaddd %zmm6,%zmm1,%zmm1
- vpaddd %zmm7,%zmm2,%zmm2
- vpaddd %zmm4,%zmm3,%zmm3
- vpxord %zmm0,%zmm15,%zmm15
- vpxord %zmm1,%zmm12,%zmm12
- vpxord %zmm2,%zmm13,%zmm13
- vpxord %zmm3,%zmm14,%zmm14
- vprold $16,%zmm15,%zmm15
- vprold $16,%zmm12,%zmm12
- vprold $16,%zmm13,%zmm13
- vprold $16,%zmm14,%zmm14
- vpaddd %zmm15,%zmm10,%zmm10
- vpaddd %zmm12,%zmm11,%zmm11
- vpaddd %zmm13,%zmm8,%zmm8
- vpaddd %zmm14,%zmm9,%zmm9
- vpxord %zmm10,%zmm5,%zmm5
- vpxord %zmm11,%zmm6,%zmm6
- vpxord %zmm8,%zmm7,%zmm7
- vpxord %zmm9,%zmm4,%zmm4
- vprold $12,%zmm5,%zmm5
- vprold $12,%zmm6,%zmm6
- vprold $12,%zmm7,%zmm7
- vprold $12,%zmm4,%zmm4
- vpaddd %zmm5,%zmm0,%zmm0
- vpaddd %zmm6,%zmm1,%zmm1
- vpaddd %zmm7,%zmm2,%zmm2
- vpaddd %zmm4,%zmm3,%zmm3
- vpxord %zmm0,%zmm15,%zmm15
- vpxord %zmm1,%zmm12,%zmm12
- vpxord %zmm2,%zmm13,%zmm13
- vpxord %zmm3,%zmm14,%zmm14
- vprold $8,%zmm15,%zmm15
- vprold $8,%zmm12,%zmm12
- vprold $8,%zmm13,%zmm13
- vprold $8,%zmm14,%zmm14
- vpaddd %zmm15,%zmm10,%zmm10
- vpaddd %zmm12,%zmm11,%zmm11
- vpaddd %zmm13,%zmm8,%zmm8
- vpaddd %zmm14,%zmm9,%zmm9
- vpxord %zmm10,%zmm5,%zmm5
- vpxord %zmm11,%zmm6,%zmm6
- vpxord %zmm8,%zmm7,%zmm7
- vpxord %zmm9,%zmm4,%zmm4
- vprold $7,%zmm5,%zmm5
- vprold $7,%zmm6,%zmm6
- vprold $7,%zmm7,%zmm7
- vprold $7,%zmm4,%zmm4
- decl %eax
- jnz .Loop16x
-
- vpaddd %zmm16,%zmm0,%zmm0
- vpaddd %zmm17,%zmm1,%zmm1
- vpaddd %zmm18,%zmm2,%zmm2
- vpaddd %zmm19,%zmm3,%zmm3
-
- vpunpckldq %zmm1,%zmm0,%zmm18
- vpunpckldq %zmm3,%zmm2,%zmm19
- vpunpckhdq %zmm1,%zmm0,%zmm0
- vpunpckhdq %zmm3,%zmm2,%zmm2
- vpunpcklqdq %zmm19,%zmm18,%zmm1
- vpunpckhqdq %zmm19,%zmm18,%zmm18
- vpunpcklqdq %zmm2,%zmm0,%zmm3
- vpunpckhqdq %zmm2,%zmm0,%zmm0
- vpaddd %zmm20,%zmm4,%zmm4
- vpaddd %zmm21,%zmm5,%zmm5
- vpaddd %zmm22,%zmm6,%zmm6
- vpaddd %zmm23,%zmm7,%zmm7
-
- vpunpckldq %zmm5,%zmm4,%zmm2
- vpunpckldq %zmm7,%zmm6,%zmm19
- vpunpckhdq %zmm5,%zmm4,%zmm4
- vpunpckhdq %zmm7,%zmm6,%zmm6
- vpunpcklqdq %zmm19,%zmm2,%zmm5
- vpunpckhqdq %zmm19,%zmm2,%zmm2
- vpunpcklqdq %zmm6,%zmm4,%zmm7
- vpunpckhqdq %zmm6,%zmm4,%zmm4
- vshufi32x4 $0x44,%zmm5,%zmm1,%zmm19
- vshufi32x4 $0xee,%zmm5,%zmm1,%zmm5
- vshufi32x4 $0x44,%zmm2,%zmm18,%zmm1
- vshufi32x4 $0xee,%zmm2,%zmm18,%zmm2
- vshufi32x4 $0x44,%zmm7,%zmm3,%zmm18
- vshufi32x4 $0xee,%zmm7,%zmm3,%zmm7
- vshufi32x4 $0x44,%zmm4,%zmm0,%zmm3
- vshufi32x4 $0xee,%zmm4,%zmm0,%zmm4
- vpaddd %zmm24,%zmm8,%zmm8
- vpaddd %zmm25,%zmm9,%zmm9
- vpaddd %zmm26,%zmm10,%zmm10
- vpaddd %zmm27,%zmm11,%zmm11
-
- vpunpckldq %zmm9,%zmm8,%zmm6
- vpunpckldq %zmm11,%zmm10,%zmm0
- vpunpckhdq %zmm9,%zmm8,%zmm8
- vpunpckhdq %zmm11,%zmm10,%zmm10
- vpunpcklqdq %zmm0,%zmm6,%zmm9
- vpunpckhqdq %zmm0,%zmm6,%zmm6
- vpunpcklqdq %zmm10,%zmm8,%zmm11
- vpunpckhqdq %zmm10,%zmm8,%zmm8
- vpaddd %zmm28,%zmm12,%zmm12
- vpaddd %zmm29,%zmm13,%zmm13
- vpaddd %zmm30,%zmm14,%zmm14
- vpaddd %zmm31,%zmm15,%zmm15
-
- vpunpckldq %zmm13,%zmm12,%zmm10
- vpunpckldq %zmm15,%zmm14,%zmm0
- vpunpckhdq %zmm13,%zmm12,%zmm12
- vpunpckhdq %zmm15,%zmm14,%zmm14
- vpunpcklqdq %zmm0,%zmm10,%zmm13
- vpunpckhqdq %zmm0,%zmm10,%zmm10
- vpunpcklqdq %zmm14,%zmm12,%zmm15
- vpunpckhqdq %zmm14,%zmm12,%zmm12
- vshufi32x4 $0x44,%zmm13,%zmm9,%zmm0
- vshufi32x4 $0xee,%zmm13,%zmm9,%zmm13
- vshufi32x4 $0x44,%zmm10,%zmm6,%zmm9
- vshufi32x4 $0xee,%zmm10,%zmm6,%zmm10
- vshufi32x4 $0x44,%zmm15,%zmm11,%zmm6
- vshufi32x4 $0xee,%zmm15,%zmm11,%zmm15
- vshufi32x4 $0x44,%zmm12,%zmm8,%zmm11
- vshufi32x4 $0xee,%zmm12,%zmm8,%zmm12
- vshufi32x4 $0x88,%zmm0,%zmm19,%zmm16
- vshufi32x4 $0xdd,%zmm0,%zmm19,%zmm19
- vshufi32x4 $0x88,%zmm13,%zmm5,%zmm0
- vshufi32x4 $0xdd,%zmm13,%zmm5,%zmm13
- vshufi32x4 $0x88,%zmm9,%zmm1,%zmm17
- vshufi32x4 $0xdd,%zmm9,%zmm1,%zmm1
- vshufi32x4 $0x88,%zmm10,%zmm2,%zmm9
- vshufi32x4 $0xdd,%zmm10,%zmm2,%zmm10
- vshufi32x4 $0x88,%zmm6,%zmm18,%zmm14
- vshufi32x4 $0xdd,%zmm6,%zmm18,%zmm18
- vshufi32x4 $0x88,%zmm15,%zmm7,%zmm6
- vshufi32x4 $0xdd,%zmm15,%zmm7,%zmm15
- vshufi32x4 $0x88,%zmm11,%zmm3,%zmm8
- vshufi32x4 $0xdd,%zmm11,%zmm3,%zmm3
- vshufi32x4 $0x88,%zmm12,%zmm4,%zmm11
- vshufi32x4 $0xdd,%zmm12,%zmm4,%zmm12
- cmpq $1024,%rdx
- jb .Ltail16x
-
- vpxord 0(%rsi),%zmm16,%zmm16
- vpxord 64(%rsi),%zmm17,%zmm17
- vpxord 128(%rsi),%zmm14,%zmm14
- vpxord 192(%rsi),%zmm8,%zmm8
- vmovdqu32 %zmm16,0(%rdi)
- vmovdqu32 %zmm17,64(%rdi)
- vmovdqu32 %zmm14,128(%rdi)
- vmovdqu32 %zmm8,192(%rdi)
-
- vpxord 256(%rsi),%zmm19,%zmm19
- vpxord 320(%rsi),%zmm1,%zmm1
- vpxord 384(%rsi),%zmm18,%zmm18
- vpxord 448(%rsi),%zmm3,%zmm3
- vmovdqu32 %zmm19,256(%rdi)
- vmovdqu32 %zmm1,320(%rdi)
- vmovdqu32 %zmm18,384(%rdi)
- vmovdqu32 %zmm3,448(%rdi)
-
- vpxord 512(%rsi),%zmm0,%zmm0
- vpxord 576(%rsi),%zmm9,%zmm9
- vpxord 640(%rsi),%zmm6,%zmm6
- vpxord 704(%rsi),%zmm11,%zmm11
- vmovdqu32 %zmm0,512(%rdi)
- vmovdqu32 %zmm9,576(%rdi)
- vmovdqu32 %zmm6,640(%rdi)
- vmovdqu32 %zmm11,704(%rdi)
-
- vpxord 768(%rsi),%zmm13,%zmm13
- vpxord 832(%rsi),%zmm10,%zmm10
- vpxord 896(%rsi),%zmm15,%zmm15
- vpxord 960(%rsi),%zmm12,%zmm12
- leaq 1024(%rsi),%rsi
- vmovdqu32 %zmm13,768(%rdi)
- vmovdqu32 %zmm10,832(%rdi)
- vmovdqu32 %zmm15,896(%rdi)
- vmovdqu32 %zmm12,960(%rdi)
- leaq 1024(%rdi),%rdi
-
- subq $1024,%rdx
- jnz .Loop_outer16x
-
- jmp .Ldone16x
-
-.align 32
-.Ltail16x:
- xorq %r10,%r10
- subq %rsi,%rdi
- cmpq $64,%rdx
- jb .Less_than_64_16x
- vpxord (%rsi),%zmm16,%zmm16
- vmovdqu32 %zmm16,(%rdi,%rsi,1)
- je .Ldone16x
- vmovdqa32 %zmm17,%zmm16
- leaq 64(%rsi),%rsi
-
- cmpq $128,%rdx
- jb .Less_than_64_16x
- vpxord (%rsi),%zmm17,%zmm17
- vmovdqu32 %zmm17,(%rdi,%rsi,1)
- je .Ldone16x
- vmovdqa32 %zmm14,%zmm16
- leaq 64(%rsi),%rsi
-
- cmpq $192,%rdx
- jb .Less_than_64_16x
- vpxord (%rsi),%zmm14,%zmm14
- vmovdqu32 %zmm14,(%rdi,%rsi,1)
- je .Ldone16x
- vmovdqa32 %zmm8,%zmm16
- leaq 64(%rsi),%rsi
-
- cmpq $256,%rdx
- jb .Less_than_64_16x
- vpxord (%rsi),%zmm8,%zmm8
- vmovdqu32 %zmm8,(%rdi,%rsi,1)
- je .Ldone16x
- vmovdqa32 %zmm19,%zmm16
- leaq 64(%rsi),%rsi
-
- cmpq $320,%rdx
- jb .Less_than_64_16x
- vpxord (%rsi),%zmm19,%zmm19
- vmovdqu32 %zmm19,(%rdi,%rsi,1)
- je .Ldone16x
- vmovdqa32 %zmm1,%zmm16
- leaq 64(%rsi),%rsi
-
- cmpq $384,%rdx
- jb .Less_than_64_16x
- vpxord (%rsi),%zmm1,%zmm1
- vmovdqu32 %zmm1,(%rdi,%rsi,1)
- je .Ldone16x
- vmovdqa32 %zmm18,%zmm16
- leaq 64(%rsi),%rsi
-
- cmpq $448,%rdx
- jb .Less_than_64_16x
- vpxord (%rsi),%zmm18,%zmm18
- vmovdqu32 %zmm18,(%rdi,%rsi,1)
- je .Ldone16x
- vmovdqa32 %zmm3,%zmm16
- leaq 64(%rsi),%rsi
-
- cmpq $512,%rdx
- jb .Less_than_64_16x
- vpxord (%rsi),%zmm3,%zmm3
- vmovdqu32 %zmm3,(%rdi,%rsi,1)
- je .Ldone16x
- vmovdqa32 %zmm0,%zmm16
- leaq 64(%rsi),%rsi
-
- cmpq $576,%rdx
- jb .Less_than_64_16x
- vpxord (%rsi),%zmm0,%zmm0
- vmovdqu32 %zmm0,(%rdi,%rsi,1)
- je .Ldone16x
- vmovdqa32 %zmm9,%zmm16
- leaq 64(%rsi),%rsi
-
- cmpq $640,%rdx
- jb .Less_than_64_16x
- vpxord (%rsi),%zmm9,%zmm9
- vmovdqu32 %zmm9,(%rdi,%rsi,1)
- je .Ldone16x
- vmovdqa32 %zmm6,%zmm16
- leaq 64(%rsi),%rsi
-
- cmpq $704,%rdx
- jb .Less_than_64_16x
- vpxord (%rsi),%zmm6,%zmm6
- vmovdqu32 %zmm6,(%rdi,%rsi,1)
- je .Ldone16x
- vmovdqa32 %zmm11,%zmm16
- leaq 64(%rsi),%rsi
-
- cmpq $768,%rdx
- jb .Less_than_64_16x
- vpxord (%rsi),%zmm11,%zmm11
- vmovdqu32 %zmm11,(%rdi,%rsi,1)
- je .Ldone16x
- vmovdqa32 %zmm13,%zmm16
- leaq 64(%rsi),%rsi
-
- cmpq $832,%rdx
- jb .Less_than_64_16x
- vpxord (%rsi),%zmm13,%zmm13
- vmovdqu32 %zmm13,(%rdi,%rsi,1)
- je .Ldone16x
- vmovdqa32 %zmm10,%zmm16
- leaq 64(%rsi),%rsi
-
- cmpq $896,%rdx
- jb .Less_than_64_16x
- vpxord (%rsi),%zmm10,%zmm10
- vmovdqu32 %zmm10,(%rdi,%rsi,1)
- je .Ldone16x
- vmovdqa32 %zmm15,%zmm16
- leaq 64(%rsi),%rsi
-
- cmpq $960,%rdx
- jb .Less_than_64_16x
- vpxord (%rsi),%zmm15,%zmm15
- vmovdqu32 %zmm15,(%rdi,%rsi,1)
- je .Ldone16x
- vmovdqa32 %zmm12,%zmm16
- leaq 64(%rsi),%rsi
-
-.Less_than_64_16x:
- vmovdqa32 %zmm16,0(%rsp)
- leaq (%rdi,%rsi,1),%rdi
- andq $63,%rdx
-
-.Loop_tail16x:
- movzbl (%rsi,%r10,1),%eax
- movzbl (%rsp,%r10,1),%ecx
- leaq 1(%r10),%r10
- xorl %ecx,%eax
- movb %al,-1(%rdi,%r10,1)
- decq %rdx
- jnz .Loop_tail16x
-
- vpxord %zmm16,%zmm16,%zmm16
- vmovdqa32 %zmm16,0(%rsp)
-
-.Ldone16x:
- vzeroall
- leaq (%r9),%rsp
-.cfi_def_cfa_register %rsp
-.L16x_epilogue:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size ChaCha20_16x,.-ChaCha20_16x
-.type ChaCha20_8xvl,@function
-.align 32
-ChaCha20_8xvl:
-.cfi_startproc
-.LChaCha20_8xvl:
- movq %rsp,%r9
-.cfi_def_cfa_register %r9
- subq $64+8,%rsp
- andq $-64,%rsp
- vzeroupper
-
- leaq .Lsigma(%rip),%r10
- vbroadcasti128 (%r10),%ymm3
- vbroadcasti128 (%rcx),%ymm7
- vbroadcasti128 16(%rcx),%ymm11
- vbroadcasti128 (%r8),%ymm15
-
- vpshufd $0x00,%ymm3,%ymm0
- vpshufd $0x55,%ymm3,%ymm1
- vpshufd $0xaa,%ymm3,%ymm2
- vpshufd $0xff,%ymm3,%ymm3
- vmovdqa64 %ymm0,%ymm16
- vmovdqa64 %ymm1,%ymm17
- vmovdqa64 %ymm2,%ymm18
- vmovdqa64 %ymm3,%ymm19
-
- vpshufd $0x00,%ymm7,%ymm4
- vpshufd $0x55,%ymm7,%ymm5
- vpshufd $0xaa,%ymm7,%ymm6
- vpshufd $0xff,%ymm7,%ymm7
- vmovdqa64 %ymm4,%ymm20
- vmovdqa64 %ymm5,%ymm21
- vmovdqa64 %ymm6,%ymm22
- vmovdqa64 %ymm7,%ymm23
-
- vpshufd $0x00,%ymm11,%ymm8
- vpshufd $0x55,%ymm11,%ymm9
- vpshufd $0xaa,%ymm11,%ymm10
- vpshufd $0xff,%ymm11,%ymm11
- vmovdqa64 %ymm8,%ymm24
- vmovdqa64 %ymm9,%ymm25
- vmovdqa64 %ymm10,%ymm26
- vmovdqa64 %ymm11,%ymm27
-
- vpshufd $0x00,%ymm15,%ymm12
- vpshufd $0x55,%ymm15,%ymm13
- vpshufd $0xaa,%ymm15,%ymm14
- vpshufd $0xff,%ymm15,%ymm15
- vpaddd .Lincy(%rip),%ymm12,%ymm12
- vmovdqa64 %ymm12,%ymm28
- vmovdqa64 %ymm13,%ymm29
- vmovdqa64 %ymm14,%ymm30
- vmovdqa64 %ymm15,%ymm31
-
- movl $10,%eax
- jmp .Loop8xvl
-
-.align 32
-.Loop_outer8xvl:
-
-
- vpbroadcastd 8(%r10),%ymm2
- vpbroadcastd 12(%r10),%ymm3
- vpaddd .Leight(%rip),%ymm28,%ymm28
- vmovdqa64 %ymm20,%ymm4
- vmovdqa64 %ymm21,%ymm5
- vmovdqa64 %ymm22,%ymm6
- vmovdqa64 %ymm23,%ymm7
- vmovdqa64 %ymm24,%ymm8
- vmovdqa64 %ymm25,%ymm9
- vmovdqa64 %ymm26,%ymm10
- vmovdqa64 %ymm27,%ymm11
- vmovdqa64 %ymm28,%ymm12
- vmovdqa64 %ymm29,%ymm13
- vmovdqa64 %ymm30,%ymm14
- vmovdqa64 %ymm31,%ymm15
-
- vmovdqa64 %ymm0,%ymm16
- vmovdqa64 %ymm1,%ymm17
- vmovdqa64 %ymm2,%ymm18
- vmovdqa64 %ymm3,%ymm19
-
- movl $10,%eax
- jmp .Loop8xvl
-
-.align 32
-.Loop8xvl:
- vpaddd %ymm4,%ymm0,%ymm0
- vpaddd %ymm5,%ymm1,%ymm1
- vpaddd %ymm6,%ymm2,%ymm2
- vpaddd %ymm7,%ymm3,%ymm3
- vpxor %ymm0,%ymm12,%ymm12
- vpxor %ymm1,%ymm13,%ymm13
- vpxor %ymm2,%ymm14,%ymm14
- vpxor %ymm3,%ymm15,%ymm15
- vprold $16,%ymm12,%ymm12
- vprold $16,%ymm13,%ymm13
- vprold $16,%ymm14,%ymm14
- vprold $16,%ymm15,%ymm15
- vpaddd %ymm12,%ymm8,%ymm8
- vpaddd %ymm13,%ymm9,%ymm9
- vpaddd %ymm14,%ymm10,%ymm10
- vpaddd %ymm15,%ymm11,%ymm11
- vpxor %ymm8,%ymm4,%ymm4
- vpxor %ymm9,%ymm5,%ymm5
- vpxor %ymm10,%ymm6,%ymm6
- vpxor %ymm11,%ymm7,%ymm7
- vprold $12,%ymm4,%ymm4
- vprold $12,%ymm5,%ymm5
- vprold $12,%ymm6,%ymm6
- vprold $12,%ymm7,%ymm7
- vpaddd %ymm4,%ymm0,%ymm0
- vpaddd %ymm5,%ymm1,%ymm1
- vpaddd %ymm6,%ymm2,%ymm2
- vpaddd %ymm7,%ymm3,%ymm3
- vpxor %ymm0,%ymm12,%ymm12
- vpxor %ymm1,%ymm13,%ymm13
- vpxor %ymm2,%ymm14,%ymm14
- vpxor %ymm3,%ymm15,%ymm15
- vprold $8,%ymm12,%ymm12
- vprold $8,%ymm13,%ymm13
- vprold $8,%ymm14,%ymm14
- vprold $8,%ymm15,%ymm15
- vpaddd %ymm12,%ymm8,%ymm8
- vpaddd %ymm13,%ymm9,%ymm9
- vpaddd %ymm14,%ymm10,%ymm10
- vpaddd %ymm15,%ymm11,%ymm11
- vpxor %ymm8,%ymm4,%ymm4
- vpxor %ymm9,%ymm5,%ymm5
- vpxor %ymm10,%ymm6,%ymm6
- vpxor %ymm11,%ymm7,%ymm7
- vprold $7,%ymm4,%ymm4
- vprold $7,%ymm5,%ymm5
- vprold $7,%ymm6,%ymm6
- vprold $7,%ymm7,%ymm7
- vpaddd %ymm5,%ymm0,%ymm0
- vpaddd %ymm6,%ymm1,%ymm1
- vpaddd %ymm7,%ymm2,%ymm2
- vpaddd %ymm4,%ymm3,%ymm3
- vpxor %ymm0,%ymm15,%ymm15
- vpxor %ymm1,%ymm12,%ymm12
- vpxor %ymm2,%ymm13,%ymm13
- vpxor %ymm3,%ymm14,%ymm14
- vprold $16,%ymm15,%ymm15
- vprold $16,%ymm12,%ymm12
- vprold $16,%ymm13,%ymm13
- vprold $16,%ymm14,%ymm14
- vpaddd %ymm15,%ymm10,%ymm10
- vpaddd %ymm12,%ymm11,%ymm11
- vpaddd %ymm13,%ymm8,%ymm8
- vpaddd %ymm14,%ymm9,%ymm9
- vpxor %ymm10,%ymm5,%ymm5
- vpxor %ymm11,%ymm6,%ymm6
- vpxor %ymm8,%ymm7,%ymm7
- vpxor %ymm9,%ymm4,%ymm4
- vprold $12,%ymm5,%ymm5
- vprold $12,%ymm6,%ymm6
- vprold $12,%ymm7,%ymm7
- vprold $12,%ymm4,%ymm4
- vpaddd %ymm5,%ymm0,%ymm0
- vpaddd %ymm6,%ymm1,%ymm1
- vpaddd %ymm7,%ymm2,%ymm2
- vpaddd %ymm4,%ymm3,%ymm3
- vpxor %ymm0,%ymm15,%ymm15
- vpxor %ymm1,%ymm12,%ymm12
- vpxor %ymm2,%ymm13,%ymm13
- vpxor %ymm3,%ymm14,%ymm14
- vprold $8,%ymm15,%ymm15
- vprold $8,%ymm12,%ymm12
- vprold $8,%ymm13,%ymm13
- vprold $8,%ymm14,%ymm14
- vpaddd %ymm15,%ymm10,%ymm10
- vpaddd %ymm12,%ymm11,%ymm11
- vpaddd %ymm13,%ymm8,%ymm8
- vpaddd %ymm14,%ymm9,%ymm9
- vpxor %ymm10,%ymm5,%ymm5
- vpxor %ymm11,%ymm6,%ymm6
- vpxor %ymm8,%ymm7,%ymm7
- vpxor %ymm9,%ymm4,%ymm4
- vprold $7,%ymm5,%ymm5
- vprold $7,%ymm6,%ymm6
- vprold $7,%ymm7,%ymm7
- vprold $7,%ymm4,%ymm4
- decl %eax
- jnz .Loop8xvl
-
- vpaddd %ymm16,%ymm0,%ymm0
- vpaddd %ymm17,%ymm1,%ymm1
- vpaddd %ymm18,%ymm2,%ymm2
- vpaddd %ymm19,%ymm3,%ymm3
-
- vpunpckldq %ymm1,%ymm0,%ymm18
- vpunpckldq %ymm3,%ymm2,%ymm19
- vpunpckhdq %ymm1,%ymm0,%ymm0
- vpunpckhdq %ymm3,%ymm2,%ymm2
- vpunpcklqdq %ymm19,%ymm18,%ymm1
- vpunpckhqdq %ymm19,%ymm18,%ymm18
- vpunpcklqdq %ymm2,%ymm0,%ymm3
- vpunpckhqdq %ymm2,%ymm0,%ymm0
- vpaddd %ymm20,%ymm4,%ymm4
- vpaddd %ymm21,%ymm5,%ymm5
- vpaddd %ymm22,%ymm6,%ymm6
- vpaddd %ymm23,%ymm7,%ymm7
-
- vpunpckldq %ymm5,%ymm4,%ymm2
- vpunpckldq %ymm7,%ymm6,%ymm19
- vpunpckhdq %ymm5,%ymm4,%ymm4
- vpunpckhdq %ymm7,%ymm6,%ymm6
- vpunpcklqdq %ymm19,%ymm2,%ymm5
- vpunpckhqdq %ymm19,%ymm2,%ymm2
- vpunpcklqdq %ymm6,%ymm4,%ymm7
- vpunpckhqdq %ymm6,%ymm4,%ymm4
- vshufi32x4 $0,%ymm5,%ymm1,%ymm19
- vshufi32x4 $3,%ymm5,%ymm1,%ymm5
- vshufi32x4 $0,%ymm2,%ymm18,%ymm1
- vshufi32x4 $3,%ymm2,%ymm18,%ymm2
- vshufi32x4 $0,%ymm7,%ymm3,%ymm18
- vshufi32x4 $3,%ymm7,%ymm3,%ymm7
- vshufi32x4 $0,%ymm4,%ymm0,%ymm3
- vshufi32x4 $3,%ymm4,%ymm0,%ymm4
- vpaddd %ymm24,%ymm8,%ymm8
- vpaddd %ymm25,%ymm9,%ymm9
- vpaddd %ymm26,%ymm10,%ymm10
- vpaddd %ymm27,%ymm11,%ymm11
-
- vpunpckldq %ymm9,%ymm8,%ymm6
- vpunpckldq %ymm11,%ymm10,%ymm0
- vpunpckhdq %ymm9,%ymm8,%ymm8
- vpunpckhdq %ymm11,%ymm10,%ymm10
- vpunpcklqdq %ymm0,%ymm6,%ymm9
- vpunpckhqdq %ymm0,%ymm6,%ymm6
- vpunpcklqdq %ymm10,%ymm8,%ymm11
- vpunpckhqdq %ymm10,%ymm8,%ymm8
- vpaddd %ymm28,%ymm12,%ymm12
- vpaddd %ymm29,%ymm13,%ymm13
- vpaddd %ymm30,%ymm14,%ymm14
- vpaddd %ymm31,%ymm15,%ymm15
-
- vpunpckldq %ymm13,%ymm12,%ymm10
- vpunpckldq %ymm15,%ymm14,%ymm0
- vpunpckhdq %ymm13,%ymm12,%ymm12
- vpunpckhdq %ymm15,%ymm14,%ymm14
- vpunpcklqdq %ymm0,%ymm10,%ymm13
- vpunpckhqdq %ymm0,%ymm10,%ymm10
- vpunpcklqdq %ymm14,%ymm12,%ymm15
- vpunpckhqdq %ymm14,%ymm12,%ymm12
- vperm2i128 $0x20,%ymm13,%ymm9,%ymm0
- vperm2i128 $0x31,%ymm13,%ymm9,%ymm13
- vperm2i128 $0x20,%ymm10,%ymm6,%ymm9
- vperm2i128 $0x31,%ymm10,%ymm6,%ymm10
- vperm2i128 $0x20,%ymm15,%ymm11,%ymm6
- vperm2i128 $0x31,%ymm15,%ymm11,%ymm15
- vperm2i128 $0x20,%ymm12,%ymm8,%ymm11
- vperm2i128 $0x31,%ymm12,%ymm8,%ymm12
- cmpq $512,%rdx
- jb .Ltail8xvl
-
- movl $0x80,%eax
- vpxord 0(%rsi),%ymm19,%ymm19
- vpxor 32(%rsi),%ymm0,%ymm0
- vpxor 64(%rsi),%ymm5,%ymm5
- vpxor 96(%rsi),%ymm13,%ymm13
- leaq (%rsi,%rax,1),%rsi
- vmovdqu32 %ymm19,0(%rdi)
- vmovdqu %ymm0,32(%rdi)
- vmovdqu %ymm5,64(%rdi)
- vmovdqu %ymm13,96(%rdi)
- leaq (%rdi,%rax,1),%rdi
-
- vpxor 0(%rsi),%ymm1,%ymm1
- vpxor 32(%rsi),%ymm9,%ymm9
- vpxor 64(%rsi),%ymm2,%ymm2
- vpxor 96(%rsi),%ymm10,%ymm10
- leaq (%rsi,%rax,1),%rsi
- vmovdqu %ymm1,0(%rdi)
- vmovdqu %ymm9,32(%rdi)
- vmovdqu %ymm2,64(%rdi)
- vmovdqu %ymm10,96(%rdi)
- leaq (%rdi,%rax,1),%rdi
-
- vpxord 0(%rsi),%ymm18,%ymm18
- vpxor 32(%rsi),%ymm6,%ymm6
- vpxor 64(%rsi),%ymm7,%ymm7
- vpxor 96(%rsi),%ymm15,%ymm15
- leaq (%rsi,%rax,1),%rsi
- vmovdqu32 %ymm18,0(%rdi)
- vmovdqu %ymm6,32(%rdi)
- vmovdqu %ymm7,64(%rdi)
- vmovdqu %ymm15,96(%rdi)
- leaq (%rdi,%rax,1),%rdi
-
- vpxor 0(%rsi),%ymm3,%ymm3
- vpxor 32(%rsi),%ymm11,%ymm11
- vpxor 64(%rsi),%ymm4,%ymm4
- vpxor 96(%rsi),%ymm12,%ymm12
- leaq (%rsi,%rax,1),%rsi
- vmovdqu %ymm3,0(%rdi)
- vmovdqu %ymm11,32(%rdi)
- vmovdqu %ymm4,64(%rdi)
- vmovdqu %ymm12,96(%rdi)
- leaq (%rdi,%rax,1),%rdi
-
- vpbroadcastd 0(%r10),%ymm0
- vpbroadcastd 4(%r10),%ymm1
-
- subq $512,%rdx
- jnz .Loop_outer8xvl
-
- jmp .Ldone8xvl
-
-.align 32
-.Ltail8xvl:
- vmovdqa64 %ymm19,%ymm8
- xorq %r10,%r10
- subq %rsi,%rdi
- cmpq $64,%rdx
- jb .Less_than_64_8xvl
- vpxor 0(%rsi),%ymm8,%ymm8
- vpxor 32(%rsi),%ymm0,%ymm0
- vmovdqu %ymm8,0(%rdi,%rsi,1)
- vmovdqu %ymm0,32(%rdi,%rsi,1)
- je .Ldone8xvl
- vmovdqa %ymm5,%ymm8
- vmovdqa %ymm13,%ymm0
- leaq 64(%rsi),%rsi
-
- cmpq $128,%rdx
- jb .Less_than_64_8xvl
- vpxor 0(%rsi),%ymm5,%ymm5
- vpxor 32(%rsi),%ymm13,%ymm13
- vmovdqu %ymm5,0(%rdi,%rsi,1)
- vmovdqu %ymm13,32(%rdi,%rsi,1)
- je .Ldone8xvl
- vmovdqa %ymm1,%ymm8
- vmovdqa %ymm9,%ymm0
- leaq 64(%rsi),%rsi
-
- cmpq $192,%rdx
- jb .Less_than_64_8xvl
- vpxor 0(%rsi),%ymm1,%ymm1
- vpxor 32(%rsi),%ymm9,%ymm9
- vmovdqu %ymm1,0(%rdi,%rsi,1)
- vmovdqu %ymm9,32(%rdi,%rsi,1)
- je .Ldone8xvl
- vmovdqa %ymm2,%ymm8
- vmovdqa %ymm10,%ymm0
- leaq 64(%rsi),%rsi
-
- cmpq $256,%rdx
- jb .Less_than_64_8xvl
- vpxor 0(%rsi),%ymm2,%ymm2
- vpxor 32(%rsi),%ymm10,%ymm10
- vmovdqu %ymm2,0(%rdi,%rsi,1)
- vmovdqu %ymm10,32(%rdi,%rsi,1)
- je .Ldone8xvl
- vmovdqa32 %ymm18,%ymm8
- vmovdqa %ymm6,%ymm0
- leaq 64(%rsi),%rsi
-
- cmpq $320,%rdx
- jb .Less_than_64_8xvl
- vpxord 0(%rsi),%ymm18,%ymm18
- vpxor 32(%rsi),%ymm6,%ymm6
- vmovdqu32 %ymm18,0(%rdi,%rsi,1)
- vmovdqu %ymm6,32(%rdi,%rsi,1)
- je .Ldone8xvl
- vmovdqa %ymm7,%ymm8
- vmovdqa %ymm15,%ymm0
- leaq 64(%rsi),%rsi
-
- cmpq $384,%rdx
- jb .Less_than_64_8xvl
- vpxor 0(%rsi),%ymm7,%ymm7
- vpxor 32(%rsi),%ymm15,%ymm15
- vmovdqu %ymm7,0(%rdi,%rsi,1)
- vmovdqu %ymm15,32(%rdi,%rsi,1)
- je .Ldone8xvl
- vmovdqa %ymm3,%ymm8
- vmovdqa %ymm11,%ymm0
- leaq 64(%rsi),%rsi
-
- cmpq $448,%rdx
- jb .Less_than_64_8xvl
- vpxor 0(%rsi),%ymm3,%ymm3
- vpxor 32(%rsi),%ymm11,%ymm11
- vmovdqu %ymm3,0(%rdi,%rsi,1)
- vmovdqu %ymm11,32(%rdi,%rsi,1)
- je .Ldone8xvl
- vmovdqa %ymm4,%ymm8
- vmovdqa %ymm12,%ymm0
- leaq 64(%rsi),%rsi
-
-.Less_than_64_8xvl:
- vmovdqa %ymm8,0(%rsp)
- vmovdqa %ymm0,32(%rsp)
- leaq (%rdi,%rsi,1),%rdi
- andq $63,%rdx
-
-.Loop_tail8xvl:
- movzbl (%rsi,%r10,1),%eax
- movzbl (%rsp,%r10,1),%ecx
- leaq 1(%r10),%r10
- xorl %ecx,%eax
- movb %al,-1(%rdi,%r10,1)
- decq %rdx
- jnz .Loop_tail8xvl
-
- vpxor %ymm8,%ymm8,%ymm8
- vmovdqa %ymm8,0(%rsp)
- vmovdqa %ymm8,32(%rsp)
-
-.Ldone8xvl:
- vzeroall
- leaq (%r9),%rsp
-.cfi_def_cfa_register %rsp
-.L8xvl_epilogue:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size ChaCha20_8xvl,.-ChaCha20_8xvl
+.type ChaCha20_avx512,@function
+.align 32
+ChaCha20_avx512:
+.cfi_startproc
+.LChaCha20_avx512:
+ movq %rsp,%r9
+.cfi_def_cfa_register %r9
+ cmpq $512,%rdx
+ ja .LChaCha20_16x
+
+ subq $64+8,%rsp
+ vbroadcasti32x4 .Lsigma(%rip),%zmm0
+ vbroadcasti32x4 (%rcx),%zmm1
+ vbroadcasti32x4 16(%rcx),%zmm2
+ vbroadcasti32x4 (%r8),%zmm3
+
+ vmovdqa32 %zmm0,%zmm16
+ vmovdqa32 %zmm1,%zmm17
+ vmovdqa32 %zmm2,%zmm18
+ vpaddd .Lzeroz(%rip),%zmm3,%zmm3
+ vmovdqa32 .Lfourz(%rip),%zmm20
+ movq $10,%r8
+ vmovdqa32 %zmm3,%zmm19
+ jmp .Loop_avx512
+
+.align 16
+.Loop_outer_avx512:
+ vmovdqa32 %zmm16,%zmm0
+ vmovdqa32 %zmm17,%zmm1
+ vmovdqa32 %zmm18,%zmm2
+ vpaddd %zmm20,%zmm19,%zmm3
+ movq $10,%r8
+ vmovdqa32 %zmm3,%zmm19
+ jmp .Loop_avx512
+
+.align 32
+.Loop_avx512:
+ vpaddd %zmm1,%zmm0,%zmm0
+ vpxord %zmm0,%zmm3,%zmm3
+ vprold $16,%zmm3,%zmm3
+ vpaddd %zmm3,%zmm2,%zmm2
+ vpxord %zmm2,%zmm1,%zmm1
+ vprold $12,%zmm1,%zmm1
+ vpaddd %zmm1,%zmm0,%zmm0
+ vpxord %zmm0,%zmm3,%zmm3
+ vprold $8,%zmm3,%zmm3
+ vpaddd %zmm3,%zmm2,%zmm2
+ vpxord %zmm2,%zmm1,%zmm1
+ vprold $7,%zmm1,%zmm1
+ vpshufd $78,%zmm2,%zmm2
+ vpshufd $57,%zmm1,%zmm1
+ vpshufd $147,%zmm3,%zmm3
+ vpaddd %zmm1,%zmm0,%zmm0
+ vpxord %zmm0,%zmm3,%zmm3
+ vprold $16,%zmm3,%zmm3
+ vpaddd %zmm3,%zmm2,%zmm2
+ vpxord %zmm2,%zmm1,%zmm1
+ vprold $12,%zmm1,%zmm1
+ vpaddd %zmm1,%zmm0,%zmm0
+ vpxord %zmm0,%zmm3,%zmm3
+ vprold $8,%zmm3,%zmm3
+ vpaddd %zmm3,%zmm2,%zmm2
+ vpxord %zmm2,%zmm1,%zmm1
+ vprold $7,%zmm1,%zmm1
+ vpshufd $78,%zmm2,%zmm2
+ vpshufd $147,%zmm1,%zmm1
+ vpshufd $57,%zmm3,%zmm3
+ decq %r8
+ jnz .Loop_avx512
+ vpaddd %zmm16,%zmm0,%zmm0
+ vpaddd %zmm17,%zmm1,%zmm1
+ vpaddd %zmm18,%zmm2,%zmm2
+ vpaddd %zmm19,%zmm3,%zmm3
+
+ subq $64,%rdx
+ jb .Ltail64_avx512
+
+ vpxor 0(%rsi),%xmm0,%xmm4
+ vpxor 16(%rsi),%xmm1,%xmm5
+ vpxor 32(%rsi),%xmm2,%xmm6
+ vpxor 48(%rsi),%xmm3,%xmm7
+ leaq 64(%rsi),%rsi
+
+ vmovdqu %xmm4,0(%rdi)
+ vmovdqu %xmm5,16(%rdi)
+ vmovdqu %xmm6,32(%rdi)
+ vmovdqu %xmm7,48(%rdi)
+ leaq 64(%rdi),%rdi
+
+ jz .Ldone_avx512
+
+ vextracti32x4 $1,%zmm0,%xmm4
+ vextracti32x4 $1,%zmm1,%xmm5
+ vextracti32x4 $1,%zmm2,%xmm6
+ vextracti32x4 $1,%zmm3,%xmm7
+
+ subq $64,%rdx
+ jb .Ltail_avx512
+
+ vpxor 0(%rsi),%xmm4,%xmm4
+ vpxor 16(%rsi),%xmm5,%xmm5
+ vpxor 32(%rsi),%xmm6,%xmm6
+ vpxor 48(%rsi),%xmm7,%xmm7
+ leaq 64(%rsi),%rsi
+
+ vmovdqu %xmm4,0(%rdi)
+ vmovdqu %xmm5,16(%rdi)
+ vmovdqu %xmm6,32(%rdi)
+ vmovdqu %xmm7,48(%rdi)
+ leaq 64(%rdi),%rdi
+
+ jz .Ldone_avx512
+
+ vextracti32x4 $2,%zmm0,%xmm4
+ vextracti32x4 $2,%zmm1,%xmm5
+ vextracti32x4 $2,%zmm2,%xmm6
+ vextracti32x4 $2,%zmm3,%xmm7
+
+ subq $64,%rdx
+ jb .Ltail_avx512
+
+ vpxor 0(%rsi),%xmm4,%xmm4
+ vpxor 16(%rsi),%xmm5,%xmm5
+ vpxor 32(%rsi),%xmm6,%xmm6
+ vpxor 48(%rsi),%xmm7,%xmm7
+ leaq 64(%rsi),%rsi
+
+ vmovdqu %xmm4,0(%rdi)
+ vmovdqu %xmm5,16(%rdi)
+ vmovdqu %xmm6,32(%rdi)
+ vmovdqu %xmm7,48(%rdi)
+ leaq 64(%rdi),%rdi
+
+ jz .Ldone_avx512
+
+ vextracti32x4 $3,%zmm0,%xmm4
+ vextracti32x4 $3,%zmm1,%xmm5
+ vextracti32x4 $3,%zmm2,%xmm6
+ vextracti32x4 $3,%zmm3,%xmm7
+
+ subq $64,%rdx
+ jb .Ltail_avx512
+
+ vpxor 0(%rsi),%xmm4,%xmm4
+ vpxor 16(%rsi),%xmm5,%xmm5
+ vpxor 32(%rsi),%xmm6,%xmm6
+ vpxor 48(%rsi),%xmm7,%xmm7
+ leaq 64(%rsi),%rsi
+
+ vmovdqu %xmm4,0(%rdi)
+ vmovdqu %xmm5,16(%rdi)
+ vmovdqu %xmm6,32(%rdi)
+ vmovdqu %xmm7,48(%rdi)
+ leaq 64(%rdi),%rdi
+
+ jnz .Loop_outer_avx512
+
+ jmp .Ldone_avx512
+
+.align 16
+.Ltail64_avx512:
+ vmovdqa %xmm0,0(%rsp)
+ vmovdqa %xmm1,16(%rsp)
+ vmovdqa %xmm2,32(%rsp)
+ vmovdqa %xmm3,48(%rsp)
+ addq $64,%rdx
+ jmp .Loop_tail_avx512
+
+.align 16
+.Ltail_avx512:
+ vmovdqa %xmm4,0(%rsp)
+ vmovdqa %xmm5,16(%rsp)
+ vmovdqa %xmm6,32(%rsp)
+ vmovdqa %xmm7,48(%rsp)
+ addq $64,%rdx
+
+.Loop_tail_avx512:
+ movzbl (%rsi,%r8,1),%eax
+ movzbl (%rsp,%r8,1),%ecx
+ leaq 1(%r8),%r8
+ xorl %ecx,%eax
+ movb %al,-1(%rdi,%r8,1)
+ decq %rdx
+ jnz .Loop_tail_avx512
+
+ vmovdqu32 %zmm16,0(%rsp)
+
+.Ldone_avx512:
+ vzeroall
+ leaq (%r9),%rsp
+.cfi_def_cfa_register %rsp
+.Lavx512_epilogue:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size ChaCha20_avx512,.-ChaCha20_avx512
+.type ChaCha20_avx512vl,@function
+.align 32
+ChaCha20_avx512vl:
+.cfi_startproc
+.LChaCha20_avx512vl:
+ movq %rsp,%r9
+.cfi_def_cfa_register %r9
+ cmpq $128,%rdx
+ ja .LChaCha20_8xvl
+
+ subq $64+8,%rsp
+ vbroadcasti128 .Lsigma(%rip),%ymm0
+ vbroadcasti128 (%rcx),%ymm1
+ vbroadcasti128 16(%rcx),%ymm2
+ vbroadcasti128 (%r8),%ymm3
+
+ vmovdqa32 %ymm0,%ymm16
+ vmovdqa32 %ymm1,%ymm17
+ vmovdqa32 %ymm2,%ymm18
+ vpaddd .Lzeroz(%rip),%ymm3,%ymm3
+ vmovdqa32 .Ltwoy(%rip),%ymm20
+ movq $10,%r8
+ vmovdqa32 %ymm3,%ymm19
+ jmp .Loop_avx512vl
+
+.align 16
+.Loop_outer_avx512vl:
+ vmovdqa32 %ymm18,%ymm2
+ vpaddd %ymm20,%ymm19,%ymm3
+ movq $10,%r8
+ vmovdqa32 %ymm3,%ymm19
+ jmp .Loop_avx512vl
+
+.align 32
+.Loop_avx512vl:
+ vpaddd %ymm1,%ymm0,%ymm0
+ vpxor %ymm0,%ymm3,%ymm3
+ vprold $16,%ymm3,%ymm3
+ vpaddd %ymm3,%ymm2,%ymm2
+ vpxor %ymm2,%ymm1,%ymm1
+ vprold $12,%ymm1,%ymm1
+ vpaddd %ymm1,%ymm0,%ymm0
+ vpxor %ymm0,%ymm3,%ymm3
+ vprold $8,%ymm3,%ymm3
+ vpaddd %ymm3,%ymm2,%ymm2
+ vpxor %ymm2,%ymm1,%ymm1
+ vprold $7,%ymm1,%ymm1
+ vpshufd $78,%ymm2,%ymm2
+ vpshufd $57,%ymm1,%ymm1
+ vpshufd $147,%ymm3,%ymm3
+ vpaddd %ymm1,%ymm0,%ymm0
+ vpxor %ymm0,%ymm3,%ymm3
+ vprold $16,%ymm3,%ymm3
+ vpaddd %ymm3,%ymm2,%ymm2
+ vpxor %ymm2,%ymm1,%ymm1
+ vprold $12,%ymm1,%ymm1
+ vpaddd %ymm1,%ymm0,%ymm0
+ vpxor %ymm0,%ymm3,%ymm3
+ vprold $8,%ymm3,%ymm3
+ vpaddd %ymm3,%ymm2,%ymm2
+ vpxor %ymm2,%ymm1,%ymm1
+ vprold $7,%ymm1,%ymm1
+ vpshufd $78,%ymm2,%ymm2
+ vpshufd $147,%ymm1,%ymm1
+ vpshufd $57,%ymm3,%ymm3
+ decq %r8
+ jnz .Loop_avx512vl
+ vpaddd %ymm16,%ymm0,%ymm0
+ vpaddd %ymm17,%ymm1,%ymm1
+ vpaddd %ymm18,%ymm2,%ymm2
+ vpaddd %ymm19,%ymm3,%ymm3
+
+ subq $64,%rdx
+ jb .Ltail64_avx512vl
+
+ vpxor 0(%rsi),%xmm0,%xmm4
+ vpxor 16(%rsi),%xmm1,%xmm5
+ vpxor 32(%rsi),%xmm2,%xmm6
+ vpxor 48(%rsi),%xmm3,%xmm7
+ leaq 64(%rsi),%rsi
+
+ vmovdqu %xmm4,0(%rdi)
+ vmovdqu %xmm5,16(%rdi)
+ vmovdqu %xmm6,32(%rdi)
+ vmovdqu %xmm7,48(%rdi)
+ leaq 64(%rdi),%rdi
+
+ jz .Ldone_avx512vl
+
+ vextracti128 $1,%ymm0,%xmm4
+ vextracti128 $1,%ymm1,%xmm5
+ vextracti128 $1,%ymm2,%xmm6
+ vextracti128 $1,%ymm3,%xmm7
+
+ subq $64,%rdx
+ jb .Ltail_avx512vl
+
+ vpxor 0(%rsi),%xmm4,%xmm4
+ vpxor 16(%rsi),%xmm5,%xmm5
+ vpxor 32(%rsi),%xmm6,%xmm6
+ vpxor 48(%rsi),%xmm7,%xmm7
+ leaq 64(%rsi),%rsi
+
+ vmovdqu %xmm4,0(%rdi)
+ vmovdqu %xmm5,16(%rdi)
+ vmovdqu %xmm6,32(%rdi)
+ vmovdqu %xmm7,48(%rdi)
+ leaq 64(%rdi),%rdi
+
+ vmovdqa32 %ymm16,%ymm0
+ vmovdqa32 %ymm17,%ymm1
+ jnz .Loop_outer_avx512vl
+
+ jmp .Ldone_avx512vl
+
+.align 16
+.Ltail64_avx512vl:
+ vmovdqa %xmm0,0(%rsp)
+ vmovdqa %xmm1,16(%rsp)
+ vmovdqa %xmm2,32(%rsp)
+ vmovdqa %xmm3,48(%rsp)
+ addq $64,%rdx
+ jmp .Loop_tail_avx512vl
+
+.align 16
+.Ltail_avx512vl:
+ vmovdqa %xmm4,0(%rsp)
+ vmovdqa %xmm5,16(%rsp)
+ vmovdqa %xmm6,32(%rsp)
+ vmovdqa %xmm7,48(%rsp)
+ addq $64,%rdx
+
+.Loop_tail_avx512vl:
+ movzbl (%rsi,%r8,1),%eax
+ movzbl (%rsp,%r8,1),%ecx
+ leaq 1(%r8),%r8
+ xorl %ecx,%eax
+ movb %al,-1(%rdi,%r8,1)
+ decq %rdx
+ jnz .Loop_tail_avx512vl
+
+ vmovdqu32 %ymm16,0(%rsp)
+ vmovdqu32 %ymm16,32(%rsp)
+
+.Ldone_avx512vl:
+ vzeroall
+ leaq (%r9),%rsp
+.cfi_def_cfa_register %rsp
+.Lavx512vl_epilogue:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size ChaCha20_avx512vl,.-ChaCha20_avx512vl
+.type ChaCha20_16x,@function
+.align 32
+ChaCha20_16x:
+.cfi_startproc
+.LChaCha20_16x:
+ movq %rsp,%r9
+.cfi_def_cfa_register %r9
+ subq $64+8,%rsp
+ andq $-64,%rsp
+ vzeroupper
+
+ leaq .Lsigma(%rip),%r10
+ vbroadcasti32x4 (%r10),%zmm3
+ vbroadcasti32x4 (%rcx),%zmm7
+ vbroadcasti32x4 16(%rcx),%zmm11
+ vbroadcasti32x4 (%r8),%zmm15
+
+ vpshufd $0x00,%zmm3,%zmm0
+ vpshufd $0x55,%zmm3,%zmm1
+ vpshufd $0xaa,%zmm3,%zmm2
+ vpshufd $0xff,%zmm3,%zmm3
+ vmovdqa64 %zmm0,%zmm16
+ vmovdqa64 %zmm1,%zmm17
+ vmovdqa64 %zmm2,%zmm18
+ vmovdqa64 %zmm3,%zmm19
+
+ vpshufd $0x00,%zmm7,%zmm4
+ vpshufd $0x55,%zmm7,%zmm5
+ vpshufd $0xaa,%zmm7,%zmm6
+ vpshufd $0xff,%zmm7,%zmm7
+ vmovdqa64 %zmm4,%zmm20
+ vmovdqa64 %zmm5,%zmm21
+ vmovdqa64 %zmm6,%zmm22
+ vmovdqa64 %zmm7,%zmm23
+
+ vpshufd $0x00,%zmm11,%zmm8
+ vpshufd $0x55,%zmm11,%zmm9
+ vpshufd $0xaa,%zmm11,%zmm10
+ vpshufd $0xff,%zmm11,%zmm11
+ vmovdqa64 %zmm8,%zmm24
+ vmovdqa64 %zmm9,%zmm25
+ vmovdqa64 %zmm10,%zmm26
+ vmovdqa64 %zmm11,%zmm27
+
+ vpshufd $0x00,%zmm15,%zmm12
+ vpshufd $0x55,%zmm15,%zmm13
+ vpshufd $0xaa,%zmm15,%zmm14
+ vpshufd $0xff,%zmm15,%zmm15
+ vpaddd .Lincz(%rip),%zmm12,%zmm12
+ vmovdqa64 %zmm12,%zmm28
+ vmovdqa64 %zmm13,%zmm29
+ vmovdqa64 %zmm14,%zmm30
+ vmovdqa64 %zmm15,%zmm31
+
+ movl $10,%eax
+ jmp .Loop16x
+
+.align 32
+.Loop_outer16x:
+ vpbroadcastd 0(%r10),%zmm0
+ vpbroadcastd 4(%r10),%zmm1
+ vpbroadcastd 8(%r10),%zmm2
+ vpbroadcastd 12(%r10),%zmm3
+ vpaddd .Lsixteen(%rip),%zmm28,%zmm28
+ vmovdqa64 %zmm20,%zmm4
+ vmovdqa64 %zmm21,%zmm5
+ vmovdqa64 %zmm22,%zmm6
+ vmovdqa64 %zmm23,%zmm7
+ vmovdqa64 %zmm24,%zmm8
+ vmovdqa64 %zmm25,%zmm9
+ vmovdqa64 %zmm26,%zmm10
+ vmovdqa64 %zmm27,%zmm11
+ vmovdqa64 %zmm28,%zmm12
+ vmovdqa64 %zmm29,%zmm13
+ vmovdqa64 %zmm30,%zmm14
+ vmovdqa64 %zmm31,%zmm15
+
+ vmovdqa64 %zmm0,%zmm16
+ vmovdqa64 %zmm1,%zmm17
+ vmovdqa64 %zmm2,%zmm18
+ vmovdqa64 %zmm3,%zmm19
+
+ movl $10,%eax
+ jmp .Loop16x
+
+.align 32
+.Loop16x:
+ vpaddd %zmm4,%zmm0,%zmm0
+ vpaddd %zmm5,%zmm1,%zmm1
+ vpaddd %zmm6,%zmm2,%zmm2
+ vpaddd %zmm7,%zmm3,%zmm3
+ vpxord %zmm0,%zmm12,%zmm12
+ vpxord %zmm1,%zmm13,%zmm13
+ vpxord %zmm2,%zmm14,%zmm14
+ vpxord %zmm3,%zmm15,%zmm15
+ vprold $16,%zmm12,%zmm12
+ vprold $16,%zmm13,%zmm13
+ vprold $16,%zmm14,%zmm14
+ vprold $16,%zmm15,%zmm15
+ vpaddd %zmm12,%zmm8,%zmm8
+ vpaddd %zmm13,%zmm9,%zmm9
+ vpaddd %zmm14,%zmm10,%zmm10
+ vpaddd %zmm15,%zmm11,%zmm11
+ vpxord %zmm8,%zmm4,%zmm4
+ vpxord %zmm9,%zmm5,%zmm5
+ vpxord %zmm10,%zmm6,%zmm6
+ vpxord %zmm11,%zmm7,%zmm7
+ vprold $12,%zmm4,%zmm4
+ vprold $12,%zmm5,%zmm5
+ vprold $12,%zmm6,%zmm6
+ vprold $12,%zmm7,%zmm7
+ vpaddd %zmm4,%zmm0,%zmm0
+ vpaddd %zmm5,%zmm1,%zmm1
+ vpaddd %zmm6,%zmm2,%zmm2
+ vpaddd %zmm7,%zmm3,%zmm3
+ vpxord %zmm0,%zmm12,%zmm12
+ vpxord %zmm1,%zmm13,%zmm13
+ vpxord %zmm2,%zmm14,%zmm14
+ vpxord %zmm3,%zmm15,%zmm15
+ vprold $8,%zmm12,%zmm12
+ vprold $8,%zmm13,%zmm13
+ vprold $8,%zmm14,%zmm14
+ vprold $8,%zmm15,%zmm15
+ vpaddd %zmm12,%zmm8,%zmm8
+ vpaddd %zmm13,%zmm9,%zmm9
+ vpaddd %zmm14,%zmm10,%zmm10
+ vpaddd %zmm15,%zmm11,%zmm11
+ vpxord %zmm8,%zmm4,%zmm4
+ vpxord %zmm9,%zmm5,%zmm5
+ vpxord %zmm10,%zmm6,%zmm6
+ vpxord %zmm11,%zmm7,%zmm7
+ vprold $7,%zmm4,%zmm4
+ vprold $7,%zmm5,%zmm5
+ vprold $7,%zmm6,%zmm6
+ vprold $7,%zmm7,%zmm7
+ vpaddd %zmm5,%zmm0,%zmm0
+ vpaddd %zmm6,%zmm1,%zmm1
+ vpaddd %zmm7,%zmm2,%zmm2
+ vpaddd %zmm4,%zmm3,%zmm3
+ vpxord %zmm0,%zmm15,%zmm15
+ vpxord %zmm1,%zmm12,%zmm12
+ vpxord %zmm2,%zmm13,%zmm13
+ vpxord %zmm3,%zmm14,%zmm14
+ vprold $16,%zmm15,%zmm15
+ vprold $16,%zmm12,%zmm12
+ vprold $16,%zmm13,%zmm13
+ vprold $16,%zmm14,%zmm14
+ vpaddd %zmm15,%zmm10,%zmm10
+ vpaddd %zmm12,%zmm11,%zmm11
+ vpaddd %zmm13,%zmm8,%zmm8
+ vpaddd %zmm14,%zmm9,%zmm9
+ vpxord %zmm10,%zmm5,%zmm5
+ vpxord %zmm11,%zmm6,%zmm6
+ vpxord %zmm8,%zmm7,%zmm7
+ vpxord %zmm9,%zmm4,%zmm4
+ vprold $12,%zmm5,%zmm5
+ vprold $12,%zmm6,%zmm6
+ vprold $12,%zmm7,%zmm7
+ vprold $12,%zmm4,%zmm4
+ vpaddd %zmm5,%zmm0,%zmm0
+ vpaddd %zmm6,%zmm1,%zmm1
+ vpaddd %zmm7,%zmm2,%zmm2
+ vpaddd %zmm4,%zmm3,%zmm3
+ vpxord %zmm0,%zmm15,%zmm15
+ vpxord %zmm1,%zmm12,%zmm12
+ vpxord %zmm2,%zmm13,%zmm13
+ vpxord %zmm3,%zmm14,%zmm14
+ vprold $8,%zmm15,%zmm15
+ vprold $8,%zmm12,%zmm12
+ vprold $8,%zmm13,%zmm13
+ vprold $8,%zmm14,%zmm14
+ vpaddd %zmm15,%zmm10,%zmm10
+ vpaddd %zmm12,%zmm11,%zmm11
+ vpaddd %zmm13,%zmm8,%zmm8
+ vpaddd %zmm14,%zmm9,%zmm9
+ vpxord %zmm10,%zmm5,%zmm5
+ vpxord %zmm11,%zmm6,%zmm6
+ vpxord %zmm8,%zmm7,%zmm7
+ vpxord %zmm9,%zmm4,%zmm4
+ vprold $7,%zmm5,%zmm5
+ vprold $7,%zmm6,%zmm6
+ vprold $7,%zmm7,%zmm7
+ vprold $7,%zmm4,%zmm4
+ decl %eax
+ jnz .Loop16x
+
+ vpaddd %zmm16,%zmm0,%zmm0
+ vpaddd %zmm17,%zmm1,%zmm1
+ vpaddd %zmm18,%zmm2,%zmm2
+ vpaddd %zmm19,%zmm3,%zmm3
+
+ vpunpckldq %zmm1,%zmm0,%zmm18
+ vpunpckldq %zmm3,%zmm2,%zmm19
+ vpunpckhdq %zmm1,%zmm0,%zmm0
+ vpunpckhdq %zmm3,%zmm2,%zmm2
+ vpunpcklqdq %zmm19,%zmm18,%zmm1
+ vpunpckhqdq %zmm19,%zmm18,%zmm18
+ vpunpcklqdq %zmm2,%zmm0,%zmm3
+ vpunpckhqdq %zmm2,%zmm0,%zmm0
+ vpaddd %zmm20,%zmm4,%zmm4
+ vpaddd %zmm21,%zmm5,%zmm5
+ vpaddd %zmm22,%zmm6,%zmm6
+ vpaddd %zmm23,%zmm7,%zmm7
+
+ vpunpckldq %zmm5,%zmm4,%zmm2
+ vpunpckldq %zmm7,%zmm6,%zmm19
+ vpunpckhdq %zmm5,%zmm4,%zmm4
+ vpunpckhdq %zmm7,%zmm6,%zmm6
+ vpunpcklqdq %zmm19,%zmm2,%zmm5
+ vpunpckhqdq %zmm19,%zmm2,%zmm2
+ vpunpcklqdq %zmm6,%zmm4,%zmm7
+ vpunpckhqdq %zmm6,%zmm4,%zmm4
+ vshufi32x4 $0x44,%zmm5,%zmm1,%zmm19
+ vshufi32x4 $0xee,%zmm5,%zmm1,%zmm5
+ vshufi32x4 $0x44,%zmm2,%zmm18,%zmm1
+ vshufi32x4 $0xee,%zmm2,%zmm18,%zmm2
+ vshufi32x4 $0x44,%zmm7,%zmm3,%zmm18
+ vshufi32x4 $0xee,%zmm7,%zmm3,%zmm7
+ vshufi32x4 $0x44,%zmm4,%zmm0,%zmm3
+ vshufi32x4 $0xee,%zmm4,%zmm0,%zmm4
+ vpaddd %zmm24,%zmm8,%zmm8
+ vpaddd %zmm25,%zmm9,%zmm9
+ vpaddd %zmm26,%zmm10,%zmm10
+ vpaddd %zmm27,%zmm11,%zmm11
+
+ vpunpckldq %zmm9,%zmm8,%zmm6
+ vpunpckldq %zmm11,%zmm10,%zmm0
+ vpunpckhdq %zmm9,%zmm8,%zmm8
+ vpunpckhdq %zmm11,%zmm10,%zmm10
+ vpunpcklqdq %zmm0,%zmm6,%zmm9
+ vpunpckhqdq %zmm0,%zmm6,%zmm6
+ vpunpcklqdq %zmm10,%zmm8,%zmm11
+ vpunpckhqdq %zmm10,%zmm8,%zmm8
+ vpaddd %zmm28,%zmm12,%zmm12
+ vpaddd %zmm29,%zmm13,%zmm13
+ vpaddd %zmm30,%zmm14,%zmm14
+ vpaddd %zmm31,%zmm15,%zmm15
+
+ vpunpckldq %zmm13,%zmm12,%zmm10
+ vpunpckldq %zmm15,%zmm14,%zmm0
+ vpunpckhdq %zmm13,%zmm12,%zmm12
+ vpunpckhdq %zmm15,%zmm14,%zmm14
+ vpunpcklqdq %zmm0,%zmm10,%zmm13
+ vpunpckhqdq %zmm0,%zmm10,%zmm10
+ vpunpcklqdq %zmm14,%zmm12,%zmm15
+ vpunpckhqdq %zmm14,%zmm12,%zmm12
+ vshufi32x4 $0x44,%zmm13,%zmm9,%zmm0
+ vshufi32x4 $0xee,%zmm13,%zmm9,%zmm13
+ vshufi32x4 $0x44,%zmm10,%zmm6,%zmm9
+ vshufi32x4 $0xee,%zmm10,%zmm6,%zmm10
+ vshufi32x4 $0x44,%zmm15,%zmm11,%zmm6
+ vshufi32x4 $0xee,%zmm15,%zmm11,%zmm15
+ vshufi32x4 $0x44,%zmm12,%zmm8,%zmm11
+ vshufi32x4 $0xee,%zmm12,%zmm8,%zmm12
+ vshufi32x4 $0x88,%zmm0,%zmm19,%zmm16
+ vshufi32x4 $0xdd,%zmm0,%zmm19,%zmm19
+ vshufi32x4 $0x88,%zmm13,%zmm5,%zmm0
+ vshufi32x4 $0xdd,%zmm13,%zmm5,%zmm13
+ vshufi32x4 $0x88,%zmm9,%zmm1,%zmm17
+ vshufi32x4 $0xdd,%zmm9,%zmm1,%zmm1
+ vshufi32x4 $0x88,%zmm10,%zmm2,%zmm9
+ vshufi32x4 $0xdd,%zmm10,%zmm2,%zmm10
+ vshufi32x4 $0x88,%zmm6,%zmm18,%zmm14
+ vshufi32x4 $0xdd,%zmm6,%zmm18,%zmm18
+ vshufi32x4 $0x88,%zmm15,%zmm7,%zmm6
+ vshufi32x4 $0xdd,%zmm15,%zmm7,%zmm15
+ vshufi32x4 $0x88,%zmm11,%zmm3,%zmm8
+ vshufi32x4 $0xdd,%zmm11,%zmm3,%zmm3
+ vshufi32x4 $0x88,%zmm12,%zmm4,%zmm11
+ vshufi32x4 $0xdd,%zmm12,%zmm4,%zmm12
+ cmpq $1024,%rdx
+ jb .Ltail16x
+
+ vpxord 0(%rsi),%zmm16,%zmm16
+ vpxord 64(%rsi),%zmm17,%zmm17
+ vpxord 128(%rsi),%zmm14,%zmm14
+ vpxord 192(%rsi),%zmm8,%zmm8
+ vmovdqu32 %zmm16,0(%rdi)
+ vmovdqu32 %zmm17,64(%rdi)
+ vmovdqu32 %zmm14,128(%rdi)
+ vmovdqu32 %zmm8,192(%rdi)
+
+ vpxord 256(%rsi),%zmm19,%zmm19
+ vpxord 320(%rsi),%zmm1,%zmm1
+ vpxord 384(%rsi),%zmm18,%zmm18
+ vpxord 448(%rsi),%zmm3,%zmm3
+ vmovdqu32 %zmm19,256(%rdi)
+ vmovdqu32 %zmm1,320(%rdi)
+ vmovdqu32 %zmm18,384(%rdi)
+ vmovdqu32 %zmm3,448(%rdi)
+
+ vpxord 512(%rsi),%zmm0,%zmm0
+ vpxord 576(%rsi),%zmm9,%zmm9
+ vpxord 640(%rsi),%zmm6,%zmm6
+ vpxord 704(%rsi),%zmm11,%zmm11
+ vmovdqu32 %zmm0,512(%rdi)
+ vmovdqu32 %zmm9,576(%rdi)
+ vmovdqu32 %zmm6,640(%rdi)
+ vmovdqu32 %zmm11,704(%rdi)
+
+ vpxord 768(%rsi),%zmm13,%zmm13
+ vpxord 832(%rsi),%zmm10,%zmm10
+ vpxord 896(%rsi),%zmm15,%zmm15
+ vpxord 960(%rsi),%zmm12,%zmm12
+ leaq 1024(%rsi),%rsi
+ vmovdqu32 %zmm13,768(%rdi)
+ vmovdqu32 %zmm10,832(%rdi)
+ vmovdqu32 %zmm15,896(%rdi)
+ vmovdqu32 %zmm12,960(%rdi)
+ leaq 1024(%rdi),%rdi
+
+ subq $1024,%rdx
+ jnz .Loop_outer16x
+
+ jmp .Ldone16x
+
+.align 32
+.Ltail16x:
+ xorq %r10,%r10
+ subq %rsi,%rdi
+ cmpq $64,%rdx
+ jb .Less_than_64_16x
+ vpxord (%rsi),%zmm16,%zmm16
+ vmovdqu32 %zmm16,(%rdi,%rsi,1)
+ je .Ldone16x
+ vmovdqa32 %zmm17,%zmm16
+ leaq 64(%rsi),%rsi
+
+ cmpq $128,%rdx
+ jb .Less_than_64_16x
+ vpxord (%rsi),%zmm17,%zmm17
+ vmovdqu32 %zmm17,(%rdi,%rsi,1)
+ je .Ldone16x
+ vmovdqa32 %zmm14,%zmm16
+ leaq 64(%rsi),%rsi
+
+ cmpq $192,%rdx
+ jb .Less_than_64_16x
+ vpxord (%rsi),%zmm14,%zmm14
+ vmovdqu32 %zmm14,(%rdi,%rsi,1)
+ je .Ldone16x
+ vmovdqa32 %zmm8,%zmm16
+ leaq 64(%rsi),%rsi
+
+ cmpq $256,%rdx
+ jb .Less_than_64_16x
+ vpxord (%rsi),%zmm8,%zmm8
+ vmovdqu32 %zmm8,(%rdi,%rsi,1)
+ je .Ldone16x
+ vmovdqa32 %zmm19,%zmm16
+ leaq 64(%rsi),%rsi
+
+ cmpq $320,%rdx
+ jb .Less_than_64_16x
+ vpxord (%rsi),%zmm19,%zmm19
+ vmovdqu32 %zmm19,(%rdi,%rsi,1)
+ je .Ldone16x
+ vmovdqa32 %zmm1,%zmm16
+ leaq 64(%rsi),%rsi
+
+ cmpq $384,%rdx
+ jb .Less_than_64_16x
+ vpxord (%rsi),%zmm1,%zmm1
+ vmovdqu32 %zmm1,(%rdi,%rsi,1)
+ je .Ldone16x
+ vmovdqa32 %zmm18,%zmm16
+ leaq 64(%rsi),%rsi
+
+ cmpq $448,%rdx
+ jb .Less_than_64_16x
+ vpxord (%rsi),%zmm18,%zmm18
+ vmovdqu32 %zmm18,(%rdi,%rsi,1)
+ je .Ldone16x
+ vmovdqa32 %zmm3,%zmm16
+ leaq 64(%rsi),%rsi
+
+ cmpq $512,%rdx
+ jb .Less_than_64_16x
+ vpxord (%rsi),%zmm3,%zmm3
+ vmovdqu32 %zmm3,(%rdi,%rsi,1)
+ je .Ldone16x
+ vmovdqa32 %zmm0,%zmm16
+ leaq 64(%rsi),%rsi
+
+ cmpq $576,%rdx
+ jb .Less_than_64_16x
+ vpxord (%rsi),%zmm0,%zmm0
+ vmovdqu32 %zmm0,(%rdi,%rsi,1)
+ je .Ldone16x
+ vmovdqa32 %zmm9,%zmm16
+ leaq 64(%rsi),%rsi
+
+ cmpq $640,%rdx
+ jb .Less_than_64_16x
+ vpxord (%rsi),%zmm9,%zmm9
+ vmovdqu32 %zmm9,(%rdi,%rsi,1)
+ je .Ldone16x
+ vmovdqa32 %zmm6,%zmm16
+ leaq 64(%rsi),%rsi
+
+ cmpq $704,%rdx
+ jb .Less_than_64_16x
+ vpxord (%rsi),%zmm6,%zmm6
+ vmovdqu32 %zmm6,(%rdi,%rsi,1)
+ je .Ldone16x
+ vmovdqa32 %zmm11,%zmm16
+ leaq 64(%rsi),%rsi
+
+ cmpq $768,%rdx
+ jb .Less_than_64_16x
+ vpxord (%rsi),%zmm11,%zmm11
+ vmovdqu32 %zmm11,(%rdi,%rsi,1)
+ je .Ldone16x
+ vmovdqa32 %zmm13,%zmm16
+ leaq 64(%rsi),%rsi
+
+ cmpq $832,%rdx
+ jb .Less_than_64_16x
+ vpxord (%rsi),%zmm13,%zmm13
+ vmovdqu32 %zmm13,(%rdi,%rsi,1)
+ je .Ldone16x
+ vmovdqa32 %zmm10,%zmm16
+ leaq 64(%rsi),%rsi
+
+ cmpq $896,%rdx
+ jb .Less_than_64_16x
+ vpxord (%rsi),%zmm10,%zmm10
+ vmovdqu32 %zmm10,(%rdi,%rsi,1)
+ je .Ldone16x
+ vmovdqa32 %zmm15,%zmm16
+ leaq 64(%rsi),%rsi
+
+ cmpq $960,%rdx
+ jb .Less_than_64_16x
+ vpxord (%rsi),%zmm15,%zmm15
+ vmovdqu32 %zmm15,(%rdi,%rsi,1)
+ je .Ldone16x
+ vmovdqa32 %zmm12,%zmm16
+ leaq 64(%rsi),%rsi
+
+.Less_than_64_16x:
+ vmovdqa32 %zmm16,0(%rsp)
+ leaq (%rdi,%rsi,1),%rdi
+ andq $63,%rdx
+
+.Loop_tail16x:
+ movzbl (%rsi,%r10,1),%eax
+ movzbl (%rsp,%r10,1),%ecx
+ leaq 1(%r10),%r10
+ xorl %ecx,%eax
+ movb %al,-1(%rdi,%r10,1)
+ decq %rdx
+ jnz .Loop_tail16x
+
+ vpxord %zmm16,%zmm16,%zmm16
+ vmovdqa32 %zmm16,0(%rsp)
+
+.Ldone16x:
+ vzeroall
+ leaq (%r9),%rsp
+.cfi_def_cfa_register %rsp
+.L16x_epilogue:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size ChaCha20_16x,.-ChaCha20_16x
+.type ChaCha20_8xvl,@function
+.align 32
+ChaCha20_8xvl:
+.cfi_startproc
+.LChaCha20_8xvl:
+ movq %rsp,%r9
+.cfi_def_cfa_register %r9
+ subq $64+8,%rsp
+ andq $-64,%rsp
+ vzeroupper
+
+ leaq .Lsigma(%rip),%r10
+ vbroadcasti128 (%r10),%ymm3
+ vbroadcasti128 (%rcx),%ymm7
+ vbroadcasti128 16(%rcx),%ymm11
+ vbroadcasti128 (%r8),%ymm15
+
+ vpshufd $0x00,%ymm3,%ymm0
+ vpshufd $0x55,%ymm3,%ymm1
+ vpshufd $0xaa,%ymm3,%ymm2
+ vpshufd $0xff,%ymm3,%ymm3
+ vmovdqa64 %ymm0,%ymm16
+ vmovdqa64 %ymm1,%ymm17
+ vmovdqa64 %ymm2,%ymm18
+ vmovdqa64 %ymm3,%ymm19
+
+ vpshufd $0x00,%ymm7,%ymm4
+ vpshufd $0x55,%ymm7,%ymm5
+ vpshufd $0xaa,%ymm7,%ymm6
+ vpshufd $0xff,%ymm7,%ymm7
+ vmovdqa64 %ymm4,%ymm20
+ vmovdqa64 %ymm5,%ymm21
+ vmovdqa64 %ymm6,%ymm22
+ vmovdqa64 %ymm7,%ymm23
+
+ vpshufd $0x00,%ymm11,%ymm8
+ vpshufd $0x55,%ymm11,%ymm9
+ vpshufd $0xaa,%ymm11,%ymm10
+ vpshufd $0xff,%ymm11,%ymm11
+ vmovdqa64 %ymm8,%ymm24
+ vmovdqa64 %ymm9,%ymm25
+ vmovdqa64 %ymm10,%ymm26
+ vmovdqa64 %ymm11,%ymm27
+
+ vpshufd $0x00,%ymm15,%ymm12
+ vpshufd $0x55,%ymm15,%ymm13
+ vpshufd $0xaa,%ymm15,%ymm14
+ vpshufd $0xff,%ymm15,%ymm15
+ vpaddd .Lincy(%rip),%ymm12,%ymm12
+ vmovdqa64 %ymm12,%ymm28
+ vmovdqa64 %ymm13,%ymm29
+ vmovdqa64 %ymm14,%ymm30
+ vmovdqa64 %ymm15,%ymm31
+
+ movl $10,%eax
+ jmp .Loop8xvl
+
+.align 32
+.Loop_outer8xvl:
+
+
+ vpbroadcastd 8(%r10),%ymm2
+ vpbroadcastd 12(%r10),%ymm3
+ vpaddd .Leight(%rip),%ymm28,%ymm28
+ vmovdqa64 %ymm20,%ymm4
+ vmovdqa64 %ymm21,%ymm5
+ vmovdqa64 %ymm22,%ymm6
+ vmovdqa64 %ymm23,%ymm7
+ vmovdqa64 %ymm24,%ymm8
+ vmovdqa64 %ymm25,%ymm9
+ vmovdqa64 %ymm26,%ymm10
+ vmovdqa64 %ymm27,%ymm11
+ vmovdqa64 %ymm28,%ymm12
+ vmovdqa64 %ymm29,%ymm13
+ vmovdqa64 %ymm30,%ymm14
+ vmovdqa64 %ymm31,%ymm15
+
+ vmovdqa64 %ymm0,%ymm16
+ vmovdqa64 %ymm1,%ymm17
+ vmovdqa64 %ymm2,%ymm18
+ vmovdqa64 %ymm3,%ymm19
+
+ movl $10,%eax
+ jmp .Loop8xvl
+
+.align 32
+.Loop8xvl:
+ vpaddd %ymm4,%ymm0,%ymm0
+ vpaddd %ymm5,%ymm1,%ymm1
+ vpaddd %ymm6,%ymm2,%ymm2
+ vpaddd %ymm7,%ymm3,%ymm3
+ vpxor %ymm0,%ymm12,%ymm12
+ vpxor %ymm1,%ymm13,%ymm13
+ vpxor %ymm2,%ymm14,%ymm14
+ vpxor %ymm3,%ymm15,%ymm15
+ vprold $16,%ymm12,%ymm12
+ vprold $16,%ymm13,%ymm13
+ vprold $16,%ymm14,%ymm14
+ vprold $16,%ymm15,%ymm15
+ vpaddd %ymm12,%ymm8,%ymm8
+ vpaddd %ymm13,%ymm9,%ymm9
+ vpaddd %ymm14,%ymm10,%ymm10
+ vpaddd %ymm15,%ymm11,%ymm11
+ vpxor %ymm8,%ymm4,%ymm4
+ vpxor %ymm9,%ymm5,%ymm5
+ vpxor %ymm10,%ymm6,%ymm6
+ vpxor %ymm11,%ymm7,%ymm7
+ vprold $12,%ymm4,%ymm4
+ vprold $12,%ymm5,%ymm5
+ vprold $12,%ymm6,%ymm6
+ vprold $12,%ymm7,%ymm7
+ vpaddd %ymm4,%ymm0,%ymm0
+ vpaddd %ymm5,%ymm1,%ymm1
+ vpaddd %ymm6,%ymm2,%ymm2
+ vpaddd %ymm7,%ymm3,%ymm3
+ vpxor %ymm0,%ymm12,%ymm12
+ vpxor %ymm1,%ymm13,%ymm13
+ vpxor %ymm2,%ymm14,%ymm14
+ vpxor %ymm3,%ymm15,%ymm15
+ vprold $8,%ymm12,%ymm12
+ vprold $8,%ymm13,%ymm13
+ vprold $8,%ymm14,%ymm14
+ vprold $8,%ymm15,%ymm15
+ vpaddd %ymm12,%ymm8,%ymm8
+ vpaddd %ymm13,%ymm9,%ymm9
+ vpaddd %ymm14,%ymm10,%ymm10
+ vpaddd %ymm15,%ymm11,%ymm11
+ vpxor %ymm8,%ymm4,%ymm4
+ vpxor %ymm9,%ymm5,%ymm5
+ vpxor %ymm10,%ymm6,%ymm6
+ vpxor %ymm11,%ymm7,%ymm7
+ vprold $7,%ymm4,%ymm4
+ vprold $7,%ymm5,%ymm5
+ vprold $7,%ymm6,%ymm6
+ vprold $7,%ymm7,%ymm7
+ vpaddd %ymm5,%ymm0,%ymm0
+ vpaddd %ymm6,%ymm1,%ymm1
+ vpaddd %ymm7,%ymm2,%ymm2
+ vpaddd %ymm4,%ymm3,%ymm3
+ vpxor %ymm0,%ymm15,%ymm15
+ vpxor %ymm1,%ymm12,%ymm12
+ vpxor %ymm2,%ymm13,%ymm13
+ vpxor %ymm3,%ymm14,%ymm14
+ vprold $16,%ymm15,%ymm15
+ vprold $16,%ymm12,%ymm12
+ vprold $16,%ymm13,%ymm13
+ vprold $16,%ymm14,%ymm14
+ vpaddd %ymm15,%ymm10,%ymm10
+ vpaddd %ymm12,%ymm11,%ymm11
+ vpaddd %ymm13,%ymm8,%ymm8
+ vpaddd %ymm14,%ymm9,%ymm9
+ vpxor %ymm10,%ymm5,%ymm5
+ vpxor %ymm11,%ymm6,%ymm6
+ vpxor %ymm8,%ymm7,%ymm7
+ vpxor %ymm9,%ymm4,%ymm4
+ vprold $12,%ymm5,%ymm5
+ vprold $12,%ymm6,%ymm6
+ vprold $12,%ymm7,%ymm7
+ vprold $12,%ymm4,%ymm4
+ vpaddd %ymm5,%ymm0,%ymm0
+ vpaddd %ymm6,%ymm1,%ymm1
+ vpaddd %ymm7,%ymm2,%ymm2
+ vpaddd %ymm4,%ymm3,%ymm3
+ vpxor %ymm0,%ymm15,%ymm15
+ vpxor %ymm1,%ymm12,%ymm12
+ vpxor %ymm2,%ymm13,%ymm13
+ vpxor %ymm3,%ymm14,%ymm14
+ vprold $8,%ymm15,%ymm15
+ vprold $8,%ymm12,%ymm12
+ vprold $8,%ymm13,%ymm13
+ vprold $8,%ymm14,%ymm14
+ vpaddd %ymm15,%ymm10,%ymm10
+ vpaddd %ymm12,%ymm11,%ymm11
+ vpaddd %ymm13,%ymm8,%ymm8
+ vpaddd %ymm14,%ymm9,%ymm9
+ vpxor %ymm10,%ymm5,%ymm5
+ vpxor %ymm11,%ymm6,%ymm6
+ vpxor %ymm8,%ymm7,%ymm7
+ vpxor %ymm9,%ymm4,%ymm4
+ vprold $7,%ymm5,%ymm5
+ vprold $7,%ymm6,%ymm6
+ vprold $7,%ymm7,%ymm7
+ vprold $7,%ymm4,%ymm4
+ decl %eax
+ jnz .Loop8xvl
+
+ vpaddd %ymm16,%ymm0,%ymm0
+ vpaddd %ymm17,%ymm1,%ymm1
+ vpaddd %ymm18,%ymm2,%ymm2
+ vpaddd %ymm19,%ymm3,%ymm3
+
+ vpunpckldq %ymm1,%ymm0,%ymm18
+ vpunpckldq %ymm3,%ymm2,%ymm19
+ vpunpckhdq %ymm1,%ymm0,%ymm0
+ vpunpckhdq %ymm3,%ymm2,%ymm2
+ vpunpcklqdq %ymm19,%ymm18,%ymm1
+ vpunpckhqdq %ymm19,%ymm18,%ymm18
+ vpunpcklqdq %ymm2,%ymm0,%ymm3
+ vpunpckhqdq %ymm2,%ymm0,%ymm0
+ vpaddd %ymm20,%ymm4,%ymm4
+ vpaddd %ymm21,%ymm5,%ymm5
+ vpaddd %ymm22,%ymm6,%ymm6
+ vpaddd %ymm23,%ymm7,%ymm7
+
+ vpunpckldq %ymm5,%ymm4,%ymm2
+ vpunpckldq %ymm7,%ymm6,%ymm19
+ vpunpckhdq %ymm5,%ymm4,%ymm4
+ vpunpckhdq %ymm7,%ymm6,%ymm6
+ vpunpcklqdq %ymm19,%ymm2,%ymm5
+ vpunpckhqdq %ymm19,%ymm2,%ymm2
+ vpunpcklqdq %ymm6,%ymm4,%ymm7
+ vpunpckhqdq %ymm6,%ymm4,%ymm4
+ vshufi32x4 $0,%ymm5,%ymm1,%ymm19
+ vshufi32x4 $3,%ymm5,%ymm1,%ymm5
+ vshufi32x4 $0,%ymm2,%ymm18,%ymm1
+ vshufi32x4 $3,%ymm2,%ymm18,%ymm2
+ vshufi32x4 $0,%ymm7,%ymm3,%ymm18
+ vshufi32x4 $3,%ymm7,%ymm3,%ymm7
+ vshufi32x4 $0,%ymm4,%ymm0,%ymm3
+ vshufi32x4 $3,%ymm4,%ymm0,%ymm4
+ vpaddd %ymm24,%ymm8,%ymm8
+ vpaddd %ymm25,%ymm9,%ymm9
+ vpaddd %ymm26,%ymm10,%ymm10
+ vpaddd %ymm27,%ymm11,%ymm11
+
+ vpunpckldq %ymm9,%ymm8,%ymm6
+ vpunpckldq %ymm11,%ymm10,%ymm0
+ vpunpckhdq %ymm9,%ymm8,%ymm8
+ vpunpckhdq %ymm11,%ymm10,%ymm10
+ vpunpcklqdq %ymm0,%ymm6,%ymm9
+ vpunpckhqdq %ymm0,%ymm6,%ymm6
+ vpunpcklqdq %ymm10,%ymm8,%ymm11
+ vpunpckhqdq %ymm10,%ymm8,%ymm8
+ vpaddd %ymm28,%ymm12,%ymm12
+ vpaddd %ymm29,%ymm13,%ymm13
+ vpaddd %ymm30,%ymm14,%ymm14
+ vpaddd %ymm31,%ymm15,%ymm15
+
+ vpunpckldq %ymm13,%ymm12,%ymm10
+ vpunpckldq %ymm15,%ymm14,%ymm0
+ vpunpckhdq %ymm13,%ymm12,%ymm12
+ vpunpckhdq %ymm15,%ymm14,%ymm14
+ vpunpcklqdq %ymm0,%ymm10,%ymm13
+ vpunpckhqdq %ymm0,%ymm10,%ymm10
+ vpunpcklqdq %ymm14,%ymm12,%ymm15
+ vpunpckhqdq %ymm14,%ymm12,%ymm12
+ vperm2i128 $0x20,%ymm13,%ymm9,%ymm0
+ vperm2i128 $0x31,%ymm13,%ymm9,%ymm13
+ vperm2i128 $0x20,%ymm10,%ymm6,%ymm9
+ vperm2i128 $0x31,%ymm10,%ymm6,%ymm10
+ vperm2i128 $0x20,%ymm15,%ymm11,%ymm6
+ vperm2i128 $0x31,%ymm15,%ymm11,%ymm15
+ vperm2i128 $0x20,%ymm12,%ymm8,%ymm11
+ vperm2i128 $0x31,%ymm12,%ymm8,%ymm12
+ cmpq $512,%rdx
+ jb .Ltail8xvl
+
+ movl $0x80,%eax
+ vpxord 0(%rsi),%ymm19,%ymm19
+ vpxor 32(%rsi),%ymm0,%ymm0
+ vpxor 64(%rsi),%ymm5,%ymm5
+ vpxor 96(%rsi),%ymm13,%ymm13
+ leaq (%rsi,%rax,1),%rsi
+ vmovdqu32 %ymm19,0(%rdi)
+ vmovdqu %ymm0,32(%rdi)
+ vmovdqu %ymm5,64(%rdi)
+ vmovdqu %ymm13,96(%rdi)
+ leaq (%rdi,%rax,1),%rdi
+
+ vpxor 0(%rsi),%ymm1,%ymm1
+ vpxor 32(%rsi),%ymm9,%ymm9
+ vpxor 64(%rsi),%ymm2,%ymm2
+ vpxor 96(%rsi),%ymm10,%ymm10
+ leaq (%rsi,%rax,1),%rsi
+ vmovdqu %ymm1,0(%rdi)
+ vmovdqu %ymm9,32(%rdi)
+ vmovdqu %ymm2,64(%rdi)
+ vmovdqu %ymm10,96(%rdi)
+ leaq (%rdi,%rax,1),%rdi
+
+ vpxord 0(%rsi),%ymm18,%ymm18
+ vpxor 32(%rsi),%ymm6,%ymm6
+ vpxor 64(%rsi),%ymm7,%ymm7
+ vpxor 96(%rsi),%ymm15,%ymm15
+ leaq (%rsi,%rax,1),%rsi
+ vmovdqu32 %ymm18,0(%rdi)
+ vmovdqu %ymm6,32(%rdi)
+ vmovdqu %ymm7,64(%rdi)
+ vmovdqu %ymm15,96(%rdi)
+ leaq (%rdi,%rax,1),%rdi
+
+ vpxor 0(%rsi),%ymm3,%ymm3
+ vpxor 32(%rsi),%ymm11,%ymm11
+ vpxor 64(%rsi),%ymm4,%ymm4
+ vpxor 96(%rsi),%ymm12,%ymm12
+ leaq (%rsi,%rax,1),%rsi
+ vmovdqu %ymm3,0(%rdi)
+ vmovdqu %ymm11,32(%rdi)
+ vmovdqu %ymm4,64(%rdi)
+ vmovdqu %ymm12,96(%rdi)
+ leaq (%rdi,%rax,1),%rdi
+
+ vpbroadcastd 0(%r10),%ymm0
+ vpbroadcastd 4(%r10),%ymm1
+
+ subq $512,%rdx
+ jnz .Loop_outer8xvl
+
+ jmp .Ldone8xvl
+
+.align 32
+.Ltail8xvl:
+ vmovdqa64 %ymm19,%ymm8
+ xorq %r10,%r10
+ subq %rsi,%rdi
+ cmpq $64,%rdx
+ jb .Less_than_64_8xvl
+ vpxor 0(%rsi),%ymm8,%ymm8
+ vpxor 32(%rsi),%ymm0,%ymm0
+ vmovdqu %ymm8,0(%rdi,%rsi,1)
+ vmovdqu %ymm0,32(%rdi,%rsi,1)
+ je .Ldone8xvl
+ vmovdqa %ymm5,%ymm8
+ vmovdqa %ymm13,%ymm0
+ leaq 64(%rsi),%rsi
+
+ cmpq $128,%rdx
+ jb .Less_than_64_8xvl
+ vpxor 0(%rsi),%ymm5,%ymm5
+ vpxor 32(%rsi),%ymm13,%ymm13
+ vmovdqu %ymm5,0(%rdi,%rsi,1)
+ vmovdqu %ymm13,32(%rdi,%rsi,1)
+ je .Ldone8xvl
+ vmovdqa %ymm1,%ymm8
+ vmovdqa %ymm9,%ymm0
+ leaq 64(%rsi),%rsi
+
+ cmpq $192,%rdx
+ jb .Less_than_64_8xvl
+ vpxor 0(%rsi),%ymm1,%ymm1
+ vpxor 32(%rsi),%ymm9,%ymm9
+ vmovdqu %ymm1,0(%rdi,%rsi,1)
+ vmovdqu %ymm9,32(%rdi,%rsi,1)
+ je .Ldone8xvl
+ vmovdqa %ymm2,%ymm8
+ vmovdqa %ymm10,%ymm0
+ leaq 64(%rsi),%rsi
+
+ cmpq $256,%rdx
+ jb .Less_than_64_8xvl
+ vpxor 0(%rsi),%ymm2,%ymm2
+ vpxor 32(%rsi),%ymm10,%ymm10
+ vmovdqu %ymm2,0(%rdi,%rsi,1)
+ vmovdqu %ymm10,32(%rdi,%rsi,1)
+ je .Ldone8xvl
+ vmovdqa32 %ymm18,%ymm8
+ vmovdqa %ymm6,%ymm0
+ leaq 64(%rsi),%rsi
+
+ cmpq $320,%rdx
+ jb .Less_than_64_8xvl
+ vpxord 0(%rsi),%ymm18,%ymm18
+ vpxor 32(%rsi),%ymm6,%ymm6
+ vmovdqu32 %ymm18,0(%rdi,%rsi,1)
+ vmovdqu %ymm6,32(%rdi,%rsi,1)
+ je .Ldone8xvl
+ vmovdqa %ymm7,%ymm8
+ vmovdqa %ymm15,%ymm0
+ leaq 64(%rsi),%rsi
+
+ cmpq $384,%rdx
+ jb .Less_than_64_8xvl
+ vpxor 0(%rsi),%ymm7,%ymm7
+ vpxor 32(%rsi),%ymm15,%ymm15
+ vmovdqu %ymm7,0(%rdi,%rsi,1)
+ vmovdqu %ymm15,32(%rdi,%rsi,1)
+ je .Ldone8xvl
+ vmovdqa %ymm3,%ymm8
+ vmovdqa %ymm11,%ymm0
+ leaq 64(%rsi),%rsi
+
+ cmpq $448,%rdx
+ jb .Less_than_64_8xvl
+ vpxor 0(%rsi),%ymm3,%ymm3
+ vpxor 32(%rsi),%ymm11,%ymm11
+ vmovdqu %ymm3,0(%rdi,%rsi,1)
+ vmovdqu %ymm11,32(%rdi,%rsi,1)
+ je .Ldone8xvl
+ vmovdqa %ymm4,%ymm8
+ vmovdqa %ymm12,%ymm0
+ leaq 64(%rsi),%rsi
+
+.Less_than_64_8xvl:
+ vmovdqa %ymm8,0(%rsp)
+ vmovdqa %ymm0,32(%rsp)
+ leaq (%rdi,%rsi,1),%rdi
+ andq $63,%rdx
+
+.Loop_tail8xvl:
+ movzbl (%rsi,%r10,1),%eax
+ movzbl (%rsp,%r10,1),%ecx
+ leaq 1(%r10),%r10
+ xorl %ecx,%eax
+ movb %al,-1(%rdi,%r10,1)
+ decq %rdx
+ jnz .Loop_tail8xvl
+
+ vpxor %ymm8,%ymm8,%ymm8
+ vmovdqa %ymm8,0(%rsp)
+ vmovdqa %ymm8,32(%rsp)
+
+.Ldone8xvl:
+ vzeroall
+ leaq (%r9),%rsp
+.cfi_def_cfa_register %rsp
+.L8xvl_epilogue:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size ChaCha20_8xvl,.-ChaCha20_8xvl
diff --git a/contrib/libs/openssl/asm/linux/crypto/poly1305/poly1305-x86_64.s b/contrib/libs/openssl/asm/linux/crypto/poly1305/poly1305-x86_64.s
index fc8673f244..9bb9be4632 100644
--- a/contrib/libs/openssl/asm/linux/crypto/poly1305/poly1305-x86_64.s
+++ b/contrib/libs/openssl/asm/linux/crypto/poly1305/poly1305-x86_64.s
@@ -32,11 +32,11 @@ poly1305_init:
leaq poly1305_blocks_avx2(%rip),%rax
btq $37,%r9
cmovcq %rax,%r10
- movq $2149646336,%rax
- shrq $32,%r9
- andq %rax,%r9
- cmpq %rax,%r9
- je .Linit_base2_44
+ movq $2149646336,%rax
+ shrq $32,%r9
+ andq %rax,%r9
+ cmpq %rax,%r9
+ je .Linit_base2_44
movq $0x0ffffffc0fffffff,%rax
movq $0x0ffffffc0ffffffc,%rcx
andq 0(%rsi),%rax
@@ -1575,12 +1575,12 @@ poly1305_blocks_avx2:
vmovd 16(%rdi),%xmm4
.Ldo_avx2:
- cmpq $512,%rdx
- jb .Lskip_avx512
- andl %r11d,%r10d
- testl $65536,%r10d
- jnz .Lblocks_avx512
-.Lskip_avx512:
+ cmpq $512,%rdx
+ jb .Lskip_avx512
+ andl %r11d,%r10d
+ testl $65536,%r10d
+ jnz .Lblocks_avx512
+.Lskip_avx512:
leaq -8(%rsp),%r11
.cfi_def_cfa %r11,16
subq $0x128,%rsp
@@ -1943,1511 +1943,1511 @@ poly1305_blocks_avx2:
.byte 0xf3,0xc3
.cfi_endproc
.size poly1305_blocks_avx2,.-poly1305_blocks_avx2
-.type poly1305_blocks_avx512,@function
-.align 32
-poly1305_blocks_avx512:
-.cfi_startproc
-.Lblocks_avx512:
- movl $15,%eax
- kmovw %eax,%k2
- leaq -8(%rsp),%r11
-.cfi_def_cfa %r11,16
- subq $0x128,%rsp
- leaq .Lconst(%rip),%rcx
- leaq 48+64(%rdi),%rdi
- vmovdqa 96(%rcx),%ymm9
-
-
- vmovdqu -64(%rdi),%xmm11
- andq $-512,%rsp
- vmovdqu -48(%rdi),%xmm12
- movq $0x20,%rax
- vmovdqu -32(%rdi),%xmm7
- vmovdqu -16(%rdi),%xmm13
- vmovdqu 0(%rdi),%xmm8
- vmovdqu 16(%rdi),%xmm14
- vmovdqu 32(%rdi),%xmm10
- vmovdqu 48(%rdi),%xmm15
- vmovdqu 64(%rdi),%xmm6
- vpermd %zmm11,%zmm9,%zmm16
- vpbroadcastq 64(%rcx),%zmm5
- vpermd %zmm12,%zmm9,%zmm17
- vpermd %zmm7,%zmm9,%zmm21
- vpermd %zmm13,%zmm9,%zmm18
- vmovdqa64 %zmm16,0(%rsp){%k2}
- vpsrlq $32,%zmm16,%zmm7
- vpermd %zmm8,%zmm9,%zmm22
- vmovdqu64 %zmm17,0(%rsp,%rax,1){%k2}
- vpsrlq $32,%zmm17,%zmm8
- vpermd %zmm14,%zmm9,%zmm19
- vmovdqa64 %zmm21,64(%rsp){%k2}
- vpermd %zmm10,%zmm9,%zmm23
- vpermd %zmm15,%zmm9,%zmm20
- vmovdqu64 %zmm18,64(%rsp,%rax,1){%k2}
- vpermd %zmm6,%zmm9,%zmm24
- vmovdqa64 %zmm22,128(%rsp){%k2}
- vmovdqu64 %zmm19,128(%rsp,%rax,1){%k2}
- vmovdqa64 %zmm23,192(%rsp){%k2}
- vmovdqu64 %zmm20,192(%rsp,%rax,1){%k2}
- vmovdqa64 %zmm24,256(%rsp){%k2}
-
-
-
-
-
-
-
-
-
-
- vpmuludq %zmm7,%zmm16,%zmm11
- vpmuludq %zmm7,%zmm17,%zmm12
- vpmuludq %zmm7,%zmm18,%zmm13
- vpmuludq %zmm7,%zmm19,%zmm14
- vpmuludq %zmm7,%zmm20,%zmm15
- vpsrlq $32,%zmm18,%zmm9
-
- vpmuludq %zmm8,%zmm24,%zmm25
- vpmuludq %zmm8,%zmm16,%zmm26
- vpmuludq %zmm8,%zmm17,%zmm27
- vpmuludq %zmm8,%zmm18,%zmm28
- vpmuludq %zmm8,%zmm19,%zmm29
- vpsrlq $32,%zmm19,%zmm10
- vpaddq %zmm25,%zmm11,%zmm11
- vpaddq %zmm26,%zmm12,%zmm12
- vpaddq %zmm27,%zmm13,%zmm13
- vpaddq %zmm28,%zmm14,%zmm14
- vpaddq %zmm29,%zmm15,%zmm15
-
- vpmuludq %zmm9,%zmm23,%zmm25
- vpmuludq %zmm9,%zmm24,%zmm26
- vpmuludq %zmm9,%zmm17,%zmm28
- vpmuludq %zmm9,%zmm18,%zmm29
- vpmuludq %zmm9,%zmm16,%zmm27
- vpsrlq $32,%zmm20,%zmm6
- vpaddq %zmm25,%zmm11,%zmm11
- vpaddq %zmm26,%zmm12,%zmm12
- vpaddq %zmm28,%zmm14,%zmm14
- vpaddq %zmm29,%zmm15,%zmm15
- vpaddq %zmm27,%zmm13,%zmm13
-
- vpmuludq %zmm10,%zmm22,%zmm25
- vpmuludq %zmm10,%zmm16,%zmm28
- vpmuludq %zmm10,%zmm17,%zmm29
- vpmuludq %zmm10,%zmm23,%zmm26
- vpmuludq %zmm10,%zmm24,%zmm27
- vpaddq %zmm25,%zmm11,%zmm11
- vpaddq %zmm28,%zmm14,%zmm14
- vpaddq %zmm29,%zmm15,%zmm15
- vpaddq %zmm26,%zmm12,%zmm12
- vpaddq %zmm27,%zmm13,%zmm13
-
- vpmuludq %zmm6,%zmm24,%zmm28
- vpmuludq %zmm6,%zmm16,%zmm29
- vpmuludq %zmm6,%zmm21,%zmm25
- vpmuludq %zmm6,%zmm22,%zmm26
- vpmuludq %zmm6,%zmm23,%zmm27
- vpaddq %zmm28,%zmm14,%zmm14
- vpaddq %zmm29,%zmm15,%zmm15
- vpaddq %zmm25,%zmm11,%zmm11
- vpaddq %zmm26,%zmm12,%zmm12
- vpaddq %zmm27,%zmm13,%zmm13
-
-
-
- vmovdqu64 0(%rsi),%zmm10
- vmovdqu64 64(%rsi),%zmm6
- leaq 128(%rsi),%rsi
-
-
-
-
- vpsrlq $26,%zmm14,%zmm28
- vpandq %zmm5,%zmm14,%zmm14
- vpaddq %zmm28,%zmm15,%zmm15
-
- vpsrlq $26,%zmm11,%zmm25
- vpandq %zmm5,%zmm11,%zmm11
- vpaddq %zmm25,%zmm12,%zmm12
-
- vpsrlq $26,%zmm15,%zmm29
- vpandq %zmm5,%zmm15,%zmm15
-
- vpsrlq $26,%zmm12,%zmm26
- vpandq %zmm5,%zmm12,%zmm12
- vpaddq %zmm26,%zmm13,%zmm13
-
- vpaddq %zmm29,%zmm11,%zmm11
- vpsllq $2,%zmm29,%zmm29
- vpaddq %zmm29,%zmm11,%zmm11
-
- vpsrlq $26,%zmm13,%zmm27
- vpandq %zmm5,%zmm13,%zmm13
- vpaddq %zmm27,%zmm14,%zmm14
-
- vpsrlq $26,%zmm11,%zmm25
- vpandq %zmm5,%zmm11,%zmm11
- vpaddq %zmm25,%zmm12,%zmm12
-
- vpsrlq $26,%zmm14,%zmm28
- vpandq %zmm5,%zmm14,%zmm14
- vpaddq %zmm28,%zmm15,%zmm15
-
-
-
-
-
- vpunpcklqdq %zmm6,%zmm10,%zmm7
- vpunpckhqdq %zmm6,%zmm10,%zmm6
-
-
-
-
-
-
- vmovdqa32 128(%rcx),%zmm25
- movl $0x7777,%eax
- kmovw %eax,%k1
-
- vpermd %zmm16,%zmm25,%zmm16
- vpermd %zmm17,%zmm25,%zmm17
- vpermd %zmm18,%zmm25,%zmm18
- vpermd %zmm19,%zmm25,%zmm19
- vpermd %zmm20,%zmm25,%zmm20
-
- vpermd %zmm11,%zmm25,%zmm16{%k1}
- vpermd %zmm12,%zmm25,%zmm17{%k1}
- vpermd %zmm13,%zmm25,%zmm18{%k1}
- vpermd %zmm14,%zmm25,%zmm19{%k1}
- vpermd %zmm15,%zmm25,%zmm20{%k1}
-
- vpslld $2,%zmm17,%zmm21
- vpslld $2,%zmm18,%zmm22
- vpslld $2,%zmm19,%zmm23
- vpslld $2,%zmm20,%zmm24
- vpaddd %zmm17,%zmm21,%zmm21
- vpaddd %zmm18,%zmm22,%zmm22
- vpaddd %zmm19,%zmm23,%zmm23
- vpaddd %zmm20,%zmm24,%zmm24
-
- vpbroadcastq 32(%rcx),%zmm30
-
- vpsrlq $52,%zmm7,%zmm9
- vpsllq $12,%zmm6,%zmm10
- vporq %zmm10,%zmm9,%zmm9
- vpsrlq $26,%zmm7,%zmm8
- vpsrlq $14,%zmm6,%zmm10
- vpsrlq $40,%zmm6,%zmm6
- vpandq %zmm5,%zmm9,%zmm9
- vpandq %zmm5,%zmm7,%zmm7
-
-
-
-
- vpaddq %zmm2,%zmm9,%zmm2
- subq $192,%rdx
- jbe .Ltail_avx512
- jmp .Loop_avx512
-
-.align 32
-.Loop_avx512:
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- vpmuludq %zmm2,%zmm17,%zmm14
- vpaddq %zmm0,%zmm7,%zmm0
- vpmuludq %zmm2,%zmm18,%zmm15
- vpandq %zmm5,%zmm8,%zmm8
- vpmuludq %zmm2,%zmm23,%zmm11
- vpandq %zmm5,%zmm10,%zmm10
- vpmuludq %zmm2,%zmm24,%zmm12
- vporq %zmm30,%zmm6,%zmm6
- vpmuludq %zmm2,%zmm16,%zmm13
- vpaddq %zmm1,%zmm8,%zmm1
- vpaddq %zmm3,%zmm10,%zmm3
- vpaddq %zmm4,%zmm6,%zmm4
-
- vmovdqu64 0(%rsi),%zmm10
- vmovdqu64 64(%rsi),%zmm6
- leaq 128(%rsi),%rsi
- vpmuludq %zmm0,%zmm19,%zmm28
- vpmuludq %zmm0,%zmm20,%zmm29
- vpmuludq %zmm0,%zmm16,%zmm25
- vpmuludq %zmm0,%zmm17,%zmm26
- vpaddq %zmm28,%zmm14,%zmm14
- vpaddq %zmm29,%zmm15,%zmm15
- vpaddq %zmm25,%zmm11,%zmm11
- vpaddq %zmm26,%zmm12,%zmm12
-
- vpmuludq %zmm1,%zmm18,%zmm28
- vpmuludq %zmm1,%zmm19,%zmm29
- vpmuludq %zmm1,%zmm24,%zmm25
- vpmuludq %zmm0,%zmm18,%zmm27
- vpaddq %zmm28,%zmm14,%zmm14
- vpaddq %zmm29,%zmm15,%zmm15
- vpaddq %zmm25,%zmm11,%zmm11
- vpaddq %zmm27,%zmm13,%zmm13
-
- vpunpcklqdq %zmm6,%zmm10,%zmm7
- vpunpckhqdq %zmm6,%zmm10,%zmm6
-
- vpmuludq %zmm3,%zmm16,%zmm28
- vpmuludq %zmm3,%zmm17,%zmm29
- vpmuludq %zmm1,%zmm16,%zmm26
- vpmuludq %zmm1,%zmm17,%zmm27
- vpaddq %zmm28,%zmm14,%zmm14
- vpaddq %zmm29,%zmm15,%zmm15
- vpaddq %zmm26,%zmm12,%zmm12
- vpaddq %zmm27,%zmm13,%zmm13
-
- vpmuludq %zmm4,%zmm24,%zmm28
- vpmuludq %zmm4,%zmm16,%zmm29
- vpmuludq %zmm3,%zmm22,%zmm25
- vpmuludq %zmm3,%zmm23,%zmm26
- vpaddq %zmm28,%zmm14,%zmm14
- vpmuludq %zmm3,%zmm24,%zmm27
- vpaddq %zmm29,%zmm15,%zmm15
- vpaddq %zmm25,%zmm11,%zmm11
- vpaddq %zmm26,%zmm12,%zmm12
- vpaddq %zmm27,%zmm13,%zmm13
-
- vpmuludq %zmm4,%zmm21,%zmm25
- vpmuludq %zmm4,%zmm22,%zmm26
- vpmuludq %zmm4,%zmm23,%zmm27
- vpaddq %zmm25,%zmm11,%zmm0
- vpaddq %zmm26,%zmm12,%zmm1
- vpaddq %zmm27,%zmm13,%zmm2
-
-
-
-
- vpsrlq $52,%zmm7,%zmm9
- vpsllq $12,%zmm6,%zmm10
-
- vpsrlq $26,%zmm14,%zmm3
- vpandq %zmm5,%zmm14,%zmm14
- vpaddq %zmm3,%zmm15,%zmm4
-
- vporq %zmm10,%zmm9,%zmm9
-
- vpsrlq $26,%zmm0,%zmm11
- vpandq %zmm5,%zmm0,%zmm0
- vpaddq %zmm11,%zmm1,%zmm1
-
- vpandq %zmm5,%zmm9,%zmm9
-
- vpsrlq $26,%zmm4,%zmm15
- vpandq %zmm5,%zmm4,%zmm4
-
- vpsrlq $26,%zmm1,%zmm12
- vpandq %zmm5,%zmm1,%zmm1
- vpaddq %zmm12,%zmm2,%zmm2
-
- vpaddq %zmm15,%zmm0,%zmm0
- vpsllq $2,%zmm15,%zmm15
- vpaddq %zmm15,%zmm0,%zmm0
-
- vpaddq %zmm9,%zmm2,%zmm2
- vpsrlq $26,%zmm7,%zmm8
-
- vpsrlq $26,%zmm2,%zmm13
- vpandq %zmm5,%zmm2,%zmm2
- vpaddq %zmm13,%zmm14,%zmm3
-
- vpsrlq $14,%zmm6,%zmm10
-
- vpsrlq $26,%zmm0,%zmm11
- vpandq %zmm5,%zmm0,%zmm0
- vpaddq %zmm11,%zmm1,%zmm1
-
- vpsrlq $40,%zmm6,%zmm6
-
- vpsrlq $26,%zmm3,%zmm14
- vpandq %zmm5,%zmm3,%zmm3
- vpaddq %zmm14,%zmm4,%zmm4
-
- vpandq %zmm5,%zmm7,%zmm7
-
-
-
-
- subq $128,%rdx
- ja .Loop_avx512
-
-.Ltail_avx512:
-
-
-
-
-
- vpsrlq $32,%zmm16,%zmm16
- vpsrlq $32,%zmm17,%zmm17
- vpsrlq $32,%zmm18,%zmm18
- vpsrlq $32,%zmm23,%zmm23
- vpsrlq $32,%zmm24,%zmm24
- vpsrlq $32,%zmm19,%zmm19
- vpsrlq $32,%zmm20,%zmm20
- vpsrlq $32,%zmm21,%zmm21
- vpsrlq $32,%zmm22,%zmm22
-
-
-
- leaq (%rsi,%rdx,1),%rsi
-
-
- vpaddq %zmm0,%zmm7,%zmm0
-
- vpmuludq %zmm2,%zmm17,%zmm14
- vpmuludq %zmm2,%zmm18,%zmm15
- vpmuludq %zmm2,%zmm23,%zmm11
- vpandq %zmm5,%zmm8,%zmm8
- vpmuludq %zmm2,%zmm24,%zmm12
- vpandq %zmm5,%zmm10,%zmm10
- vpmuludq %zmm2,%zmm16,%zmm13
- vporq %zmm30,%zmm6,%zmm6
- vpaddq %zmm1,%zmm8,%zmm1
- vpaddq %zmm3,%zmm10,%zmm3
- vpaddq %zmm4,%zmm6,%zmm4
-
- vmovdqu 0(%rsi),%xmm7
- vpmuludq %zmm0,%zmm19,%zmm28
- vpmuludq %zmm0,%zmm20,%zmm29
- vpmuludq %zmm0,%zmm16,%zmm25
- vpmuludq %zmm0,%zmm17,%zmm26
- vpaddq %zmm28,%zmm14,%zmm14
- vpaddq %zmm29,%zmm15,%zmm15
- vpaddq %zmm25,%zmm11,%zmm11
- vpaddq %zmm26,%zmm12,%zmm12
-
- vmovdqu 16(%rsi),%xmm8
- vpmuludq %zmm1,%zmm18,%zmm28
- vpmuludq %zmm1,%zmm19,%zmm29
- vpmuludq %zmm1,%zmm24,%zmm25
- vpmuludq %zmm0,%zmm18,%zmm27
- vpaddq %zmm28,%zmm14,%zmm14
- vpaddq %zmm29,%zmm15,%zmm15
- vpaddq %zmm25,%zmm11,%zmm11
- vpaddq %zmm27,%zmm13,%zmm13
-
- vinserti128 $1,32(%rsi),%ymm7,%ymm7
- vpmuludq %zmm3,%zmm16,%zmm28
- vpmuludq %zmm3,%zmm17,%zmm29
- vpmuludq %zmm1,%zmm16,%zmm26
- vpmuludq %zmm1,%zmm17,%zmm27
- vpaddq %zmm28,%zmm14,%zmm14
- vpaddq %zmm29,%zmm15,%zmm15
- vpaddq %zmm26,%zmm12,%zmm12
- vpaddq %zmm27,%zmm13,%zmm13
-
- vinserti128 $1,48(%rsi),%ymm8,%ymm8
- vpmuludq %zmm4,%zmm24,%zmm28
- vpmuludq %zmm4,%zmm16,%zmm29
- vpmuludq %zmm3,%zmm22,%zmm25
- vpmuludq %zmm3,%zmm23,%zmm26
- vpmuludq %zmm3,%zmm24,%zmm27
- vpaddq %zmm28,%zmm14,%zmm3
- vpaddq %zmm29,%zmm15,%zmm15
- vpaddq %zmm25,%zmm11,%zmm11
- vpaddq %zmm26,%zmm12,%zmm12
- vpaddq %zmm27,%zmm13,%zmm13
-
- vpmuludq %zmm4,%zmm21,%zmm25
- vpmuludq %zmm4,%zmm22,%zmm26
- vpmuludq %zmm4,%zmm23,%zmm27
- vpaddq %zmm25,%zmm11,%zmm0
- vpaddq %zmm26,%zmm12,%zmm1
- vpaddq %zmm27,%zmm13,%zmm2
-
-
-
-
- movl $1,%eax
- vpermq $0xb1,%zmm3,%zmm14
- vpermq $0xb1,%zmm15,%zmm4
- vpermq $0xb1,%zmm0,%zmm11
- vpermq $0xb1,%zmm1,%zmm12
- vpermq $0xb1,%zmm2,%zmm13
- vpaddq %zmm14,%zmm3,%zmm3
- vpaddq %zmm15,%zmm4,%zmm4
- vpaddq %zmm11,%zmm0,%zmm0
- vpaddq %zmm12,%zmm1,%zmm1
- vpaddq %zmm13,%zmm2,%zmm2
-
- kmovw %eax,%k3
- vpermq $0x2,%zmm3,%zmm14
- vpermq $0x2,%zmm4,%zmm15
- vpermq $0x2,%zmm0,%zmm11
- vpermq $0x2,%zmm1,%zmm12
- vpermq $0x2,%zmm2,%zmm13
- vpaddq %zmm14,%zmm3,%zmm3
- vpaddq %zmm15,%zmm4,%zmm4
- vpaddq %zmm11,%zmm0,%zmm0
- vpaddq %zmm12,%zmm1,%zmm1
- vpaddq %zmm13,%zmm2,%zmm2
-
- vextracti64x4 $0x1,%zmm3,%ymm14
- vextracti64x4 $0x1,%zmm4,%ymm15
- vextracti64x4 $0x1,%zmm0,%ymm11
- vextracti64x4 $0x1,%zmm1,%ymm12
- vextracti64x4 $0x1,%zmm2,%ymm13
- vpaddq %zmm14,%zmm3,%zmm3{%k3}{z}
- vpaddq %zmm15,%zmm4,%zmm4{%k3}{z}
- vpaddq %zmm11,%zmm0,%zmm0{%k3}{z}
- vpaddq %zmm12,%zmm1,%zmm1{%k3}{z}
- vpaddq %zmm13,%zmm2,%zmm2{%k3}{z}
-
-
-
- vpsrlq $26,%ymm3,%ymm14
- vpand %ymm5,%ymm3,%ymm3
- vpsrldq $6,%ymm7,%ymm9
- vpsrldq $6,%ymm8,%ymm10
- vpunpckhqdq %ymm8,%ymm7,%ymm6
- vpaddq %ymm14,%ymm4,%ymm4
-
- vpsrlq $26,%ymm0,%ymm11
- vpand %ymm5,%ymm0,%ymm0
- vpunpcklqdq %ymm10,%ymm9,%ymm9
- vpunpcklqdq %ymm8,%ymm7,%ymm7
- vpaddq %ymm11,%ymm1,%ymm1
-
- vpsrlq $26,%ymm4,%ymm15
- vpand %ymm5,%ymm4,%ymm4
-
- vpsrlq $26,%ymm1,%ymm12
- vpand %ymm5,%ymm1,%ymm1
- vpsrlq $30,%ymm9,%ymm10
- vpsrlq $4,%ymm9,%ymm9
- vpaddq %ymm12,%ymm2,%ymm2
-
- vpaddq %ymm15,%ymm0,%ymm0
- vpsllq $2,%ymm15,%ymm15
- vpsrlq $26,%ymm7,%ymm8
- vpsrlq $40,%ymm6,%ymm6
- vpaddq %ymm15,%ymm0,%ymm0
-
- vpsrlq $26,%ymm2,%ymm13
- vpand %ymm5,%ymm2,%ymm2
- vpand %ymm5,%ymm9,%ymm9
- vpand %ymm5,%ymm7,%ymm7
- vpaddq %ymm13,%ymm3,%ymm3
-
- vpsrlq $26,%ymm0,%ymm11
- vpand %ymm5,%ymm0,%ymm0
- vpaddq %ymm2,%ymm9,%ymm2
- vpand %ymm5,%ymm8,%ymm8
- vpaddq %ymm11,%ymm1,%ymm1
-
- vpsrlq $26,%ymm3,%ymm14
- vpand %ymm5,%ymm3,%ymm3
- vpand %ymm5,%ymm10,%ymm10
- vpor 32(%rcx),%ymm6,%ymm6
- vpaddq %ymm14,%ymm4,%ymm4
-
- leaq 144(%rsp),%rax
- addq $64,%rdx
- jnz .Ltail_avx2
-
- vpsubq %ymm9,%ymm2,%ymm2
- vmovd %xmm0,-112(%rdi)
- vmovd %xmm1,-108(%rdi)
- vmovd %xmm2,-104(%rdi)
- vmovd %xmm3,-100(%rdi)
- vmovd %xmm4,-96(%rdi)
- vzeroall
- leaq 8(%r11),%rsp
-.cfi_def_cfa %rsp,8
- .byte 0xf3,0xc3
-.cfi_endproc
-.size poly1305_blocks_avx512,.-poly1305_blocks_avx512
-.type poly1305_init_base2_44,@function
-.align 32
-poly1305_init_base2_44:
+.type poly1305_blocks_avx512,@function
+.align 32
+poly1305_blocks_avx512:
.cfi_startproc
- xorq %rax,%rax
- movq %rax,0(%rdi)
- movq %rax,8(%rdi)
- movq %rax,16(%rdi)
-
-.Linit_base2_44:
- leaq poly1305_blocks_vpmadd52(%rip),%r10
- leaq poly1305_emit_base2_44(%rip),%r11
-
- movq $0x0ffffffc0fffffff,%rax
- movq $0x0ffffffc0ffffffc,%rcx
- andq 0(%rsi),%rax
- movq $0x00000fffffffffff,%r8
- andq 8(%rsi),%rcx
- movq $0x00000fffffffffff,%r9
- andq %rax,%r8
- shrdq $44,%rcx,%rax
- movq %r8,40(%rdi)
- andq %r9,%rax
- shrq $24,%rcx
- movq %rax,48(%rdi)
- leaq (%rax,%rax,4),%rax
- movq %rcx,56(%rdi)
- shlq $2,%rax
- leaq (%rcx,%rcx,4),%rcx
- shlq $2,%rcx
- movq %rax,24(%rdi)
- movq %rcx,32(%rdi)
- movq $-1,64(%rdi)
- movq %r10,0(%rdx)
- movq %r11,8(%rdx)
- movl $1,%eax
- .byte 0xf3,0xc3
+.Lblocks_avx512:
+ movl $15,%eax
+ kmovw %eax,%k2
+ leaq -8(%rsp),%r11
+.cfi_def_cfa %r11,16
+ subq $0x128,%rsp
+ leaq .Lconst(%rip),%rcx
+ leaq 48+64(%rdi),%rdi
+ vmovdqa 96(%rcx),%ymm9
+
+
+ vmovdqu -64(%rdi),%xmm11
+ andq $-512,%rsp
+ vmovdqu -48(%rdi),%xmm12
+ movq $0x20,%rax
+ vmovdqu -32(%rdi),%xmm7
+ vmovdqu -16(%rdi),%xmm13
+ vmovdqu 0(%rdi),%xmm8
+ vmovdqu 16(%rdi),%xmm14
+ vmovdqu 32(%rdi),%xmm10
+ vmovdqu 48(%rdi),%xmm15
+ vmovdqu 64(%rdi),%xmm6
+ vpermd %zmm11,%zmm9,%zmm16
+ vpbroadcastq 64(%rcx),%zmm5
+ vpermd %zmm12,%zmm9,%zmm17
+ vpermd %zmm7,%zmm9,%zmm21
+ vpermd %zmm13,%zmm9,%zmm18
+ vmovdqa64 %zmm16,0(%rsp){%k2}
+ vpsrlq $32,%zmm16,%zmm7
+ vpermd %zmm8,%zmm9,%zmm22
+ vmovdqu64 %zmm17,0(%rsp,%rax,1){%k2}
+ vpsrlq $32,%zmm17,%zmm8
+ vpermd %zmm14,%zmm9,%zmm19
+ vmovdqa64 %zmm21,64(%rsp){%k2}
+ vpermd %zmm10,%zmm9,%zmm23
+ vpermd %zmm15,%zmm9,%zmm20
+ vmovdqu64 %zmm18,64(%rsp,%rax,1){%k2}
+ vpermd %zmm6,%zmm9,%zmm24
+ vmovdqa64 %zmm22,128(%rsp){%k2}
+ vmovdqu64 %zmm19,128(%rsp,%rax,1){%k2}
+ vmovdqa64 %zmm23,192(%rsp){%k2}
+ vmovdqu64 %zmm20,192(%rsp,%rax,1){%k2}
+ vmovdqa64 %zmm24,256(%rsp){%k2}
+
+
+
+
+
+
+
+
+
+
+ vpmuludq %zmm7,%zmm16,%zmm11
+ vpmuludq %zmm7,%zmm17,%zmm12
+ vpmuludq %zmm7,%zmm18,%zmm13
+ vpmuludq %zmm7,%zmm19,%zmm14
+ vpmuludq %zmm7,%zmm20,%zmm15
+ vpsrlq $32,%zmm18,%zmm9
+
+ vpmuludq %zmm8,%zmm24,%zmm25
+ vpmuludq %zmm8,%zmm16,%zmm26
+ vpmuludq %zmm8,%zmm17,%zmm27
+ vpmuludq %zmm8,%zmm18,%zmm28
+ vpmuludq %zmm8,%zmm19,%zmm29
+ vpsrlq $32,%zmm19,%zmm10
+ vpaddq %zmm25,%zmm11,%zmm11
+ vpaddq %zmm26,%zmm12,%zmm12
+ vpaddq %zmm27,%zmm13,%zmm13
+ vpaddq %zmm28,%zmm14,%zmm14
+ vpaddq %zmm29,%zmm15,%zmm15
+
+ vpmuludq %zmm9,%zmm23,%zmm25
+ vpmuludq %zmm9,%zmm24,%zmm26
+ vpmuludq %zmm9,%zmm17,%zmm28
+ vpmuludq %zmm9,%zmm18,%zmm29
+ vpmuludq %zmm9,%zmm16,%zmm27
+ vpsrlq $32,%zmm20,%zmm6
+ vpaddq %zmm25,%zmm11,%zmm11
+ vpaddq %zmm26,%zmm12,%zmm12
+ vpaddq %zmm28,%zmm14,%zmm14
+ vpaddq %zmm29,%zmm15,%zmm15
+ vpaddq %zmm27,%zmm13,%zmm13
+
+ vpmuludq %zmm10,%zmm22,%zmm25
+ vpmuludq %zmm10,%zmm16,%zmm28
+ vpmuludq %zmm10,%zmm17,%zmm29
+ vpmuludq %zmm10,%zmm23,%zmm26
+ vpmuludq %zmm10,%zmm24,%zmm27
+ vpaddq %zmm25,%zmm11,%zmm11
+ vpaddq %zmm28,%zmm14,%zmm14
+ vpaddq %zmm29,%zmm15,%zmm15
+ vpaddq %zmm26,%zmm12,%zmm12
+ vpaddq %zmm27,%zmm13,%zmm13
+
+ vpmuludq %zmm6,%zmm24,%zmm28
+ vpmuludq %zmm6,%zmm16,%zmm29
+ vpmuludq %zmm6,%zmm21,%zmm25
+ vpmuludq %zmm6,%zmm22,%zmm26
+ vpmuludq %zmm6,%zmm23,%zmm27
+ vpaddq %zmm28,%zmm14,%zmm14
+ vpaddq %zmm29,%zmm15,%zmm15
+ vpaddq %zmm25,%zmm11,%zmm11
+ vpaddq %zmm26,%zmm12,%zmm12
+ vpaddq %zmm27,%zmm13,%zmm13
+
+
+
+ vmovdqu64 0(%rsi),%zmm10
+ vmovdqu64 64(%rsi),%zmm6
+ leaq 128(%rsi),%rsi
+
+
+
+
+ vpsrlq $26,%zmm14,%zmm28
+ vpandq %zmm5,%zmm14,%zmm14
+ vpaddq %zmm28,%zmm15,%zmm15
+
+ vpsrlq $26,%zmm11,%zmm25
+ vpandq %zmm5,%zmm11,%zmm11
+ vpaddq %zmm25,%zmm12,%zmm12
+
+ vpsrlq $26,%zmm15,%zmm29
+ vpandq %zmm5,%zmm15,%zmm15
+
+ vpsrlq $26,%zmm12,%zmm26
+ vpandq %zmm5,%zmm12,%zmm12
+ vpaddq %zmm26,%zmm13,%zmm13
+
+ vpaddq %zmm29,%zmm11,%zmm11
+ vpsllq $2,%zmm29,%zmm29
+ vpaddq %zmm29,%zmm11,%zmm11
+
+ vpsrlq $26,%zmm13,%zmm27
+ vpandq %zmm5,%zmm13,%zmm13
+ vpaddq %zmm27,%zmm14,%zmm14
+
+ vpsrlq $26,%zmm11,%zmm25
+ vpandq %zmm5,%zmm11,%zmm11
+ vpaddq %zmm25,%zmm12,%zmm12
+
+ vpsrlq $26,%zmm14,%zmm28
+ vpandq %zmm5,%zmm14,%zmm14
+ vpaddq %zmm28,%zmm15,%zmm15
+
+
+
+
+
+ vpunpcklqdq %zmm6,%zmm10,%zmm7
+ vpunpckhqdq %zmm6,%zmm10,%zmm6
+
+
+
+
+
+
+ vmovdqa32 128(%rcx),%zmm25
+ movl $0x7777,%eax
+ kmovw %eax,%k1
+
+ vpermd %zmm16,%zmm25,%zmm16
+ vpermd %zmm17,%zmm25,%zmm17
+ vpermd %zmm18,%zmm25,%zmm18
+ vpermd %zmm19,%zmm25,%zmm19
+ vpermd %zmm20,%zmm25,%zmm20
+
+ vpermd %zmm11,%zmm25,%zmm16{%k1}
+ vpermd %zmm12,%zmm25,%zmm17{%k1}
+ vpermd %zmm13,%zmm25,%zmm18{%k1}
+ vpermd %zmm14,%zmm25,%zmm19{%k1}
+ vpermd %zmm15,%zmm25,%zmm20{%k1}
+
+ vpslld $2,%zmm17,%zmm21
+ vpslld $2,%zmm18,%zmm22
+ vpslld $2,%zmm19,%zmm23
+ vpslld $2,%zmm20,%zmm24
+ vpaddd %zmm17,%zmm21,%zmm21
+ vpaddd %zmm18,%zmm22,%zmm22
+ vpaddd %zmm19,%zmm23,%zmm23
+ vpaddd %zmm20,%zmm24,%zmm24
+
+ vpbroadcastq 32(%rcx),%zmm30
+
+ vpsrlq $52,%zmm7,%zmm9
+ vpsllq $12,%zmm6,%zmm10
+ vporq %zmm10,%zmm9,%zmm9
+ vpsrlq $26,%zmm7,%zmm8
+ vpsrlq $14,%zmm6,%zmm10
+ vpsrlq $40,%zmm6,%zmm6
+ vpandq %zmm5,%zmm9,%zmm9
+ vpandq %zmm5,%zmm7,%zmm7
+
+
+
+
+ vpaddq %zmm2,%zmm9,%zmm2
+ subq $192,%rdx
+ jbe .Ltail_avx512
+ jmp .Loop_avx512
+
+.align 32
+.Loop_avx512:
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ vpmuludq %zmm2,%zmm17,%zmm14
+ vpaddq %zmm0,%zmm7,%zmm0
+ vpmuludq %zmm2,%zmm18,%zmm15
+ vpandq %zmm5,%zmm8,%zmm8
+ vpmuludq %zmm2,%zmm23,%zmm11
+ vpandq %zmm5,%zmm10,%zmm10
+ vpmuludq %zmm2,%zmm24,%zmm12
+ vporq %zmm30,%zmm6,%zmm6
+ vpmuludq %zmm2,%zmm16,%zmm13
+ vpaddq %zmm1,%zmm8,%zmm1
+ vpaddq %zmm3,%zmm10,%zmm3
+ vpaddq %zmm4,%zmm6,%zmm4
+
+ vmovdqu64 0(%rsi),%zmm10
+ vmovdqu64 64(%rsi),%zmm6
+ leaq 128(%rsi),%rsi
+ vpmuludq %zmm0,%zmm19,%zmm28
+ vpmuludq %zmm0,%zmm20,%zmm29
+ vpmuludq %zmm0,%zmm16,%zmm25
+ vpmuludq %zmm0,%zmm17,%zmm26
+ vpaddq %zmm28,%zmm14,%zmm14
+ vpaddq %zmm29,%zmm15,%zmm15
+ vpaddq %zmm25,%zmm11,%zmm11
+ vpaddq %zmm26,%zmm12,%zmm12
+
+ vpmuludq %zmm1,%zmm18,%zmm28
+ vpmuludq %zmm1,%zmm19,%zmm29
+ vpmuludq %zmm1,%zmm24,%zmm25
+ vpmuludq %zmm0,%zmm18,%zmm27
+ vpaddq %zmm28,%zmm14,%zmm14
+ vpaddq %zmm29,%zmm15,%zmm15
+ vpaddq %zmm25,%zmm11,%zmm11
+ vpaddq %zmm27,%zmm13,%zmm13
+
+ vpunpcklqdq %zmm6,%zmm10,%zmm7
+ vpunpckhqdq %zmm6,%zmm10,%zmm6
+
+ vpmuludq %zmm3,%zmm16,%zmm28
+ vpmuludq %zmm3,%zmm17,%zmm29
+ vpmuludq %zmm1,%zmm16,%zmm26
+ vpmuludq %zmm1,%zmm17,%zmm27
+ vpaddq %zmm28,%zmm14,%zmm14
+ vpaddq %zmm29,%zmm15,%zmm15
+ vpaddq %zmm26,%zmm12,%zmm12
+ vpaddq %zmm27,%zmm13,%zmm13
+
+ vpmuludq %zmm4,%zmm24,%zmm28
+ vpmuludq %zmm4,%zmm16,%zmm29
+ vpmuludq %zmm3,%zmm22,%zmm25
+ vpmuludq %zmm3,%zmm23,%zmm26
+ vpaddq %zmm28,%zmm14,%zmm14
+ vpmuludq %zmm3,%zmm24,%zmm27
+ vpaddq %zmm29,%zmm15,%zmm15
+ vpaddq %zmm25,%zmm11,%zmm11
+ vpaddq %zmm26,%zmm12,%zmm12
+ vpaddq %zmm27,%zmm13,%zmm13
+
+ vpmuludq %zmm4,%zmm21,%zmm25
+ vpmuludq %zmm4,%zmm22,%zmm26
+ vpmuludq %zmm4,%zmm23,%zmm27
+ vpaddq %zmm25,%zmm11,%zmm0
+ vpaddq %zmm26,%zmm12,%zmm1
+ vpaddq %zmm27,%zmm13,%zmm2
+
+
+
+
+ vpsrlq $52,%zmm7,%zmm9
+ vpsllq $12,%zmm6,%zmm10
+
+ vpsrlq $26,%zmm14,%zmm3
+ vpandq %zmm5,%zmm14,%zmm14
+ vpaddq %zmm3,%zmm15,%zmm4
+
+ vporq %zmm10,%zmm9,%zmm9
+
+ vpsrlq $26,%zmm0,%zmm11
+ vpandq %zmm5,%zmm0,%zmm0
+ vpaddq %zmm11,%zmm1,%zmm1
+
+ vpandq %zmm5,%zmm9,%zmm9
+
+ vpsrlq $26,%zmm4,%zmm15
+ vpandq %zmm5,%zmm4,%zmm4
+
+ vpsrlq $26,%zmm1,%zmm12
+ vpandq %zmm5,%zmm1,%zmm1
+ vpaddq %zmm12,%zmm2,%zmm2
+
+ vpaddq %zmm15,%zmm0,%zmm0
+ vpsllq $2,%zmm15,%zmm15
+ vpaddq %zmm15,%zmm0,%zmm0
+
+ vpaddq %zmm9,%zmm2,%zmm2
+ vpsrlq $26,%zmm7,%zmm8
+
+ vpsrlq $26,%zmm2,%zmm13
+ vpandq %zmm5,%zmm2,%zmm2
+ vpaddq %zmm13,%zmm14,%zmm3
+
+ vpsrlq $14,%zmm6,%zmm10
+
+ vpsrlq $26,%zmm0,%zmm11
+ vpandq %zmm5,%zmm0,%zmm0
+ vpaddq %zmm11,%zmm1,%zmm1
+
+ vpsrlq $40,%zmm6,%zmm6
+
+ vpsrlq $26,%zmm3,%zmm14
+ vpandq %zmm5,%zmm3,%zmm3
+ vpaddq %zmm14,%zmm4,%zmm4
+
+ vpandq %zmm5,%zmm7,%zmm7
+
+
+
+
+ subq $128,%rdx
+ ja .Loop_avx512
+
+.Ltail_avx512:
+
+
+
+
+
+ vpsrlq $32,%zmm16,%zmm16
+ vpsrlq $32,%zmm17,%zmm17
+ vpsrlq $32,%zmm18,%zmm18
+ vpsrlq $32,%zmm23,%zmm23
+ vpsrlq $32,%zmm24,%zmm24
+ vpsrlq $32,%zmm19,%zmm19
+ vpsrlq $32,%zmm20,%zmm20
+ vpsrlq $32,%zmm21,%zmm21
+ vpsrlq $32,%zmm22,%zmm22
+
+
+
+ leaq (%rsi,%rdx,1),%rsi
+
+
+ vpaddq %zmm0,%zmm7,%zmm0
+
+ vpmuludq %zmm2,%zmm17,%zmm14
+ vpmuludq %zmm2,%zmm18,%zmm15
+ vpmuludq %zmm2,%zmm23,%zmm11
+ vpandq %zmm5,%zmm8,%zmm8
+ vpmuludq %zmm2,%zmm24,%zmm12
+ vpandq %zmm5,%zmm10,%zmm10
+ vpmuludq %zmm2,%zmm16,%zmm13
+ vporq %zmm30,%zmm6,%zmm6
+ vpaddq %zmm1,%zmm8,%zmm1
+ vpaddq %zmm3,%zmm10,%zmm3
+ vpaddq %zmm4,%zmm6,%zmm4
+
+ vmovdqu 0(%rsi),%xmm7
+ vpmuludq %zmm0,%zmm19,%zmm28
+ vpmuludq %zmm0,%zmm20,%zmm29
+ vpmuludq %zmm0,%zmm16,%zmm25
+ vpmuludq %zmm0,%zmm17,%zmm26
+ vpaddq %zmm28,%zmm14,%zmm14
+ vpaddq %zmm29,%zmm15,%zmm15
+ vpaddq %zmm25,%zmm11,%zmm11
+ vpaddq %zmm26,%zmm12,%zmm12
+
+ vmovdqu 16(%rsi),%xmm8
+ vpmuludq %zmm1,%zmm18,%zmm28
+ vpmuludq %zmm1,%zmm19,%zmm29
+ vpmuludq %zmm1,%zmm24,%zmm25
+ vpmuludq %zmm0,%zmm18,%zmm27
+ vpaddq %zmm28,%zmm14,%zmm14
+ vpaddq %zmm29,%zmm15,%zmm15
+ vpaddq %zmm25,%zmm11,%zmm11
+ vpaddq %zmm27,%zmm13,%zmm13
+
+ vinserti128 $1,32(%rsi),%ymm7,%ymm7
+ vpmuludq %zmm3,%zmm16,%zmm28
+ vpmuludq %zmm3,%zmm17,%zmm29
+ vpmuludq %zmm1,%zmm16,%zmm26
+ vpmuludq %zmm1,%zmm17,%zmm27
+ vpaddq %zmm28,%zmm14,%zmm14
+ vpaddq %zmm29,%zmm15,%zmm15
+ vpaddq %zmm26,%zmm12,%zmm12
+ vpaddq %zmm27,%zmm13,%zmm13
+
+ vinserti128 $1,48(%rsi),%ymm8,%ymm8
+ vpmuludq %zmm4,%zmm24,%zmm28
+ vpmuludq %zmm4,%zmm16,%zmm29
+ vpmuludq %zmm3,%zmm22,%zmm25
+ vpmuludq %zmm3,%zmm23,%zmm26
+ vpmuludq %zmm3,%zmm24,%zmm27
+ vpaddq %zmm28,%zmm14,%zmm3
+ vpaddq %zmm29,%zmm15,%zmm15
+ vpaddq %zmm25,%zmm11,%zmm11
+ vpaddq %zmm26,%zmm12,%zmm12
+ vpaddq %zmm27,%zmm13,%zmm13
+
+ vpmuludq %zmm4,%zmm21,%zmm25
+ vpmuludq %zmm4,%zmm22,%zmm26
+ vpmuludq %zmm4,%zmm23,%zmm27
+ vpaddq %zmm25,%zmm11,%zmm0
+ vpaddq %zmm26,%zmm12,%zmm1
+ vpaddq %zmm27,%zmm13,%zmm2
+
+
+
+
+ movl $1,%eax
+ vpermq $0xb1,%zmm3,%zmm14
+ vpermq $0xb1,%zmm15,%zmm4
+ vpermq $0xb1,%zmm0,%zmm11
+ vpermq $0xb1,%zmm1,%zmm12
+ vpermq $0xb1,%zmm2,%zmm13
+ vpaddq %zmm14,%zmm3,%zmm3
+ vpaddq %zmm15,%zmm4,%zmm4
+ vpaddq %zmm11,%zmm0,%zmm0
+ vpaddq %zmm12,%zmm1,%zmm1
+ vpaddq %zmm13,%zmm2,%zmm2
+
+ kmovw %eax,%k3
+ vpermq $0x2,%zmm3,%zmm14
+ vpermq $0x2,%zmm4,%zmm15
+ vpermq $0x2,%zmm0,%zmm11
+ vpermq $0x2,%zmm1,%zmm12
+ vpermq $0x2,%zmm2,%zmm13
+ vpaddq %zmm14,%zmm3,%zmm3
+ vpaddq %zmm15,%zmm4,%zmm4
+ vpaddq %zmm11,%zmm0,%zmm0
+ vpaddq %zmm12,%zmm1,%zmm1
+ vpaddq %zmm13,%zmm2,%zmm2
+
+ vextracti64x4 $0x1,%zmm3,%ymm14
+ vextracti64x4 $0x1,%zmm4,%ymm15
+ vextracti64x4 $0x1,%zmm0,%ymm11
+ vextracti64x4 $0x1,%zmm1,%ymm12
+ vextracti64x4 $0x1,%zmm2,%ymm13
+ vpaddq %zmm14,%zmm3,%zmm3{%k3}{z}
+ vpaddq %zmm15,%zmm4,%zmm4{%k3}{z}
+ vpaddq %zmm11,%zmm0,%zmm0{%k3}{z}
+ vpaddq %zmm12,%zmm1,%zmm1{%k3}{z}
+ vpaddq %zmm13,%zmm2,%zmm2{%k3}{z}
+
+
+
+ vpsrlq $26,%ymm3,%ymm14
+ vpand %ymm5,%ymm3,%ymm3
+ vpsrldq $6,%ymm7,%ymm9
+ vpsrldq $6,%ymm8,%ymm10
+ vpunpckhqdq %ymm8,%ymm7,%ymm6
+ vpaddq %ymm14,%ymm4,%ymm4
+
+ vpsrlq $26,%ymm0,%ymm11
+ vpand %ymm5,%ymm0,%ymm0
+ vpunpcklqdq %ymm10,%ymm9,%ymm9
+ vpunpcklqdq %ymm8,%ymm7,%ymm7
+ vpaddq %ymm11,%ymm1,%ymm1
+
+ vpsrlq $26,%ymm4,%ymm15
+ vpand %ymm5,%ymm4,%ymm4
+
+ vpsrlq $26,%ymm1,%ymm12
+ vpand %ymm5,%ymm1,%ymm1
+ vpsrlq $30,%ymm9,%ymm10
+ vpsrlq $4,%ymm9,%ymm9
+ vpaddq %ymm12,%ymm2,%ymm2
+
+ vpaddq %ymm15,%ymm0,%ymm0
+ vpsllq $2,%ymm15,%ymm15
+ vpsrlq $26,%ymm7,%ymm8
+ vpsrlq $40,%ymm6,%ymm6
+ vpaddq %ymm15,%ymm0,%ymm0
+
+ vpsrlq $26,%ymm2,%ymm13
+ vpand %ymm5,%ymm2,%ymm2
+ vpand %ymm5,%ymm9,%ymm9
+ vpand %ymm5,%ymm7,%ymm7
+ vpaddq %ymm13,%ymm3,%ymm3
+
+ vpsrlq $26,%ymm0,%ymm11
+ vpand %ymm5,%ymm0,%ymm0
+ vpaddq %ymm2,%ymm9,%ymm2
+ vpand %ymm5,%ymm8,%ymm8
+ vpaddq %ymm11,%ymm1,%ymm1
+
+ vpsrlq $26,%ymm3,%ymm14
+ vpand %ymm5,%ymm3,%ymm3
+ vpand %ymm5,%ymm10,%ymm10
+ vpor 32(%rcx),%ymm6,%ymm6
+ vpaddq %ymm14,%ymm4,%ymm4
+
+ leaq 144(%rsp),%rax
+ addq $64,%rdx
+ jnz .Ltail_avx2
+
+ vpsubq %ymm9,%ymm2,%ymm2
+ vmovd %xmm0,-112(%rdi)
+ vmovd %xmm1,-108(%rdi)
+ vmovd %xmm2,-104(%rdi)
+ vmovd %xmm3,-100(%rdi)
+ vmovd %xmm4,-96(%rdi)
+ vzeroall
+ leaq 8(%r11),%rsp
+.cfi_def_cfa %rsp,8
+ .byte 0xf3,0xc3
.cfi_endproc
-.size poly1305_init_base2_44,.-poly1305_init_base2_44
-.type poly1305_blocks_vpmadd52,@function
-.align 32
-poly1305_blocks_vpmadd52:
+.size poly1305_blocks_avx512,.-poly1305_blocks_avx512
+.type poly1305_init_base2_44,@function
+.align 32
+poly1305_init_base2_44:
.cfi_startproc
- shrq $4,%rdx
- jz .Lno_data_vpmadd52
-
- shlq $40,%rcx
- movq 64(%rdi),%r8
-
-
-
-
-
-
- movq $3,%rax
- movq $1,%r10
- cmpq $4,%rdx
- cmovaeq %r10,%rax
- testq %r8,%r8
- cmovnsq %r10,%rax
-
- andq %rdx,%rax
- jz .Lblocks_vpmadd52_4x
-
- subq %rax,%rdx
- movl $7,%r10d
- movl $1,%r11d
- kmovw %r10d,%k7
- leaq .L2_44_inp_permd(%rip),%r10
- kmovw %r11d,%k1
-
- vmovq %rcx,%xmm21
- vmovdqa64 0(%r10),%ymm19
- vmovdqa64 32(%r10),%ymm20
- vpermq $0xcf,%ymm21,%ymm21
- vmovdqa64 64(%r10),%ymm22
-
- vmovdqu64 0(%rdi),%ymm16{%k7}{z}
- vmovdqu64 40(%rdi),%ymm3{%k7}{z}
- vmovdqu64 32(%rdi),%ymm4{%k7}{z}
- vmovdqu64 24(%rdi),%ymm5{%k7}{z}
-
- vmovdqa64 96(%r10),%ymm23
- vmovdqa64 128(%r10),%ymm24
-
- jmp .Loop_vpmadd52
-
-.align 32
-.Loop_vpmadd52:
- vmovdqu32 0(%rsi),%xmm18
- leaq 16(%rsi),%rsi
-
- vpermd %ymm18,%ymm19,%ymm18
- vpsrlvq %ymm20,%ymm18,%ymm18
- vpandq %ymm22,%ymm18,%ymm18
- vporq %ymm21,%ymm18,%ymm18
-
- vpaddq %ymm18,%ymm16,%ymm16
-
- vpermq $0,%ymm16,%ymm0{%k7}{z}
- vpermq $85,%ymm16,%ymm1{%k7}{z}
- vpermq $170,%ymm16,%ymm2{%k7}{z}
-
- vpxord %ymm16,%ymm16,%ymm16
- vpxord %ymm17,%ymm17,%ymm17
-
- vpmadd52luq %ymm3,%ymm0,%ymm16
- vpmadd52huq %ymm3,%ymm0,%ymm17
-
- vpmadd52luq %ymm4,%ymm1,%ymm16
- vpmadd52huq %ymm4,%ymm1,%ymm17
-
- vpmadd52luq %ymm5,%ymm2,%ymm16
- vpmadd52huq %ymm5,%ymm2,%ymm17
-
- vpsrlvq %ymm23,%ymm16,%ymm18
- vpsllvq %ymm24,%ymm17,%ymm17
- vpandq %ymm22,%ymm16,%ymm16
-
- vpaddq %ymm18,%ymm17,%ymm17
-
- vpermq $147,%ymm17,%ymm17
-
- vpaddq %ymm17,%ymm16,%ymm16
-
- vpsrlvq %ymm23,%ymm16,%ymm18
- vpandq %ymm22,%ymm16,%ymm16
-
- vpermq $147,%ymm18,%ymm18
-
- vpaddq %ymm18,%ymm16,%ymm16
-
- vpermq $147,%ymm16,%ymm18{%k1}{z}
-
- vpaddq %ymm18,%ymm16,%ymm16
- vpsllq $2,%ymm18,%ymm18
-
- vpaddq %ymm18,%ymm16,%ymm16
-
- decq %rax
- jnz .Loop_vpmadd52
-
- vmovdqu64 %ymm16,0(%rdi){%k7}
-
- testq %rdx,%rdx
- jnz .Lblocks_vpmadd52_4x
-
-.Lno_data_vpmadd52:
- .byte 0xf3,0xc3
+ xorq %rax,%rax
+ movq %rax,0(%rdi)
+ movq %rax,8(%rdi)
+ movq %rax,16(%rdi)
+
+.Linit_base2_44:
+ leaq poly1305_blocks_vpmadd52(%rip),%r10
+ leaq poly1305_emit_base2_44(%rip),%r11
+
+ movq $0x0ffffffc0fffffff,%rax
+ movq $0x0ffffffc0ffffffc,%rcx
+ andq 0(%rsi),%rax
+ movq $0x00000fffffffffff,%r8
+ andq 8(%rsi),%rcx
+ movq $0x00000fffffffffff,%r9
+ andq %rax,%r8
+ shrdq $44,%rcx,%rax
+ movq %r8,40(%rdi)
+ andq %r9,%rax
+ shrq $24,%rcx
+ movq %rax,48(%rdi)
+ leaq (%rax,%rax,4),%rax
+ movq %rcx,56(%rdi)
+ shlq $2,%rax
+ leaq (%rcx,%rcx,4),%rcx
+ shlq $2,%rcx
+ movq %rax,24(%rdi)
+ movq %rcx,32(%rdi)
+ movq $-1,64(%rdi)
+ movq %r10,0(%rdx)
+ movq %r11,8(%rdx)
+ movl $1,%eax
+ .byte 0xf3,0xc3
.cfi_endproc
-.size poly1305_blocks_vpmadd52,.-poly1305_blocks_vpmadd52
-.type poly1305_blocks_vpmadd52_4x,@function
-.align 32
-poly1305_blocks_vpmadd52_4x:
+.size poly1305_init_base2_44,.-poly1305_init_base2_44
+.type poly1305_blocks_vpmadd52,@function
+.align 32
+poly1305_blocks_vpmadd52:
.cfi_startproc
- shrq $4,%rdx
- jz .Lno_data_vpmadd52_4x
-
- shlq $40,%rcx
- movq 64(%rdi),%r8
-
-.Lblocks_vpmadd52_4x:
- vpbroadcastq %rcx,%ymm31
-
- vmovdqa64 .Lx_mask44(%rip),%ymm28
- movl $5,%eax
- vmovdqa64 .Lx_mask42(%rip),%ymm29
- kmovw %eax,%k1
-
- testq %r8,%r8
- js .Linit_vpmadd52
-
- vmovq 0(%rdi),%xmm0
- vmovq 8(%rdi),%xmm1
- vmovq 16(%rdi),%xmm2
-
- testq $3,%rdx
- jnz .Lblocks_vpmadd52_2x_do
-
-.Lblocks_vpmadd52_4x_do:
- vpbroadcastq 64(%rdi),%ymm3
- vpbroadcastq 96(%rdi),%ymm4
- vpbroadcastq 128(%rdi),%ymm5
- vpbroadcastq 160(%rdi),%ymm16
-
-.Lblocks_vpmadd52_4x_key_loaded:
- vpsllq $2,%ymm5,%ymm17
- vpaddq %ymm5,%ymm17,%ymm17
- vpsllq $2,%ymm17,%ymm17
-
- testq $7,%rdx
- jz .Lblocks_vpmadd52_8x
-
- vmovdqu64 0(%rsi),%ymm26
- vmovdqu64 32(%rsi),%ymm27
- leaq 64(%rsi),%rsi
-
- vpunpcklqdq %ymm27,%ymm26,%ymm25
- vpunpckhqdq %ymm27,%ymm26,%ymm27
-
-
-
- vpsrlq $24,%ymm27,%ymm26
- vporq %ymm31,%ymm26,%ymm26
- vpaddq %ymm26,%ymm2,%ymm2
- vpandq %ymm28,%ymm25,%ymm24
- vpsrlq $44,%ymm25,%ymm25
- vpsllq $20,%ymm27,%ymm27
- vporq %ymm27,%ymm25,%ymm25
- vpandq %ymm28,%ymm25,%ymm25
-
- subq $4,%rdx
- jz .Ltail_vpmadd52_4x
- jmp .Loop_vpmadd52_4x
- ud2
-
-.align 32
-.Linit_vpmadd52:
- vmovq 24(%rdi),%xmm16
- vmovq 56(%rdi),%xmm2
- vmovq 32(%rdi),%xmm17
- vmovq 40(%rdi),%xmm3
- vmovq 48(%rdi),%xmm4
-
- vmovdqa %ymm3,%ymm0
- vmovdqa %ymm4,%ymm1
- vmovdqa %ymm2,%ymm5
-
- movl $2,%eax
-
-.Lmul_init_vpmadd52:
- vpxorq %ymm18,%ymm18,%ymm18
- vpmadd52luq %ymm2,%ymm16,%ymm18
- vpxorq %ymm19,%ymm19,%ymm19
- vpmadd52huq %ymm2,%ymm16,%ymm19
- vpxorq %ymm20,%ymm20,%ymm20
- vpmadd52luq %ymm2,%ymm17,%ymm20
- vpxorq %ymm21,%ymm21,%ymm21
- vpmadd52huq %ymm2,%ymm17,%ymm21
- vpxorq %ymm22,%ymm22,%ymm22
- vpmadd52luq %ymm2,%ymm3,%ymm22
- vpxorq %ymm23,%ymm23,%ymm23
- vpmadd52huq %ymm2,%ymm3,%ymm23
-
- vpmadd52luq %ymm0,%ymm3,%ymm18
- vpmadd52huq %ymm0,%ymm3,%ymm19
- vpmadd52luq %ymm0,%ymm4,%ymm20
- vpmadd52huq %ymm0,%ymm4,%ymm21
- vpmadd52luq %ymm0,%ymm5,%ymm22
- vpmadd52huq %ymm0,%ymm5,%ymm23
-
- vpmadd52luq %ymm1,%ymm17,%ymm18
- vpmadd52huq %ymm1,%ymm17,%ymm19
- vpmadd52luq %ymm1,%ymm3,%ymm20
- vpmadd52huq %ymm1,%ymm3,%ymm21
- vpmadd52luq %ymm1,%ymm4,%ymm22
- vpmadd52huq %ymm1,%ymm4,%ymm23
-
-
-
- vpsrlq $44,%ymm18,%ymm30
- vpsllq $8,%ymm19,%ymm19
- vpandq %ymm28,%ymm18,%ymm0
- vpaddq %ymm30,%ymm19,%ymm19
-
- vpaddq %ymm19,%ymm20,%ymm20
-
- vpsrlq $44,%ymm20,%ymm30
- vpsllq $8,%ymm21,%ymm21
- vpandq %ymm28,%ymm20,%ymm1
- vpaddq %ymm30,%ymm21,%ymm21
-
- vpaddq %ymm21,%ymm22,%ymm22
-
- vpsrlq $42,%ymm22,%ymm30
- vpsllq $10,%ymm23,%ymm23
- vpandq %ymm29,%ymm22,%ymm2
- vpaddq %ymm30,%ymm23,%ymm23
-
- vpaddq %ymm23,%ymm0,%ymm0
- vpsllq $2,%ymm23,%ymm23
-
- vpaddq %ymm23,%ymm0,%ymm0
-
- vpsrlq $44,%ymm0,%ymm30
- vpandq %ymm28,%ymm0,%ymm0
-
- vpaddq %ymm30,%ymm1,%ymm1
-
- decl %eax
- jz .Ldone_init_vpmadd52
-
- vpunpcklqdq %ymm4,%ymm1,%ymm4
- vpbroadcastq %xmm1,%xmm1
- vpunpcklqdq %ymm5,%ymm2,%ymm5
- vpbroadcastq %xmm2,%xmm2
- vpunpcklqdq %ymm3,%ymm0,%ymm3
- vpbroadcastq %xmm0,%xmm0
-
- vpsllq $2,%ymm4,%ymm16
- vpsllq $2,%ymm5,%ymm17
- vpaddq %ymm4,%ymm16,%ymm16
- vpaddq %ymm5,%ymm17,%ymm17
- vpsllq $2,%ymm16,%ymm16
- vpsllq $2,%ymm17,%ymm17
-
- jmp .Lmul_init_vpmadd52
- ud2
-
-.align 32
-.Ldone_init_vpmadd52:
- vinserti128 $1,%xmm4,%ymm1,%ymm4
- vinserti128 $1,%xmm5,%ymm2,%ymm5
- vinserti128 $1,%xmm3,%ymm0,%ymm3
-
- vpermq $216,%ymm4,%ymm4
- vpermq $216,%ymm5,%ymm5
- vpermq $216,%ymm3,%ymm3
-
- vpsllq $2,%ymm4,%ymm16
- vpaddq %ymm4,%ymm16,%ymm16
- vpsllq $2,%ymm16,%ymm16
-
- vmovq 0(%rdi),%xmm0
- vmovq 8(%rdi),%xmm1
- vmovq 16(%rdi),%xmm2
-
- testq $3,%rdx
- jnz .Ldone_init_vpmadd52_2x
-
- vmovdqu64 %ymm3,64(%rdi)
- vpbroadcastq %xmm3,%ymm3
- vmovdqu64 %ymm4,96(%rdi)
- vpbroadcastq %xmm4,%ymm4
- vmovdqu64 %ymm5,128(%rdi)
- vpbroadcastq %xmm5,%ymm5
- vmovdqu64 %ymm16,160(%rdi)
- vpbroadcastq %xmm16,%ymm16
-
- jmp .Lblocks_vpmadd52_4x_key_loaded
- ud2
-
-.align 32
-.Ldone_init_vpmadd52_2x:
- vmovdqu64 %ymm3,64(%rdi)
- vpsrldq $8,%ymm3,%ymm3
- vmovdqu64 %ymm4,96(%rdi)
- vpsrldq $8,%ymm4,%ymm4
- vmovdqu64 %ymm5,128(%rdi)
- vpsrldq $8,%ymm5,%ymm5
- vmovdqu64 %ymm16,160(%rdi)
- vpsrldq $8,%ymm16,%ymm16
- jmp .Lblocks_vpmadd52_2x_key_loaded
- ud2
-
-.align 32
-.Lblocks_vpmadd52_2x_do:
- vmovdqu64 128+8(%rdi),%ymm5{%k1}{z}
- vmovdqu64 160+8(%rdi),%ymm16{%k1}{z}
- vmovdqu64 64+8(%rdi),%ymm3{%k1}{z}
- vmovdqu64 96+8(%rdi),%ymm4{%k1}{z}
-
-.Lblocks_vpmadd52_2x_key_loaded:
- vmovdqu64 0(%rsi),%ymm26
- vpxorq %ymm27,%ymm27,%ymm27
- leaq 32(%rsi),%rsi
-
- vpunpcklqdq %ymm27,%ymm26,%ymm25
- vpunpckhqdq %ymm27,%ymm26,%ymm27
-
-
-
- vpsrlq $24,%ymm27,%ymm26
- vporq %ymm31,%ymm26,%ymm26
- vpaddq %ymm26,%ymm2,%ymm2
- vpandq %ymm28,%ymm25,%ymm24
- vpsrlq $44,%ymm25,%ymm25
- vpsllq $20,%ymm27,%ymm27
- vporq %ymm27,%ymm25,%ymm25
- vpandq %ymm28,%ymm25,%ymm25
-
- jmp .Ltail_vpmadd52_2x
- ud2
-
-.align 32
-.Loop_vpmadd52_4x:
-
- vpaddq %ymm24,%ymm0,%ymm0
- vpaddq %ymm25,%ymm1,%ymm1
-
- vpxorq %ymm18,%ymm18,%ymm18
- vpmadd52luq %ymm2,%ymm16,%ymm18
- vpxorq %ymm19,%ymm19,%ymm19
- vpmadd52huq %ymm2,%ymm16,%ymm19
- vpxorq %ymm20,%ymm20,%ymm20
- vpmadd52luq %ymm2,%ymm17,%ymm20
- vpxorq %ymm21,%ymm21,%ymm21
- vpmadd52huq %ymm2,%ymm17,%ymm21
- vpxorq %ymm22,%ymm22,%ymm22
- vpmadd52luq %ymm2,%ymm3,%ymm22
- vpxorq %ymm23,%ymm23,%ymm23
- vpmadd52huq %ymm2,%ymm3,%ymm23
-
- vmovdqu64 0(%rsi),%ymm26
- vmovdqu64 32(%rsi),%ymm27
- leaq 64(%rsi),%rsi
- vpmadd52luq %ymm0,%ymm3,%ymm18
- vpmadd52huq %ymm0,%ymm3,%ymm19
- vpmadd52luq %ymm0,%ymm4,%ymm20
- vpmadd52huq %ymm0,%ymm4,%ymm21
- vpmadd52luq %ymm0,%ymm5,%ymm22
- vpmadd52huq %ymm0,%ymm5,%ymm23
-
- vpunpcklqdq %ymm27,%ymm26,%ymm25
- vpunpckhqdq %ymm27,%ymm26,%ymm27
- vpmadd52luq %ymm1,%ymm17,%ymm18
- vpmadd52huq %ymm1,%ymm17,%ymm19
- vpmadd52luq %ymm1,%ymm3,%ymm20
- vpmadd52huq %ymm1,%ymm3,%ymm21
- vpmadd52luq %ymm1,%ymm4,%ymm22
- vpmadd52huq %ymm1,%ymm4,%ymm23
-
-
-
- vpsrlq $44,%ymm18,%ymm30
- vpsllq $8,%ymm19,%ymm19
- vpandq %ymm28,%ymm18,%ymm0
- vpaddq %ymm30,%ymm19,%ymm19
-
- vpsrlq $24,%ymm27,%ymm26
- vporq %ymm31,%ymm26,%ymm26
- vpaddq %ymm19,%ymm20,%ymm20
-
- vpsrlq $44,%ymm20,%ymm30
- vpsllq $8,%ymm21,%ymm21
- vpandq %ymm28,%ymm20,%ymm1
- vpaddq %ymm30,%ymm21,%ymm21
-
- vpandq %ymm28,%ymm25,%ymm24
- vpsrlq $44,%ymm25,%ymm25
- vpsllq $20,%ymm27,%ymm27
- vpaddq %ymm21,%ymm22,%ymm22
-
- vpsrlq $42,%ymm22,%ymm30
- vpsllq $10,%ymm23,%ymm23
- vpandq %ymm29,%ymm22,%ymm2
- vpaddq %ymm30,%ymm23,%ymm23
-
- vpaddq %ymm26,%ymm2,%ymm2
- vpaddq %ymm23,%ymm0,%ymm0
- vpsllq $2,%ymm23,%ymm23
-
- vpaddq %ymm23,%ymm0,%ymm0
- vporq %ymm27,%ymm25,%ymm25
- vpandq %ymm28,%ymm25,%ymm25
-
- vpsrlq $44,%ymm0,%ymm30
- vpandq %ymm28,%ymm0,%ymm0
-
- vpaddq %ymm30,%ymm1,%ymm1
-
- subq $4,%rdx
- jnz .Loop_vpmadd52_4x
-
-.Ltail_vpmadd52_4x:
- vmovdqu64 128(%rdi),%ymm5
- vmovdqu64 160(%rdi),%ymm16
- vmovdqu64 64(%rdi),%ymm3
- vmovdqu64 96(%rdi),%ymm4
-
-.Ltail_vpmadd52_2x:
- vpsllq $2,%ymm5,%ymm17
- vpaddq %ymm5,%ymm17,%ymm17
- vpsllq $2,%ymm17,%ymm17
-
-
- vpaddq %ymm24,%ymm0,%ymm0
- vpaddq %ymm25,%ymm1,%ymm1
-
- vpxorq %ymm18,%ymm18,%ymm18
- vpmadd52luq %ymm2,%ymm16,%ymm18
- vpxorq %ymm19,%ymm19,%ymm19
- vpmadd52huq %ymm2,%ymm16,%ymm19
- vpxorq %ymm20,%ymm20,%ymm20
- vpmadd52luq %ymm2,%ymm17,%ymm20
- vpxorq %ymm21,%ymm21,%ymm21
- vpmadd52huq %ymm2,%ymm17,%ymm21
- vpxorq %ymm22,%ymm22,%ymm22
- vpmadd52luq %ymm2,%ymm3,%ymm22
- vpxorq %ymm23,%ymm23,%ymm23
- vpmadd52huq %ymm2,%ymm3,%ymm23
-
- vpmadd52luq %ymm0,%ymm3,%ymm18
- vpmadd52huq %ymm0,%ymm3,%ymm19
- vpmadd52luq %ymm0,%ymm4,%ymm20
- vpmadd52huq %ymm0,%ymm4,%ymm21
- vpmadd52luq %ymm0,%ymm5,%ymm22
- vpmadd52huq %ymm0,%ymm5,%ymm23
-
- vpmadd52luq %ymm1,%ymm17,%ymm18
- vpmadd52huq %ymm1,%ymm17,%ymm19
- vpmadd52luq %ymm1,%ymm3,%ymm20
- vpmadd52huq %ymm1,%ymm3,%ymm21
- vpmadd52luq %ymm1,%ymm4,%ymm22
- vpmadd52huq %ymm1,%ymm4,%ymm23
-
-
-
-
- movl $1,%eax
- kmovw %eax,%k1
- vpsrldq $8,%ymm18,%ymm24
- vpsrldq $8,%ymm19,%ymm0
- vpsrldq $8,%ymm20,%ymm25
- vpsrldq $8,%ymm21,%ymm1
- vpaddq %ymm24,%ymm18,%ymm18
- vpaddq %ymm0,%ymm19,%ymm19
- vpsrldq $8,%ymm22,%ymm26
- vpsrldq $8,%ymm23,%ymm2
- vpaddq %ymm25,%ymm20,%ymm20
- vpaddq %ymm1,%ymm21,%ymm21
- vpermq $0x2,%ymm18,%ymm24
- vpermq $0x2,%ymm19,%ymm0
- vpaddq %ymm26,%ymm22,%ymm22
- vpaddq %ymm2,%ymm23,%ymm23
-
- vpermq $0x2,%ymm20,%ymm25
- vpermq $0x2,%ymm21,%ymm1
- vpaddq %ymm24,%ymm18,%ymm18{%k1}{z}
- vpaddq %ymm0,%ymm19,%ymm19{%k1}{z}
- vpermq $0x2,%ymm22,%ymm26
- vpermq $0x2,%ymm23,%ymm2
- vpaddq %ymm25,%ymm20,%ymm20{%k1}{z}
- vpaddq %ymm1,%ymm21,%ymm21{%k1}{z}
- vpaddq %ymm26,%ymm22,%ymm22{%k1}{z}
- vpaddq %ymm2,%ymm23,%ymm23{%k1}{z}
-
-
-
- vpsrlq $44,%ymm18,%ymm30
- vpsllq $8,%ymm19,%ymm19
- vpandq %ymm28,%ymm18,%ymm0
- vpaddq %ymm30,%ymm19,%ymm19
-
- vpaddq %ymm19,%ymm20,%ymm20
-
- vpsrlq $44,%ymm20,%ymm30
- vpsllq $8,%ymm21,%ymm21
- vpandq %ymm28,%ymm20,%ymm1
- vpaddq %ymm30,%ymm21,%ymm21
-
- vpaddq %ymm21,%ymm22,%ymm22
-
- vpsrlq $42,%ymm22,%ymm30
- vpsllq $10,%ymm23,%ymm23
- vpandq %ymm29,%ymm22,%ymm2
- vpaddq %ymm30,%ymm23,%ymm23
-
- vpaddq %ymm23,%ymm0,%ymm0
- vpsllq $2,%ymm23,%ymm23
-
- vpaddq %ymm23,%ymm0,%ymm0
-
- vpsrlq $44,%ymm0,%ymm30
- vpandq %ymm28,%ymm0,%ymm0
-
- vpaddq %ymm30,%ymm1,%ymm1
-
-
- subq $2,%rdx
- ja .Lblocks_vpmadd52_4x_do
-
- vmovq %xmm0,0(%rdi)
- vmovq %xmm1,8(%rdi)
- vmovq %xmm2,16(%rdi)
- vzeroall
-
-.Lno_data_vpmadd52_4x:
- .byte 0xf3,0xc3
+ shrq $4,%rdx
+ jz .Lno_data_vpmadd52
+
+ shlq $40,%rcx
+ movq 64(%rdi),%r8
+
+
+
+
+
+
+ movq $3,%rax
+ movq $1,%r10
+ cmpq $4,%rdx
+ cmovaeq %r10,%rax
+ testq %r8,%r8
+ cmovnsq %r10,%rax
+
+ andq %rdx,%rax
+ jz .Lblocks_vpmadd52_4x
+
+ subq %rax,%rdx
+ movl $7,%r10d
+ movl $1,%r11d
+ kmovw %r10d,%k7
+ leaq .L2_44_inp_permd(%rip),%r10
+ kmovw %r11d,%k1
+
+ vmovq %rcx,%xmm21
+ vmovdqa64 0(%r10),%ymm19
+ vmovdqa64 32(%r10),%ymm20
+ vpermq $0xcf,%ymm21,%ymm21
+ vmovdqa64 64(%r10),%ymm22
+
+ vmovdqu64 0(%rdi),%ymm16{%k7}{z}
+ vmovdqu64 40(%rdi),%ymm3{%k7}{z}
+ vmovdqu64 32(%rdi),%ymm4{%k7}{z}
+ vmovdqu64 24(%rdi),%ymm5{%k7}{z}
+
+ vmovdqa64 96(%r10),%ymm23
+ vmovdqa64 128(%r10),%ymm24
+
+ jmp .Loop_vpmadd52
+
+.align 32
+.Loop_vpmadd52:
+ vmovdqu32 0(%rsi),%xmm18
+ leaq 16(%rsi),%rsi
+
+ vpermd %ymm18,%ymm19,%ymm18
+ vpsrlvq %ymm20,%ymm18,%ymm18
+ vpandq %ymm22,%ymm18,%ymm18
+ vporq %ymm21,%ymm18,%ymm18
+
+ vpaddq %ymm18,%ymm16,%ymm16
+
+ vpermq $0,%ymm16,%ymm0{%k7}{z}
+ vpermq $85,%ymm16,%ymm1{%k7}{z}
+ vpermq $170,%ymm16,%ymm2{%k7}{z}
+
+ vpxord %ymm16,%ymm16,%ymm16
+ vpxord %ymm17,%ymm17,%ymm17
+
+ vpmadd52luq %ymm3,%ymm0,%ymm16
+ vpmadd52huq %ymm3,%ymm0,%ymm17
+
+ vpmadd52luq %ymm4,%ymm1,%ymm16
+ vpmadd52huq %ymm4,%ymm1,%ymm17
+
+ vpmadd52luq %ymm5,%ymm2,%ymm16
+ vpmadd52huq %ymm5,%ymm2,%ymm17
+
+ vpsrlvq %ymm23,%ymm16,%ymm18
+ vpsllvq %ymm24,%ymm17,%ymm17
+ vpandq %ymm22,%ymm16,%ymm16
+
+ vpaddq %ymm18,%ymm17,%ymm17
+
+ vpermq $147,%ymm17,%ymm17
+
+ vpaddq %ymm17,%ymm16,%ymm16
+
+ vpsrlvq %ymm23,%ymm16,%ymm18
+ vpandq %ymm22,%ymm16,%ymm16
+
+ vpermq $147,%ymm18,%ymm18
+
+ vpaddq %ymm18,%ymm16,%ymm16
+
+ vpermq $147,%ymm16,%ymm18{%k1}{z}
+
+ vpaddq %ymm18,%ymm16,%ymm16
+ vpsllq $2,%ymm18,%ymm18
+
+ vpaddq %ymm18,%ymm16,%ymm16
+
+ decq %rax
+ jnz .Loop_vpmadd52
+
+ vmovdqu64 %ymm16,0(%rdi){%k7}
+
+ testq %rdx,%rdx
+ jnz .Lblocks_vpmadd52_4x
+
+.Lno_data_vpmadd52:
+ .byte 0xf3,0xc3
.cfi_endproc
-.size poly1305_blocks_vpmadd52_4x,.-poly1305_blocks_vpmadd52_4x
-.type poly1305_blocks_vpmadd52_8x,@function
-.align 32
-poly1305_blocks_vpmadd52_8x:
+.size poly1305_blocks_vpmadd52,.-poly1305_blocks_vpmadd52
+.type poly1305_blocks_vpmadd52_4x,@function
+.align 32
+poly1305_blocks_vpmadd52_4x:
.cfi_startproc
- shrq $4,%rdx
- jz .Lno_data_vpmadd52_8x
-
- shlq $40,%rcx
- movq 64(%rdi),%r8
-
- vmovdqa64 .Lx_mask44(%rip),%ymm28
- vmovdqa64 .Lx_mask42(%rip),%ymm29
-
- testq %r8,%r8
- js .Linit_vpmadd52
-
- vmovq 0(%rdi),%xmm0
- vmovq 8(%rdi),%xmm1
- vmovq 16(%rdi),%xmm2
-
-.Lblocks_vpmadd52_8x:
-
-
-
- vmovdqu64 128(%rdi),%ymm5
- vmovdqu64 160(%rdi),%ymm16
- vmovdqu64 64(%rdi),%ymm3
- vmovdqu64 96(%rdi),%ymm4
-
- vpsllq $2,%ymm5,%ymm17
- vpaddq %ymm5,%ymm17,%ymm17
- vpsllq $2,%ymm17,%ymm17
-
- vpbroadcastq %xmm5,%ymm8
- vpbroadcastq %xmm3,%ymm6
- vpbroadcastq %xmm4,%ymm7
-
- vpxorq %ymm18,%ymm18,%ymm18
- vpmadd52luq %ymm8,%ymm16,%ymm18
- vpxorq %ymm19,%ymm19,%ymm19
- vpmadd52huq %ymm8,%ymm16,%ymm19
- vpxorq %ymm20,%ymm20,%ymm20
- vpmadd52luq %ymm8,%ymm17,%ymm20
- vpxorq %ymm21,%ymm21,%ymm21
- vpmadd52huq %ymm8,%ymm17,%ymm21
- vpxorq %ymm22,%ymm22,%ymm22
- vpmadd52luq %ymm8,%ymm3,%ymm22
- vpxorq %ymm23,%ymm23,%ymm23
- vpmadd52huq %ymm8,%ymm3,%ymm23
-
- vpmadd52luq %ymm6,%ymm3,%ymm18
- vpmadd52huq %ymm6,%ymm3,%ymm19
- vpmadd52luq %ymm6,%ymm4,%ymm20
- vpmadd52huq %ymm6,%ymm4,%ymm21
- vpmadd52luq %ymm6,%ymm5,%ymm22
- vpmadd52huq %ymm6,%ymm5,%ymm23
-
- vpmadd52luq %ymm7,%ymm17,%ymm18
- vpmadd52huq %ymm7,%ymm17,%ymm19
- vpmadd52luq %ymm7,%ymm3,%ymm20
- vpmadd52huq %ymm7,%ymm3,%ymm21
- vpmadd52luq %ymm7,%ymm4,%ymm22
- vpmadd52huq %ymm7,%ymm4,%ymm23
-
-
-
- vpsrlq $44,%ymm18,%ymm30
- vpsllq $8,%ymm19,%ymm19
- vpandq %ymm28,%ymm18,%ymm6
- vpaddq %ymm30,%ymm19,%ymm19
-
- vpaddq %ymm19,%ymm20,%ymm20
-
- vpsrlq $44,%ymm20,%ymm30
- vpsllq $8,%ymm21,%ymm21
- vpandq %ymm28,%ymm20,%ymm7
- vpaddq %ymm30,%ymm21,%ymm21
-
- vpaddq %ymm21,%ymm22,%ymm22
-
- vpsrlq $42,%ymm22,%ymm30
- vpsllq $10,%ymm23,%ymm23
- vpandq %ymm29,%ymm22,%ymm8
- vpaddq %ymm30,%ymm23,%ymm23
-
- vpaddq %ymm23,%ymm6,%ymm6
- vpsllq $2,%ymm23,%ymm23
-
- vpaddq %ymm23,%ymm6,%ymm6
-
- vpsrlq $44,%ymm6,%ymm30
- vpandq %ymm28,%ymm6,%ymm6
-
- vpaddq %ymm30,%ymm7,%ymm7
-
-
-
-
-
- vpunpcklqdq %ymm5,%ymm8,%ymm26
- vpunpckhqdq %ymm5,%ymm8,%ymm5
- vpunpcklqdq %ymm3,%ymm6,%ymm24
- vpunpckhqdq %ymm3,%ymm6,%ymm3
- vpunpcklqdq %ymm4,%ymm7,%ymm25
- vpunpckhqdq %ymm4,%ymm7,%ymm4
- vshufi64x2 $0x44,%zmm5,%zmm26,%zmm8
- vshufi64x2 $0x44,%zmm3,%zmm24,%zmm6
- vshufi64x2 $0x44,%zmm4,%zmm25,%zmm7
-
- vmovdqu64 0(%rsi),%zmm26
- vmovdqu64 64(%rsi),%zmm27
- leaq 128(%rsi),%rsi
-
- vpsllq $2,%zmm8,%zmm10
- vpsllq $2,%zmm7,%zmm9
- vpaddq %zmm8,%zmm10,%zmm10
- vpaddq %zmm7,%zmm9,%zmm9
- vpsllq $2,%zmm10,%zmm10
- vpsllq $2,%zmm9,%zmm9
-
- vpbroadcastq %rcx,%zmm31
- vpbroadcastq %xmm28,%zmm28
- vpbroadcastq %xmm29,%zmm29
-
- vpbroadcastq %xmm9,%zmm16
- vpbroadcastq %xmm10,%zmm17
- vpbroadcastq %xmm6,%zmm3
- vpbroadcastq %xmm7,%zmm4
- vpbroadcastq %xmm8,%zmm5
-
- vpunpcklqdq %zmm27,%zmm26,%zmm25
- vpunpckhqdq %zmm27,%zmm26,%zmm27
-
-
-
- vpsrlq $24,%zmm27,%zmm26
- vporq %zmm31,%zmm26,%zmm26
- vpaddq %zmm26,%zmm2,%zmm2
- vpandq %zmm28,%zmm25,%zmm24
- vpsrlq $44,%zmm25,%zmm25
- vpsllq $20,%zmm27,%zmm27
- vporq %zmm27,%zmm25,%zmm25
- vpandq %zmm28,%zmm25,%zmm25
-
- subq $8,%rdx
- jz .Ltail_vpmadd52_8x
- jmp .Loop_vpmadd52_8x
-
-.align 32
-.Loop_vpmadd52_8x:
-
- vpaddq %zmm24,%zmm0,%zmm0
- vpaddq %zmm25,%zmm1,%zmm1
-
- vpxorq %zmm18,%zmm18,%zmm18
- vpmadd52luq %zmm2,%zmm16,%zmm18
- vpxorq %zmm19,%zmm19,%zmm19
- vpmadd52huq %zmm2,%zmm16,%zmm19
- vpxorq %zmm20,%zmm20,%zmm20
- vpmadd52luq %zmm2,%zmm17,%zmm20
- vpxorq %zmm21,%zmm21,%zmm21
- vpmadd52huq %zmm2,%zmm17,%zmm21
- vpxorq %zmm22,%zmm22,%zmm22
- vpmadd52luq %zmm2,%zmm3,%zmm22
- vpxorq %zmm23,%zmm23,%zmm23
- vpmadd52huq %zmm2,%zmm3,%zmm23
-
- vmovdqu64 0(%rsi),%zmm26
- vmovdqu64 64(%rsi),%zmm27
- leaq 128(%rsi),%rsi
- vpmadd52luq %zmm0,%zmm3,%zmm18
- vpmadd52huq %zmm0,%zmm3,%zmm19
- vpmadd52luq %zmm0,%zmm4,%zmm20
- vpmadd52huq %zmm0,%zmm4,%zmm21
- vpmadd52luq %zmm0,%zmm5,%zmm22
- vpmadd52huq %zmm0,%zmm5,%zmm23
-
- vpunpcklqdq %zmm27,%zmm26,%zmm25
- vpunpckhqdq %zmm27,%zmm26,%zmm27
- vpmadd52luq %zmm1,%zmm17,%zmm18
- vpmadd52huq %zmm1,%zmm17,%zmm19
- vpmadd52luq %zmm1,%zmm3,%zmm20
- vpmadd52huq %zmm1,%zmm3,%zmm21
- vpmadd52luq %zmm1,%zmm4,%zmm22
- vpmadd52huq %zmm1,%zmm4,%zmm23
-
-
-
- vpsrlq $44,%zmm18,%zmm30
- vpsllq $8,%zmm19,%zmm19
- vpandq %zmm28,%zmm18,%zmm0
- vpaddq %zmm30,%zmm19,%zmm19
-
- vpsrlq $24,%zmm27,%zmm26
- vporq %zmm31,%zmm26,%zmm26
- vpaddq %zmm19,%zmm20,%zmm20
-
- vpsrlq $44,%zmm20,%zmm30
- vpsllq $8,%zmm21,%zmm21
- vpandq %zmm28,%zmm20,%zmm1
- vpaddq %zmm30,%zmm21,%zmm21
-
- vpandq %zmm28,%zmm25,%zmm24
- vpsrlq $44,%zmm25,%zmm25
- vpsllq $20,%zmm27,%zmm27
- vpaddq %zmm21,%zmm22,%zmm22
-
- vpsrlq $42,%zmm22,%zmm30
- vpsllq $10,%zmm23,%zmm23
- vpandq %zmm29,%zmm22,%zmm2
- vpaddq %zmm30,%zmm23,%zmm23
-
- vpaddq %zmm26,%zmm2,%zmm2
- vpaddq %zmm23,%zmm0,%zmm0
- vpsllq $2,%zmm23,%zmm23
-
- vpaddq %zmm23,%zmm0,%zmm0
- vporq %zmm27,%zmm25,%zmm25
- vpandq %zmm28,%zmm25,%zmm25
-
- vpsrlq $44,%zmm0,%zmm30
- vpandq %zmm28,%zmm0,%zmm0
-
- vpaddq %zmm30,%zmm1,%zmm1
-
- subq $8,%rdx
- jnz .Loop_vpmadd52_8x
-
-.Ltail_vpmadd52_8x:
-
- vpaddq %zmm24,%zmm0,%zmm0
- vpaddq %zmm25,%zmm1,%zmm1
-
- vpxorq %zmm18,%zmm18,%zmm18
- vpmadd52luq %zmm2,%zmm9,%zmm18
- vpxorq %zmm19,%zmm19,%zmm19
- vpmadd52huq %zmm2,%zmm9,%zmm19
- vpxorq %zmm20,%zmm20,%zmm20
- vpmadd52luq %zmm2,%zmm10,%zmm20
- vpxorq %zmm21,%zmm21,%zmm21
- vpmadd52huq %zmm2,%zmm10,%zmm21
- vpxorq %zmm22,%zmm22,%zmm22
- vpmadd52luq %zmm2,%zmm6,%zmm22
- vpxorq %zmm23,%zmm23,%zmm23
- vpmadd52huq %zmm2,%zmm6,%zmm23
-
- vpmadd52luq %zmm0,%zmm6,%zmm18
- vpmadd52huq %zmm0,%zmm6,%zmm19
- vpmadd52luq %zmm0,%zmm7,%zmm20
- vpmadd52huq %zmm0,%zmm7,%zmm21
- vpmadd52luq %zmm0,%zmm8,%zmm22
- vpmadd52huq %zmm0,%zmm8,%zmm23
-
- vpmadd52luq %zmm1,%zmm10,%zmm18
- vpmadd52huq %zmm1,%zmm10,%zmm19
- vpmadd52luq %zmm1,%zmm6,%zmm20
- vpmadd52huq %zmm1,%zmm6,%zmm21
- vpmadd52luq %zmm1,%zmm7,%zmm22
- vpmadd52huq %zmm1,%zmm7,%zmm23
-
-
-
-
- movl $1,%eax
- kmovw %eax,%k1
- vpsrldq $8,%zmm18,%zmm24
- vpsrldq $8,%zmm19,%zmm0
- vpsrldq $8,%zmm20,%zmm25
- vpsrldq $8,%zmm21,%zmm1
- vpaddq %zmm24,%zmm18,%zmm18
- vpaddq %zmm0,%zmm19,%zmm19
- vpsrldq $8,%zmm22,%zmm26
- vpsrldq $8,%zmm23,%zmm2
- vpaddq %zmm25,%zmm20,%zmm20
- vpaddq %zmm1,%zmm21,%zmm21
- vpermq $0x2,%zmm18,%zmm24
- vpermq $0x2,%zmm19,%zmm0
- vpaddq %zmm26,%zmm22,%zmm22
- vpaddq %zmm2,%zmm23,%zmm23
-
- vpermq $0x2,%zmm20,%zmm25
- vpermq $0x2,%zmm21,%zmm1
- vpaddq %zmm24,%zmm18,%zmm18
- vpaddq %zmm0,%zmm19,%zmm19
- vpermq $0x2,%zmm22,%zmm26
- vpermq $0x2,%zmm23,%zmm2
- vpaddq %zmm25,%zmm20,%zmm20
- vpaddq %zmm1,%zmm21,%zmm21
- vextracti64x4 $1,%zmm18,%ymm24
- vextracti64x4 $1,%zmm19,%ymm0
- vpaddq %zmm26,%zmm22,%zmm22
- vpaddq %zmm2,%zmm23,%zmm23
-
- vextracti64x4 $1,%zmm20,%ymm25
- vextracti64x4 $1,%zmm21,%ymm1
- vextracti64x4 $1,%zmm22,%ymm26
- vextracti64x4 $1,%zmm23,%ymm2
- vpaddq %ymm24,%ymm18,%ymm18{%k1}{z}
- vpaddq %ymm0,%ymm19,%ymm19{%k1}{z}
- vpaddq %ymm25,%ymm20,%ymm20{%k1}{z}
- vpaddq %ymm1,%ymm21,%ymm21{%k1}{z}
- vpaddq %ymm26,%ymm22,%ymm22{%k1}{z}
- vpaddq %ymm2,%ymm23,%ymm23{%k1}{z}
-
-
-
- vpsrlq $44,%ymm18,%ymm30
- vpsllq $8,%ymm19,%ymm19
- vpandq %ymm28,%ymm18,%ymm0
- vpaddq %ymm30,%ymm19,%ymm19
-
- vpaddq %ymm19,%ymm20,%ymm20
-
- vpsrlq $44,%ymm20,%ymm30
- vpsllq $8,%ymm21,%ymm21
- vpandq %ymm28,%ymm20,%ymm1
- vpaddq %ymm30,%ymm21,%ymm21
-
- vpaddq %ymm21,%ymm22,%ymm22
-
- vpsrlq $42,%ymm22,%ymm30
- vpsllq $10,%ymm23,%ymm23
- vpandq %ymm29,%ymm22,%ymm2
- vpaddq %ymm30,%ymm23,%ymm23
-
- vpaddq %ymm23,%ymm0,%ymm0
- vpsllq $2,%ymm23,%ymm23
-
- vpaddq %ymm23,%ymm0,%ymm0
-
- vpsrlq $44,%ymm0,%ymm30
- vpandq %ymm28,%ymm0,%ymm0
-
- vpaddq %ymm30,%ymm1,%ymm1
-
-
-
- vmovq %xmm0,0(%rdi)
- vmovq %xmm1,8(%rdi)
- vmovq %xmm2,16(%rdi)
- vzeroall
-
-.Lno_data_vpmadd52_8x:
- .byte 0xf3,0xc3
+ shrq $4,%rdx
+ jz .Lno_data_vpmadd52_4x
+
+ shlq $40,%rcx
+ movq 64(%rdi),%r8
+
+.Lblocks_vpmadd52_4x:
+ vpbroadcastq %rcx,%ymm31
+
+ vmovdqa64 .Lx_mask44(%rip),%ymm28
+ movl $5,%eax
+ vmovdqa64 .Lx_mask42(%rip),%ymm29
+ kmovw %eax,%k1
+
+ testq %r8,%r8
+ js .Linit_vpmadd52
+
+ vmovq 0(%rdi),%xmm0
+ vmovq 8(%rdi),%xmm1
+ vmovq 16(%rdi),%xmm2
+
+ testq $3,%rdx
+ jnz .Lblocks_vpmadd52_2x_do
+
+.Lblocks_vpmadd52_4x_do:
+ vpbroadcastq 64(%rdi),%ymm3
+ vpbroadcastq 96(%rdi),%ymm4
+ vpbroadcastq 128(%rdi),%ymm5
+ vpbroadcastq 160(%rdi),%ymm16
+
+.Lblocks_vpmadd52_4x_key_loaded:
+ vpsllq $2,%ymm5,%ymm17
+ vpaddq %ymm5,%ymm17,%ymm17
+ vpsllq $2,%ymm17,%ymm17
+
+ testq $7,%rdx
+ jz .Lblocks_vpmadd52_8x
+
+ vmovdqu64 0(%rsi),%ymm26
+ vmovdqu64 32(%rsi),%ymm27
+ leaq 64(%rsi),%rsi
+
+ vpunpcklqdq %ymm27,%ymm26,%ymm25
+ vpunpckhqdq %ymm27,%ymm26,%ymm27
+
+
+
+ vpsrlq $24,%ymm27,%ymm26
+ vporq %ymm31,%ymm26,%ymm26
+ vpaddq %ymm26,%ymm2,%ymm2
+ vpandq %ymm28,%ymm25,%ymm24
+ vpsrlq $44,%ymm25,%ymm25
+ vpsllq $20,%ymm27,%ymm27
+ vporq %ymm27,%ymm25,%ymm25
+ vpandq %ymm28,%ymm25,%ymm25
+
+ subq $4,%rdx
+ jz .Ltail_vpmadd52_4x
+ jmp .Loop_vpmadd52_4x
+ ud2
+
+.align 32
+.Linit_vpmadd52:
+ vmovq 24(%rdi),%xmm16
+ vmovq 56(%rdi),%xmm2
+ vmovq 32(%rdi),%xmm17
+ vmovq 40(%rdi),%xmm3
+ vmovq 48(%rdi),%xmm4
+
+ vmovdqa %ymm3,%ymm0
+ vmovdqa %ymm4,%ymm1
+ vmovdqa %ymm2,%ymm5
+
+ movl $2,%eax
+
+.Lmul_init_vpmadd52:
+ vpxorq %ymm18,%ymm18,%ymm18
+ vpmadd52luq %ymm2,%ymm16,%ymm18
+ vpxorq %ymm19,%ymm19,%ymm19
+ vpmadd52huq %ymm2,%ymm16,%ymm19
+ vpxorq %ymm20,%ymm20,%ymm20
+ vpmadd52luq %ymm2,%ymm17,%ymm20
+ vpxorq %ymm21,%ymm21,%ymm21
+ vpmadd52huq %ymm2,%ymm17,%ymm21
+ vpxorq %ymm22,%ymm22,%ymm22
+ vpmadd52luq %ymm2,%ymm3,%ymm22
+ vpxorq %ymm23,%ymm23,%ymm23
+ vpmadd52huq %ymm2,%ymm3,%ymm23
+
+ vpmadd52luq %ymm0,%ymm3,%ymm18
+ vpmadd52huq %ymm0,%ymm3,%ymm19
+ vpmadd52luq %ymm0,%ymm4,%ymm20
+ vpmadd52huq %ymm0,%ymm4,%ymm21
+ vpmadd52luq %ymm0,%ymm5,%ymm22
+ vpmadd52huq %ymm0,%ymm5,%ymm23
+
+ vpmadd52luq %ymm1,%ymm17,%ymm18
+ vpmadd52huq %ymm1,%ymm17,%ymm19
+ vpmadd52luq %ymm1,%ymm3,%ymm20
+ vpmadd52huq %ymm1,%ymm3,%ymm21
+ vpmadd52luq %ymm1,%ymm4,%ymm22
+ vpmadd52huq %ymm1,%ymm4,%ymm23
+
+
+
+ vpsrlq $44,%ymm18,%ymm30
+ vpsllq $8,%ymm19,%ymm19
+ vpandq %ymm28,%ymm18,%ymm0
+ vpaddq %ymm30,%ymm19,%ymm19
+
+ vpaddq %ymm19,%ymm20,%ymm20
+
+ vpsrlq $44,%ymm20,%ymm30
+ vpsllq $8,%ymm21,%ymm21
+ vpandq %ymm28,%ymm20,%ymm1
+ vpaddq %ymm30,%ymm21,%ymm21
+
+ vpaddq %ymm21,%ymm22,%ymm22
+
+ vpsrlq $42,%ymm22,%ymm30
+ vpsllq $10,%ymm23,%ymm23
+ vpandq %ymm29,%ymm22,%ymm2
+ vpaddq %ymm30,%ymm23,%ymm23
+
+ vpaddq %ymm23,%ymm0,%ymm0
+ vpsllq $2,%ymm23,%ymm23
+
+ vpaddq %ymm23,%ymm0,%ymm0
+
+ vpsrlq $44,%ymm0,%ymm30
+ vpandq %ymm28,%ymm0,%ymm0
+
+ vpaddq %ymm30,%ymm1,%ymm1
+
+ decl %eax
+ jz .Ldone_init_vpmadd52
+
+ vpunpcklqdq %ymm4,%ymm1,%ymm4
+ vpbroadcastq %xmm1,%xmm1
+ vpunpcklqdq %ymm5,%ymm2,%ymm5
+ vpbroadcastq %xmm2,%xmm2
+ vpunpcklqdq %ymm3,%ymm0,%ymm3
+ vpbroadcastq %xmm0,%xmm0
+
+ vpsllq $2,%ymm4,%ymm16
+ vpsllq $2,%ymm5,%ymm17
+ vpaddq %ymm4,%ymm16,%ymm16
+ vpaddq %ymm5,%ymm17,%ymm17
+ vpsllq $2,%ymm16,%ymm16
+ vpsllq $2,%ymm17,%ymm17
+
+ jmp .Lmul_init_vpmadd52
+ ud2
+
+.align 32
+.Ldone_init_vpmadd52:
+ vinserti128 $1,%xmm4,%ymm1,%ymm4
+ vinserti128 $1,%xmm5,%ymm2,%ymm5
+ vinserti128 $1,%xmm3,%ymm0,%ymm3
+
+ vpermq $216,%ymm4,%ymm4
+ vpermq $216,%ymm5,%ymm5
+ vpermq $216,%ymm3,%ymm3
+
+ vpsllq $2,%ymm4,%ymm16
+ vpaddq %ymm4,%ymm16,%ymm16
+ vpsllq $2,%ymm16,%ymm16
+
+ vmovq 0(%rdi),%xmm0
+ vmovq 8(%rdi),%xmm1
+ vmovq 16(%rdi),%xmm2
+
+ testq $3,%rdx
+ jnz .Ldone_init_vpmadd52_2x
+
+ vmovdqu64 %ymm3,64(%rdi)
+ vpbroadcastq %xmm3,%ymm3
+ vmovdqu64 %ymm4,96(%rdi)
+ vpbroadcastq %xmm4,%ymm4
+ vmovdqu64 %ymm5,128(%rdi)
+ vpbroadcastq %xmm5,%ymm5
+ vmovdqu64 %ymm16,160(%rdi)
+ vpbroadcastq %xmm16,%ymm16
+
+ jmp .Lblocks_vpmadd52_4x_key_loaded
+ ud2
+
+.align 32
+.Ldone_init_vpmadd52_2x:
+ vmovdqu64 %ymm3,64(%rdi)
+ vpsrldq $8,%ymm3,%ymm3
+ vmovdqu64 %ymm4,96(%rdi)
+ vpsrldq $8,%ymm4,%ymm4
+ vmovdqu64 %ymm5,128(%rdi)
+ vpsrldq $8,%ymm5,%ymm5
+ vmovdqu64 %ymm16,160(%rdi)
+ vpsrldq $8,%ymm16,%ymm16
+ jmp .Lblocks_vpmadd52_2x_key_loaded
+ ud2
+
+.align 32
+.Lblocks_vpmadd52_2x_do:
+ vmovdqu64 128+8(%rdi),%ymm5{%k1}{z}
+ vmovdqu64 160+8(%rdi),%ymm16{%k1}{z}
+ vmovdqu64 64+8(%rdi),%ymm3{%k1}{z}
+ vmovdqu64 96+8(%rdi),%ymm4{%k1}{z}
+
+.Lblocks_vpmadd52_2x_key_loaded:
+ vmovdqu64 0(%rsi),%ymm26
+ vpxorq %ymm27,%ymm27,%ymm27
+ leaq 32(%rsi),%rsi
+
+ vpunpcklqdq %ymm27,%ymm26,%ymm25
+ vpunpckhqdq %ymm27,%ymm26,%ymm27
+
+
+
+ vpsrlq $24,%ymm27,%ymm26
+ vporq %ymm31,%ymm26,%ymm26
+ vpaddq %ymm26,%ymm2,%ymm2
+ vpandq %ymm28,%ymm25,%ymm24
+ vpsrlq $44,%ymm25,%ymm25
+ vpsllq $20,%ymm27,%ymm27
+ vporq %ymm27,%ymm25,%ymm25
+ vpandq %ymm28,%ymm25,%ymm25
+
+ jmp .Ltail_vpmadd52_2x
+ ud2
+
+.align 32
+.Loop_vpmadd52_4x:
+
+ vpaddq %ymm24,%ymm0,%ymm0
+ vpaddq %ymm25,%ymm1,%ymm1
+
+ vpxorq %ymm18,%ymm18,%ymm18
+ vpmadd52luq %ymm2,%ymm16,%ymm18
+ vpxorq %ymm19,%ymm19,%ymm19
+ vpmadd52huq %ymm2,%ymm16,%ymm19
+ vpxorq %ymm20,%ymm20,%ymm20
+ vpmadd52luq %ymm2,%ymm17,%ymm20
+ vpxorq %ymm21,%ymm21,%ymm21
+ vpmadd52huq %ymm2,%ymm17,%ymm21
+ vpxorq %ymm22,%ymm22,%ymm22
+ vpmadd52luq %ymm2,%ymm3,%ymm22
+ vpxorq %ymm23,%ymm23,%ymm23
+ vpmadd52huq %ymm2,%ymm3,%ymm23
+
+ vmovdqu64 0(%rsi),%ymm26
+ vmovdqu64 32(%rsi),%ymm27
+ leaq 64(%rsi),%rsi
+ vpmadd52luq %ymm0,%ymm3,%ymm18
+ vpmadd52huq %ymm0,%ymm3,%ymm19
+ vpmadd52luq %ymm0,%ymm4,%ymm20
+ vpmadd52huq %ymm0,%ymm4,%ymm21
+ vpmadd52luq %ymm0,%ymm5,%ymm22
+ vpmadd52huq %ymm0,%ymm5,%ymm23
+
+ vpunpcklqdq %ymm27,%ymm26,%ymm25
+ vpunpckhqdq %ymm27,%ymm26,%ymm27
+ vpmadd52luq %ymm1,%ymm17,%ymm18
+ vpmadd52huq %ymm1,%ymm17,%ymm19
+ vpmadd52luq %ymm1,%ymm3,%ymm20
+ vpmadd52huq %ymm1,%ymm3,%ymm21
+ vpmadd52luq %ymm1,%ymm4,%ymm22
+ vpmadd52huq %ymm1,%ymm4,%ymm23
+
+
+
+ vpsrlq $44,%ymm18,%ymm30
+ vpsllq $8,%ymm19,%ymm19
+ vpandq %ymm28,%ymm18,%ymm0
+ vpaddq %ymm30,%ymm19,%ymm19
+
+ vpsrlq $24,%ymm27,%ymm26
+ vporq %ymm31,%ymm26,%ymm26
+ vpaddq %ymm19,%ymm20,%ymm20
+
+ vpsrlq $44,%ymm20,%ymm30
+ vpsllq $8,%ymm21,%ymm21
+ vpandq %ymm28,%ymm20,%ymm1
+ vpaddq %ymm30,%ymm21,%ymm21
+
+ vpandq %ymm28,%ymm25,%ymm24
+ vpsrlq $44,%ymm25,%ymm25
+ vpsllq $20,%ymm27,%ymm27
+ vpaddq %ymm21,%ymm22,%ymm22
+
+ vpsrlq $42,%ymm22,%ymm30
+ vpsllq $10,%ymm23,%ymm23
+ vpandq %ymm29,%ymm22,%ymm2
+ vpaddq %ymm30,%ymm23,%ymm23
+
+ vpaddq %ymm26,%ymm2,%ymm2
+ vpaddq %ymm23,%ymm0,%ymm0
+ vpsllq $2,%ymm23,%ymm23
+
+ vpaddq %ymm23,%ymm0,%ymm0
+ vporq %ymm27,%ymm25,%ymm25
+ vpandq %ymm28,%ymm25,%ymm25
+
+ vpsrlq $44,%ymm0,%ymm30
+ vpandq %ymm28,%ymm0,%ymm0
+
+ vpaddq %ymm30,%ymm1,%ymm1
+
+ subq $4,%rdx
+ jnz .Loop_vpmadd52_4x
+
+.Ltail_vpmadd52_4x:
+ vmovdqu64 128(%rdi),%ymm5
+ vmovdqu64 160(%rdi),%ymm16
+ vmovdqu64 64(%rdi),%ymm3
+ vmovdqu64 96(%rdi),%ymm4
+
+.Ltail_vpmadd52_2x:
+ vpsllq $2,%ymm5,%ymm17
+ vpaddq %ymm5,%ymm17,%ymm17
+ vpsllq $2,%ymm17,%ymm17
+
+
+ vpaddq %ymm24,%ymm0,%ymm0
+ vpaddq %ymm25,%ymm1,%ymm1
+
+ vpxorq %ymm18,%ymm18,%ymm18
+ vpmadd52luq %ymm2,%ymm16,%ymm18
+ vpxorq %ymm19,%ymm19,%ymm19
+ vpmadd52huq %ymm2,%ymm16,%ymm19
+ vpxorq %ymm20,%ymm20,%ymm20
+ vpmadd52luq %ymm2,%ymm17,%ymm20
+ vpxorq %ymm21,%ymm21,%ymm21
+ vpmadd52huq %ymm2,%ymm17,%ymm21
+ vpxorq %ymm22,%ymm22,%ymm22
+ vpmadd52luq %ymm2,%ymm3,%ymm22
+ vpxorq %ymm23,%ymm23,%ymm23
+ vpmadd52huq %ymm2,%ymm3,%ymm23
+
+ vpmadd52luq %ymm0,%ymm3,%ymm18
+ vpmadd52huq %ymm0,%ymm3,%ymm19
+ vpmadd52luq %ymm0,%ymm4,%ymm20
+ vpmadd52huq %ymm0,%ymm4,%ymm21
+ vpmadd52luq %ymm0,%ymm5,%ymm22
+ vpmadd52huq %ymm0,%ymm5,%ymm23
+
+ vpmadd52luq %ymm1,%ymm17,%ymm18
+ vpmadd52huq %ymm1,%ymm17,%ymm19
+ vpmadd52luq %ymm1,%ymm3,%ymm20
+ vpmadd52huq %ymm1,%ymm3,%ymm21
+ vpmadd52luq %ymm1,%ymm4,%ymm22
+ vpmadd52huq %ymm1,%ymm4,%ymm23
+
+
+
+
+ movl $1,%eax
+ kmovw %eax,%k1
+ vpsrldq $8,%ymm18,%ymm24
+ vpsrldq $8,%ymm19,%ymm0
+ vpsrldq $8,%ymm20,%ymm25
+ vpsrldq $8,%ymm21,%ymm1
+ vpaddq %ymm24,%ymm18,%ymm18
+ vpaddq %ymm0,%ymm19,%ymm19
+ vpsrldq $8,%ymm22,%ymm26
+ vpsrldq $8,%ymm23,%ymm2
+ vpaddq %ymm25,%ymm20,%ymm20
+ vpaddq %ymm1,%ymm21,%ymm21
+ vpermq $0x2,%ymm18,%ymm24
+ vpermq $0x2,%ymm19,%ymm0
+ vpaddq %ymm26,%ymm22,%ymm22
+ vpaddq %ymm2,%ymm23,%ymm23
+
+ vpermq $0x2,%ymm20,%ymm25
+ vpermq $0x2,%ymm21,%ymm1
+ vpaddq %ymm24,%ymm18,%ymm18{%k1}{z}
+ vpaddq %ymm0,%ymm19,%ymm19{%k1}{z}
+ vpermq $0x2,%ymm22,%ymm26
+ vpermq $0x2,%ymm23,%ymm2
+ vpaddq %ymm25,%ymm20,%ymm20{%k1}{z}
+ vpaddq %ymm1,%ymm21,%ymm21{%k1}{z}
+ vpaddq %ymm26,%ymm22,%ymm22{%k1}{z}
+ vpaddq %ymm2,%ymm23,%ymm23{%k1}{z}
+
+
+
+ vpsrlq $44,%ymm18,%ymm30
+ vpsllq $8,%ymm19,%ymm19
+ vpandq %ymm28,%ymm18,%ymm0
+ vpaddq %ymm30,%ymm19,%ymm19
+
+ vpaddq %ymm19,%ymm20,%ymm20
+
+ vpsrlq $44,%ymm20,%ymm30
+ vpsllq $8,%ymm21,%ymm21
+ vpandq %ymm28,%ymm20,%ymm1
+ vpaddq %ymm30,%ymm21,%ymm21
+
+ vpaddq %ymm21,%ymm22,%ymm22
+
+ vpsrlq $42,%ymm22,%ymm30
+ vpsllq $10,%ymm23,%ymm23
+ vpandq %ymm29,%ymm22,%ymm2
+ vpaddq %ymm30,%ymm23,%ymm23
+
+ vpaddq %ymm23,%ymm0,%ymm0
+ vpsllq $2,%ymm23,%ymm23
+
+ vpaddq %ymm23,%ymm0,%ymm0
+
+ vpsrlq $44,%ymm0,%ymm30
+ vpandq %ymm28,%ymm0,%ymm0
+
+ vpaddq %ymm30,%ymm1,%ymm1
+
+
+ subq $2,%rdx
+ ja .Lblocks_vpmadd52_4x_do
+
+ vmovq %xmm0,0(%rdi)
+ vmovq %xmm1,8(%rdi)
+ vmovq %xmm2,16(%rdi)
+ vzeroall
+
+.Lno_data_vpmadd52_4x:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size poly1305_blocks_vpmadd52_4x,.-poly1305_blocks_vpmadd52_4x
+.type poly1305_blocks_vpmadd52_8x,@function
+.align 32
+poly1305_blocks_vpmadd52_8x:
+.cfi_startproc
+ shrq $4,%rdx
+ jz .Lno_data_vpmadd52_8x
+
+ shlq $40,%rcx
+ movq 64(%rdi),%r8
+
+ vmovdqa64 .Lx_mask44(%rip),%ymm28
+ vmovdqa64 .Lx_mask42(%rip),%ymm29
+
+ testq %r8,%r8
+ js .Linit_vpmadd52
+
+ vmovq 0(%rdi),%xmm0
+ vmovq 8(%rdi),%xmm1
+ vmovq 16(%rdi),%xmm2
+
+.Lblocks_vpmadd52_8x:
+
+
+
+ vmovdqu64 128(%rdi),%ymm5
+ vmovdqu64 160(%rdi),%ymm16
+ vmovdqu64 64(%rdi),%ymm3
+ vmovdqu64 96(%rdi),%ymm4
+
+ vpsllq $2,%ymm5,%ymm17
+ vpaddq %ymm5,%ymm17,%ymm17
+ vpsllq $2,%ymm17,%ymm17
+
+ vpbroadcastq %xmm5,%ymm8
+ vpbroadcastq %xmm3,%ymm6
+ vpbroadcastq %xmm4,%ymm7
+
+ vpxorq %ymm18,%ymm18,%ymm18
+ vpmadd52luq %ymm8,%ymm16,%ymm18
+ vpxorq %ymm19,%ymm19,%ymm19
+ vpmadd52huq %ymm8,%ymm16,%ymm19
+ vpxorq %ymm20,%ymm20,%ymm20
+ vpmadd52luq %ymm8,%ymm17,%ymm20
+ vpxorq %ymm21,%ymm21,%ymm21
+ vpmadd52huq %ymm8,%ymm17,%ymm21
+ vpxorq %ymm22,%ymm22,%ymm22
+ vpmadd52luq %ymm8,%ymm3,%ymm22
+ vpxorq %ymm23,%ymm23,%ymm23
+ vpmadd52huq %ymm8,%ymm3,%ymm23
+
+ vpmadd52luq %ymm6,%ymm3,%ymm18
+ vpmadd52huq %ymm6,%ymm3,%ymm19
+ vpmadd52luq %ymm6,%ymm4,%ymm20
+ vpmadd52huq %ymm6,%ymm4,%ymm21
+ vpmadd52luq %ymm6,%ymm5,%ymm22
+ vpmadd52huq %ymm6,%ymm5,%ymm23
+
+ vpmadd52luq %ymm7,%ymm17,%ymm18
+ vpmadd52huq %ymm7,%ymm17,%ymm19
+ vpmadd52luq %ymm7,%ymm3,%ymm20
+ vpmadd52huq %ymm7,%ymm3,%ymm21
+ vpmadd52luq %ymm7,%ymm4,%ymm22
+ vpmadd52huq %ymm7,%ymm4,%ymm23
+
+
+
+ vpsrlq $44,%ymm18,%ymm30
+ vpsllq $8,%ymm19,%ymm19
+ vpandq %ymm28,%ymm18,%ymm6
+ vpaddq %ymm30,%ymm19,%ymm19
+
+ vpaddq %ymm19,%ymm20,%ymm20
+
+ vpsrlq $44,%ymm20,%ymm30
+ vpsllq $8,%ymm21,%ymm21
+ vpandq %ymm28,%ymm20,%ymm7
+ vpaddq %ymm30,%ymm21,%ymm21
+
+ vpaddq %ymm21,%ymm22,%ymm22
+
+ vpsrlq $42,%ymm22,%ymm30
+ vpsllq $10,%ymm23,%ymm23
+ vpandq %ymm29,%ymm22,%ymm8
+ vpaddq %ymm30,%ymm23,%ymm23
+
+ vpaddq %ymm23,%ymm6,%ymm6
+ vpsllq $2,%ymm23,%ymm23
+
+ vpaddq %ymm23,%ymm6,%ymm6
+
+ vpsrlq $44,%ymm6,%ymm30
+ vpandq %ymm28,%ymm6,%ymm6
+
+ vpaddq %ymm30,%ymm7,%ymm7
+
+
+
+
+
+ vpunpcklqdq %ymm5,%ymm8,%ymm26
+ vpunpckhqdq %ymm5,%ymm8,%ymm5
+ vpunpcklqdq %ymm3,%ymm6,%ymm24
+ vpunpckhqdq %ymm3,%ymm6,%ymm3
+ vpunpcklqdq %ymm4,%ymm7,%ymm25
+ vpunpckhqdq %ymm4,%ymm7,%ymm4
+ vshufi64x2 $0x44,%zmm5,%zmm26,%zmm8
+ vshufi64x2 $0x44,%zmm3,%zmm24,%zmm6
+ vshufi64x2 $0x44,%zmm4,%zmm25,%zmm7
+
+ vmovdqu64 0(%rsi),%zmm26
+ vmovdqu64 64(%rsi),%zmm27
+ leaq 128(%rsi),%rsi
+
+ vpsllq $2,%zmm8,%zmm10
+ vpsllq $2,%zmm7,%zmm9
+ vpaddq %zmm8,%zmm10,%zmm10
+ vpaddq %zmm7,%zmm9,%zmm9
+ vpsllq $2,%zmm10,%zmm10
+ vpsllq $2,%zmm9,%zmm9
+
+ vpbroadcastq %rcx,%zmm31
+ vpbroadcastq %xmm28,%zmm28
+ vpbroadcastq %xmm29,%zmm29
+
+ vpbroadcastq %xmm9,%zmm16
+ vpbroadcastq %xmm10,%zmm17
+ vpbroadcastq %xmm6,%zmm3
+ vpbroadcastq %xmm7,%zmm4
+ vpbroadcastq %xmm8,%zmm5
+
+ vpunpcklqdq %zmm27,%zmm26,%zmm25
+ vpunpckhqdq %zmm27,%zmm26,%zmm27
+
+
+
+ vpsrlq $24,%zmm27,%zmm26
+ vporq %zmm31,%zmm26,%zmm26
+ vpaddq %zmm26,%zmm2,%zmm2
+ vpandq %zmm28,%zmm25,%zmm24
+ vpsrlq $44,%zmm25,%zmm25
+ vpsllq $20,%zmm27,%zmm27
+ vporq %zmm27,%zmm25,%zmm25
+ vpandq %zmm28,%zmm25,%zmm25
+
+ subq $8,%rdx
+ jz .Ltail_vpmadd52_8x
+ jmp .Loop_vpmadd52_8x
+
+.align 32
+.Loop_vpmadd52_8x:
+
+ vpaddq %zmm24,%zmm0,%zmm0
+ vpaddq %zmm25,%zmm1,%zmm1
+
+ vpxorq %zmm18,%zmm18,%zmm18
+ vpmadd52luq %zmm2,%zmm16,%zmm18
+ vpxorq %zmm19,%zmm19,%zmm19
+ vpmadd52huq %zmm2,%zmm16,%zmm19
+ vpxorq %zmm20,%zmm20,%zmm20
+ vpmadd52luq %zmm2,%zmm17,%zmm20
+ vpxorq %zmm21,%zmm21,%zmm21
+ vpmadd52huq %zmm2,%zmm17,%zmm21
+ vpxorq %zmm22,%zmm22,%zmm22
+ vpmadd52luq %zmm2,%zmm3,%zmm22
+ vpxorq %zmm23,%zmm23,%zmm23
+ vpmadd52huq %zmm2,%zmm3,%zmm23
+
+ vmovdqu64 0(%rsi),%zmm26
+ vmovdqu64 64(%rsi),%zmm27
+ leaq 128(%rsi),%rsi
+ vpmadd52luq %zmm0,%zmm3,%zmm18
+ vpmadd52huq %zmm0,%zmm3,%zmm19
+ vpmadd52luq %zmm0,%zmm4,%zmm20
+ vpmadd52huq %zmm0,%zmm4,%zmm21
+ vpmadd52luq %zmm0,%zmm5,%zmm22
+ vpmadd52huq %zmm0,%zmm5,%zmm23
+
+ vpunpcklqdq %zmm27,%zmm26,%zmm25
+ vpunpckhqdq %zmm27,%zmm26,%zmm27
+ vpmadd52luq %zmm1,%zmm17,%zmm18
+ vpmadd52huq %zmm1,%zmm17,%zmm19
+ vpmadd52luq %zmm1,%zmm3,%zmm20
+ vpmadd52huq %zmm1,%zmm3,%zmm21
+ vpmadd52luq %zmm1,%zmm4,%zmm22
+ vpmadd52huq %zmm1,%zmm4,%zmm23
+
+
+
+ vpsrlq $44,%zmm18,%zmm30
+ vpsllq $8,%zmm19,%zmm19
+ vpandq %zmm28,%zmm18,%zmm0
+ vpaddq %zmm30,%zmm19,%zmm19
+
+ vpsrlq $24,%zmm27,%zmm26
+ vporq %zmm31,%zmm26,%zmm26
+ vpaddq %zmm19,%zmm20,%zmm20
+
+ vpsrlq $44,%zmm20,%zmm30
+ vpsllq $8,%zmm21,%zmm21
+ vpandq %zmm28,%zmm20,%zmm1
+ vpaddq %zmm30,%zmm21,%zmm21
+
+ vpandq %zmm28,%zmm25,%zmm24
+ vpsrlq $44,%zmm25,%zmm25
+ vpsllq $20,%zmm27,%zmm27
+ vpaddq %zmm21,%zmm22,%zmm22
+
+ vpsrlq $42,%zmm22,%zmm30
+ vpsllq $10,%zmm23,%zmm23
+ vpandq %zmm29,%zmm22,%zmm2
+ vpaddq %zmm30,%zmm23,%zmm23
+
+ vpaddq %zmm26,%zmm2,%zmm2
+ vpaddq %zmm23,%zmm0,%zmm0
+ vpsllq $2,%zmm23,%zmm23
+
+ vpaddq %zmm23,%zmm0,%zmm0
+ vporq %zmm27,%zmm25,%zmm25
+ vpandq %zmm28,%zmm25,%zmm25
+
+ vpsrlq $44,%zmm0,%zmm30
+ vpandq %zmm28,%zmm0,%zmm0
+
+ vpaddq %zmm30,%zmm1,%zmm1
+
+ subq $8,%rdx
+ jnz .Loop_vpmadd52_8x
+
+.Ltail_vpmadd52_8x:
+
+ vpaddq %zmm24,%zmm0,%zmm0
+ vpaddq %zmm25,%zmm1,%zmm1
+
+ vpxorq %zmm18,%zmm18,%zmm18
+ vpmadd52luq %zmm2,%zmm9,%zmm18
+ vpxorq %zmm19,%zmm19,%zmm19
+ vpmadd52huq %zmm2,%zmm9,%zmm19
+ vpxorq %zmm20,%zmm20,%zmm20
+ vpmadd52luq %zmm2,%zmm10,%zmm20
+ vpxorq %zmm21,%zmm21,%zmm21
+ vpmadd52huq %zmm2,%zmm10,%zmm21
+ vpxorq %zmm22,%zmm22,%zmm22
+ vpmadd52luq %zmm2,%zmm6,%zmm22
+ vpxorq %zmm23,%zmm23,%zmm23
+ vpmadd52huq %zmm2,%zmm6,%zmm23
+
+ vpmadd52luq %zmm0,%zmm6,%zmm18
+ vpmadd52huq %zmm0,%zmm6,%zmm19
+ vpmadd52luq %zmm0,%zmm7,%zmm20
+ vpmadd52huq %zmm0,%zmm7,%zmm21
+ vpmadd52luq %zmm0,%zmm8,%zmm22
+ vpmadd52huq %zmm0,%zmm8,%zmm23
+
+ vpmadd52luq %zmm1,%zmm10,%zmm18
+ vpmadd52huq %zmm1,%zmm10,%zmm19
+ vpmadd52luq %zmm1,%zmm6,%zmm20
+ vpmadd52huq %zmm1,%zmm6,%zmm21
+ vpmadd52luq %zmm1,%zmm7,%zmm22
+ vpmadd52huq %zmm1,%zmm7,%zmm23
+
+
+
+
+ movl $1,%eax
+ kmovw %eax,%k1
+ vpsrldq $8,%zmm18,%zmm24
+ vpsrldq $8,%zmm19,%zmm0
+ vpsrldq $8,%zmm20,%zmm25
+ vpsrldq $8,%zmm21,%zmm1
+ vpaddq %zmm24,%zmm18,%zmm18
+ vpaddq %zmm0,%zmm19,%zmm19
+ vpsrldq $8,%zmm22,%zmm26
+ vpsrldq $8,%zmm23,%zmm2
+ vpaddq %zmm25,%zmm20,%zmm20
+ vpaddq %zmm1,%zmm21,%zmm21
+ vpermq $0x2,%zmm18,%zmm24
+ vpermq $0x2,%zmm19,%zmm0
+ vpaddq %zmm26,%zmm22,%zmm22
+ vpaddq %zmm2,%zmm23,%zmm23
+
+ vpermq $0x2,%zmm20,%zmm25
+ vpermq $0x2,%zmm21,%zmm1
+ vpaddq %zmm24,%zmm18,%zmm18
+ vpaddq %zmm0,%zmm19,%zmm19
+ vpermq $0x2,%zmm22,%zmm26
+ vpermq $0x2,%zmm23,%zmm2
+ vpaddq %zmm25,%zmm20,%zmm20
+ vpaddq %zmm1,%zmm21,%zmm21
+ vextracti64x4 $1,%zmm18,%ymm24
+ vextracti64x4 $1,%zmm19,%ymm0
+ vpaddq %zmm26,%zmm22,%zmm22
+ vpaddq %zmm2,%zmm23,%zmm23
+
+ vextracti64x4 $1,%zmm20,%ymm25
+ vextracti64x4 $1,%zmm21,%ymm1
+ vextracti64x4 $1,%zmm22,%ymm26
+ vextracti64x4 $1,%zmm23,%ymm2
+ vpaddq %ymm24,%ymm18,%ymm18{%k1}{z}
+ vpaddq %ymm0,%ymm19,%ymm19{%k1}{z}
+ vpaddq %ymm25,%ymm20,%ymm20{%k1}{z}
+ vpaddq %ymm1,%ymm21,%ymm21{%k1}{z}
+ vpaddq %ymm26,%ymm22,%ymm22{%k1}{z}
+ vpaddq %ymm2,%ymm23,%ymm23{%k1}{z}
+
+
+
+ vpsrlq $44,%ymm18,%ymm30
+ vpsllq $8,%ymm19,%ymm19
+ vpandq %ymm28,%ymm18,%ymm0
+ vpaddq %ymm30,%ymm19,%ymm19
+
+ vpaddq %ymm19,%ymm20,%ymm20
+
+ vpsrlq $44,%ymm20,%ymm30
+ vpsllq $8,%ymm21,%ymm21
+ vpandq %ymm28,%ymm20,%ymm1
+ vpaddq %ymm30,%ymm21,%ymm21
+
+ vpaddq %ymm21,%ymm22,%ymm22
+
+ vpsrlq $42,%ymm22,%ymm30
+ vpsllq $10,%ymm23,%ymm23
+ vpandq %ymm29,%ymm22,%ymm2
+ vpaddq %ymm30,%ymm23,%ymm23
+
+ vpaddq %ymm23,%ymm0,%ymm0
+ vpsllq $2,%ymm23,%ymm23
+
+ vpaddq %ymm23,%ymm0,%ymm0
+
+ vpsrlq $44,%ymm0,%ymm30
+ vpandq %ymm28,%ymm0,%ymm0
+
+ vpaddq %ymm30,%ymm1,%ymm1
+
+
+
+ vmovq %xmm0,0(%rdi)
+ vmovq %xmm1,8(%rdi)
+ vmovq %xmm2,16(%rdi)
+ vzeroall
+
+.Lno_data_vpmadd52_8x:
+ .byte 0xf3,0xc3
.cfi_endproc
-.size poly1305_blocks_vpmadd52_8x,.-poly1305_blocks_vpmadd52_8x
-.type poly1305_emit_base2_44,@function
-.align 32
-poly1305_emit_base2_44:
+.size poly1305_blocks_vpmadd52_8x,.-poly1305_blocks_vpmadd52_8x
+.type poly1305_emit_base2_44,@function
+.align 32
+poly1305_emit_base2_44:
.cfi_startproc
- movq 0(%rdi),%r8
- movq 8(%rdi),%r9
- movq 16(%rdi),%r10
-
- movq %r9,%rax
- shrq $20,%r9
- shlq $44,%rax
- movq %r10,%rcx
- shrq $40,%r10
- shlq $24,%rcx
-
- addq %rax,%r8
- adcq %rcx,%r9
- adcq $0,%r10
-
- movq %r8,%rax
- addq $5,%r8
- movq %r9,%rcx
- adcq $0,%r9
- adcq $0,%r10
- shrq $2,%r10
- cmovnzq %r8,%rax
- cmovnzq %r9,%rcx
-
- addq 0(%rdx),%rax
- adcq 8(%rdx),%rcx
- movq %rax,0(%rsi)
- movq %rcx,8(%rsi)
-
- .byte 0xf3,0xc3
+ movq 0(%rdi),%r8
+ movq 8(%rdi),%r9
+ movq 16(%rdi),%r10
+
+ movq %r9,%rax
+ shrq $20,%r9
+ shlq $44,%rax
+ movq %r10,%rcx
+ shrq $40,%r10
+ shlq $24,%rcx
+
+ addq %rax,%r8
+ adcq %rcx,%r9
+ adcq $0,%r10
+
+ movq %r8,%rax
+ addq $5,%r8
+ movq %r9,%rcx
+ adcq $0,%r9
+ adcq $0,%r10
+ shrq $2,%r10
+ cmovnzq %r8,%rax
+ cmovnzq %r9,%rcx
+
+ addq 0(%rdx),%rax
+ adcq 8(%rdx),%rcx
+ movq %rax,0(%rsi)
+ movq %rcx,8(%rsi)
+
+ .byte 0xf3,0xc3
.cfi_endproc
-.size poly1305_emit_base2_44,.-poly1305_emit_base2_44
+.size poly1305_emit_base2_44,.-poly1305_emit_base2_44
.align 64
.Lconst:
.Lmask24: