| # This file is generated from a similarly-named Perl script in the BoringSSL |
| # source tree. Do not edit by hand. |
| |
| #if defined(__i386__) |
| #if defined(BORINGSSL_PREFIX) |
| #include <boringssl_prefix_symbols_asm.h> |
| #endif |
| .text |
| .LONE_mont: |
| .long 1,0,0,-1,-1,-1,-2,0 |
| .hidden _ecp_nistz256_div_by_2 |
| .type _ecp_nistz256_div_by_2,@function |
| .align 16 |
| _ecp_nistz256_div_by_2: |
| movl (%esi),%ebp |
| xorl %edx,%edx |
| movl 4(%esi),%ebx |
| movl %ebp,%eax |
| andl $1,%ebp |
| movl 8(%esi),%ecx |
| subl %ebp,%edx |
| addl %edx,%eax |
| adcl %edx,%ebx |
| movl %eax,(%edi) |
| adcl %edx,%ecx |
| movl %ebx,4(%edi) |
| movl %ecx,8(%edi) |
| movl 12(%esi),%eax |
| movl 16(%esi),%ebx |
| adcl $0,%eax |
| movl 20(%esi),%ecx |
| adcl $0,%ebx |
| movl %eax,12(%edi) |
| adcl $0,%ecx |
| movl %ebx,16(%edi) |
| movl %ecx,20(%edi) |
| movl 24(%esi),%eax |
| movl 28(%esi),%ebx |
| adcl %ebp,%eax |
| adcl %edx,%ebx |
| movl %eax,24(%edi) |
| sbbl %esi,%esi |
| movl %ebx,28(%edi) |
| movl (%edi),%eax |
| movl 4(%edi),%ebx |
| movl 8(%edi),%ecx |
| movl 12(%edi),%edx |
| shrl $1,%eax |
| movl %ebx,%ebp |
| shll $31,%ebx |
| orl %ebx,%eax |
| shrl $1,%ebp |
| movl %ecx,%ebx |
| shll $31,%ecx |
| movl %eax,(%edi) |
| orl %ecx,%ebp |
| movl 16(%edi),%eax |
| shrl $1,%ebx |
| movl %edx,%ecx |
| shll $31,%edx |
| movl %ebp,4(%edi) |
| orl %edx,%ebx |
| movl 20(%edi),%ebp |
| shrl $1,%ecx |
| movl %eax,%edx |
| shll $31,%eax |
| movl %ebx,8(%edi) |
| orl %eax,%ecx |
| movl 24(%edi),%ebx |
| shrl $1,%edx |
| movl %ebp,%eax |
| shll $31,%ebp |
| movl %ecx,12(%edi) |
| orl %ebp,%edx |
| movl 28(%edi),%ecx |
| shrl $1,%eax |
| movl %ebx,%ebp |
| shll $31,%ebx |
| movl %edx,16(%edi) |
| orl %ebx,%eax |
| shrl $1,%ebp |
| movl %ecx,%ebx |
| shll $31,%ecx |
| movl %eax,20(%edi) |
| orl %ecx,%ebp |
| shrl $1,%ebx |
| shll $31,%esi |
| movl %ebp,24(%edi) |
| orl %esi,%ebx |
| movl %ebx,28(%edi) |
| ret |
| .size _ecp_nistz256_div_by_2,.-_ecp_nistz256_div_by_2 |
| .globl GFp_nistz256_add |
| .hidden GFp_nistz256_add |
| .type GFp_nistz256_add,@function |
| .align 16 |
| GFp_nistz256_add: |
| .L_GFp_nistz256_add_begin: |
| pushl %ebp |
| pushl %ebx |
| pushl %esi |
| pushl %edi |
| movl 24(%esp),%esi |
| movl 28(%esp),%ebp |
| movl 20(%esp),%edi |
| call _ecp_nistz256_add |
| popl %edi |
| popl %esi |
| popl %ebx |
| popl %ebp |
| ret |
| .size GFp_nistz256_add,.-.L_GFp_nistz256_add_begin |
| .hidden _ecp_nistz256_add |
| .type _ecp_nistz256_add,@function |
| .align 16 |
| _ecp_nistz256_add: |
| movl (%esi),%eax |
| movl 4(%esi),%ebx |
| movl 8(%esi),%ecx |
| addl (%ebp),%eax |
| movl 12(%esi),%edx |
| adcl 4(%ebp),%ebx |
| movl %eax,(%edi) |
| adcl 8(%ebp),%ecx |
| movl %ebx,4(%edi) |
| adcl 12(%ebp),%edx |
| movl %ecx,8(%edi) |
| movl %edx,12(%edi) |
| movl 16(%esi),%eax |
| movl 20(%esi),%ebx |
| movl 24(%esi),%ecx |
| adcl 16(%ebp),%eax |
| movl 28(%esi),%edx |
| adcl 20(%ebp),%ebx |
| movl %eax,16(%edi) |
| adcl 24(%ebp),%ecx |
| movl %ebx,20(%edi) |
| movl $0,%esi |
| adcl 28(%ebp),%edx |
| movl %ecx,24(%edi) |
| adcl $0,%esi |
| movl %edx,28(%edi) |
| movl (%edi),%eax |
| movl 4(%edi),%ebx |
| movl 8(%edi),%ecx |
| subl $-1,%eax |
| movl 12(%edi),%edx |
| sbbl $-1,%ebx |
| movl 16(%edi),%eax |
| sbbl $-1,%ecx |
| movl 20(%edi),%ebx |
| sbbl $0,%edx |
| movl 24(%edi),%ecx |
| sbbl $0,%eax |
| movl 28(%edi),%edx |
| sbbl $0,%ebx |
| sbbl $1,%ecx |
| sbbl $-1,%edx |
| sbbl $0,%esi |
| notl %esi |
| movl (%edi),%eax |
| movl %esi,%ebp |
| movl 4(%edi),%ebx |
| shrl $31,%ebp |
| movl 8(%edi),%ecx |
| subl %esi,%eax |
| movl 12(%edi),%edx |
| sbbl %esi,%ebx |
| movl %eax,(%edi) |
| sbbl %esi,%ecx |
| movl %ebx,4(%edi) |
| sbbl $0,%edx |
| movl %ecx,8(%edi) |
| movl %edx,12(%edi) |
| movl 16(%edi),%eax |
| movl 20(%edi),%ebx |
| movl 24(%edi),%ecx |
| sbbl $0,%eax |
| movl 28(%edi),%edx |
| sbbl $0,%ebx |
| movl %eax,16(%edi) |
| sbbl %ebp,%ecx |
| movl %ebx,20(%edi) |
| sbbl %esi,%edx |
| movl %ecx,24(%edi) |
| movl %edx,28(%edi) |
| ret |
| .size _ecp_nistz256_add,.-_ecp_nistz256_add |
| .hidden _ecp_nistz256_sub |
| .type _ecp_nistz256_sub,@function |
| .align 16 |
| _ecp_nistz256_sub: |
| movl (%esi),%eax |
| movl 4(%esi),%ebx |
| movl 8(%esi),%ecx |
| subl (%ebp),%eax |
| movl 12(%esi),%edx |
| sbbl 4(%ebp),%ebx |
| movl %eax,(%edi) |
| sbbl 8(%ebp),%ecx |
| movl %ebx,4(%edi) |
| sbbl 12(%ebp),%edx |
| movl %ecx,8(%edi) |
| movl %edx,12(%edi) |
| movl 16(%esi),%eax |
| movl 20(%esi),%ebx |
| movl 24(%esi),%ecx |
| sbbl 16(%ebp),%eax |
| movl 28(%esi),%edx |
| sbbl 20(%ebp),%ebx |
| sbbl 24(%ebp),%ecx |
| movl %eax,16(%edi) |
| sbbl 28(%ebp),%edx |
| movl %ebx,20(%edi) |
| sbbl %esi,%esi |
| movl %ecx,24(%edi) |
| movl %edx,28(%edi) |
| movl (%edi),%eax |
| movl %esi,%ebp |
| movl 4(%edi),%ebx |
| shrl $31,%ebp |
| movl 8(%edi),%ecx |
| addl %esi,%eax |
| movl 12(%edi),%edx |
| adcl %esi,%ebx |
| movl %eax,(%edi) |
| adcl %esi,%ecx |
| movl %ebx,4(%edi) |
| adcl $0,%edx |
| movl %ecx,8(%edi) |
| movl %edx,12(%edi) |
| movl 16(%edi),%eax |
| movl 20(%edi),%ebx |
| movl 24(%edi),%ecx |
| adcl $0,%eax |
| movl 28(%edi),%edx |
| adcl $0,%ebx |
| movl %eax,16(%edi) |
| adcl %ebp,%ecx |
| movl %ebx,20(%edi) |
| adcl %esi,%edx |
| movl %ecx,24(%edi) |
| movl %edx,28(%edi) |
| ret |
| .size _ecp_nistz256_sub,.-_ecp_nistz256_sub |
| .globl GFp_nistz256_neg |
| .hidden GFp_nistz256_neg |
| .type GFp_nistz256_neg,@function |
| .align 16 |
| GFp_nistz256_neg: |
| .L_GFp_nistz256_neg_begin: |
| pushl %ebp |
| pushl %ebx |
| pushl %esi |
| pushl %edi |
| movl 24(%esp),%ebp |
| movl 20(%esp),%edi |
| xorl %eax,%eax |
| subl $32,%esp |
| movl %eax,(%esp) |
| movl %esp,%esi |
| movl %eax,4(%esp) |
| movl %eax,8(%esp) |
| movl %eax,12(%esp) |
| movl %eax,16(%esp) |
| movl %eax,20(%esp) |
| movl %eax,24(%esp) |
| movl %eax,28(%esp) |
| call _ecp_nistz256_sub |
| addl $32,%esp |
| popl %edi |
| popl %esi |
| popl %ebx |
| popl %ebp |
| ret |
| .size GFp_nistz256_neg,.-.L_GFp_nistz256_neg_begin |
| .hidden _picup_eax |
| .type _picup_eax,@function |
| .align 16 |
| _picup_eax: |
| movl (%esp),%eax |
| ret |
| .size _picup_eax,.-_picup_eax |
| .globl GFp_nistz256_mul_mont |
| .hidden GFp_nistz256_mul_mont |
| .type GFp_nistz256_mul_mont,@function |
| .align 16 |
| GFp_nistz256_mul_mont: |
| .L_GFp_nistz256_mul_mont_begin: |
| pushl %ebp |
| pushl %ebx |
| pushl %esi |
| pushl %edi |
| movl 24(%esp),%esi |
| movl 28(%esp),%ebp |
| call _picup_eax |
| .L000pic: |
| leal GFp_ia32cap_P-.L000pic(%eax),%eax |
| movl (%eax),%eax |
| movl 20(%esp),%edi |
| call _ecp_nistz256_mul_mont |
| popl %edi |
| popl %esi |
| popl %ebx |
| popl %ebp |
| ret |
| .size GFp_nistz256_mul_mont,.-.L_GFp_nistz256_mul_mont_begin |
| .hidden _ecp_nistz256_mul_mont |
| .type _ecp_nistz256_mul_mont,@function |
| .align 16 |
| _ecp_nistz256_mul_mont: |
| movl %esp,%edx |
| subl $256,%esp |
| movd (%ebp),%xmm7 |
| leal 4(%ebp),%ebp |
| pcmpeqd %xmm6,%xmm6 |
| psrlq $48,%xmm6 |
| pshuflw $220,%xmm7,%xmm7 |
| andl $-64,%esp |
| pshufd $220,%xmm7,%xmm7 |
| leal 128(%esp),%ebx |
| movd (%esi),%xmm0 |
| pshufd $204,%xmm0,%xmm0 |
| movd 4(%esi),%xmm1 |
| movdqa %xmm0,(%ebx) |
| pmuludq %xmm7,%xmm0 |
| movd 8(%esi),%xmm2 |
| pshufd $204,%xmm1,%xmm1 |
| movdqa %xmm1,16(%ebx) |
| pmuludq %xmm7,%xmm1 |
| movq %xmm0,%xmm4 |
| pslldq $6,%xmm4 |
| paddq %xmm0,%xmm4 |
| movdqa %xmm4,%xmm5 |
| psrldq $10,%xmm4 |
| pand %xmm6,%xmm5 |
| movd 12(%esi),%xmm3 |
| pshufd $204,%xmm2,%xmm2 |
| movdqa %xmm2,32(%ebx) |
| pmuludq %xmm7,%xmm2 |
| paddq %xmm4,%xmm1 |
| movdqa %xmm1,(%esp) |
| movd 16(%esi),%xmm0 |
| pshufd $204,%xmm3,%xmm3 |
| movdqa %xmm3,48(%ebx) |
| pmuludq %xmm7,%xmm3 |
| movdqa %xmm2,16(%esp) |
| movd 20(%esi),%xmm1 |
| pshufd $204,%xmm0,%xmm0 |
| movdqa %xmm0,64(%ebx) |
| pmuludq %xmm7,%xmm0 |
| paddq %xmm5,%xmm3 |
| movdqa %xmm3,32(%esp) |
| movd 24(%esi),%xmm2 |
| pshufd $204,%xmm1,%xmm1 |
| movdqa %xmm1,80(%ebx) |
| pmuludq %xmm7,%xmm1 |
| movdqa %xmm0,48(%esp) |
| pshufd $177,%xmm5,%xmm4 |
| movd 28(%esi),%xmm3 |
| pshufd $204,%xmm2,%xmm2 |
| movdqa %xmm2,96(%ebx) |
| pmuludq %xmm7,%xmm2 |
| movdqa %xmm1,64(%esp) |
| psubq %xmm5,%xmm4 |
| movd (%ebp),%xmm0 |
| pshufd $204,%xmm3,%xmm3 |
| movdqa %xmm3,112(%ebx) |
| pmuludq %xmm7,%xmm3 |
| pshuflw $220,%xmm0,%xmm7 |
| movdqa (%ebx),%xmm0 |
| pshufd $220,%xmm7,%xmm7 |
| movl $6,%ecx |
| leal 4(%ebp),%ebp |
| jmp .L001madd_sse2 |
| .align 16 |
| .L001madd_sse2: |
| paddq %xmm5,%xmm2 |
| paddq %xmm4,%xmm3 |
| movdqa 16(%ebx),%xmm1 |
| pmuludq %xmm7,%xmm0 |
| movdqa %xmm2,80(%esp) |
| movdqa 32(%ebx),%xmm2 |
| pmuludq %xmm7,%xmm1 |
| movdqa %xmm3,96(%esp) |
| paddq (%esp),%xmm0 |
| movdqa 48(%ebx),%xmm3 |
| pmuludq %xmm7,%xmm2 |
| movq %xmm0,%xmm4 |
| pslldq $6,%xmm4 |
| paddq 16(%esp),%xmm1 |
| paddq %xmm0,%xmm4 |
| movdqa %xmm4,%xmm5 |
| psrldq $10,%xmm4 |
| movdqa 64(%ebx),%xmm0 |
| pmuludq %xmm7,%xmm3 |
| paddq %xmm4,%xmm1 |
| paddq 32(%esp),%xmm2 |
| movdqa %xmm1,(%esp) |
| movdqa 80(%ebx),%xmm1 |
| pmuludq %xmm7,%xmm0 |
| paddq 48(%esp),%xmm3 |
| movdqa %xmm2,16(%esp) |
| pand %xmm6,%xmm5 |
| movdqa 96(%ebx),%xmm2 |
| pmuludq %xmm7,%xmm1 |
| paddq %xmm5,%xmm3 |
| paddq 64(%esp),%xmm0 |
| movdqa %xmm3,32(%esp) |
| pshufd $177,%xmm5,%xmm4 |
| movdqa %xmm7,%xmm3 |
| pmuludq %xmm7,%xmm2 |
| movd (%ebp),%xmm7 |
| leal 4(%ebp),%ebp |
| paddq 80(%esp),%xmm1 |
| psubq %xmm5,%xmm4 |
| movdqa %xmm0,48(%esp) |
| pshuflw $220,%xmm7,%xmm7 |
| pmuludq 112(%ebx),%xmm3 |
| pshufd $220,%xmm7,%xmm7 |
| movdqa (%ebx),%xmm0 |
| movdqa %xmm1,64(%esp) |
| paddq 96(%esp),%xmm2 |
| decl %ecx |
| jnz .L001madd_sse2 |
| paddq %xmm5,%xmm2 |
| paddq %xmm4,%xmm3 |
| movdqa 16(%ebx),%xmm1 |
| pmuludq %xmm7,%xmm0 |
| movdqa %xmm2,80(%esp) |
| movdqa 32(%ebx),%xmm2 |
| pmuludq %xmm7,%xmm1 |
| movdqa %xmm3,96(%esp) |
| paddq (%esp),%xmm0 |
| movdqa 48(%ebx),%xmm3 |
| pmuludq %xmm7,%xmm2 |
| movq %xmm0,%xmm4 |
| pslldq $6,%xmm4 |
| paddq 16(%esp),%xmm1 |
| paddq %xmm0,%xmm4 |
| movdqa %xmm4,%xmm5 |
| psrldq $10,%xmm4 |
| movdqa 64(%ebx),%xmm0 |
| pmuludq %xmm7,%xmm3 |
| paddq %xmm4,%xmm1 |
| paddq 32(%esp),%xmm2 |
| movdqa %xmm1,(%esp) |
| movdqa 80(%ebx),%xmm1 |
| pmuludq %xmm7,%xmm0 |
| paddq 48(%esp),%xmm3 |
| movdqa %xmm2,16(%esp) |
| pand %xmm6,%xmm5 |
| movdqa 96(%ebx),%xmm2 |
| pmuludq %xmm7,%xmm1 |
| paddq %xmm5,%xmm3 |
| paddq 64(%esp),%xmm0 |
| movdqa %xmm3,32(%esp) |
| pshufd $177,%xmm5,%xmm4 |
| movdqa 112(%ebx),%xmm3 |
| pmuludq %xmm7,%xmm2 |
| paddq 80(%esp),%xmm1 |
| psubq %xmm5,%xmm4 |
| movdqa %xmm0,48(%esp) |
| pmuludq %xmm7,%xmm3 |
| pcmpeqd %xmm7,%xmm7 |
| movdqa (%esp),%xmm0 |
| pslldq $8,%xmm7 |
| movdqa %xmm1,64(%esp) |
| paddq 96(%esp),%xmm2 |
| paddq %xmm5,%xmm2 |
| paddq %xmm4,%xmm3 |
| movdqa %xmm2,80(%esp) |
| movdqa %xmm3,96(%esp) |
| movdqa 16(%esp),%xmm1 |
| movdqa 32(%esp),%xmm2 |
| movdqa 48(%esp),%xmm3 |
| movq %xmm0,%xmm4 |
| pand %xmm7,%xmm0 |
| xorl %ebp,%ebp |
| pslldq $6,%xmm4 |
| movq %xmm1,%xmm5 |
| paddq %xmm4,%xmm0 |
| pand %xmm7,%xmm1 |
| psrldq $6,%xmm0 |
| movd %xmm0,%eax |
| psrldq $4,%xmm0 |
| paddq %xmm0,%xmm5 |
| movdqa 64(%esp),%xmm0 |
| subl $-1,%eax |
| pslldq $6,%xmm5 |
| movq %xmm2,%xmm4 |
| paddq %xmm5,%xmm1 |
| pand %xmm7,%xmm2 |
| psrldq $6,%xmm1 |
| movl %eax,(%edi) |
| movd %xmm1,%eax |
| psrldq $4,%xmm1 |
| paddq %xmm1,%xmm4 |
| movdqa 80(%esp),%xmm1 |
| sbbl $-1,%eax |
| pslldq $6,%xmm4 |
| movq %xmm3,%xmm5 |
| paddq %xmm4,%xmm2 |
| pand %xmm7,%xmm3 |
| psrldq $6,%xmm2 |
| movl %eax,4(%edi) |
| movd %xmm2,%eax |
| psrldq $4,%xmm2 |
| paddq %xmm2,%xmm5 |
| movdqa 96(%esp),%xmm2 |
| sbbl $-1,%eax |
| pslldq $6,%xmm5 |
| movq %xmm0,%xmm4 |
| paddq %xmm5,%xmm3 |
| pand %xmm7,%xmm0 |
| psrldq $6,%xmm3 |
| movl %eax,8(%edi) |
| movd %xmm3,%eax |
| psrldq $4,%xmm3 |
| paddq %xmm3,%xmm4 |
| sbbl $0,%eax |
| pslldq $6,%xmm4 |
| movq %xmm1,%xmm5 |
| paddq %xmm4,%xmm0 |
| pand %xmm7,%xmm1 |
| psrldq $6,%xmm0 |
| movl %eax,12(%edi) |
| movd %xmm0,%eax |
| psrldq $4,%xmm0 |
| paddq %xmm0,%xmm5 |
| sbbl $0,%eax |
| pslldq $6,%xmm5 |
| movq %xmm2,%xmm4 |
| paddq %xmm5,%xmm1 |
| pand %xmm7,%xmm2 |
| psrldq $6,%xmm1 |
| movd %xmm1,%ebx |
| psrldq $4,%xmm1 |
| movl %edx,%esp |
| paddq %xmm1,%xmm4 |
| pslldq $6,%xmm4 |
| paddq %xmm4,%xmm2 |
| psrldq $6,%xmm2 |
| movd %xmm2,%ecx |
| psrldq $4,%xmm2 |
| sbbl $0,%ebx |
| movd %xmm2,%edx |
| pextrw $2,%xmm2,%esi |
| sbbl $1,%ecx |
| sbbl $-1,%edx |
| sbbl $0,%esi |
| subl %esi,%ebp |
| addl %esi,(%edi) |
| adcl %esi,4(%edi) |
| adcl %esi,8(%edi) |
| adcl $0,12(%edi) |
| adcl $0,%eax |
| adcl $0,%ebx |
| movl %eax,16(%edi) |
| adcl %ebp,%ecx |
| movl %ebx,20(%edi) |
| adcl %esi,%edx |
| movl %ecx,24(%edi) |
| movl %edx,28(%edi) |
| ret |
| .size _ecp_nistz256_mul_mont,.-_ecp_nistz256_mul_mont |
| .globl GFp_nistz256_point_double |
| .hidden GFp_nistz256_point_double |
| .type GFp_nistz256_point_double,@function |
| .align 16 |
| GFp_nistz256_point_double: |
| .L_GFp_nistz256_point_double_begin: |
| pushl %ebp |
| pushl %ebx |
| pushl %esi |
| pushl %edi |
| movl 24(%esp),%esi |
| subl $164,%esp |
| call _picup_eax |
| .L002pic: |
| leal GFp_ia32cap_P-.L002pic(%eax),%edx |
| movl (%edx),%ebp |
| .Lpoint_double_shortcut: |
| movl (%esi),%eax |
| movl 4(%esi),%ebx |
| movl 8(%esi),%ecx |
| movl 12(%esi),%edx |
| movl %eax,96(%esp) |
| movl %ebx,100(%esp) |
| movl %ecx,104(%esp) |
| movl %edx,108(%esp) |
| movl 16(%esi),%eax |
| movl 20(%esi),%ebx |
| movl 24(%esi),%ecx |
| movl 28(%esi),%edx |
| movl %eax,112(%esp) |
| movl %ebx,116(%esp) |
| movl %ecx,120(%esp) |
| movl %edx,124(%esp) |
| movl %ebp,160(%esp) |
| leal 32(%esi),%ebp |
| leal 32(%esi),%esi |
| leal (%esp),%edi |
| call _ecp_nistz256_add |
| movl 160(%esp),%eax |
| movl $64,%esi |
| addl 188(%esp),%esi |
| leal 64(%esp),%edi |
| movl %esi,%ebp |
| call _ecp_nistz256_mul_mont |
| movl 160(%esp),%eax |
| leal (%esp),%esi |
| leal (%esp),%ebp |
| leal (%esp),%edi |
| call _ecp_nistz256_mul_mont |
| movl 160(%esp),%eax |
| movl 188(%esp),%ebp |
| leal 32(%ebp),%esi |
| leal 64(%ebp),%ebp |
| leal 128(%esp),%edi |
| call _ecp_nistz256_mul_mont |
| leal 96(%esp),%esi |
| leal 64(%esp),%ebp |
| leal 32(%esp),%edi |
| call _ecp_nistz256_add |
| movl $64,%edi |
| leal 128(%esp),%esi |
| leal 128(%esp),%ebp |
| addl 184(%esp),%edi |
| call _ecp_nistz256_add |
| leal 96(%esp),%esi |
| leal 64(%esp),%ebp |
| leal 64(%esp),%edi |
| call _ecp_nistz256_sub |
| movl 160(%esp),%eax |
| leal (%esp),%esi |
| leal (%esp),%ebp |
| leal 128(%esp),%edi |
| call _ecp_nistz256_mul_mont |
| movl 160(%esp),%eax |
| leal 32(%esp),%esi |
| leal 64(%esp),%ebp |
| leal 32(%esp),%edi |
| call _ecp_nistz256_mul_mont |
| movl $32,%edi |
| leal 128(%esp),%esi |
| addl 184(%esp),%edi |
| call _ecp_nistz256_div_by_2 |
| leal 32(%esp),%esi |
| leal 32(%esp),%ebp |
| leal 128(%esp),%edi |
| call _ecp_nistz256_add |
| movl 160(%esp),%eax |
| leal 96(%esp),%esi |
| leal (%esp),%ebp |
| leal (%esp),%edi |
| call _ecp_nistz256_mul_mont |
| leal 128(%esp),%esi |
| leal 32(%esp),%ebp |
| leal 32(%esp),%edi |
| call _ecp_nistz256_add |
| leal (%esp),%esi |
| leal (%esp),%ebp |
| leal 128(%esp),%edi |
| call _ecp_nistz256_add |
| movl 160(%esp),%eax |
| leal 32(%esp),%esi |
| leal 32(%esp),%ebp |
| movl 184(%esp),%edi |
| call _ecp_nistz256_mul_mont |
| movl %edi,%esi |
| leal 128(%esp),%ebp |
| call _ecp_nistz256_sub |
| leal (%esp),%esi |
| movl %edi,%ebp |
| leal (%esp),%edi |
| call _ecp_nistz256_sub |
| movl 160(%esp),%eax |
| movl %edi,%esi |
| leal 32(%esp),%ebp |
| call _ecp_nistz256_mul_mont |
| movl $32,%ebp |
| leal (%esp),%esi |
| addl 184(%esp),%ebp |
| movl %ebp,%edi |
| call _ecp_nistz256_sub |
| addl $164,%esp |
| popl %edi |
| popl %esi |
| popl %ebx |
| popl %ebp |
| ret |
| .size GFp_nistz256_point_double,.-.L_GFp_nistz256_point_double_begin |
| .globl GFp_nistz256_point_add_affine |
| .hidden GFp_nistz256_point_add_affine |
| .type GFp_nistz256_point_add_affine,@function |
| .align 16 |
| GFp_nistz256_point_add_affine: |
| .L_GFp_nistz256_point_add_affine_begin: |
| pushl %ebp |
| pushl %ebx |
| pushl %esi |
| pushl %edi |
| movl 24(%esp),%esi |
| subl $492,%esp |
| call _picup_eax |
| .L003pic: |
| leal GFp_ia32cap_P-.L003pic(%eax),%edx |
| movl (%edx),%ebp |
| leal 96(%esp),%edi |
| movl (%esi),%eax |
| movl 4(%esi),%ebx |
| movl 8(%esi),%ecx |
| movl 12(%esi),%edx |
| movl %eax,(%edi) |
| movl %ebp,488(%esp) |
| movl %ebx,4(%edi) |
| movl %ecx,8(%edi) |
| movl %edx,12(%edi) |
| movl 16(%esi),%eax |
| movl 20(%esi),%ebx |
| movl 24(%esi),%ecx |
| movl 28(%esi),%edx |
| movl %eax,16(%edi) |
| movl %ebx,20(%edi) |
| movl %ecx,24(%edi) |
| movl %edx,28(%edi) |
| movl 32(%esi),%eax |
| movl 36(%esi),%ebx |
| movl 40(%esi),%ecx |
| movl 44(%esi),%edx |
| movl %eax,32(%edi) |
| movl %ebx,36(%edi) |
| movl %ecx,40(%edi) |
| movl %edx,44(%edi) |
| movl 48(%esi),%eax |
| movl 52(%esi),%ebx |
| movl 56(%esi),%ecx |
| movl 60(%esi),%edx |
| movl %eax,48(%edi) |
| movl %ebx,52(%edi) |
| movl %ecx,56(%edi) |
| movl %edx,60(%edi) |
| movl 64(%esi),%eax |
| movl 68(%esi),%ebx |
| movl 72(%esi),%ecx |
| movl 76(%esi),%edx |
| movl %eax,64(%edi) |
| movl %eax,%ebp |
| movl %ebx,68(%edi) |
| orl %ebx,%ebp |
| movl %ecx,72(%edi) |
| orl %ecx,%ebp |
| movl %edx,76(%edi) |
| orl %edx,%ebp |
| movl 80(%esi),%eax |
| movl 84(%esi),%ebx |
| movl 88(%esi),%ecx |
| movl 92(%esi),%edx |
| movl %eax,80(%edi) |
| orl %eax,%ebp |
| movl %ebx,84(%edi) |
| orl %ebx,%ebp |
| movl %ecx,88(%edi) |
| orl %ecx,%ebp |
| movl %edx,92(%edi) |
| orl %edx,%ebp |
| xorl %eax,%eax |
| movl 520(%esp),%esi |
| subl %ebp,%eax |
| orl %eax,%ebp |
| sarl $31,%ebp |
| movl %ebp,480(%esp) |
| leal 192(%esp),%edi |
| movl (%esi),%eax |
| movl 4(%esi),%ebx |
| movl 8(%esi),%ecx |
| movl 12(%esi),%edx |
| movl %eax,(%edi) |
| movl %eax,%ebp |
| movl %ebx,4(%edi) |
| orl %ebx,%ebp |
| movl %ecx,8(%edi) |
| orl %ecx,%ebp |
| movl %edx,12(%edi) |
| orl %edx,%ebp |
| movl 16(%esi),%eax |
| movl 20(%esi),%ebx |
| movl 24(%esi),%ecx |
| movl 28(%esi),%edx |
| movl %eax,16(%edi) |
| orl %eax,%ebp |
| movl %ebx,20(%edi) |
| orl %ebx,%ebp |
| movl %ecx,24(%edi) |
| orl %ecx,%ebp |
| movl %edx,28(%edi) |
| orl %edx,%ebp |
| movl 32(%esi),%eax |
| movl 36(%esi),%ebx |
| movl 40(%esi),%ecx |
| movl 44(%esi),%edx |
| movl %eax,32(%edi) |
| orl %eax,%ebp |
| movl %ebx,36(%edi) |
| orl %ebx,%ebp |
| movl %ecx,40(%edi) |
| orl %ecx,%ebp |
| movl %edx,44(%edi) |
| orl %edx,%ebp |
| movl 48(%esi),%eax |
| movl 52(%esi),%ebx |
| movl 56(%esi),%ecx |
| movl 60(%esi),%edx |
| movl %eax,48(%edi) |
| orl %eax,%ebp |
| movl %ebx,52(%edi) |
| orl %ebx,%ebp |
| movl %ecx,56(%edi) |
| orl %ecx,%ebp |
| movl %edx,60(%edi) |
| orl %edx,%ebp |
| xorl %ebx,%ebx |
| movl 488(%esp),%eax |
| subl %ebp,%ebx |
| leal 160(%esp),%esi |
| orl %ebp,%ebx |
| leal 160(%esp),%ebp |
| sarl $31,%ebx |
| leal 288(%esp),%edi |
| movl %ebx,484(%esp) |
| call _ecp_nistz256_mul_mont |
| movl 488(%esp),%eax |
| leal 192(%esp),%esi |
| movl %edi,%ebp |
| leal 256(%esp),%edi |
| call _ecp_nistz256_mul_mont |
| movl 488(%esp),%eax |
| leal 160(%esp),%esi |
| leal 288(%esp),%ebp |
| leal 288(%esp),%edi |
| call _ecp_nistz256_mul_mont |
| leal 256(%esp),%esi |
| leal 96(%esp),%ebp |
| leal 320(%esp),%edi |
| call _ecp_nistz256_sub |
| movl 488(%esp),%eax |
| leal 224(%esp),%esi |
| leal 288(%esp),%ebp |
| leal 288(%esp),%edi |
| call _ecp_nistz256_mul_mont |
| movl 488(%esp),%eax |
| leal 160(%esp),%esi |
| leal 320(%esp),%ebp |
| leal 64(%esp),%edi |
| call _ecp_nistz256_mul_mont |
| leal 288(%esp),%esi |
| leal 128(%esp),%ebp |
| leal 352(%esp),%edi |
| call _ecp_nistz256_sub |
| movl 488(%esp),%eax |
| leal 320(%esp),%esi |
| leal 320(%esp),%ebp |
| leal 384(%esp),%edi |
| call _ecp_nistz256_mul_mont |
| movl 488(%esp),%eax |
| leal 352(%esp),%esi |
| leal 352(%esp),%ebp |
| leal 448(%esp),%edi |
| call _ecp_nistz256_mul_mont |
| movl 488(%esp),%eax |
| leal 96(%esp),%esi |
| leal 384(%esp),%ebp |
| leal 256(%esp),%edi |
| call _ecp_nistz256_mul_mont |
| movl 488(%esp),%eax |
| leal 320(%esp),%esi |
| leal 384(%esp),%ebp |
| leal 416(%esp),%edi |
| call _ecp_nistz256_mul_mont |
| leal 256(%esp),%esi |
| leal 256(%esp),%ebp |
| leal 384(%esp),%edi |
| call _ecp_nistz256_add |
| leal 448(%esp),%esi |
| leal 384(%esp),%ebp |
| leal (%esp),%edi |
| call _ecp_nistz256_sub |
| leal (%esp),%esi |
| leal 416(%esp),%ebp |
| leal (%esp),%edi |
| call _ecp_nistz256_sub |
| leal 256(%esp),%esi |
| leal (%esp),%ebp |
| leal 32(%esp),%edi |
| call _ecp_nistz256_sub |
| movl 488(%esp),%eax |
| leal 416(%esp),%esi |
| leal 128(%esp),%ebp |
| leal 288(%esp),%edi |
| call _ecp_nistz256_mul_mont |
| movl 488(%esp),%eax |
| leal 352(%esp),%esi |
| leal 32(%esp),%ebp |
| leal 32(%esp),%edi |
| call _ecp_nistz256_mul_mont |
| leal 32(%esp),%esi |
| leal 288(%esp),%ebp |
| leal 32(%esp),%edi |
| call _ecp_nistz256_sub |
| movl 480(%esp),%ebp |
| movl 484(%esp),%esi |
| movl 512(%esp),%edi |
| movl %ebp,%edx |
| notl %ebp |
| andl %esi,%edx |
| andl %esi,%ebp |
| notl %esi |
| movl %edx,%eax |
| andl 64(%esp),%eax |
| movl %ebp,%ebx |
| andl $1,%ebx |
| movl %esi,%ecx |
| andl 160(%esp),%ecx |
| orl %ebx,%eax |
| orl %ecx,%eax |
| movl %eax,64(%edi) |
| movl %edx,%eax |
| andl 68(%esp),%eax |
| movl %esi,%ecx |
| andl 164(%esp),%ecx |
| orl %ecx,%eax |
| movl %eax,68(%edi) |
| movl %edx,%eax |
| andl 72(%esp),%eax |
| movl %esi,%ecx |
| andl 168(%esp),%ecx |
| orl %ecx,%eax |
| movl %eax,72(%edi) |
| movl %edx,%eax |
| andl 76(%esp),%eax |
| movl %esi,%ecx |
| andl 172(%esp),%ecx |
| orl %ebp,%eax |
| orl %ecx,%eax |
| movl %eax,76(%edi) |
| movl %edx,%eax |
| andl 80(%esp),%eax |
| movl %esi,%ecx |
| andl 176(%esp),%ecx |
| orl %ebp,%eax |
| orl %ecx,%eax |
| movl %eax,80(%edi) |
| movl %edx,%eax |
| andl 84(%esp),%eax |
| movl %esi,%ecx |
| andl 180(%esp),%ecx |
| orl %ebp,%eax |
| orl %ecx,%eax |
| movl %eax,84(%edi) |
| movl %edx,%eax |
| andl 88(%esp),%eax |
| movl %ebp,%ebx |
| andl $-2,%ebx |
| movl %esi,%ecx |
| andl 184(%esp),%ecx |
| orl %ebx,%eax |
| orl %ecx,%eax |
| movl %eax,88(%edi) |
| movl %edx,%eax |
| andl 92(%esp),%eax |
| movl %esi,%ecx |
| andl 188(%esp),%ecx |
| orl %ecx,%eax |
| movl %eax,92(%edi) |
| movl %edx,%eax |
| andl (%esp),%eax |
| movl %ebp,%ebx |
| andl 192(%esp),%ebx |
| movl %esi,%ecx |
| andl 96(%esp),%ecx |
| orl %ebx,%eax |
| orl %ecx,%eax |
| movl %eax,(%edi) |
| movl %edx,%eax |
| andl 4(%esp),%eax |
| movl %ebp,%ebx |
| andl 196(%esp),%ebx |
| movl %esi,%ecx |
| andl 100(%esp),%ecx |
| orl %ebx,%eax |
| orl %ecx,%eax |
| movl %eax,4(%edi) |
| movl %edx,%eax |
| andl 8(%esp),%eax |
| movl %ebp,%ebx |
| andl 200(%esp),%ebx |
| movl %esi,%ecx |
| andl 104(%esp),%ecx |
| orl %ebx,%eax |
| orl %ecx,%eax |
| movl %eax,8(%edi) |
| movl %edx,%eax |
| andl 12(%esp),%eax |
| movl %ebp,%ebx |
| andl 204(%esp),%ebx |
| movl %esi,%ecx |
| andl 108(%esp),%ecx |
| orl %ebx,%eax |
| orl %ecx,%eax |
| movl %eax,12(%edi) |
| movl %edx,%eax |
| andl 16(%esp),%eax |
| movl %ebp,%ebx |
| andl 208(%esp),%ebx |
| movl %esi,%ecx |
| andl 112(%esp),%ecx |
| orl %ebx,%eax |
| orl %ecx,%eax |
| movl %eax,16(%edi) |
| movl %edx,%eax |
| andl 20(%esp),%eax |
| movl %ebp,%ebx |
| andl 212(%esp),%ebx |
| movl %esi,%ecx |
| andl 116(%esp),%ecx |
| orl %ebx,%eax |
| orl %ecx,%eax |
| movl %eax,20(%edi) |
| movl %edx,%eax |
| andl 24(%esp),%eax |
| movl %ebp,%ebx |
| andl 216(%esp),%ebx |
| movl %esi,%ecx |
| andl 120(%esp),%ecx |
| orl %ebx,%eax |
| orl %ecx,%eax |
| movl %eax,24(%edi) |
| movl %edx,%eax |
| andl 28(%esp),%eax |
| movl %ebp,%ebx |
| andl 220(%esp),%ebx |
| movl %esi,%ecx |
| andl 124(%esp),%ecx |
| orl %ebx,%eax |
| orl %ecx,%eax |
| movl %eax,28(%edi) |
| movl %edx,%eax |
| andl 32(%esp),%eax |
| movl %ebp,%ebx |
| andl 224(%esp),%ebx |
| movl %esi,%ecx |
| andl 128(%esp),%ecx |
| orl %ebx,%eax |
| orl %ecx,%eax |
| movl %eax,32(%edi) |
| movl %edx,%eax |
| andl 36(%esp),%eax |
| movl %ebp,%ebx |
| andl 228(%esp),%ebx |
| movl %esi,%ecx |
| andl 132(%esp),%ecx |
| orl %ebx,%eax |
| orl %ecx,%eax |
| movl %eax,36(%edi) |
| movl %edx,%eax |
| andl 40(%esp),%eax |
| movl %ebp,%ebx |
| andl 232(%esp),%ebx |
| movl %esi,%ecx |
| andl 136(%esp),%ecx |
| orl %ebx,%eax |
| orl %ecx,%eax |
| movl %eax,40(%edi) |
| movl %edx,%eax |
| andl 44(%esp),%eax |
| movl %ebp,%ebx |
| andl 236(%esp),%ebx |
| movl %esi,%ecx |
| andl 140(%esp),%ecx |
| orl %ebx,%eax |
| orl %ecx,%eax |
| movl %eax,44(%edi) |
| movl %edx,%eax |
| andl 48(%esp),%eax |
| movl %ebp,%ebx |
| andl 240(%esp),%ebx |
| movl %esi,%ecx |
| andl 144(%esp),%ecx |
| orl %ebx,%eax |
| orl %ecx,%eax |
| movl %eax,48(%edi) |
| movl %edx,%eax |
| andl 52(%esp),%eax |
| movl %ebp,%ebx |
| andl 244(%esp),%ebx |
| movl %esi,%ecx |
| andl 148(%esp),%ecx |
| orl %ebx,%eax |
| orl %ecx,%eax |
| movl %eax,52(%edi) |
| movl %edx,%eax |
| andl 56(%esp),%eax |
| movl %ebp,%ebx |
| andl 248(%esp),%ebx |
| movl %esi,%ecx |
| andl 152(%esp),%ecx |
| orl %ebx,%eax |
| orl %ecx,%eax |
| movl %eax,56(%edi) |
| movl %edx,%eax |
| andl 60(%esp),%eax |
| movl %ebp,%ebx |
| andl 252(%esp),%ebx |
| movl %esi,%ecx |
| andl 156(%esp),%ecx |
| orl %ebx,%eax |
| orl %ecx,%eax |
| movl %eax,60(%edi) |
| addl $492,%esp |
| popl %edi |
| popl %esi |
| popl %ebx |
| popl %ebp |
| ret |
| .size GFp_nistz256_point_add_affine,.-.L_GFp_nistz256_point_add_affine_begin |
| #endif |
| .section .note.GNU-stack,"",@progbits |