Created
May 14, 2017 02:28
-
-
Save saethlin/a4cd996cfb759d9859cdc48ca27afdb9 to your computer and use it in GitHub Desktop.
leapfrog asm with numpy operator overloading
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| ----------------------------------ASSEMBLY nrt---------------------------------- | |
| .text | |
| .file "<string>" | |
| .globl nrt_atomic_add | |
| .p2align 4, 0x90 | |
| .type nrt_atomic_add,@function | |
| nrt_atomic_add: | |
| movl $1, %eax | |
| lock xaddq %rax, (%rdi) | |
| addq $1, %rax | |
| retq | |
| .Lfunc_end0: | |
| .size nrt_atomic_add, .Lfunc_end0-nrt_atomic_add | |
| .globl nrt_atomic_sub | |
| .p2align 4, 0x90 | |
| .type nrt_atomic_sub,@function | |
| nrt_atomic_sub: | |
| movq $-1, %rax | |
| lock xaddq %rax, (%rdi) | |
| addq $-1, %rax | |
| retq | |
| .Lfunc_end1: | |
| .size nrt_atomic_sub, .Lfunc_end1-nrt_atomic_sub | |
| .globl nrt_atomic_cas | |
| .p2align 4, 0x90 | |
| .type nrt_atomic_cas,@function | |
| nrt_atomic_cas: | |
| xorl %r8d, %r8d | |
| movq %rsi, %rax | |
| lock cmpxchgq %rdx, (%rdi) | |
| sete %r8b | |
| movq %rax, (%rcx) | |
| movl %r8d, %eax | |
| retq | |
| .Lfunc_end2: | |
| .size nrt_atomic_cas, .Lfunc_end2-nrt_atomic_cas | |
| .globl NRT_MemInfo_data_fast | |
| .p2align 4, 0x90 | |
| .type NRT_MemInfo_data_fast,@function | |
| NRT_MemInfo_data_fast: | |
| movq 24(%rdi), %rax | |
| retq | |
| .Lfunc_end3: | |
| .size NRT_MemInfo_data_fast, .Lfunc_end3-NRT_MemInfo_data_fast | |
| .globl NRT_incref | |
| .p2align 4, 0x90 | |
| .type NRT_incref,@function | |
| NRT_incref: | |
| testq %rdi, %rdi | |
| je .LBB4_1 | |
| lock addq $1, (%rdi) | |
| retq | |
| .LBB4_1: | |
| retq | |
| .Lfunc_end4: | |
| .size NRT_incref, .Lfunc_end4-NRT_incref | |
| .globl NRT_decref | |
| .p2align 4, 0x90 | |
| .type NRT_decref,@function | |
| NRT_decref: | |
| .cfi_startproc | |
| testq %rdi, %rdi | |
| je .LBB5_2 | |
| movq $-1, %rax | |
| lock xaddq %rax, (%rdi) | |
| cmpq $1, %rax | |
| je .LBB5_3 | |
| .LBB5_2: | |
| retq | |
| .LBB5_3: | |
| movabsq $NRT_MemInfo_call_dtor, %rax | |
| jmpq *%rax | |
| .Lfunc_end5: | |
| .size NRT_decref, .Lfunc_end5-NRT_decref | |
| .cfi_endproc | |
| .globl nrt_unresolved_abort | |
| .p2align 4, 0x90 | |
| .type nrt_unresolved_abort,@function | |
| nrt_unresolved_abort: | |
| movabsq $.const.picklebuf.139995443290696, %rax | |
| movq %rax, (%rsi) | |
| movl $1, %eax | |
| retq | |
| .Lfunc_end6: | |
| .size nrt_unresolved_abort, .Lfunc_end6-nrt_unresolved_abort | |
| .type .const.picklebuf.139995443290696,@object | |
| .section .rodata,"a",@progbits | |
| .p2align 3 | |
| .const.picklebuf.139995443290696: | |
| .quad .const.pickledata.139995443290696 | |
| .long 101 | |
| .zero 4 | |
| .size .const.picklebuf.139995443290696, 16 | |
| .type .const.pickledata.139995443290696,@object | |
| .p2align 4 | |
| .const.pickledata.139995443290696: | |
| .ascii "\200\004\225Z\000\000\000\000\000\000\000\214\bbuiltins\224\214\fRuntimeError\224\223\224\2146numba jitted function aborted due to unresolved symbol\224\205\224\206\224." | |
| .size .const.pickledata.139995443290696, 101 | |
| .section ".note.GNU-stack","",@progbits | |
| ================================================================================ | |
| -------------------ASSEMBLY __numba_array_expr_0x7f5339d37b7-------------------- | |
| .text | |
| .file "__numba_array_expr_0x7f5339d37b7" | |
| .globl _ZN13$3cdynamic$3e36__numba_array_expr_0x7f5339d37b7$242Edd | |
| .p2align 4, 0x90 | |
| .type _ZN13$3cdynamic$3e36__numba_array_expr_0x7f5339d37b7$242Edd,@function | |
| _ZN13$3cdynamic$3e36__numba_array_expr_0x7f5339d37b7$242Edd: | |
| vmulsd %xmm1, %xmm0, %xmm0 | |
| vmovsd %xmm0, (%rdi) | |
| xorl %eax, %eax | |
| retq | |
| .Lfunc_end0: | |
| .size _ZN13$3cdynamic$3e36__numba_array_expr_0x7f5339d37b7$242Edd, .Lfunc_end0-_ZN13$3cdynamic$3e36__numba_array_expr_0x7f5339d37b7$242Edd | |
| .section ".note.GNU-stack","",@progbits | |
| ================================================================================ | |
| ----------------------------ASSEMBLY _broadcast_onto---------------------------- | |
| .text | |
| .file "_broadcast_onto" | |
| .globl _ZN5numba7targets7npyimpl19_broadcast_onto$243Ex8int64$2ax8int64$2a | |
| .p2align 4, 0x90 | |
| .type _ZN5numba7targets7npyimpl19_broadcast_onto$243Ex8int64$2ax8int64$2a,@function | |
| _ZN5numba7targets7npyimpl19_broadcast_onto$243Ex8int64$2ax8int64$2a: | |
| pushq %r14 | |
| pushq %rbx | |
| movq %r9, %r11 | |
| subq %rcx, %r11 | |
| jge .LBB0_3 | |
| movq $0, (%rdi) | |
| jmp .LBB0_2 | |
| .LBB0_3: | |
| testq %rcx, %rcx | |
| jle .LBB0_13 | |
| movq 24(%rsp), %r10 | |
| leaq (,%r9,8), %rsi | |
| leaq (,%rcx,8), %rax | |
| subq %rax, %rsi | |
| leaq (%r10,%r11,8), %r14 | |
| xorl %edx, %edx | |
| .p2align 4, 0x90 | |
| .LBB0_5: | |
| movq (%r8,%rdx,8), %rbx | |
| movq (%r14,%rdx,8), %rax | |
| cmpq $1, %rax | |
| jne .LBB0_6 | |
| cmpq $1, %rbx | |
| je .LBB0_11 | |
| movq %rbx, (%rsi,%r10) | |
| jmp .LBB0_11 | |
| .p2align 4, 0x90 | |
| .LBB0_6: | |
| cmpq $1, %rbx | |
| je .LBB0_11 | |
| cmpq %rax, %rbx | |
| jne .LBB0_8 | |
| .LBB0_11: | |
| addq $1, %rdx | |
| addq $8, %rsi | |
| cmpq %rcx, %rdx | |
| jl .LBB0_5 | |
| addq %rdx, %r11 | |
| .LBB0_13: | |
| movq %r11, (%rdi) | |
| .LBB0_2: | |
| xorl %eax, %eax | |
| popq %rbx | |
| popq %r14 | |
| retq | |
| .LBB0_8: | |
| addq $-1, %rcx | |
| subq %r9, %rcx | |
| subq %rdx, %rcx | |
| movq %rcx, (%rdi) | |
| jmp .LBB0_2 | |
| .Lfunc_end0: | |
| .size _ZN5numba7targets7npyimpl19_broadcast_onto$243Ex8int64$2ax8int64$2a, .Lfunc_end0-_ZN5numba7targets7npyimpl19_broadcast_onto$243Ex8int64$2ax8int64$2a | |
| .section ".note.GNU-stack","",@progbits | |
| ================================================================================ | |
| ----------------ASSEMBLY __numba_array_expr__0x7ffff80acc62c81c----------------- | |
| .text | |
| .file "__numba_array_expr__0x7ffff80acc62c81c" | |
| .globl _ZN13$3cdynamic$3e42__numba_array_expr__0x7ffff80acc62c81c$244Edd | |
| .p2align 4, 0x90 | |
| .type _ZN13$3cdynamic$3e42__numba_array_expr__0x7ffff80acc62c81c$244Edd,@function | |
| _ZN13$3cdynamic$3e42__numba_array_expr__0x7ffff80acc62c81c$244Edd: | |
| vmulsd %xmm1, %xmm0, %xmm0 | |
| vmovsd %xmm0, (%rdi) | |
| xorl %eax, %eax | |
| retq | |
| .Lfunc_end0: | |
| .size _ZN13$3cdynamic$3e42__numba_array_expr__0x7ffff80acc62c81c$244Edd, .Lfunc_end0-_ZN13$3cdynamic$3e42__numba_array_expr__0x7ffff80acc62c81c$244Edd | |
| .section ".note.GNU-stack","",@progbits | |
| ================================================================================ | |
| ----------------ASSEMBLY __numba_array_expr__0x7ffff80acc62c82a----------------- | |
| .text | |
| .file "__numba_array_expr__0x7ffff80acc62c82a" | |
| .globl _ZN13$3cdynamic$3e42__numba_array_expr__0x7ffff80acc62c82a$245Edd | |
| .p2align 4, 0x90 | |
| .type _ZN13$3cdynamic$3e42__numba_array_expr__0x7ffff80acc62c82a$245Edd,@function | |
| _ZN13$3cdynamic$3e42__numba_array_expr__0x7ffff80acc62c82a$245Edd: | |
| vmulsd %xmm1, %xmm0, %xmm0 | |
| vmovsd %xmm0, (%rdi) | |
| xorl %eax, %eax | |
| retq | |
| .Lfunc_end0: | |
| .size _ZN13$3cdynamic$3e42__numba_array_expr__0x7ffff80acc62c82a$245Edd, .Lfunc_end0-_ZN13$3cdynamic$3e42__numba_array_expr__0x7ffff80acc62c82a$245Edd | |
| .section ".note.GNU-stack","",@progbits | |
| ================================================================================ | |
| -----------------------------ASSEMBLY leapfrog_slow----------------------------- | |
| .text | |
| .file "<string>" | |
| .section .rodata.cst8,"aM",@progbits,8 | |
| .p2align 3 | |
| .LCPI0_0: | |
| .quad 4585925428558828667 | |
| .LCPI0_1: | |
| .quad 4590429028186199163 | |
| .LCPI0_2: | |
| .quad 4645133162144333824 | |
| .LCPI0_3: | |
| .quad -4768654942473954831 | |
| .text | |
| .globl _ZN8__main__17leapfrog_slow$241E5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi1E1C7mutable7alignedE | |
| .p2align 4, 0x90 | |
| .type _ZN8__main__17leapfrog_slow$241E5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi1E1C7mutable7alignedE,@function | |
| _ZN8__main__17leapfrog_slow$241E5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi1E1C7mutable7alignedE: | |
| .cfi_startproc | |
| pushq %rbp | |
| .Lcfi0: | |
| .cfi_def_cfa_offset 16 | |
| pushq %r15 | |
| .Lcfi1: | |
| .cfi_def_cfa_offset 24 | |
| pushq %r14 | |
| .Lcfi2: | |
| .cfi_def_cfa_offset 32 | |
| pushq %r13 | |
| .Lcfi3: | |
| .cfi_def_cfa_offset 40 | |
| pushq %r12 | |
| .Lcfi4: | |
| .cfi_def_cfa_offset 48 | |
| pushq %rbx | |
| .Lcfi5: | |
| .cfi_def_cfa_offset 56 | |
| subq $520, %rsp | |
| .Lcfi6: | |
| .cfi_def_cfa_offset 576 | |
| .Lcfi7: | |
| .cfi_offset %rbx, -56 | |
| .Lcfi8: | |
| .cfi_offset %r12, -48 | |
| .Lcfi9: | |
| .cfi_offset %r13, -40 | |
| .Lcfi10: | |
| .cfi_offset %r14, -32 | |
| .Lcfi11: | |
| .cfi_offset %r15, -24 | |
| .Lcfi12: | |
| .cfi_offset %rbp, -16 | |
| movq %r9, 304(%rsp) | |
| movq %r8, 296(%rsp) | |
| movq %rcx, %r15 | |
| movq %rsi, 184(%rsp) | |
| movq %rdi, 312(%rsp) | |
| movq 768(%rsp), %r12 | |
| movq 744(%rsp), %r13 | |
| movq 696(%rsp), %rbp | |
| movq 624(%rsp), %rbx | |
| movabsq $NRT_incref, %r14 | |
| movq %r15, %rdi | |
| callq *%r14 | |
| movq %rbx, %rdi | |
| callq *%r14 | |
| movq %rbp, %rdi | |
| callq *%r14 | |
| movq %r12, %rdi | |
| movq 672(%rsp), %r12 | |
| callq *%r14 | |
| movq 728(%rsp), %rbx | |
| movq 736(%rsp), %rbp | |
| movq 600(%rsp), %rsi | |
| movq %rbp, %rax | |
| sarq $63, %rax | |
| andq %rbp, %rax | |
| movq %rax, 360(%rsp) | |
| imulq %r13, %rax | |
| leaq -1(%rsi), %rcx | |
| movq %rcx, 72(%rsp) | |
| movq %r13, %rcx | |
| notq %rcx | |
| cmpq $-2, %rcx | |
| movq $-1, %rdx | |
| cmovgq %rcx, %rdx | |
| leaq (%rdx,%r13), %rcx | |
| leaq 8(,%rcx,8), %rcx | |
| movq %rcx, 352(%rsp) | |
| leaq -1(%r12), %rcx | |
| movq %rcx, 136(%rsp) | |
| movl %esi, %ecx | |
| andl $3, %ecx | |
| movq %rcx, 80(%rsp) | |
| testq %rbp, %rbp | |
| setle %r8b | |
| testq %r13, %r13 | |
| setle %cl | |
| orb %r8b, %cl | |
| movb %cl, 103(%rsp) | |
| movl %r12d, %ebp | |
| andl $3, %ebp | |
| movq 656(%rsp), %r8 | |
| leaq 56(%r8), %rcx | |
| movq %rcx, 232(%rsp) | |
| leaq (,%rsi,8), %rcx | |
| movq %rcx, 88(%rsp) | |
| movq 584(%rsp), %rcx | |
| leaq 24(%rcx), %rcx | |
| movq %rcx, 200(%rsp) | |
| addq $-1, %rax | |
| subq %rdx, %rax | |
| leaq (%rbx,%rax,8), %rax | |
| movq %rax, 344(%rsp) | |
| leaq (,%r13,8), %rax | |
| movq %rax, 336(%rsp) | |
| leaq 56(%rbx), %rax | |
| movq %rax, 376(%rsp) | |
| movq %rsi, %rdx | |
| negq %rdx | |
| movabsq $.LCPI0_0, %rax | |
| vmovsd (%rax), %xmm0 | |
| vbroadcastsd (%rax), %ymm1 | |
| movabsq $.LCPI0_3, %rax | |
| vmovsd (%rax), %xmm2 | |
| movabsq $.LCPI0_1, %rax | |
| vmovsd (%rax), %xmm3 | |
| vbroadcastsd (%rax), %ymm4 | |
| movq %r12, %rsi | |
| negq %rsi | |
| vxorpd %xmm5, %xmm5, %xmm5 | |
| leaq (,%r12,8), %rax | |
| movq %rax, 152(%rsp) | |
| movq %r8, %rcx | |
| leaq 24(%rcx), %rax | |
| movq %rax, 368(%rsp) | |
| leaq 224(%rcx), %rax | |
| movq %rax, 208(%rsp) | |
| leaq 224(%rbx), %rax | |
| movq %rax, 320(%rsp) | |
| movq %r15, 168(%rsp) | |
| movq %rbp, 176(%rsp) | |
| movq %rdx, 240(%rsp) | |
| vmovsd %xmm0, 224(%rsp) | |
| vmovupd %ymm1, 448(%rsp) | |
| vmovsd %xmm2, 328(%rsp) | |
| vmovsd %xmm3, 216(%rsp) | |
| vmovupd %ymm4, 480(%rsp) | |
| movq %rsi, 392(%rsp) | |
| .LBB0_1: | |
| movq $1, 280(%rsp) | |
| movq $1, 288(%rsp) | |
| movq 664(%rsp), %rax | |
| movq %rax, %rcx | |
| movq %rcx, 432(%rsp) | |
| movq %r12, 440(%rsp) | |
| movq %r12, %rax | |
| cmpq $1, %rcx | |
| je .LBB0_3 | |
| movq 664(%rsp), %rax | |
| movq %rax, 280(%rsp) | |
| movq 440(%rsp), %rax | |
| movq 288(%rsp), %r13 | |
| cmpq $1, %r13 | |
| jne .LBB0_6 | |
| .LBB0_3: | |
| cmpq $1, %rax | |
| jne .LBB0_5 | |
| movl $1, %r13d | |
| jmp .LBB0_8 | |
| .p2align 4, 0x90 | |
| .LBB0_5: | |
| movq %rax, 288(%rsp) | |
| movq %rax, %r13 | |
| jmp .LBB0_8 | |
| .p2align 4, 0x90 | |
| .LBB0_6: | |
| cmpq $1, %rax | |
| je .LBB0_8 | |
| cmpq %r13, %rax | |
| jne .LBB0_454 | |
| .p2align 4, 0x90 | |
| .LBB0_8: | |
| vmovsd %xmm5, 384(%rsp) | |
| movq 280(%rsp), %rbx | |
| movq %rbx, %rdi | |
| imulq %r13, %rdi | |
| shlq $3, %rdi | |
| movl $32, %esi | |
| movabsq $NRT_MemInfo_alloc_safe_aligned, %rax | |
| vzeroupper | |
| callq *%rax | |
| vmovupd 448(%rsp), %ymm5 | |
| vmovsd 224(%rsp), %xmm4 | |
| movq %rax, 144(%rsp) | |
| movq 24(%rax), %r15 | |
| movq %rbx, 24(%rsp) | |
| testq %rbx, %rbx | |
| jle .LBB0_89 | |
| testq %r13, %r13 | |
| jle .LBB0_89 | |
| leaq -1(%r13), %rax | |
| movq %rax, 16(%rsp) | |
| movq 656(%rsp), %rax | |
| leaq (%rax,%r13,8), %rax | |
| movq %rax, 160(%rsp) | |
| leaq -16(%r13), %rax | |
| shrq $4, %rax | |
| movq %rax, 56(%rsp) | |
| leal 1(%rax), %eax | |
| movq %r13, %rsi | |
| andq $-16, %rsi | |
| andl $1, %eax | |
| movq %rax, 104(%rsp) | |
| leaq 96(%r15), %rcx | |
| leaq (,%r13,8), %rax | |
| movq %rax, 112(%rsp) | |
| movq %r15, %rdx | |
| subq $-128, %rdx | |
| movq %rsi, 64(%rsp) | |
| negq %rsi | |
| movq %rsi, 128(%rsp) | |
| leaq 224(%r15), %rax | |
| movl $7, %esi | |
| movq %rsi, 120(%rsp) | |
| xorl %esi, %esi | |
| movq %rsi, 8(%rsp) | |
| movl $16, %r10d | |
| movl $4, %r8d | |
| xorl %r11d, %r11d | |
| xorl %r9d, %r9d | |
| xorl %esi, %esi | |
| movq %rsi, 32(%rsp) | |
| xorl %esi, %esi | |
| movq %rsi, 48(%rsp) | |
| jmp .LBB0_37 | |
| .LBB0_11: | |
| cmpq $0, 104(%rsp) | |
| jne .LBB0_19 | |
| xorl %esi, %esi | |
| jmp .LBB0_20 | |
| .LBB0_13: | |
| cmpq $0, 104(%rsp) | |
| jne .LBB0_24 | |
| xorl %esi, %esi | |
| cmpq $0, 56(%rsp) | |
| jne .LBB0_25 | |
| jmp .LBB0_27 | |
| .LBB0_15: | |
| cmpq $0, 104(%rsp) | |
| jne .LBB0_28 | |
| xorl %edi, %edi | |
| cmpq $0, 56(%rsp) | |
| jne .LBB0_29 | |
| jmp .LBB0_31 | |
| .LBB0_17: | |
| cmpq $0, 104(%rsp) | |
| jne .LBB0_33 | |
| xorl %esi, %esi | |
| cmpq $0, 56(%rsp) | |
| jne .LBB0_34 | |
| jmp .LBB0_36 | |
| .LBB0_19: | |
| movq 656(%rsp), %rsi | |
| vmulpd (%rsi,%rax,8), %ymm5, %ymm0 | |
| vmulpd 32(%rsi,%rax,8), %ymm5, %ymm1 | |
| vmulpd 64(%rsi,%rax,8), %ymm5, %ymm2 | |
| vmulpd 96(%rsi,%rax,8), %ymm5, %ymm3 | |
| vmovupd %ymm0, (%r15,%rbp,8) | |
| vmovupd %ymm1, 32(%r15,%rbp,8) | |
| vmovupd %ymm2, 64(%r15,%rbp,8) | |
| vmovupd %ymm3, 96(%r15,%rbp,8) | |
| movl $16, %esi | |
| .LBB0_20: | |
| cmpq $0, 56(%rsp) | |
| movq 656(%rsp), %rbx | |
| je .LBB0_23 | |
| movq 128(%rsp), %rax | |
| leaq (%rax,%rsi), %rax | |
| leaq -96(,%rsi,8), %rdi | |
| .p2align 4, 0x90 | |
| .LBB0_22: | |
| leaq (%rbx,%rdi), %rsi | |
| vmulpd -32(%rsi,%r10,8), %ymm5, %ymm0 | |
| vmulpd (%rsi,%r10,8), %ymm5, %ymm1 | |
| vmulpd 32(%rsi,%r10,8), %ymm5, %ymm2 | |
| vmulpd 64(%rsi,%r10,8), %ymm5, %ymm3 | |
| vmovupd %ymm0, (%rdi,%rcx) | |
| vmovupd %ymm1, 32(%rdi,%rcx) | |
| vmovupd %ymm2, 64(%rdi,%rcx) | |
| vmovupd %ymm3, 96(%rdi,%rcx) | |
| vmulpd 96(%rsi,%r10,8), %ymm5, %ymm0 | |
| vmulpd 128(%rsi,%r10,8), %ymm5, %ymm1 | |
| vmulpd 160(%rsi,%r10,8), %ymm5, %ymm2 | |
| vmulpd 192(%rsi,%r10,8), %ymm5, %ymm3 | |
| vmovupd %ymm0, 128(%rdi,%rcx) | |
| vmovupd %ymm1, 160(%rdi,%rcx) | |
| vmovupd %ymm2, 192(%rdi,%rcx) | |
| vmovupd %ymm3, 224(%rdi,%rcx) | |
| addq $256, %rdi | |
| addq $32, %rax | |
| jne .LBB0_22 | |
| .LBB0_23: | |
| movq %r9, 48(%rsp) | |
| movq 16(%rsp), %rax | |
| movq %rax, 32(%rsp) | |
| movq 64(%rsp), %rax | |
| movq %rax, %rbx | |
| cmpq %rax, %r13 | |
| jne .LBB0_44 | |
| jmp .LBB0_88 | |
| .LBB0_24: | |
| movq 656(%rsp), %rax | |
| vmulpd (%rax,%rbx,8), %ymm5, %ymm0 | |
| vmulpd 32(%rax,%rbx,8), %ymm5, %ymm1 | |
| vmulpd 64(%rax,%rbx,8), %ymm5, %ymm2 | |
| vmulpd 96(%rax,%rbx,8), %ymm5, %ymm3 | |
| vmovupd %ymm0, (%r15,%rbp,8) | |
| vmovupd %ymm1, 32(%r15,%rbp,8) | |
| vmovupd %ymm2, 64(%r15,%rbp,8) | |
| vmovupd %ymm3, 96(%r15,%rbp,8) | |
| movl $16, %esi | |
| cmpq $0, 56(%rsp) | |
| je .LBB0_27 | |
| .LBB0_25: | |
| movq 128(%rsp), %rax | |
| leaq (%rax,%rsi), %rax | |
| movq 208(%rsp), %rdi | |
| leaq (%rdi,%rbx,8), %rdi | |
| leaq -96(,%rsi,8), %rbp | |
| .p2align 4, 0x90 | |
| .LBB0_26: | |
| vmulpd -128(%rdi,%rbp), %ymm5, %ymm0 | |
| vmulpd -96(%rdi,%rbp), %ymm5, %ymm1 | |
| vmulpd -64(%rdi,%rbp), %ymm5, %ymm2 | |
| vmulpd -32(%rdi,%rbp), %ymm5, %ymm3 | |
| vmovupd %ymm0, (%rbp,%rcx) | |
| vmovupd %ymm1, 32(%rbp,%rcx) | |
| vmovupd %ymm2, 64(%rbp,%rcx) | |
| vmovupd %ymm3, 96(%rbp,%rcx) | |
| vmulpd (%rdi,%rbp), %ymm5, %ymm0 | |
| vmulpd 32(%rdi,%rbp), %ymm5, %ymm1 | |
| vmulpd 64(%rdi,%rbp), %ymm5, %ymm2 | |
| vmulpd 96(%rdi,%rbp), %ymm5, %ymm3 | |
| vmovupd %ymm0, 128(%rbp,%rcx) | |
| vmovupd %ymm1, 160(%rbp,%rcx) | |
| vmovupd %ymm2, 192(%rbp,%rcx) | |
| vmovupd %ymm3, 224(%rbp,%rcx) | |
| addq $256, %rbp | |
| addq $32, %rax | |
| jne .LBB0_26 | |
| .LBB0_27: | |
| movq 64(%rsp), %rax | |
| movq %rax, %rdi | |
| cmpq %rax, %r13 | |
| jne .LBB0_57 | |
| jmp .LBB0_63 | |
| .LBB0_28: | |
| vbroadcastsd (%rbx), %ymm0 | |
| vmulpd %ymm5, %ymm0, %ymm0 | |
| vmovupd %ymm0, (%r15,%rbp,8) | |
| vmovupd %ymm0, 32(%r15,%rbp,8) | |
| vmovupd %ymm0, 64(%r15,%rbp,8) | |
| vmovupd %ymm0, 96(%r15,%rbp,8) | |
| movl $16, %edi | |
| cmpq $0, 56(%rsp) | |
| je .LBB0_31 | |
| .LBB0_29: | |
| vbroadcastsd (%rbx), %ymm0 | |
| vmulpd %ymm5, %ymm0, %ymm0 | |
| leaq 96(,%rdi,8), %rax | |
| movq 64(%rsp), %rsi | |
| subq %rdi, %rsi | |
| .p2align 4, 0x90 | |
| .LBB0_30: | |
| vmovupd %ymm0, -224(%rax,%rdx) | |
| vmovupd %ymm0, -192(%rax,%rdx) | |
| vmovupd %ymm0, -160(%rax,%rdx) | |
| vmovupd %ymm0, -128(%rax,%rdx) | |
| vmovupd %ymm0, -96(%rax,%rdx) | |
| vmovupd %ymm0, -64(%rax,%rdx) | |
| vmovupd %ymm0, -32(%rax,%rdx) | |
| vmovupd %ymm0, (%rax,%rdx) | |
| addq $256, %rax | |
| addq $-32, %rsi | |
| jne .LBB0_30 | |
| .LBB0_31: | |
| movq 64(%rsp), %rax | |
| movq %rax, %rdi | |
| cmpq %rax, %r13 | |
| jne .LBB0_69 | |
| jmp .LBB0_87 | |
| .LBB0_33: | |
| vbroadcastsd (%rbx), %ymm0 | |
| vmulpd %ymm5, %ymm0, %ymm0 | |
| vmovupd %ymm0, (%r15,%rbp,8) | |
| vmovupd %ymm0, 32(%r15,%rbp,8) | |
| vmovupd %ymm0, 64(%r15,%rbp,8) | |
| vmovupd %ymm0, 96(%r15,%rbp,8) | |
| movl $16, %esi | |
| cmpq $0, 56(%rsp) | |
| je .LBB0_36 | |
| .LBB0_34: | |
| vbroadcastsd (%rbx), %ymm0 | |
| vmulpd %ymm5, %ymm0, %ymm0 | |
| movq 128(%rsp), %rax | |
| leaq (%rax,%rsi), %rax | |
| movq 40(%rsp), %rdi | |
| leaq (%rdi,%rsi,8), %rsi | |
| .p2align 4, 0x90 | |
| .LBB0_35: | |
| vmovupd %ymm0, -224(%rsi) | |
| vmovupd %ymm0, -192(%rsi) | |
| vmovupd %ymm0, -160(%rsi) | |
| vmovupd %ymm0, -128(%rsi) | |
| vmovupd %ymm0, -96(%rsi) | |
| vmovupd %ymm0, -64(%rsi) | |
| vmovupd %ymm0, -32(%rsi) | |
| vmovupd %ymm0, (%rsi) | |
| addq $256, %rsi | |
| addq $32, %rax | |
| jne .LBB0_35 | |
| .LBB0_36: | |
| movq 64(%rsp), %rax | |
| movq %rax, %rdi | |
| cmpq %rax, %r13 | |
| jne .LBB0_79 | |
| jmp .LBB0_88 | |
| .p2align 4, 0x90 | |
| .LBB0_37: | |
| movq %rax, 40(%rsp) | |
| movq %r13, %rbp | |
| imulq %r9, %rbp | |
| leaq (%r13,%rbp), %rax | |
| leaq (%r15,%rbp,8), %rdi | |
| leaq (%r15,%rax,8), %r14 | |
| cmpq $1, 664(%rsp) | |
| jbe .LBB0_51 | |
| movq %r9, %rax | |
| imulq %r12, %rax | |
| cmpq $2, %r12 | |
| jb .LBB0_64 | |
| cmpq $15, %r13 | |
| jbe .LBB0_43 | |
| cmpq $0, 64(%rsp) | |
| je .LBB0_43 | |
| leaq (%r13,%rax), %rsi | |
| movq 656(%rsp), %rbx | |
| leaq (%rbx,%rsi,8), %rsi | |
| cmpq %rsi, %rdi | |
| jae .LBB0_11 | |
| leaq (%rbx,%rax,8), %rsi | |
| cmpq %r14, %rsi | |
| jae .LBB0_11 | |
| .p2align 4, 0x90 | |
| .LBB0_43: | |
| xorl %ebx, %ebx | |
| .LBB0_44: | |
| movl %r13d, %ebp | |
| subl %ebx, %ebp | |
| movq 16(%rsp), %r14 | |
| subq %rbx, %r14 | |
| andq $7, %rbp | |
| je .LBB0_47 | |
| leaq (%rbx,%r11), %rax | |
| leaq (%r15,%rax,8), %rax | |
| movq 8(%rsp), %rsi | |
| leaq (%rbx,%rsi), %rsi | |
| movq 656(%rsp), %rdi | |
| leaq (%rdi,%rsi,8), %rsi | |
| negq %rbp | |
| .p2align 4, 0x90 | |
| .LBB0_46: | |
| vmulsd (%rsi), %xmm4, %xmm0 | |
| vmovsd %xmm0, (%rax) | |
| addq $1, %rbx | |
| addq $8, %rax | |
| addq $8, %rsi | |
| addq $1, %rbp | |
| jne .LBB0_46 | |
| .LBB0_47: | |
| movq %r9, 48(%rsp) | |
| movq 16(%rsp), %rax | |
| movq %rax, 32(%rsp) | |
| cmpq $7, %r14 | |
| jb .LBB0_88 | |
| leaq (%rbx,%r8), %rax | |
| leaq (%r15,%rax,8), %rdi | |
| movq 120(%rsp), %rax | |
| leaq (%rbx,%rax), %rax | |
| movq 656(%rsp), %rsi | |
| leaq (%rsi,%rax,8), %rbp | |
| movq %r13, %rax | |
| subq %rbx, %rax | |
| .p2align 4, 0x90 | |
| .LBB0_49: | |
| vmulsd -56(%rbp), %xmm4, %xmm0 | |
| vmovsd %xmm0, -32(%rdi) | |
| vmulsd -48(%rbp), %xmm4, %xmm0 | |
| vmovsd %xmm0, -24(%rdi) | |
| vmulsd -40(%rbp), %xmm4, %xmm0 | |
| vmovsd %xmm0, -16(%rdi) | |
| vmulsd -32(%rbp), %xmm4, %xmm0 | |
| vmovsd %xmm0, -8(%rdi) | |
| vmulsd -24(%rbp), %xmm4, %xmm0 | |
| vmovsd %xmm0, (%rdi) | |
| vmulsd -16(%rbp), %xmm4, %xmm0 | |
| vmovsd %xmm0, 8(%rdi) | |
| vmulsd -8(%rbp), %xmm4, %xmm0 | |
| vmovsd %xmm0, 16(%rdi) | |
| vmulsd (%rbp), %xmm4, %xmm0 | |
| vmovsd %xmm0, 24(%rdi) | |
| addq $64, %rdi | |
| addq $64, %rbp | |
| addq $-8, %rax | |
| jne .LBB0_49 | |
| movq %r9, 48(%rsp) | |
| jmp .LBB0_63 | |
| .p2align 4, 0x90 | |
| .LBB0_51: | |
| movq 48(%rsp), %rbx | |
| imulq %r12, %rbx | |
| cmpq $2, %r12 | |
| jb .LBB0_74 | |
| cmpq $16, %r13 | |
| jb .LBB0_56 | |
| cmpq $0, 64(%rsp) | |
| je .LBB0_56 | |
| movq 160(%rsp), %rax | |
| leaq (%rax,%rbx,8), %rax | |
| cmpq %rax, %rdi | |
| jae .LBB0_13 | |
| movq 656(%rsp), %rax | |
| leaq (%rax,%rbx,8), %rax | |
| cmpq %r14, %rax | |
| jae .LBB0_13 | |
| .p2align 4, 0x90 | |
| .LBB0_56: | |
| xorl %edi, %edi | |
| .LBB0_57: | |
| movl %r13d, %eax | |
| subl %edi, %eax | |
| movq 16(%rsp), %r14 | |
| subq %rdi, %r14 | |
| andq $7, %rax | |
| je .LBB0_60 | |
| leaq (%rdi,%r11), %rsi | |
| leaq (%r15,%rsi,8), %rsi | |
| movq 656(%rsp), %rbp | |
| leaq (%rbp,%rbx,8), %rbp | |
| negq %rax | |
| .p2align 4, 0x90 | |
| .LBB0_59: | |
| vmulsd (%rbp,%rdi,8), %xmm4, %xmm0 | |
| vmovsd %xmm0, (%rsi) | |
| addq $1, %rdi | |
| addq $8, %rsi | |
| addq $1, %rax | |
| jne .LBB0_59 | |
| .LBB0_60: | |
| cmpq $7, %r14 | |
| jb .LBB0_63 | |
| leaq (%rdi,%r8), %rax | |
| leaq (%r15,%rax,8), %rbp | |
| addq %rdi, %rbx | |
| movq 232(%rsp), %rax | |
| leaq (%rax,%rbx,8), %rbx | |
| movq %r13, %rax | |
| subq %rdi, %rax | |
| .p2align 4, 0x90 | |
| .LBB0_62: | |
| vmulsd -56(%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, -32(%rbp) | |
| vmulsd -48(%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, -24(%rbp) | |
| vmulsd -40(%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, -16(%rbp) | |
| vmulsd -32(%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, -8(%rbp) | |
| vmulsd -24(%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, (%rbp) | |
| vmulsd -16(%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, 8(%rbp) | |
| vmulsd -8(%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, 16(%rbp) | |
| vmulsd (%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, 24(%rbp) | |
| addq $64, %rbp | |
| addq $64, %rbx | |
| addq $-8, %rax | |
| jne .LBB0_62 | |
| .LBB0_63: | |
| movq 16(%rsp), %rax | |
| movq %rax, 32(%rsp) | |
| jmp .LBB0_88 | |
| .p2align 4, 0x90 | |
| .LBB0_64: | |
| addq 32(%rsp), %rax | |
| movq 656(%rsp), %rsi | |
| leaq (%rsi,%rax,8), %rbx | |
| cmpq $15, %r13 | |
| jbe .LBB0_68 | |
| cmpq $0, 64(%rsp) | |
| je .LBB0_68 | |
| cmpq %rbx, %rdi | |
| jae .LBB0_15 | |
| cmpq %r14, %rbx | |
| jae .LBB0_15 | |
| .p2align 4, 0x90 | |
| .LBB0_68: | |
| xorl %edi, %edi | |
| .LBB0_69: | |
| movl %r13d, %eax | |
| subl %edi, %eax | |
| movq 16(%rsp), %rbp | |
| subq %rdi, %rbp | |
| andq $7, %rax | |
| je .LBB0_72 | |
| leaq (%rdi,%r11), %rsi | |
| leaq (%r15,%rsi,8), %rsi | |
| negq %rax | |
| .p2align 4, 0x90 | |
| .LBB0_71: | |
| vmulsd (%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, (%rsi) | |
| addq $1, %rdi | |
| addq $8, %rsi | |
| addq $1, %rax | |
| jne .LBB0_71 | |
| .LBB0_72: | |
| cmpq $7, %rbp | |
| jb .LBB0_87 | |
| leaq (%rdi,%r8), %rax | |
| leaq (%r15,%rax,8), %rax | |
| movq %r13, %rbp | |
| subq %rdi, %rbp | |
| .p2align 4, 0x90 | |
| .LBB0_86: | |
| vmulsd (%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, -32(%rax) | |
| vmulsd (%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, -24(%rax) | |
| vmulsd (%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, -16(%rax) | |
| vmulsd (%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, -8(%rax) | |
| vmulsd (%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, (%rax) | |
| vmulsd (%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, 8(%rax) | |
| vmulsd (%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, 16(%rax) | |
| vmulsd (%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, 24(%rax) | |
| addq $64, %rax | |
| addq $-8, %rbp | |
| jne .LBB0_86 | |
| .LBB0_87: | |
| movq %r9, 48(%rsp) | |
| jmp .LBB0_88 | |
| .p2align 4, 0x90 | |
| .LBB0_74: | |
| addq 32(%rsp), %rbx | |
| movq 656(%rsp), %rax | |
| leaq (%rax,%rbx,8), %rbx | |
| cmpq $15, %r13 | |
| jbe .LBB0_78 | |
| cmpq $0, 64(%rsp) | |
| je .LBB0_78 | |
| cmpq %rbx, %rdi | |
| jae .LBB0_17 | |
| cmpq %r14, %rbx | |
| jae .LBB0_17 | |
| .p2align 4, 0x90 | |
| .LBB0_78: | |
| xorl %edi, %edi | |
| .LBB0_79: | |
| movl %r13d, %eax | |
| subl %edi, %eax | |
| movq 16(%rsp), %rbp | |
| subq %rdi, %rbp | |
| andq $7, %rax | |
| je .LBB0_82 | |
| leaq (%rdi,%r11), %rsi | |
| leaq (%r15,%rsi,8), %rsi | |
| negq %rax | |
| .p2align 4, 0x90 | |
| .LBB0_81: | |
| vmulsd (%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, (%rsi) | |
| addq $1, %rdi | |
| addq $8, %rsi | |
| addq $1, %rax | |
| jne .LBB0_81 | |
| .LBB0_82: | |
| cmpq $7, %rbp | |
| jb .LBB0_88 | |
| leaq (%rdi,%r8), %rax | |
| leaq (%r15,%rax,8), %rax | |
| movq %r13, %rbp | |
| subq %rdi, %rbp | |
| .p2align 4, 0x90 | |
| .LBB0_84: | |
| vmulsd (%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, -32(%rax) | |
| vmulsd (%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, -24(%rax) | |
| vmulsd (%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, -16(%rax) | |
| vmulsd (%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, -8(%rax) | |
| vmulsd (%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, (%rax) | |
| vmulsd (%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, 8(%rax) | |
| vmulsd (%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, 16(%rax) | |
| vmulsd (%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, 24(%rax) | |
| addq $64, %rax | |
| addq $-8, %rbp | |
| jne .LBB0_84 | |
| .p2align 4, 0x90 | |
| .LBB0_88: | |
| addq $1, %r9 | |
| movq 112(%rsp), %rax | |
| addq %rax, %rcx | |
| addq %r13, %r11 | |
| addq %r13, %r8 | |
| addq %r12, %r10 | |
| addq %r12, 8(%rsp) | |
| addq %r12, 120(%rsp) | |
| addq %rax, %rdx | |
| movq 40(%rsp), %rsi | |
| addq %rax, %rsi | |
| movq %rsi, %rax | |
| cmpq 24(%rsp), %r9 | |
| jne .LBB0_37 | |
| .LBB0_89: | |
| movq 168(%rsp), %rdi | |
| movabsq $NRT_incref, %rax | |
| vzeroupper | |
| callq *%rax | |
| cmpq $0, 592(%rsp) | |
| movq 600(%rsp), %r10 | |
| movq 584(%rsp), %r8 | |
| movq 72(%rsp), %rbx | |
| movq 80(%rsp), %rbp | |
| movq 88(%rsp), %rdi | |
| movq 24(%rsp), %r11 | |
| jle .LBB0_156 | |
| xorl %r12d, %r12d | |
| movq %r8, %r14 | |
| xorl %r9d, %r9d | |
| xorl %esi, %esi | |
| xorl %ecx, %ecx | |
| xorl %eax, %eax | |
| xorl %edx, %edx | |
| movq %rdx, 16(%rsp) | |
| .p2align 4, 0x90 | |
| .LBB0_91: | |
| testq %r10, %r10 | |
| jle .LBB0_155 | |
| cmpq $2, 592(%rsp) | |
| jb .LBB0_99 | |
| cmpq $2, %r10 | |
| jb .LBB0_104 | |
| xorl %edx, %edx | |
| testq %rbp, %rbp | |
| je .LBB0_96 | |
| .p2align 4, 0x90 | |
| .LBB0_95: | |
| cmpq $1, %r11 | |
| cmovaq %r9, %rcx | |
| cmpq $1, %r13 | |
| vmovsd (%r14,%rdx,8), %xmm0 | |
| cmovaq %rdx, %rsi | |
| movq %rcx, %rax | |
| imulq %r13, %rax | |
| addq %rsi, %rax | |
| vaddsd (%r15,%rax,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%r14,%rdx,8) | |
| addq $1, %rdx | |
| cmpq %rdx, %rbp | |
| jne .LBB0_95 | |
| .LBB0_96: | |
| movq %r9, 16(%rsp) | |
| movq %rbx, %rax | |
| cmpq $3, %rbx | |
| jb .LBB0_155 | |
| .p2align 4, 0x90 | |
| .LBB0_97: | |
| movq %rsi, %rax | |
| leaq 3(%rdx), %rsi | |
| cmpq $1, %r13 | |
| cmovbeq %rax, %rsi | |
| leaq 2(%rdx), %rbp | |
| cmpq $1, %r13 | |
| cmovbeq %rax, %rbp | |
| leaq 1(%rdx), %rbx | |
| cmpq $1, %r13 | |
| cmovbeq %rax, %rbx | |
| cmovaq %rdx, %rax | |
| cmpq $1, %r11 | |
| vmovsd (%r14,%rdx,8), %xmm0 | |
| cmovaq %r9, %rcx | |
| movq %rcx, %rdi | |
| imulq %r13, %rdi | |
| addq %rdi, %rax | |
| vaddsd (%r15,%rax,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%r14,%rdx,8) | |
| vmovsd 8(%r14,%rdx,8), %xmm0 | |
| addq %rdi, %rbx | |
| vaddsd (%r15,%rbx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 8(%r14,%rdx,8) | |
| vmovsd 16(%r14,%rdx,8), %xmm0 | |
| addq %rdi, %rbp | |
| vaddsd (%r15,%rbp,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 16(%r14,%rdx,8) | |
| vmovsd 24(%r14,%rdx,8), %xmm0 | |
| addq %rsi, %rdi | |
| vaddsd (%r15,%rdi,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 24(%r14,%rdx,8) | |
| addq $4, %rdx | |
| cmpq %r10, %rdx | |
| jne .LBB0_97 | |
| movq %r9, 16(%rsp) | |
| movq 72(%rsp), %rbx | |
| movq %rbx, %rax | |
| jmp .LBB0_148 | |
| .p2align 4, 0x90 | |
| .LBB0_99: | |
| movq %r9, 40(%rsp) | |
| movq 16(%rsp), %r9 | |
| imulq %r10, %r9 | |
| cmpq $2, %r10 | |
| jb .LBB0_109 | |
| cmpq $1, %r11 | |
| jbe .LBB0_114 | |
| movq 40(%rsp), %rdx | |
| imulq %r13, %rdx | |
| testq %rbp, %rbp | |
| je .LBB0_142 | |
| leaq (%r8,%r9,8), %rdi | |
| xorl %eax, %eax | |
| .p2align 4, 0x90 | |
| .LBB0_103: | |
| cmpq $1, %r13 | |
| vmovsd (%rdi,%rax,8), %xmm0 | |
| cmovaq %rax, %rsi | |
| leaq (%rsi,%rdx), %rcx | |
| vaddsd (%r15,%rcx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%r14,%rax,8) | |
| addq $1, %rax | |
| cmpq %rax, %rbp | |
| jne .LBB0_103 | |
| jmp .LBB0_143 | |
| .LBB0_104: | |
| movq %r12, 48(%rsp) | |
| movq %r9, %r12 | |
| imulq %r10, %r12 | |
| addq %rax, %r12 | |
| cmpq $1, %r11 | |
| movq %rax, 8(%rsp) | |
| jbe .LBB0_117 | |
| movq %r9, %rdx | |
| imulq %r13, %r9 | |
| xorl %eax, %eax | |
| testq %rbp, %rbp | |
| je .LBB0_107 | |
| .p2align 4, 0x90 | |
| .LBB0_106: | |
| cmpq $1, %r13 | |
| vmovsd (%r8,%r12,8), %xmm0 | |
| cmovaq %rax, %rsi | |
| leaq (%rsi,%r9), %rcx | |
| vaddsd (%r15,%rcx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%r14,%rax,8) | |
| addq $1, %rax | |
| cmpq %rax, %rbp | |
| jne .LBB0_106 | |
| .LBB0_107: | |
| cmpq $3, %rbx | |
| jae .LBB0_126 | |
| movq %rdx, %r9 | |
| movq %r9, 16(%rsp) | |
| movq %r9, %rcx | |
| movq 48(%rsp), %r12 | |
| jmp .LBB0_141 | |
| .LBB0_109: | |
| addq %rax, %r9 | |
| cmpq $1, %r11 | |
| movq %rax, 8(%rsp) | |
| jbe .LBB0_121 | |
| movq 40(%rsp), %rdx | |
| imulq %r13, %rdx | |
| xorl %eax, %eax | |
| testq %rbp, %rbp | |
| je .LBB0_112 | |
| .p2align 4, 0x90 | |
| .LBB0_111: | |
| cmpq $1, %r13 | |
| vmovsd (%r8,%r9,8), %xmm0 | |
| cmovaq %rax, %rsi | |
| leaq (%rsi,%rdx), %rcx | |
| vaddsd (%r15,%rcx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%r14,%rax,8) | |
| addq $1, %rax | |
| cmpq %rax, %rbp | |
| jne .LBB0_111 | |
| .LBB0_112: | |
| cmpq $3, %rbx | |
| jae .LBB0_129 | |
| movq 40(%rsp), %r9 | |
| movq %r9, %rcx | |
| jmp .LBB0_141 | |
| .LBB0_114: | |
| movq %rcx, %r8 | |
| imulq %r13, %r8 | |
| testq %rbp, %rbp | |
| je .LBB0_149 | |
| movq 584(%rsp), %rax | |
| leaq (%rax,%r9,8), %rbp | |
| xorl %eax, %eax | |
| movq 80(%rsp), %rdx | |
| .p2align 4, 0x90 | |
| .LBB0_116: | |
| cmpq $1, %r13 | |
| vmovsd (%rbp,%rax,8), %xmm0 | |
| cmovaq %rax, %rsi | |
| leaq (%rsi,%r8), %rdi | |
| vaddsd (%r15,%rdi,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%r14,%rax,8) | |
| addq $1, %rax | |
| cmpq %rax, %rdx | |
| jne .LBB0_116 | |
| jmp .LBB0_150 | |
| .LBB0_117: | |
| movq %r9, 40(%rsp) | |
| movq %rcx, %r9 | |
| imulq %r13, %r9 | |
| xorl %eax, %eax | |
| testq %rbp, %rbp | |
| je .LBB0_119 | |
| .p2align 4, 0x90 | |
| .LBB0_118: | |
| cmpq $1, %r13 | |
| vmovsd (%r8,%r12,8), %xmm0 | |
| cmovaq %rax, %rsi | |
| leaq (%rsi,%r9), %rdi | |
| vaddsd (%r15,%rdi,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%r14,%rax,8) | |
| addq $1, %rax | |
| cmpq %rax, %rbp | |
| jne .LBB0_118 | |
| .LBB0_119: | |
| cmpq $3, %rbx | |
| jae .LBB0_132 | |
| movq 40(%rsp), %r9 | |
| movq %r9, 16(%rsp) | |
| jmp .LBB0_136 | |
| .LBB0_121: | |
| movq %rcx, %r8 | |
| imulq %r13, %r8 | |
| xorl %eax, %eax | |
| testq %rbp, %rbp | |
| je .LBB0_124 | |
| movq 584(%rsp), %rdx | |
| .p2align 4, 0x90 | |
| .LBB0_123: | |
| cmpq $1, %r13 | |
| vmovsd (%rdx,%r9,8), %xmm0 | |
| cmovaq %rax, %rsi | |
| leaq (%rsi,%r8), %rdi | |
| vaddsd (%r15,%rdi,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%r14,%rax,8) | |
| addq $1, %rax | |
| cmpq %rax, %rbp | |
| jne .LBB0_123 | |
| .LBB0_124: | |
| cmpq $3, %rbx | |
| jae .LBB0_137 | |
| movq 584(%rsp), %r8 | |
| movq 88(%rsp), %rdi | |
| jmp .LBB0_140 | |
| .LBB0_126: | |
| movq 48(%rsp), %rcx | |
| leaq (%r8,%rcx), %rbp | |
| .p2align 4, 0x90 | |
| .LBB0_127: | |
| movq %rsi, %rcx | |
| leaq 3(%rax), %rsi | |
| cmpq $1, %r13 | |
| cmovbeq %rcx, %rsi | |
| leaq 2(%rax), %rbx | |
| cmpq $1, %r13 | |
| cmovbeq %rcx, %rbx | |
| leaq 1(%rax), %rdi | |
| cmpq $1, %r13 | |
| vmovsd (%r8,%r12,8), %xmm0 | |
| cmovbeq %rcx, %rdi | |
| cmovaq %rax, %rcx | |
| addq %r9, %rcx | |
| vaddsd (%r15,%rcx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%rbp,%rax,8) | |
| vmovsd (%r8,%r12,8), %xmm0 | |
| addq %r9, %rdi | |
| vaddsd (%r15,%rdi,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 8(%rbp,%rax,8) | |
| vmovsd (%r8,%r12,8), %xmm0 | |
| addq %r9, %rbx | |
| vaddsd (%r15,%rbx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 16(%rbp,%rax,8) | |
| vmovsd (%r8,%r12,8), %xmm0 | |
| leaq (%rsi,%r9), %rcx | |
| vaddsd (%r15,%rcx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 24(%rbp,%rax,8) | |
| addq $4, %rax | |
| cmpq %r10, %rax | |
| jne .LBB0_127 | |
| movq %rdx, %r9 | |
| movq %r9, 16(%rsp) | |
| movq %r9, %rcx | |
| jmp .LBB0_135 | |
| .LBB0_129: | |
| leaq (%r8,%r12), %rdi | |
| .p2align 4, 0x90 | |
| .LBB0_130: | |
| movq %rsi, %rcx | |
| leaq 3(%rax), %rsi | |
| cmpq $1, %r13 | |
| cmovbeq %rcx, %rsi | |
| leaq 2(%rax), %rbp | |
| cmpq $1, %r13 | |
| cmovbeq %rcx, %rbp | |
| leaq 1(%rax), %rbx | |
| cmpq $1, %r13 | |
| vmovsd (%r8,%r9,8), %xmm0 | |
| cmovbeq %rcx, %rbx | |
| cmovaq %rax, %rcx | |
| addq %rdx, %rcx | |
| vaddsd (%r15,%rcx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%rdi,%rax,8) | |
| vmovsd (%r8,%r9,8), %xmm0 | |
| addq %rdx, %rbx | |
| vaddsd (%r15,%rbx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 8(%rdi,%rax,8) | |
| vmovsd (%r8,%r9,8), %xmm0 | |
| addq %rdx, %rbp | |
| vaddsd (%r15,%rbp,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 16(%rdi,%rax,8) | |
| vmovsd (%r8,%r9,8), %xmm0 | |
| leaq (%rsi,%rdx), %rcx | |
| vaddsd (%r15,%rcx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 24(%rdi,%rax,8) | |
| addq $4, %rax | |
| cmpq %r10, %rax | |
| jne .LBB0_130 | |
| movq 40(%rsp), %r9 | |
| movq %r9, %rcx | |
| movq 72(%rsp), %rbx | |
| movq 80(%rsp), %rbp | |
| movq 88(%rsp), %rdi | |
| jmp .LBB0_141 | |
| .LBB0_132: | |
| movq 48(%rsp), %rdx | |
| leaq (%r8,%rdx), %rbp | |
| .p2align 4, 0x90 | |
| .LBB0_133: | |
| movq %rsi, %rdi | |
| leaq 3(%rax), %rsi | |
| cmpq $1, %r13 | |
| cmovbeq %rdi, %rsi | |
| leaq 2(%rax), %rbx | |
| cmpq $1, %r13 | |
| cmovbeq %rdi, %rbx | |
| leaq 1(%rax), %r8 | |
| cmpq $1, %r13 | |
| movq 584(%rsp), %rdx | |
| vmovsd (%rdx,%r12,8), %xmm0 | |
| cmovbeq %rdi, %r8 | |
| cmovaq %rax, %rdi | |
| addq %r9, %rdi | |
| vaddsd (%r15,%rdi,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%rbp,%rax,8) | |
| movq 584(%rsp), %rdx | |
| vmovsd (%rdx,%r12,8), %xmm0 | |
| addq %r9, %r8 | |
| vaddsd (%r15,%r8,8), %xmm0, %xmm0 | |
| movq 584(%rsp), %r8 | |
| vmovsd %xmm0, 8(%rbp,%rax,8) | |
| vmovsd (%r8,%r12,8), %xmm0 | |
| addq %r9, %rbx | |
| vaddsd (%r15,%rbx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 16(%rbp,%rax,8) | |
| vmovsd (%r8,%r12,8), %xmm0 | |
| leaq (%rsi,%r9), %rdi | |
| vaddsd (%r15,%rdi,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 24(%rbp,%rax,8) | |
| addq $4, %rax | |
| cmpq %r10, %rax | |
| jne .LBB0_133 | |
| movq 40(%rsp), %r9 | |
| movq %r9, 16(%rsp) | |
| .LBB0_135: | |
| movq 72(%rsp), %rbx | |
| movq 80(%rsp), %rbp | |
| .LBB0_136: | |
| movq 88(%rsp), %rdi | |
| movq 48(%rsp), %r12 | |
| jmp .LBB0_141 | |
| .LBB0_137: | |
| addq $3, %rax | |
| movq %r12, %rdx | |
| movq 584(%rsp), %r12 | |
| movq %rdx, 48(%rsp) | |
| leaq (%r12,%rdx), %rbp | |
| movq 240(%rsp), %r11 | |
| .p2align 4, 0x90 | |
| .LBB0_138: | |
| movq %rsi, %rdi | |
| cmpq $1, %r13 | |
| cmovaq %rax, %rsi | |
| leaq -1(%rax), %rbx | |
| cmpq $1, %r13 | |
| cmovbeq %rdi, %rbx | |
| leaq -2(%rax), %rdx | |
| cmpq $1, %r13 | |
| cmovbeq %rdi, %rdx | |
| leaq -3(%rax), %r10 | |
| cmpq $1, %r13 | |
| vmovsd (%r12,%r9,8), %xmm0 | |
| cmovbeq %rdi, %r10 | |
| addq %r8, %r10 | |
| vaddsd (%r15,%r10,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, -24(%rbp,%rax,8) | |
| vmovsd (%r12,%r9,8), %xmm0 | |
| addq %r8, %rdx | |
| vaddsd (%r15,%rdx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, -16(%rbp,%rax,8) | |
| vmovsd (%r12,%r9,8), %xmm0 | |
| addq %r8, %rbx | |
| vaddsd (%r15,%rbx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, -8(%rbp,%rax,8) | |
| vmovsd (%r12,%r9,8), %xmm0 | |
| leaq (%rsi,%r8), %rdx | |
| vaddsd (%r15,%rdx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%rbp,%rax,8) | |
| leaq 4(%r11,%rax), %rdx | |
| addq $4, %rax | |
| cmpq $3, %rdx | |
| jne .LBB0_138 | |
| movq 600(%rsp), %r10 | |
| movq %r12, %r8 | |
| movq 72(%rsp), %rbx | |
| movq 80(%rsp), %rbp | |
| movq 88(%rsp), %rdi | |
| movq 24(%rsp), %r11 | |
| movq 48(%rsp), %r12 | |
| .LBB0_140: | |
| movq 40(%rsp), %r9 | |
| .LBB0_141: | |
| movq 8(%rsp), %rax | |
| jmp .LBB0_155 | |
| .LBB0_142: | |
| xorl %eax, %eax | |
| .LBB0_143: | |
| cmpq $3, %rbx | |
| jae .LBB0_145 | |
| movq %rbx, %rax | |
| movq 40(%rsp), %r9 | |
| movq %r9, %rcx | |
| movq 88(%rsp), %rdi | |
| jmp .LBB0_155 | |
| .LBB0_145: | |
| movq 200(%rsp), %rcx | |
| leaq (%rcx,%r9,8), %rdi | |
| .p2align 4, 0x90 | |
| .LBB0_146: | |
| movq %rsi, %rcx | |
| leaq 3(%rax), %rsi | |
| cmpq $1, %r13 | |
| cmovbeq %rcx, %rsi | |
| leaq 2(%rax), %rbp | |
| cmpq $1, %r13 | |
| cmovbeq %rcx, %rbp | |
| leaq 1(%rax), %rbx | |
| cmpq $1, %r13 | |
| vmovsd -24(%rdi,%rax,8), %xmm0 | |
| cmovbeq %rcx, %rbx | |
| cmovaq %rax, %rcx | |
| addq %rdx, %rcx | |
| vaddsd (%r15,%rcx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%r14,%rax,8) | |
| vmovsd -16(%rdi,%rax,8), %xmm0 | |
| addq %rdx, %rbx | |
| vaddsd (%r15,%rbx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 8(%r14,%rax,8) | |
| vmovsd -8(%rdi,%rax,8), %xmm0 | |
| addq %rdx, %rbp | |
| vaddsd (%r15,%rbp,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 16(%r14,%rax,8) | |
| vmovsd (%rdi,%rax,8), %xmm0 | |
| leaq (%rsi,%rdx), %rcx | |
| vaddsd (%r15,%rcx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 24(%r14,%rax,8) | |
| addq $4, %rax | |
| cmpq %r10, %rax | |
| jne .LBB0_146 | |
| movq 72(%rsp), %rbx | |
| movq %rbx, %rax | |
| movq 40(%rsp), %r9 | |
| movq %r9, %rcx | |
| .LBB0_148: | |
| movq 80(%rsp), %rbp | |
| movq 88(%rsp), %rdi | |
| jmp .LBB0_155 | |
| .LBB0_149: | |
| xorl %eax, %eax | |
| .LBB0_150: | |
| movq 72(%rsp), %rbx | |
| cmpq $3, %rbx | |
| jb .LBB0_154 | |
| movq 200(%rsp), %rdx | |
| leaq (%rdx,%r9,8), %rbp | |
| .p2align 4, 0x90 | |
| .LBB0_152: | |
| movq %rsi, %rdi | |
| leaq 3(%rax), %rsi | |
| cmpq $1, %r13 | |
| cmovbeq %rdi, %rsi | |
| leaq 2(%rax), %rbx | |
| cmpq $1, %r13 | |
| cmovbeq %rdi, %rbx | |
| leaq 1(%rax), %rdx | |
| cmpq $1, %r13 | |
| vmovsd -24(%rbp,%rax,8), %xmm0 | |
| cmovbeq %rdi, %rdx | |
| cmovaq %rax, %rdi | |
| addq %r8, %rdi | |
| vaddsd (%r15,%rdi,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%r14,%rax,8) | |
| vmovsd -16(%rbp,%rax,8), %xmm0 | |
| addq %r8, %rdx | |
| vaddsd (%r15,%rdx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 8(%r14,%rax,8) | |
| vmovsd -8(%rbp,%rax,8), %xmm0 | |
| addq %r8, %rbx | |
| vaddsd (%r15,%rbx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 16(%r14,%rax,8) | |
| vmovsd (%rbp,%rax,8), %xmm0 | |
| leaq (%rsi,%r8), %rdx | |
| vaddsd (%r15,%rdx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 24(%r14,%rax,8) | |
| addq $4, %rax | |
| cmpq %r10, %rax | |
| jne .LBB0_152 | |
| movq 72(%rsp), %rbx | |
| .LBB0_154: | |
| movq %rbx, %rax | |
| movq 584(%rsp), %r8 | |
| movq 80(%rsp), %rbp | |
| movq 88(%rsp), %rdi | |
| movq 40(%rsp), %r9 | |
| .p2align 4, 0x90 | |
| .LBB0_155: | |
| addq $1, %r9 | |
| addq %rdi, %r14 | |
| addq %rdi, %r12 | |
| cmpq 592(%rsp), %r9 | |
| jne .LBB0_91 | |
| .LBB0_156: | |
| movq 144(%rsp), %rdi | |
| movabsq $NRT_decref, %rax | |
| movq %rax, %rbx | |
| callq *%rbx | |
| movq 168(%rsp), %rdi | |
| callq *%rbx | |
| movq 344(%rsp), %rbp | |
| movq 360(%rsp), %rbx | |
| cmpb $0, 103(%rsp) | |
| movq 736(%rsp), %r14 | |
| movq 352(%rsp), %r15 | |
| movq 336(%rsp), %r13 | |
| movabsq $memset, %r12 | |
| jne .LBB0_158 | |
| .p2align 4, 0x90 | |
| .LBB0_157: | |
| xorl %esi, %esi | |
| movq %rbp, %rdi | |
| movq %r15, %rdx | |
| callq *%r12 | |
| addq $1, %rbx | |
| addq %r13, %rbp | |
| cmpq %r14, %rbx | |
| jl .LBB0_157 | |
| .LBB0_158: | |
| xorl %r9d, %r9d | |
| movl $100, %r8d | |
| movq 808(%rsp), %r13 | |
| movq 88(%rsp), %rdi | |
| vmovsd 328(%rsp), %xmm6 | |
| vxorps %xmm7, %xmm7, %xmm7 | |
| movq 672(%rsp), %r12 | |
| .p2align 4, 0x90 | |
| .LBB0_159: | |
| movq %r8, %r10 | |
| movq %r9, %rsi | |
| leaq -1(%r10), %r8 | |
| movq %rsi, %r14 | |
| imulq 600(%rsp), %r14 | |
| movq %rsi, %r15 | |
| imulq 744(%rsp), %r15 | |
| leaq 1(%rsi), %r9 | |
| xorl %eax, %eax | |
| leaq -1(%rax), %rbx | |
| xorl %r11d, %r11d | |
| movq 584(%rsp), %rcx | |
| movq 800(%rsp), %rdx | |
| movl $100, %eax | |
| testq %rax, %rax | |
| jg .LBB0_162 | |
| jmp .LBB0_164 | |
| .LBB0_160: | |
| addq $1, %rbx | |
| vdivsd %xmm3, %xmm6, %xmm3 | |
| andq %r13, %rbp | |
| addq %r11, %rbp | |
| vmulsd -8(%rdx,%rbp,8), %xmm3, %xmm3 | |
| vmulsd %xmm3, %xmm2, %xmm2 | |
| movq 728(%rsp), %rcx | |
| vaddsd (%rcx,%r15,8), %xmm2, %xmm2 | |
| vmovsd %xmm2, (%rcx,%r15,8) | |
| vmulsd %xmm3, %xmm1, %xmm1 | |
| vaddsd 8(%rcx,%r15,8), %xmm1, %xmm1 | |
| vmovsd %xmm1, 8(%rcx,%r15,8) | |
| vmulsd %xmm3, %xmm0, %xmm0 | |
| vaddsd 16(%rcx,%r15,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 16(%rcx,%r15,8) | |
| movq %rbx, %r11 | |
| leaq -1(%rbx), %rbx | |
| movq %r12, %rcx | |
| movq 800(%rsp), %rdx | |
| movq 672(%rsp), %r12 | |
| movq 88(%rsp), %rdi | |
| .p2align 4, 0x90 | |
| .LBB0_161: | |
| testq %rax, %rax | |
| jle .LBB0_164 | |
| .LBB0_162: | |
| addq $-1, %rax | |
| addq $8, %rdx | |
| addq %rdi, %rcx | |
| addq $1, %rbx | |
| cmpq %rbx, %rsi | |
| je .LBB0_161 | |
| movq 584(%rsp), %r12 | |
| vmovsd (%r12,%r14,8), %xmm2 | |
| movq %rbx, %rbp | |
| sarq $63, %rbp | |
| movq %rbp, %rdi | |
| andq 592(%rsp), %rdi | |
| addq %r11, %rdi | |
| leaq -8(,%rdi,8), %rdi | |
| imulq 600(%rsp), %rdi | |
| vmovsd 8(%r12,%r14,8), %xmm0 | |
| vsubsd 8(%rdi,%rcx), %xmm0, %xmm1 | |
| vmovsd 16(%r12,%r14,8), %xmm0 | |
| vsubsd 16(%rdi,%rcx), %xmm0, %xmm0 | |
| vmulsd %xmm1, %xmm1, %xmm3 | |
| vmulsd %xmm0, %xmm0, %xmm4 | |
| vsubsd (%rdi,%rcx), %xmm2, %xmm2 | |
| vmulsd %xmm2, %xmm2, %xmm5 | |
| vaddsd %xmm3, %xmm5, %xmm3 | |
| vaddsd %xmm4, %xmm3, %xmm3 | |
| vsqrtsd %xmm3, %xmm3, %xmm3 | |
| vmulsd %xmm3, %xmm3, %xmm4 | |
| vmulsd %xmm4, %xmm3, %xmm3 | |
| vucomisd %xmm7, %xmm3 | |
| jne .LBB0_160 | |
| jp .LBB0_160 | |
| jmp .LBB0_451 | |
| .p2align 4, 0x90 | |
| .LBB0_164: | |
| cmpq $1, %r10 | |
| jg .LBB0_159 | |
| movq $1, 264(%rsp) | |
| movq $1, 272(%rsp) | |
| movq 736(%rsp), %rcx | |
| movq %rcx, 416(%rsp) | |
| movq 744(%rsp), %rax | |
| movq %rax, 424(%rsp) | |
| cmpq $1, %rcx | |
| je .LBB0_167 | |
| movq %rcx, 264(%rsp) | |
| movq 424(%rsp), %rax | |
| movq 272(%rsp), %r14 | |
| cmpq $1, %r14 | |
| jne .LBB0_170 | |
| .LBB0_167: | |
| cmpq $1, %rax | |
| jne .LBB0_169 | |
| movl $1, %r14d | |
| jmp .LBB0_172 | |
| .p2align 4, 0x90 | |
| .LBB0_169: | |
| movq %rax, 272(%rsp) | |
| movq %rax, %r14 | |
| jmp .LBB0_172 | |
| .LBB0_170: | |
| cmpq $1, %rax | |
| je .LBB0_172 | |
| cmpq %r14, %rax | |
| jne .LBB0_454 | |
| .p2align 4, 0x90 | |
| .LBB0_172: | |
| movq 264(%rsp), %r13 | |
| movq %r13, %rdi | |
| imulq %r14, %rdi | |
| shlq $3, %rdi | |
| movl $32, %esi | |
| movabsq $NRT_MemInfo_alloc_safe_aligned, %rax | |
| callq *%rax | |
| movq %rax, 160(%rsp) | |
| movq 24(%rax), %r15 | |
| testq %r13, %r13 | |
| movq 728(%rsp), %r8 | |
| vmovsd 216(%rsp), %xmm4 | |
| vmovupd 480(%rsp), %ymm5 | |
| jle .LBB0_255 | |
| testq %r14, %r14 | |
| jle .LBB0_255 | |
| leaq -1(%r14), %rax | |
| movq %rax, 8(%rsp) | |
| leaq (%r8,%r14,8), %rax | |
| movq %rax, 128(%rsp) | |
| leaq -16(%r14), %rax | |
| shrq $4, %rax | |
| movq %rax, 104(%rsp) | |
| leal 1(%rax), %eax | |
| movq %r14, %rsi | |
| andq $-16, %rsi | |
| andl $1, %eax | |
| movq %rax, 144(%rsp) | |
| leaq 96(%r15), %rcx | |
| leaq (,%r14,8), %rax | |
| movq %rax, 64(%rsp) | |
| movq %r15, %rdx | |
| subq $-128, %rdx | |
| movq %rsi, 56(%rsp) | |
| negq %rsi | |
| movq %rsi, 192(%rsp) | |
| leaq 224(%r15), %r9 | |
| movl $7, %eax | |
| movq %rax, 112(%rsp) | |
| xorl %eax, %eax | |
| movq %rax, 48(%rsp) | |
| movl $16, %r10d | |
| movl $4, %eax | |
| movq %rax, 24(%rsp) | |
| xorl %eax, %eax | |
| movq %rax, 16(%rsp) | |
| xorl %r11d, %r11d | |
| xorl %eax, %eax | |
| movq %rax, 120(%rsp) | |
| xorl %eax, %eax | |
| movq %rax, 32(%rsp) | |
| jmp .LBB0_203 | |
| .LBB0_175: | |
| cmpq $0, 144(%rsp) | |
| jne .LBB0_183 | |
| xorl %esi, %esi | |
| jmp .LBB0_184 | |
| .LBB0_177: | |
| cmpq $0, 144(%rsp) | |
| jne .LBB0_188 | |
| xorl %esi, %esi | |
| jmp .LBB0_189 | |
| .LBB0_179: | |
| cmpq $0, 144(%rsp) | |
| jne .LBB0_193 | |
| xorl %edi, %edi | |
| jmp .LBB0_194 | |
| .LBB0_181: | |
| cmpq $0, 144(%rsp) | |
| jne .LBB0_198 | |
| xorl %esi, %esi | |
| jmp .LBB0_199 | |
| .LBB0_183: | |
| vmulpd (%r8,%rdi,8), %ymm5, %ymm0 | |
| vmulpd 32(%r8,%rdi,8), %ymm5, %ymm1 | |
| vmulpd 64(%r8,%rdi,8), %ymm5, %ymm2 | |
| vmulpd 96(%r8,%rdi,8), %ymm5, %ymm3 | |
| vmovupd %ymm0, (%r15,%rax,8) | |
| vmovupd %ymm1, 32(%r15,%rax,8) | |
| vmovupd %ymm2, 64(%r15,%rax,8) | |
| vmovupd %ymm3, 96(%r15,%rax,8) | |
| movl $16, %esi | |
| .LBB0_184: | |
| movq %rbp, %r9 | |
| cmpq $0, 104(%rsp) | |
| je .LBB0_187 | |
| movq 192(%rsp), %rax | |
| leaq (%rax,%rsi), %rax | |
| leaq -96(,%rsi,8), %rdi | |
| .p2align 4, 0x90 | |
| .LBB0_186: | |
| leaq (%r8,%rdi), %rsi | |
| vmulpd -32(%rsi,%r10,8), %ymm5, %ymm0 | |
| vmulpd (%rsi,%r10,8), %ymm5, %ymm1 | |
| vmulpd 32(%rsi,%r10,8), %ymm5, %ymm2 | |
| vmulpd 64(%rsi,%r10,8), %ymm5, %ymm3 | |
| vmovupd %ymm0, (%rdi,%rcx) | |
| vmovupd %ymm1, 32(%rdi,%rcx) | |
| vmovupd %ymm2, 64(%rdi,%rcx) | |
| vmovupd %ymm3, 96(%rdi,%rcx) | |
| vmulpd 96(%rsi,%r10,8), %ymm5, %ymm0 | |
| vmulpd 128(%rsi,%r10,8), %ymm5, %ymm1 | |
| vmulpd 160(%rsi,%r10,8), %ymm5, %ymm2 | |
| vmulpd 192(%rsi,%r10,8), %ymm5, %ymm3 | |
| vmovupd %ymm0, 128(%rdi,%rcx) | |
| vmovupd %ymm1, 160(%rdi,%rcx) | |
| vmovupd %ymm2, 192(%rdi,%rcx) | |
| vmovupd %ymm3, 224(%rdi,%rcx) | |
| addq $256, %rdi | |
| addq $32, %rax | |
| jne .LBB0_186 | |
| .LBB0_187: | |
| movq %r11, 32(%rsp) | |
| movq 8(%rsp), %rax | |
| movq %rax, 120(%rsp) | |
| movq 56(%rsp), %rsi | |
| movq %rsi, %rax | |
| cmpq %rsi, %r14 | |
| jne .LBB0_210 | |
| jmp .LBB0_254 | |
| .LBB0_188: | |
| vmulpd (%r8,%rdi,8), %ymm5, %ymm0 | |
| vmulpd 32(%r8,%rdi,8), %ymm5, %ymm1 | |
| vmulpd 64(%r8,%rdi,8), %ymm5, %ymm2 | |
| vmulpd 96(%r8,%rdi,8), %ymm5, %ymm3 | |
| vmovupd %ymm0, (%r15,%rax,8) | |
| vmovupd %ymm1, 32(%r15,%rax,8) | |
| vmovupd %ymm2, 64(%r15,%rax,8) | |
| vmovupd %ymm3, 96(%r15,%rax,8) | |
| movl $16, %esi | |
| .LBB0_189: | |
| movq %rbp, %r9 | |
| cmpq $0, 104(%rsp) | |
| je .LBB0_192 | |
| movq 192(%rsp), %rax | |
| leaq (%rax,%rsi), %rax | |
| movq 320(%rsp), %rbp | |
| leaq (%rbp,%rdi,8), %rbp | |
| leaq -96(,%rsi,8), %rbx | |
| .p2align 4, 0x90 | |
| .LBB0_191: | |
| vmulpd -128(%rbp,%rbx), %ymm5, %ymm0 | |
| vmulpd -96(%rbp,%rbx), %ymm5, %ymm1 | |
| vmulpd -64(%rbp,%rbx), %ymm5, %ymm2 | |
| vmulpd -32(%rbp,%rbx), %ymm5, %ymm3 | |
| vmovupd %ymm0, (%rbx,%rcx) | |
| vmovupd %ymm1, 32(%rbx,%rcx) | |
| vmovupd %ymm2, 64(%rbx,%rcx) | |
| vmovupd %ymm3, 96(%rbx,%rcx) | |
| vmulpd (%rbp,%rbx), %ymm5, %ymm0 | |
| vmulpd 32(%rbp,%rbx), %ymm5, %ymm1 | |
| vmulpd 64(%rbp,%rbx), %ymm5, %ymm2 | |
| vmulpd 96(%rbp,%rbx), %ymm5, %ymm3 | |
| vmovupd %ymm0, 128(%rbx,%rcx) | |
| vmovupd %ymm1, 160(%rbx,%rcx) | |
| vmovupd %ymm2, 192(%rbx,%rcx) | |
| vmovupd %ymm3, 224(%rbx,%rcx) | |
| addq $256, %rbx | |
| addq $32, %rax | |
| jne .LBB0_191 | |
| .LBB0_192: | |
| movq 56(%rsp), %rsi | |
| movq %rsi, %rax | |
| cmpq %rsi, %r14 | |
| je .LBB0_217 | |
| jmp .LBB0_224 | |
| .LBB0_193: | |
| vbroadcastsd (%r8), %ymm0 | |
| vmulpd %ymm5, %ymm0, %ymm0 | |
| vmovupd %ymm0, (%r15,%rax,8) | |
| vmovupd %ymm0, 32(%r15,%rax,8) | |
| vmovupd %ymm0, 64(%r15,%rax,8) | |
| vmovupd %ymm0, 96(%r15,%rax,8) | |
| movl $16, %edi | |
| .LBB0_194: | |
| movq %rbp, %r9 | |
| cmpq $0, 104(%rsp) | |
| je .LBB0_197 | |
| vbroadcastsd (%r8), %ymm0 | |
| vmulpd %ymm5, %ymm0, %ymm0 | |
| leaq 96(,%rdi,8), %rax | |
| movq 56(%rsp), %rsi | |
| subq %rdi, %rsi | |
| .p2align 4, 0x90 | |
| .LBB0_196: | |
| vmovupd %ymm0, -224(%rax,%rdx) | |
| vmovupd %ymm0, -192(%rax,%rdx) | |
| vmovupd %ymm0, -160(%rax,%rdx) | |
| vmovupd %ymm0, -128(%rax,%rdx) | |
| vmovupd %ymm0, -96(%rax,%rdx) | |
| vmovupd %ymm0, -64(%rax,%rdx) | |
| vmovupd %ymm0, -32(%rax,%rdx) | |
| vmovupd %ymm0, (%rax,%rdx) | |
| addq $256, %rax | |
| addq $-32, %rsi | |
| jne .LBB0_196 | |
| .LBB0_197: | |
| movq 56(%rsp), %rsi | |
| movq %rsi, %rax | |
| cmpq %rsi, %r14 | |
| jne .LBB0_236 | |
| jmp .LBB0_242 | |
| .LBB0_198: | |
| vbroadcastsd (%rdi), %ymm0 | |
| vmulpd %ymm5, %ymm0, %ymm0 | |
| vmovupd %ymm0, (%r15,%rax,8) | |
| vmovupd %ymm0, 32(%r15,%rax,8) | |
| vmovupd %ymm0, 64(%r15,%rax,8) | |
| vmovupd %ymm0, 96(%r15,%rax,8) | |
| movl $16, %esi | |
| .LBB0_199: | |
| movq %rbp, %r9 | |
| cmpq $0, 104(%rsp) | |
| je .LBB0_202 | |
| vbroadcastsd (%rdi), %ymm0 | |
| vmulpd %ymm5, %ymm0, %ymm0 | |
| movq 192(%rsp), %rax | |
| leaq (%rax,%rsi), %rax | |
| leaq (%r9,%rsi,8), %rsi | |
| .p2align 4, 0x90 | |
| .LBB0_201: | |
| vmovupd %ymm0, -224(%rsi) | |
| vmovupd %ymm0, -192(%rsi) | |
| vmovupd %ymm0, -160(%rsi) | |
| vmovupd %ymm0, -128(%rsi) | |
| vmovupd %ymm0, -96(%rsi) | |
| vmovupd %ymm0, -64(%rsi) | |
| vmovupd %ymm0, -32(%rsi) | |
| vmovupd %ymm0, (%rsi) | |
| addq $256, %rsi | |
| addq $32, %rax | |
| jne .LBB0_201 | |
| .LBB0_202: | |
| movq 56(%rsp), %rsi | |
| movq %rsi, %rax | |
| cmpq %rsi, %r14 | |
| jne .LBB0_248 | |
| jmp .LBB0_254 | |
| .p2align 4, 0x90 | |
| .LBB0_203: | |
| movq %r9, %rbp | |
| movq %r14, %rax | |
| imulq %r11, %rax | |
| leaq (%r14,%rax), %rsi | |
| leaq (%r15,%rax,8), %r9 | |
| leaq (%r15,%rsi,8), %rbx | |
| cmpq $1, 736(%rsp) | |
| jbe .LBB0_218 | |
| movq %r11, %rdi | |
| movq 744(%rsp), %rsi | |
| imulq %rsi, %rdi | |
| cmpq $2, %rsi | |
| jb .LBB0_231 | |
| cmpq $15, %r14 | |
| jbe .LBB0_209 | |
| cmpq $0, 56(%rsp) | |
| je .LBB0_209 | |
| leaq (%r14,%rdi), %rsi | |
| leaq (%r8,%rsi,8), %rsi | |
| cmpq %rsi, %r9 | |
| jae .LBB0_175 | |
| leaq (%r8,%rdi,8), %rsi | |
| cmpq %rbx, %rsi | |
| jae .LBB0_175 | |
| .p2align 4, 0x90 | |
| .LBB0_209: | |
| xorl %eax, %eax | |
| movq %rbp, %r9 | |
| .LBB0_210: | |
| movl %r14d, %ebx | |
| subl %eax, %ebx | |
| movq 8(%rsp), %rdi | |
| subq %rax, %rdi | |
| andq $7, %rbx | |
| je .LBB0_213 | |
| movq 16(%rsp), %rsi | |
| leaq (%rax,%rsi), %rsi | |
| leaq (%r15,%rsi,8), %rbp | |
| movq 48(%rsp), %rsi | |
| leaq (%rax,%rsi), %rsi | |
| leaq (%r8,%rsi,8), %rsi | |
| negq %rbx | |
| .p2align 4, 0x90 | |
| .LBB0_212: | |
| vmulsd (%rsi), %xmm4, %xmm0 | |
| vmovsd %xmm0, (%rbp) | |
| addq $1, %rax | |
| addq $8, %rbp | |
| addq $8, %rsi | |
| addq $1, %rbx | |
| jne .LBB0_212 | |
| .LBB0_213: | |
| movq %r11, 32(%rsp) | |
| movq 8(%rsp), %rsi | |
| movq %rsi, 120(%rsp) | |
| cmpq $7, %rdi | |
| jb .LBB0_254 | |
| movq 24(%rsp), %rsi | |
| leaq (%rax,%rsi), %rsi | |
| leaq (%r15,%rsi,8), %rdi | |
| movq 112(%rsp), %rsi | |
| leaq (%rax,%rsi), %rsi | |
| leaq (%r8,%rsi,8), %rbx | |
| movq %r14, %rbp | |
| subq %rax, %rbp | |
| .p2align 4, 0x90 | |
| .LBB0_215: | |
| vmulsd -56(%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, -32(%rdi) | |
| vmulsd -48(%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, -24(%rdi) | |
| vmulsd -40(%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, -16(%rdi) | |
| vmulsd -32(%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, -8(%rdi) | |
| vmulsd -24(%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, (%rdi) | |
| vmulsd -16(%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, 8(%rdi) | |
| vmulsd -8(%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, 16(%rdi) | |
| vmulsd (%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, 24(%rdi) | |
| addq $64, %rdi | |
| addq $64, %rbx | |
| addq $-8, %rbp | |
| jne .LBB0_215 | |
| movq %r11, 32(%rsp) | |
| .LBB0_217: | |
| movq 8(%rsp), %rax | |
| movq %rax, 120(%rsp) | |
| jmp .LBB0_254 | |
| .p2align 4, 0x90 | |
| .LBB0_218: | |
| movq 32(%rsp), %rdi | |
| movq 744(%rsp), %rsi | |
| imulq %rsi, %rdi | |
| cmpq $2, %rsi | |
| jb .LBB0_243 | |
| cmpq $16, %r14 | |
| jb .LBB0_223 | |
| cmpq $0, 56(%rsp) | |
| je .LBB0_223 | |
| movq 128(%rsp), %rsi | |
| leaq (%rsi,%rdi,8), %rsi | |
| cmpq %rsi, %r9 | |
| jae .LBB0_177 | |
| leaq (%r8,%rdi,8), %rsi | |
| cmpq %rbx, %rsi | |
| jae .LBB0_177 | |
| .p2align 4, 0x90 | |
| .LBB0_223: | |
| xorl %eax, %eax | |
| movq %rbp, %r9 | |
| .LBB0_224: | |
| movl %r14d, %ebp | |
| subl %eax, %ebp | |
| movq 8(%rsp), %r8 | |
| subq %rax, %r8 | |
| andq $7, %rbp | |
| je .LBB0_227 | |
| movq 16(%rsp), %rsi | |
| leaq (%rax,%rsi), %rsi | |
| leaq (%r15,%rsi,8), %rsi | |
| movq 728(%rsp), %rbx | |
| leaq (%rbx,%rdi,8), %rbx | |
| negq %rbp | |
| .p2align 4, 0x90 | |
| .LBB0_226: | |
| vmulsd (%rbx,%rax,8), %xmm4, %xmm0 | |
| vmovsd %xmm0, (%rsi) | |
| addq $1, %rax | |
| addq $8, %rsi | |
| addq $1, %rbp | |
| jne .LBB0_226 | |
| .LBB0_227: | |
| cmpq $7, %r8 | |
| jb .LBB0_230 | |
| movq 24(%rsp), %rsi | |
| leaq (%rax,%rsi), %rsi | |
| leaq (%r15,%rsi,8), %rbx | |
| addq %rax, %rdi | |
| movq 376(%rsp), %rsi | |
| leaq (%rsi,%rdi,8), %rdi | |
| movq %r14, %rbp | |
| subq %rax, %rbp | |
| .p2align 4, 0x90 | |
| .LBB0_229: | |
| vmulsd -56(%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, -32(%rbx) | |
| vmulsd -48(%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, -24(%rbx) | |
| vmulsd -40(%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, -16(%rbx) | |
| vmulsd -32(%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, -8(%rbx) | |
| vmulsd -24(%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, (%rbx) | |
| vmulsd -16(%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, 8(%rbx) | |
| vmulsd -8(%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, 16(%rbx) | |
| vmulsd (%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, 24(%rbx) | |
| addq $64, %rbx | |
| addq $64, %rdi | |
| addq $-8, %rbp | |
| jne .LBB0_229 | |
| .LBB0_230: | |
| movq 8(%rsp), %rax | |
| movq %rax, 120(%rsp) | |
| movq 728(%rsp), %r8 | |
| jmp .LBB0_254 | |
| .p2align 4, 0x90 | |
| .LBB0_231: | |
| addq 120(%rsp), %rdi | |
| leaq (%r8,%rdi,8), %r8 | |
| cmpq $15, %r14 | |
| jbe .LBB0_235 | |
| cmpq $0, 56(%rsp) | |
| je .LBB0_235 | |
| cmpq %r8, %r9 | |
| jae .LBB0_179 | |
| cmpq %rbx, %r8 | |
| jae .LBB0_179 | |
| .p2align 4, 0x90 | |
| .LBB0_235: | |
| xorl %eax, %eax | |
| movq %rbp, %r9 | |
| .LBB0_236: | |
| movl %r14d, %ebp | |
| subl %eax, %ebp | |
| movq 8(%rsp), %rdi | |
| subq %rax, %rdi | |
| andq $7, %rbp | |
| je .LBB0_239 | |
| movq 16(%rsp), %rsi | |
| leaq (%rax,%rsi), %rsi | |
| leaq (%r15,%rsi,8), %rsi | |
| negq %rbp | |
| .p2align 4, 0x90 | |
| .LBB0_238: | |
| vmulsd (%r8), %xmm4, %xmm0 | |
| vmovsd %xmm0, (%rsi) | |
| addq $1, %rax | |
| addq $8, %rsi | |
| addq $1, %rbp | |
| jne .LBB0_238 | |
| .LBB0_239: | |
| cmpq $7, %rdi | |
| jb .LBB0_242 | |
| movq 24(%rsp), %rsi | |
| leaq (%rax,%rsi), %rsi | |
| leaq (%r15,%rsi,8), %rdi | |
| movq %r14, %rbp | |
| subq %rax, %rbp | |
| .p2align 4, 0x90 | |
| .LBB0_241: | |
| vmulsd (%r8), %xmm4, %xmm0 | |
| vmovsd %xmm0, -32(%rdi) | |
| vmulsd (%r8), %xmm4, %xmm0 | |
| vmovsd %xmm0, -24(%rdi) | |
| vmulsd (%r8), %xmm4, %xmm0 | |
| vmovsd %xmm0, -16(%rdi) | |
| vmulsd (%r8), %xmm4, %xmm0 | |
| vmovsd %xmm0, -8(%rdi) | |
| vmulsd (%r8), %xmm4, %xmm0 | |
| vmovsd %xmm0, (%rdi) | |
| vmulsd (%r8), %xmm4, %xmm0 | |
| vmovsd %xmm0, 8(%rdi) | |
| vmulsd (%r8), %xmm4, %xmm0 | |
| vmovsd %xmm0, 16(%rdi) | |
| vmulsd (%r8), %xmm4, %xmm0 | |
| vmovsd %xmm0, 24(%rdi) | |
| addq $64, %rdi | |
| addq $-8, %rbp | |
| jne .LBB0_241 | |
| .LBB0_242: | |
| movq %r11, 32(%rsp) | |
| movq 728(%rsp), %r8 | |
| jmp .LBB0_254 | |
| .p2align 4, 0x90 | |
| .LBB0_243: | |
| addq 120(%rsp), %rdi | |
| leaq (%r8,%rdi,8), %rdi | |
| cmpq $15, %r14 | |
| jbe .LBB0_247 | |
| cmpq $0, 56(%rsp) | |
| je .LBB0_247 | |
| cmpq %rdi, %r9 | |
| jae .LBB0_181 | |
| cmpq %rbx, %rdi | |
| jae .LBB0_181 | |
| .p2align 4, 0x90 | |
| .LBB0_247: | |
| xorl %eax, %eax | |
| movq %rbp, %r9 | |
| .LBB0_248: | |
| movl %r14d, %ebp | |
| subl %eax, %ebp | |
| movq 8(%rsp), %rbx | |
| subq %rax, %rbx | |
| andq $7, %rbp | |
| je .LBB0_251 | |
| movq 16(%rsp), %rsi | |
| leaq (%rax,%rsi), %rsi | |
| leaq (%r15,%rsi,8), %rsi | |
| negq %rbp | |
| .p2align 4, 0x90 | |
| .LBB0_250: | |
| vmulsd (%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, (%rsi) | |
| addq $1, %rax | |
| addq $8, %rsi | |
| addq $1, %rbp | |
| jne .LBB0_250 | |
| .LBB0_251: | |
| cmpq $7, %rbx | |
| jb .LBB0_254 | |
| movq 24(%rsp), %rsi | |
| leaq (%rax,%rsi), %rsi | |
| leaq (%r15,%rsi,8), %rbp | |
| movq %r14, %rbx | |
| subq %rax, %rbx | |
| .p2align 4, 0x90 | |
| .LBB0_253: | |
| vmulsd (%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, -32(%rbp) | |
| vmulsd (%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, -24(%rbp) | |
| vmulsd (%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, -16(%rbp) | |
| vmulsd (%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, -8(%rbp) | |
| vmulsd (%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, (%rbp) | |
| vmulsd (%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, 8(%rbp) | |
| vmulsd (%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, 16(%rbp) | |
| vmulsd (%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, 24(%rbp) | |
| addq $64, %rbp | |
| addq $-8, %rbx | |
| jne .LBB0_253 | |
| .p2align 4, 0x90 | |
| .LBB0_254: | |
| addq $1, %r11 | |
| movq 64(%rsp), %rsi | |
| addq %rsi, %rcx | |
| addq %r14, 16(%rsp) | |
| addq %r14, 24(%rsp) | |
| movq 744(%rsp), %rax | |
| addq %rax, %r10 | |
| addq %rax, 48(%rsp) | |
| addq %rax, 112(%rsp) | |
| addq %rsi, %rdx | |
| addq %rsi, %r9 | |
| cmpq %r13, %r11 | |
| jne .LBB0_203 | |
| .LBB0_255: | |
| movq 624(%rsp), %rdi | |
| movabsq $NRT_incref, %rax | |
| vzeroupper | |
| callq *%rax | |
| cmpq $0, 664(%rsp) | |
| movq 152(%rsp), %rcx | |
| movq 136(%rsp), %r8 | |
| jle .LBB0_306 | |
| xorl %ebp, %ebp | |
| movq 656(%rsp), %r9 | |
| xorl %r11d, %r11d | |
| xorl %esi, %esi | |
| xorl %r10d, %r10d | |
| xorl %ebx, %ebx | |
| xorl %edx, %edx | |
| movq %r13, 40(%rsp) | |
| .p2align 4, 0x90 | |
| .LBB0_257: | |
| testq %r12, %r12 | |
| movq %rbp, 16(%rsp) | |
| jle .LBB0_305 | |
| cmpq $2, 664(%rsp) | |
| jb .LBB0_265 | |
| cmpq $2, %r12 | |
| jb .LBB0_269 | |
| movq 176(%rsp), %rdi | |
| xorl %eax, %eax | |
| testq %rdi, %rdi | |
| je .LBB0_262 | |
| .p2align 4, 0x90 | |
| .LBB0_261: | |
| cmpq $1, %r13 | |
| cmovaq %r11, %r10 | |
| cmpq $1, %r14 | |
| vmovsd (%r9,%rax,8), %xmm0 | |
| cmovaq %rax, %rsi | |
| movq %r10, %rdx | |
| imulq %r14, %rdx | |
| addq %rsi, %rdx | |
| vaddsd (%r15,%rdx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%r9,%rax,8) | |
| addq $1, %rax | |
| cmpq %rax, %rdi | |
| jne .LBB0_261 | |
| .LBB0_262: | |
| movq %r11, %rdx | |
| movq %r8, %rbx | |
| cmpq $3, %r8 | |
| jb .LBB0_305 | |
| .p2align 4, 0x90 | |
| .LBB0_263: | |
| movq %rsi, %rdx | |
| leaq 3(%rax), %rsi | |
| cmpq $1, %r14 | |
| cmovbeq %rdx, %rsi | |
| leaq 2(%rax), %rbp | |
| cmpq $1, %r14 | |
| cmovbeq %rdx, %rbp | |
| leaq 1(%rax), %rbx | |
| cmpq $1, %r14 | |
| cmovbeq %rdx, %rbx | |
| cmovaq %rax, %rdx | |
| cmpq $1, %r13 | |
| vmovsd (%r9,%rax,8), %xmm0 | |
| cmovaq %r11, %r10 | |
| movq %r10, %rdi | |
| imulq %r14, %rdi | |
| addq %rdi, %rdx | |
| vaddsd (%r15,%rdx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%r9,%rax,8) | |
| vmovsd 8(%r9,%rax,8), %xmm0 | |
| addq %rdi, %rbx | |
| vaddsd (%r15,%rbx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 8(%r9,%rax,8) | |
| vmovsd 16(%r9,%rax,8), %xmm0 | |
| addq %rdi, %rbp | |
| vaddsd (%r15,%rbp,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 16(%r9,%rax,8) | |
| vmovsd 24(%r9,%rax,8), %xmm0 | |
| addq %rsi, %rdi | |
| vaddsd (%r15,%rdi,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 24(%r9,%rax,8) | |
| addq $4, %rax | |
| cmpq %r12, %rax | |
| jne .LBB0_263 | |
| movq %r11, %rdx | |
| movq %r8, %rbx | |
| jmp .LBB0_305 | |
| .p2align 4, 0x90 | |
| .LBB0_265: | |
| movq %rdx, %r8 | |
| imulq %r12, %r8 | |
| cmpq $2, %r12 | |
| movq %rdx, 8(%rsp) | |
| jb .LBB0_272 | |
| movq 176(%rsp), %rbp | |
| testq %rbp, %rbp | |
| je .LBB0_279 | |
| movq 656(%rsp), %rax | |
| leaq (%rax,%r8,8), %rdx | |
| xorl %ebx, %ebx | |
| .p2align 4, 0x90 | |
| .LBB0_268: | |
| cmpq $1, %r13 | |
| cmovaq %r11, %r10 | |
| cmpq $1, %r14 | |
| vmovsd (%rdx,%rbx,8), %xmm0 | |
| cmovaq %rbx, %rsi | |
| movq %r10, %rdi | |
| imulq %r14, %rdi | |
| addq %rsi, %rdi | |
| vaddsd (%r15,%rdi,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%r9,%rbx,8) | |
| addq $1, %rbx | |
| cmpq %rbx, %rbp | |
| jne .LBB0_268 | |
| jmp .LBB0_280 | |
| .LBB0_269: | |
| movq %r11, %rax | |
| imulq %r12, %rax | |
| addq %rbx, %rax | |
| movq 176(%rsp), %rdi | |
| testq %rdi, %rdi | |
| movq %rbx, 24(%rsp) | |
| je .LBB0_285 | |
| xorl %ebx, %ebx | |
| movq 656(%rsp), %rcx | |
| .p2align 4, 0x90 | |
| .LBB0_271: | |
| cmpq $1, %r13 | |
| cmovaq %r11, %r10 | |
| cmpq $1, %r14 | |
| vmovsd (%rcx,%rax,8), %xmm0 | |
| cmovaq %rbx, %rsi | |
| movq %r10, %rdx | |
| imulq %r14, %rdx | |
| addq %rsi, %rdx | |
| vaddsd (%r15,%rdx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%r9,%rbx,8) | |
| addq $1, %rbx | |
| cmpq %rbx, %rdi | |
| jne .LBB0_271 | |
| jmp .LBB0_286 | |
| .LBB0_272: | |
| addq %rbx, %r8 | |
| cmpq $1, %r13 | |
| jbe .LBB0_276 | |
| movq %r11, %rdi | |
| imulq %r14, %rdi | |
| movq 176(%rsp), %rbp | |
| testq %rbp, %rbp | |
| movq 656(%rsp), %rax | |
| je .LBB0_291 | |
| xorl %edx, %edx | |
| .p2align 4, 0x90 | |
| .LBB0_275: | |
| cmpq $1, %r14 | |
| vmovsd (%rax,%r8,8), %xmm0 | |
| cmovaq %rdx, %rsi | |
| leaq (%rsi,%rdi), %rcx | |
| vaddsd (%r15,%rcx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%r9,%rdx,8) | |
| addq $1, %rdx | |
| cmpq %rdx, %rbp | |
| jne .LBB0_275 | |
| jmp .LBB0_292 | |
| .LBB0_276: | |
| movq %rbx, 24(%rsp) | |
| movq %r10, %rbx | |
| imulq %r14, %rbx | |
| movq 176(%rsp), %rdi | |
| testq %rdi, %rdi | |
| movq 656(%rsp), %rax | |
| je .LBB0_297 | |
| xorl %edx, %edx | |
| .p2align 4, 0x90 | |
| .LBB0_278: | |
| cmpq $1, %r14 | |
| vmovsd (%rax,%r8,8), %xmm0 | |
| cmovaq %rdx, %rsi | |
| leaq (%rsi,%rbx), %rcx | |
| vaddsd (%r15,%rcx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%r9,%rdx,8) | |
| addq $1, %rdx | |
| cmpq %rdx, %rdi | |
| jne .LBB0_278 | |
| jmp .LBB0_298 | |
| .LBB0_279: | |
| xorl %ebx, %ebx | |
| .LBB0_280: | |
| movq 136(%rsp), %rax | |
| cmpq $3, %rax | |
| jae .LBB0_282 | |
| movq %rax, %rbx | |
| movq %rax, %r8 | |
| jmp .LBB0_304 | |
| .LBB0_282: | |
| movq 368(%rsp), %rax | |
| leaq (%rax,%r8,8), %rax | |
| .p2align 4, 0x90 | |
| .LBB0_283: | |
| movq %rsi, %rdx | |
| leaq 3(%rbx), %rsi | |
| cmpq $1, %r14 | |
| cmovbeq %rdx, %rsi | |
| leaq 2(%rbx), %rdi | |
| cmpq $1, %r14 | |
| cmovbeq %rdx, %rdi | |
| leaq 1(%rbx), %rbp | |
| cmpq $1, %r14 | |
| cmovbeq %rdx, %rbp | |
| cmovaq %rbx, %rdx | |
| cmpq $1, %r13 | |
| vmovsd -24(%rax,%rbx,8), %xmm0 | |
| cmovaq %r11, %r10 | |
| movq %r10, %rcx | |
| imulq %r14, %rcx | |
| addq %rcx, %rdx | |
| vaddsd (%r15,%rdx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%r9,%rbx,8) | |
| vmovsd -16(%rax,%rbx,8), %xmm0 | |
| addq %rcx, %rbp | |
| vaddsd (%r15,%rbp,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 8(%r9,%rbx,8) | |
| vmovsd -8(%rax,%rbx,8), %xmm0 | |
| addq %rcx, %rdi | |
| vaddsd (%r15,%rdi,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 16(%r9,%rbx,8) | |
| vmovsd (%rax,%rbx,8), %xmm0 | |
| addq %rsi, %rcx | |
| vaddsd (%r15,%rcx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 24(%r9,%rbx,8) | |
| addq $4, %rbx | |
| cmpq %r12, %rbx | |
| jne .LBB0_283 | |
| movq 136(%rsp), %r8 | |
| movq %r8, %rbx | |
| movq 152(%rsp), %rcx | |
| jmp .LBB0_304 | |
| .LBB0_285: | |
| xorl %ebx, %ebx | |
| .LBB0_286: | |
| cmpq $3, %r8 | |
| jae .LBB0_288 | |
| movq %r11, %rdx | |
| movq 152(%rsp), %rcx | |
| movq 24(%rsp), %rbx | |
| jmp .LBB0_305 | |
| .LBB0_288: | |
| movq %r12, %r13 | |
| movq 656(%rsp), %r12 | |
| leaq (%r12,%rbp), %rdx | |
| .p2align 4, 0x90 | |
| .LBB0_289: | |
| movq %rsi, %rdi | |
| leaq 3(%rbx), %rsi | |
| cmpq $1, %r14 | |
| cmovbeq %rdi, %rsi | |
| leaq 2(%rbx), %rbp | |
| cmpq $1, %r14 | |
| cmovbeq %rdi, %rbp | |
| leaq 1(%rbx), %rcx | |
| cmpq $1, %r14 | |
| cmovbeq %rdi, %rcx | |
| cmovaq %rbx, %rdi | |
| cmpq $1, 40(%rsp) | |
| vmovsd (%r12,%rax,8), %xmm0 | |
| cmovaq %r11, %r10 | |
| movq %r10, %r8 | |
| imulq %r14, %r8 | |
| addq %r8, %rdi | |
| vaddsd (%r15,%rdi,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%rdx,%rbx,8) | |
| vmovsd (%r12,%rax,8), %xmm0 | |
| addq %r8, %rcx | |
| vaddsd (%r15,%rcx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 8(%rdx,%rbx,8) | |
| vmovsd (%r12,%rax,8), %xmm0 | |
| addq %r8, %rbp | |
| vaddsd (%r15,%rbp,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 16(%rdx,%rbx,8) | |
| vmovsd (%r12,%rax,8), %xmm0 | |
| addq %rsi, %r8 | |
| vaddsd (%r15,%r8,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 24(%rdx,%rbx,8) | |
| addq $4, %rbx | |
| cmpq %r13, %rbx | |
| jne .LBB0_289 | |
| movq %r11, %rdx | |
| movq 152(%rsp), %rcx | |
| movq %r13, %r12 | |
| movq 40(%rsp), %r13 | |
| movq 136(%rsp), %r8 | |
| movq 24(%rsp), %rbx | |
| jmp .LBB0_305 | |
| .LBB0_291: | |
| xorl %edx, %edx | |
| .LBB0_292: | |
| cmpq $3, 136(%rsp) | |
| jae .LBB0_294 | |
| movq %r11, %r10 | |
| movq 152(%rsp), %rcx | |
| movq 136(%rsp), %r8 | |
| jmp .LBB0_304 | |
| .LBB0_294: | |
| movq %rbx, 24(%rsp) | |
| movq 16(%rsp), %rcx | |
| leaq (%rax,%rcx), %rbx | |
| movq %rax, %r10 | |
| .p2align 4, 0x90 | |
| .LBB0_295: | |
| movq %rsi, %rcx | |
| leaq 3(%rdx), %rsi | |
| cmpq $1, %r14 | |
| cmovbeq %rcx, %rsi | |
| leaq 2(%rdx), %rbp | |
| cmpq $1, %r14 | |
| cmovbeq %rcx, %rbp | |
| leaq 1(%rdx), %rax | |
| cmpq $1, %r14 | |
| vmovsd (%r10,%r8,8), %xmm0 | |
| cmovbeq %rcx, %rax | |
| cmovaq %rdx, %rcx | |
| addq %rdi, %rcx | |
| vaddsd (%r15,%rcx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%rbx,%rdx,8) | |
| vmovsd (%r10,%r8,8), %xmm0 | |
| addq %rdi, %rax | |
| vaddsd (%r15,%rax,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 8(%rbx,%rdx,8) | |
| vmovsd (%r10,%r8,8), %xmm0 | |
| addq %rdi, %rbp | |
| vaddsd (%r15,%rbp,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 16(%rbx,%rdx,8) | |
| vmovsd (%r10,%r8,8), %xmm0 | |
| leaq (%rsi,%rdi), %rax | |
| vaddsd (%r15,%rax,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 24(%rbx,%rdx,8) | |
| addq $4, %rdx | |
| cmpq %r12, %rdx | |
| jne .LBB0_295 | |
| movq %r11, %r10 | |
| movq 152(%rsp), %rcx | |
| movq 136(%rsp), %r8 | |
| jmp .LBB0_303 | |
| .LBB0_297: | |
| xorl %edx, %edx | |
| .LBB0_298: | |
| movq 136(%rsp), %rdi | |
| cmpq $3, %rdi | |
| jae .LBB0_300 | |
| movq 152(%rsp), %rcx | |
| movq %rdi, %r8 | |
| jmp .LBB0_303 | |
| .LBB0_300: | |
| addq $3, %rdx | |
| leaq (%rax,%rbp), %rbp | |
| movq %rax, %r13 | |
| .p2align 4, 0x90 | |
| .LBB0_301: | |
| movq %rsi, %rax | |
| cmpq $1, %r14 | |
| cmovaq %rdx, %rsi | |
| leaq -1(%rdx), %rcx | |
| cmpq $1, %r14 | |
| cmovbeq %rax, %rcx | |
| leaq -2(%rdx), %rdi | |
| cmpq $1, %r14 | |
| cmovbeq %rax, %rdi | |
| leaq -3(%rdx), %r12 | |
| cmpq $1, %r14 | |
| vmovsd (%r13,%r8,8), %xmm0 | |
| cmovbeq %rax, %r12 | |
| addq %rbx, %r12 | |
| vaddsd (%r15,%r12,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, -24(%rbp,%rdx,8) | |
| vmovsd (%r13,%r8,8), %xmm0 | |
| addq %rbx, %rdi | |
| vaddsd (%r15,%rdi,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, -16(%rbp,%rdx,8) | |
| vmovsd (%r13,%r8,8), %xmm0 | |
| addq %rbx, %rcx | |
| vaddsd (%r15,%rcx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, -8(%rbp,%rdx,8) | |
| vmovsd (%r13,%r8,8), %xmm0 | |
| leaq (%rsi,%rbx), %rax | |
| vaddsd (%r15,%rax,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%rbp,%rdx,8) | |
| movq 392(%rsp), %rax | |
| leaq 4(%rax,%rdx), %rax | |
| addq $4, %rdx | |
| cmpq $3, %rax | |
| jne .LBB0_301 | |
| movq 672(%rsp), %r12 | |
| movq 136(%rsp), %r8 | |
| movq 152(%rsp), %rcx | |
| movq 40(%rsp), %r13 | |
| .LBB0_303: | |
| movq 24(%rsp), %rbx | |
| .LBB0_304: | |
| movq 8(%rsp), %rdx | |
| .LBB0_305: | |
| addq $1, %r11 | |
| addq %rcx, %r9 | |
| movq 16(%rsp), %rbp | |
| addq %rcx, %rbp | |
| cmpq 664(%rsp), %r11 | |
| jne .LBB0_257 | |
| .LBB0_306: | |
| movq 160(%rsp), %rdi | |
| movabsq $NRT_decref, %rax | |
| movq %rax, %rbx | |
| callq *%rbx | |
| movq 624(%rsp), %rdi | |
| callq *%rbx | |
| movq $1, 248(%rsp) | |
| movq $1, 256(%rsp) | |
| movq 664(%rsp), %rax | |
| movq %rax, %rcx | |
| movq %rcx, 400(%rsp) | |
| movq %r12, 408(%rsp) | |
| movq %r12, %rax | |
| cmpq $1, %rcx | |
| je .LBB0_308 | |
| movq 664(%rsp), %rax | |
| movq %rax, 248(%rsp) | |
| movq 408(%rsp), %rax | |
| movq 256(%rsp), %r14 | |
| cmpq $1, %r14 | |
| jne .LBB0_311 | |
| .LBB0_308: | |
| cmpq $1, %rax | |
| jne .LBB0_310 | |
| movl $1, %r14d | |
| jmp .LBB0_313 | |
| .p2align 4, 0x90 | |
| .LBB0_310: | |
| movq %rax, 256(%rsp) | |
| movq %rax, %r14 | |
| jmp .LBB0_313 | |
| .LBB0_311: | |
| cmpq $1, %rax | |
| je .LBB0_313 | |
| cmpq %r14, %rax | |
| jne .LBB0_454 | |
| .p2align 4, 0x90 | |
| .LBB0_313: | |
| movq 248(%rsp), %r13 | |
| movq %r13, %rdi | |
| imulq %r14, %rdi | |
| shlq $3, %rdi | |
| movl $32, %esi | |
| movabsq $NRT_MemInfo_alloc_safe_aligned, %rax | |
| callq *%rax | |
| movq %rax, 144(%rsp) | |
| movq 24(%rax), %r15 | |
| testq %r13, %r13 | |
| vmovsd 224(%rsp), %xmm4 | |
| vmovupd 448(%rsp), %ymm5 | |
| jle .LBB0_395 | |
| testq %r14, %r14 | |
| jle .LBB0_395 | |
| leaq -1(%r14), %rax | |
| movq %rax, 8(%rsp) | |
| movq 656(%rsp), %rax | |
| leaq (%rax,%r14,8), %rax | |
| movq %rax, 160(%rsp) | |
| leaq -16(%r14), %rax | |
| shrq $4, %rax | |
| movq %rax, 56(%rsp) | |
| leal 1(%rax), %eax | |
| movq %r14, %rsi | |
| andq $-16, %rsi | |
| andl $1, %eax | |
| movq %rax, 104(%rsp) | |
| leaq 96(%r15), %rcx | |
| leaq (,%r14,8), %rax | |
| movq %rax, 120(%rsp) | |
| movq %r15, %r11 | |
| subq $-128, %r11 | |
| movq %rsi, 112(%rsp) | |
| negq %rsi | |
| movq %rsi, 128(%rsp) | |
| leaq 224(%r15), %rdx | |
| movl $7, %r8d | |
| xorl %eax, %eax | |
| movq %rax, 48(%rsp) | |
| movl $16, %r10d | |
| movl $4, %eax | |
| movq %rax, 24(%rsp) | |
| xorl %eax, %eax | |
| movq %rax, 16(%rsp) | |
| xorl %r12d, %r12d | |
| xorl %eax, %eax | |
| movq %rax, 32(%rsp) | |
| xorl %ebp, %ebp | |
| jmp .LBB0_341 | |
| .LBB0_316: | |
| cmpq $0, 104(%rsp) | |
| jne .LBB0_324 | |
| xorl %esi, %esi | |
| jmp .LBB0_325 | |
| .LBB0_318: | |
| cmpq $0, 104(%rsp) | |
| jne .LBB0_329 | |
| movq %r8, %r9 | |
| xorl %esi, %esi | |
| cmpq $0, 56(%rsp) | |
| jne .LBB0_330 | |
| jmp .LBB0_332 | |
| .LBB0_320: | |
| cmpq $0, 104(%rsp) | |
| jne .LBB0_333 | |
| xorl %edi, %edi | |
| cmpq $0, 56(%rsp) | |
| jne .LBB0_334 | |
| jmp .LBB0_336 | |
| .LBB0_322: | |
| cmpq $0, 104(%rsp) | |
| jne .LBB0_337 | |
| xorl %esi, %esi | |
| cmpq $0, 56(%rsp) | |
| jne .LBB0_338 | |
| jmp .LBB0_340 | |
| .LBB0_324: | |
| movq 656(%rsp), %rsi | |
| vmulpd (%rsi,%rdi,8), %ymm5, %ymm0 | |
| vmulpd 32(%rsi,%rdi,8), %ymm5, %ymm1 | |
| vmulpd 64(%rsi,%rdi,8), %ymm5, %ymm2 | |
| vmulpd 96(%rsi,%rdi,8), %ymm5, %ymm3 | |
| vmovupd %ymm0, (%r15,%rax,8) | |
| vmovupd %ymm1, 32(%r15,%rax,8) | |
| vmovupd %ymm2, 64(%r15,%rax,8) | |
| vmovupd %ymm3, 96(%r15,%rax,8) | |
| movl $16, %esi | |
| .LBB0_325: | |
| cmpq $0, 56(%rsp) | |
| movq 656(%rsp), %rbp | |
| je .LBB0_328 | |
| movq 128(%rsp), %rax | |
| leaq (%rax,%rsi), %rax | |
| leaq -96(,%rsi,8), %rdi | |
| .p2align 4, 0x90 | |
| .LBB0_327: | |
| leaq (%rbp,%rdi), %rsi | |
| vmulpd -32(%rsi,%r10,8), %ymm5, %ymm0 | |
| vmulpd (%rsi,%r10,8), %ymm5, %ymm1 | |
| vmulpd 32(%rsi,%r10,8), %ymm5, %ymm2 | |
| vmulpd 64(%rsi,%r10,8), %ymm5, %ymm3 | |
| vmovupd %ymm0, (%rdi,%rcx) | |
| vmovupd %ymm1, 32(%rdi,%rcx) | |
| vmovupd %ymm2, 64(%rdi,%rcx) | |
| vmovupd %ymm3, 96(%rdi,%rcx) | |
| vmulpd 96(%rsi,%r10,8), %ymm5, %ymm0 | |
| vmulpd 128(%rsi,%r10,8), %ymm5, %ymm1 | |
| vmulpd 160(%rsi,%r10,8), %ymm5, %ymm2 | |
| vmulpd 192(%rsi,%r10,8), %ymm5, %ymm3 | |
| vmovupd %ymm0, 128(%rdi,%rcx) | |
| vmovupd %ymm1, 160(%rdi,%rcx) | |
| vmovupd %ymm2, 192(%rdi,%rcx) | |
| vmovupd %ymm3, 224(%rdi,%rcx) | |
| addq $256, %rdi | |
| addq $32, %rax | |
| jne .LBB0_327 | |
| .LBB0_328: | |
| movq %r12, %rbp | |
| movq 8(%rsp), %rax | |
| movq %rax, 32(%rsp) | |
| movq 112(%rsp), %rsi | |
| movq %rsi, %rax | |
| cmpq %rsi, %r14 | |
| jne .LBB0_348 | |
| jmp .LBB0_394 | |
| .LBB0_329: | |
| movq %r8, %r9 | |
| movq 656(%rsp), %rsi | |
| vmulpd (%rsi,%rdi,8), %ymm5, %ymm0 | |
| vmulpd 32(%rsi,%rdi,8), %ymm5, %ymm1 | |
| vmulpd 64(%rsi,%rdi,8), %ymm5, %ymm2 | |
| vmulpd 96(%rsi,%rdi,8), %ymm5, %ymm3 | |
| vmovupd %ymm0, (%r15,%rax,8) | |
| vmovupd %ymm1, 32(%r15,%rax,8) | |
| vmovupd %ymm2, 64(%r15,%rax,8) | |
| vmovupd %ymm3, 96(%r15,%rax,8) | |
| movl $16, %esi | |
| cmpq $0, 56(%rsp) | |
| je .LBB0_332 | |
| .LBB0_330: | |
| movq 128(%rsp), %rax | |
| leaq (%rax,%rsi), %rax | |
| movq 208(%rsp), %rbp | |
| leaq (%rbp,%rdi,8), %rbp | |
| leaq -96(,%rsi,8), %rbx | |
| .p2align 4, 0x90 | |
| .LBB0_331: | |
| vmulpd -128(%rbp,%rbx), %ymm5, %ymm0 | |
| vmulpd -96(%rbp,%rbx), %ymm5, %ymm1 | |
| vmulpd -64(%rbp,%rbx), %ymm5, %ymm2 | |
| vmulpd -32(%rbp,%rbx), %ymm5, %ymm3 | |
| vmovupd %ymm0, (%rbx,%rcx) | |
| vmovupd %ymm1, 32(%rbx,%rcx) | |
| vmovupd %ymm2, 64(%rbx,%rcx) | |
| vmovupd %ymm3, 96(%rbx,%rcx) | |
| vmulpd (%rbp,%rbx), %ymm5, %ymm0 | |
| vmulpd 32(%rbp,%rbx), %ymm5, %ymm1 | |
| vmulpd 64(%rbp,%rbx), %ymm5, %ymm2 | |
| vmulpd 96(%rbp,%rbx), %ymm5, %ymm3 | |
| vmovupd %ymm0, 128(%rbx,%rcx) | |
| vmovupd %ymm1, 160(%rbx,%rcx) | |
| vmovupd %ymm2, 192(%rbx,%rcx) | |
| vmovupd %ymm3, 224(%rbx,%rcx) | |
| addq $256, %rbx | |
| addq $32, %rax | |
| jne .LBB0_331 | |
| .LBB0_332: | |
| movq 112(%rsp), %rsi | |
| movq %rsi, %rax | |
| cmpq %rsi, %r14 | |
| jne .LBB0_361 | |
| jmp .LBB0_365 | |
| .LBB0_333: | |
| vbroadcastsd (%r8), %ymm0 | |
| vmulpd %ymm5, %ymm0, %ymm0 | |
| vmovupd %ymm0, (%r15,%rax,8) | |
| vmovupd %ymm0, 32(%r15,%rax,8) | |
| vmovupd %ymm0, 64(%r15,%rax,8) | |
| vmovupd %ymm0, 96(%r15,%rax,8) | |
| movl $16, %edi | |
| cmpq $0, 56(%rsp) | |
| je .LBB0_336 | |
| .LBB0_334: | |
| vbroadcastsd (%r8), %ymm0 | |
| vmulpd %ymm5, %ymm0, %ymm0 | |
| leaq 96(,%rdi,8), %rax | |
| movq 112(%rsp), %rsi | |
| subq %rdi, %rsi | |
| .p2align 4, 0x90 | |
| .LBB0_335: | |
| vmovupd %ymm0, -224(%rax,%r11) | |
| vmovupd %ymm0, -192(%rax,%r11) | |
| vmovupd %ymm0, -160(%rax,%r11) | |
| vmovupd %ymm0, -128(%rax,%r11) | |
| vmovupd %ymm0, -96(%rax,%r11) | |
| vmovupd %ymm0, -64(%rax,%r11) | |
| vmovupd %ymm0, -32(%rax,%r11) | |
| vmovupd %ymm0, (%rax,%r11) | |
| addq $256, %rax | |
| addq $-32, %rsi | |
| jne .LBB0_335 | |
| .LBB0_336: | |
| movq 112(%rsp), %rsi | |
| movq %rsi, %rax | |
| cmpq %rsi, %r14 | |
| jne .LBB0_371 | |
| jmp .LBB0_377 | |
| .LBB0_337: | |
| vbroadcastsd (%rdi), %ymm0 | |
| vmulpd %ymm5, %ymm0, %ymm0 | |
| vmovupd %ymm0, (%r15,%rax,8) | |
| vmovupd %ymm0, 32(%r15,%rax,8) | |
| vmovupd %ymm0, 64(%r15,%rax,8) | |
| vmovupd %ymm0, 96(%r15,%rax,8) | |
| movl $16, %esi | |
| cmpq $0, 56(%rsp) | |
| je .LBB0_340 | |
| .LBB0_338: | |
| vbroadcastsd (%rdi), %ymm0 | |
| vmulpd %ymm5, %ymm0, %ymm0 | |
| movq 128(%rsp), %rax | |
| leaq (%rax,%rsi), %rax | |
| leaq (%rdx,%rsi,8), %rsi | |
| .p2align 4, 0x90 | |
| .LBB0_339: | |
| vmovupd %ymm0, -224(%rsi) | |
| vmovupd %ymm0, -192(%rsi) | |
| vmovupd %ymm0, -160(%rsi) | |
| vmovupd %ymm0, -128(%rsi) | |
| vmovupd %ymm0, -96(%rsi) | |
| vmovupd %ymm0, -64(%rsi) | |
| vmovupd %ymm0, -32(%rsi) | |
| vmovupd %ymm0, (%rsi) | |
| addq $256, %rsi | |
| addq $32, %rax | |
| jne .LBB0_339 | |
| .LBB0_340: | |
| movq 112(%rsp), %rsi | |
| movq %rsi, %rax | |
| cmpq %rsi, %r14 | |
| jne .LBB0_383 | |
| jmp .LBB0_394 | |
| .p2align 4, 0x90 | |
| .LBB0_341: | |
| movq %r14, %rax | |
| imulq %r12, %rax | |
| leaq (%r14,%rax), %rsi | |
| leaq (%r15,%rax,8), %r9 | |
| leaq (%r15,%rsi,8), %rbx | |
| cmpq $1, 664(%rsp) | |
| jbe .LBB0_355 | |
| movq %r12, %rdi | |
| movq 672(%rsp), %rsi | |
| imulq %rsi, %rdi | |
| cmpq $2, %rsi | |
| jb .LBB0_366 | |
| cmpq $15, %r14 | |
| jbe .LBB0_347 | |
| cmpq $0, 112(%rsp) | |
| je .LBB0_347 | |
| leaq (%r14,%rdi), %rsi | |
| movq 656(%rsp), %rbp | |
| leaq (%rbp,%rsi,8), %rsi | |
| cmpq %rsi, %r9 | |
| jae .LBB0_316 | |
| leaq (%rbp,%rdi,8), %rsi | |
| cmpq %rbx, %rsi | |
| jae .LBB0_316 | |
| .p2align 4, 0x90 | |
| .LBB0_347: | |
| xorl %eax, %eax | |
| .LBB0_348: | |
| movq %r8, %r9 | |
| movl %r14d, %ebx | |
| subl %eax, %ebx | |
| movq 8(%rsp), %r8 | |
| subq %rax, %r8 | |
| andq $7, %rbx | |
| je .LBB0_351 | |
| movq 16(%rsp), %rsi | |
| leaq (%rax,%rsi), %rsi | |
| leaq (%r15,%rsi,8), %rbp | |
| movq 48(%rsp), %rsi | |
| leaq (%rax,%rsi), %rsi | |
| movq 656(%rsp), %rdi | |
| leaq (%rdi,%rsi,8), %rsi | |
| negq %rbx | |
| .p2align 4, 0x90 | |
| .LBB0_350: | |
| vmulsd (%rsi), %xmm4, %xmm0 | |
| vmovsd %xmm0, (%rbp) | |
| addq $1, %rax | |
| addq $8, %rbp | |
| addq $8, %rsi | |
| addq $1, %rbx | |
| jne .LBB0_350 | |
| .LBB0_351: | |
| movq %r12, %rbp | |
| movq 8(%rsp), %rsi | |
| movq %rsi, 32(%rsp) | |
| cmpq $7, %r8 | |
| movq %r9, %r8 | |
| jb .LBB0_394 | |
| movq 24(%rsp), %rsi | |
| leaq (%rax,%rsi), %rsi | |
| leaq (%r15,%rsi,8), %rdi | |
| leaq (%rax,%r8), %rsi | |
| movq 656(%rsp), %rbp | |
| leaq (%rbp,%rsi,8), %rbx | |
| movq %r14, %rbp | |
| subq %rax, %rbp | |
| .p2align 4, 0x90 | |
| .LBB0_353: | |
| vmulsd -56(%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, -32(%rdi) | |
| vmulsd -48(%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, -24(%rdi) | |
| vmulsd -40(%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, -16(%rdi) | |
| vmulsd -32(%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, -8(%rdi) | |
| vmulsd -24(%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, (%rdi) | |
| vmulsd -16(%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, 8(%rdi) | |
| vmulsd -8(%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, 16(%rdi) | |
| vmulsd (%rbx), %xmm4, %xmm0 | |
| vmovsd %xmm0, 24(%rdi) | |
| addq $64, %rdi | |
| addq $64, %rbx | |
| addq $-8, %rbp | |
| jne .LBB0_353 | |
| movq %r12, %rbp | |
| movq 8(%rsp), %rax | |
| movq %rax, 32(%rsp) | |
| jmp .LBB0_394 | |
| .p2align 4, 0x90 | |
| .LBB0_355: | |
| movq %rbp, %rdi | |
| movq 672(%rsp), %rsi | |
| imulq %rsi, %rdi | |
| cmpq $2, %rsi | |
| jb .LBB0_378 | |
| cmpq $16, %r14 | |
| movq %rbp, 64(%rsp) | |
| jb .LBB0_360 | |
| cmpq $0, 112(%rsp) | |
| je .LBB0_360 | |
| movq 160(%rsp), %rsi | |
| leaq (%rsi,%rdi,8), %rsi | |
| cmpq %rsi, %r9 | |
| jae .LBB0_318 | |
| movq 656(%rsp), %rsi | |
| leaq (%rsi,%rdi,8), %rsi | |
| cmpq %rbx, %rsi | |
| jae .LBB0_318 | |
| .p2align 4, 0x90 | |
| .LBB0_360: | |
| movq %r8, %r9 | |
| xorl %eax, %eax | |
| .LBB0_361: | |
| movl %r14d, %ebp | |
| subl %eax, %ebp | |
| movq 8(%rsp), %r8 | |
| subq %rax, %r8 | |
| andq $7, %rbp | |
| je .LBB0_364 | |
| movq 16(%rsp), %rsi | |
| leaq (%rax,%rsi), %rsi | |
| leaq (%r15,%rsi,8), %rsi | |
| movq 656(%rsp), %rbx | |
| leaq (%rbx,%rdi,8), %rbx | |
| negq %rbp | |
| .p2align 4, 0x90 | |
| .LBB0_363: | |
| vmulsd (%rbx,%rax,8), %xmm4, %xmm0 | |
| vmovsd %xmm0, (%rsi) | |
| addq $1, %rax | |
| addq $8, %rsi | |
| addq $1, %rbp | |
| jne .LBB0_363 | |
| .LBB0_364: | |
| cmpq $7, %r8 | |
| jae .LBB0_390 | |
| .LBB0_365: | |
| movq 8(%rsp), %rax | |
| movq %rax, 32(%rsp) | |
| movq %r9, %r8 | |
| jmp .LBB0_393 | |
| .p2align 4, 0x90 | |
| .LBB0_366: | |
| movq %r8, 64(%rsp) | |
| addq 32(%rsp), %rdi | |
| movq 656(%rsp), %rsi | |
| leaq (%rsi,%rdi,8), %r8 | |
| cmpq $15, %r14 | |
| jbe .LBB0_370 | |
| cmpq $0, 112(%rsp) | |
| je .LBB0_370 | |
| cmpq %r8, %r9 | |
| jae .LBB0_320 | |
| cmpq %rbx, %r8 | |
| jae .LBB0_320 | |
| .p2align 4, 0x90 | |
| .LBB0_370: | |
| xorl %eax, %eax | |
| .LBB0_371: | |
| movl %r14d, %ebp | |
| subl %eax, %ebp | |
| movq 8(%rsp), %rdi | |
| subq %rax, %rdi | |
| andq $7, %rbp | |
| je .LBB0_374 | |
| movq 16(%rsp), %rsi | |
| leaq (%rax,%rsi), %rsi | |
| leaq (%r15,%rsi,8), %rsi | |
| negq %rbp | |
| .p2align 4, 0x90 | |
| .LBB0_373: | |
| vmulsd (%r8), %xmm4, %xmm0 | |
| vmovsd %xmm0, (%rsi) | |
| addq $1, %rax | |
| addq $8, %rsi | |
| addq $1, %rbp | |
| jne .LBB0_373 | |
| .LBB0_374: | |
| cmpq $7, %rdi | |
| jb .LBB0_377 | |
| movq 24(%rsp), %rsi | |
| leaq (%rax,%rsi), %rsi | |
| leaq (%r15,%rsi,8), %rdi | |
| movq %r14, %rbp | |
| subq %rax, %rbp | |
| .p2align 4, 0x90 | |
| .LBB0_376: | |
| vmulsd (%r8), %xmm4, %xmm0 | |
| vmovsd %xmm0, -32(%rdi) | |
| vmulsd (%r8), %xmm4, %xmm0 | |
| vmovsd %xmm0, -24(%rdi) | |
| vmulsd (%r8), %xmm4, %xmm0 | |
| vmovsd %xmm0, -16(%rdi) | |
| vmulsd (%r8), %xmm4, %xmm0 | |
| vmovsd %xmm0, -8(%rdi) | |
| vmulsd (%r8), %xmm4, %xmm0 | |
| vmovsd %xmm0, (%rdi) | |
| vmulsd (%r8), %xmm4, %xmm0 | |
| vmovsd %xmm0, 8(%rdi) | |
| vmulsd (%r8), %xmm4, %xmm0 | |
| vmovsd %xmm0, 16(%rdi) | |
| vmulsd (%r8), %xmm4, %xmm0 | |
| vmovsd %xmm0, 24(%rdi) | |
| addq $64, %rdi | |
| addq $-8, %rbp | |
| jne .LBB0_376 | |
| .LBB0_377: | |
| movq %r12, %rbp | |
| movq 64(%rsp), %r8 | |
| jmp .LBB0_394 | |
| .p2align 4, 0x90 | |
| .LBB0_378: | |
| addq 32(%rsp), %rdi | |
| movq 656(%rsp), %rsi | |
| leaq (%rsi,%rdi,8), %rdi | |
| cmpq $15, %r14 | |
| jbe .LBB0_382 | |
| cmpq $0, 112(%rsp) | |
| je .LBB0_382 | |
| cmpq %rdi, %r9 | |
| jae .LBB0_322 | |
| cmpq %rbx, %rdi | |
| jae .LBB0_322 | |
| .p2align 4, 0x90 | |
| .LBB0_382: | |
| xorl %eax, %eax | |
| .LBB0_383: | |
| movq %rbp, %r9 | |
| movl %r14d, %ebp | |
| subl %eax, %ebp | |
| movq 8(%rsp), %rbx | |
| subq %rax, %rbx | |
| andq $7, %rbp | |
| je .LBB0_386 | |
| movq 16(%rsp), %rsi | |
| leaq (%rax,%rsi), %rsi | |
| leaq (%r15,%rsi,8), %rsi | |
| negq %rbp | |
| .p2align 4, 0x90 | |
| .LBB0_385: | |
| vmulsd (%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, (%rsi) | |
| addq $1, %rax | |
| addq $8, %rsi | |
| addq $1, %rbp | |
| jne .LBB0_385 | |
| .LBB0_386: | |
| cmpq $7, %rbx | |
| jb .LBB0_389 | |
| movq 24(%rsp), %rsi | |
| leaq (%rax,%rsi), %rsi | |
| leaq (%r15,%rsi,8), %rbp | |
| movq %r14, %rbx | |
| subq %rax, %rbx | |
| .p2align 4, 0x90 | |
| .LBB0_388: | |
| vmulsd (%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, -32(%rbp) | |
| vmulsd (%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, -24(%rbp) | |
| vmulsd (%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, -16(%rbp) | |
| vmulsd (%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, -8(%rbp) | |
| vmulsd (%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, (%rbp) | |
| vmulsd (%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, 8(%rbp) | |
| vmulsd (%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, 16(%rbp) | |
| vmulsd (%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, 24(%rbp) | |
| addq $64, %rbp | |
| addq $-8, %rbx | |
| jne .LBB0_388 | |
| .LBB0_389: | |
| movq %r9, %rbp | |
| jmp .LBB0_394 | |
| .LBB0_390: | |
| movq 24(%rsp), %rsi | |
| leaq (%rax,%rsi), %rsi | |
| leaq (%r15,%rsi,8), %rbx | |
| addq %rax, %rdi | |
| movq 232(%rsp), %rsi | |
| leaq (%rsi,%rdi,8), %rdi | |
| movq %r14, %rbp | |
| subq %rax, %rbp | |
| movq %r9, %r8 | |
| .p2align 4, 0x90 | |
| .LBB0_391: | |
| vmulsd -56(%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, -32(%rbx) | |
| vmulsd -48(%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, -24(%rbx) | |
| vmulsd -40(%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, -16(%rbx) | |
| vmulsd -32(%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, -8(%rbx) | |
| vmulsd -24(%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, (%rbx) | |
| vmulsd -16(%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, 8(%rbx) | |
| vmulsd -8(%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, 16(%rbx) | |
| vmulsd (%rdi), %xmm4, %xmm0 | |
| vmovsd %xmm0, 24(%rbx) | |
| addq $64, %rbx | |
| addq $64, %rdi | |
| addq $-8, %rbp | |
| jne .LBB0_391 | |
| movq 8(%rsp), %rax | |
| movq %rax, 32(%rsp) | |
| .LBB0_393: | |
| movq 64(%rsp), %rbp | |
| .LBB0_394: | |
| addq $1, %r12 | |
| movq 120(%rsp), %rax | |
| addq %rax, %rcx | |
| addq %r14, 16(%rsp) | |
| addq %r14, 24(%rsp) | |
| movq 672(%rsp), %rsi | |
| addq %rsi, %r10 | |
| addq %rsi, 48(%rsp) | |
| addq %rsi, %r8 | |
| addq %rax, %r11 | |
| addq %rax, %rdx | |
| cmpq %r13, %r12 | |
| jne .LBB0_341 | |
| .LBB0_395: | |
| movq 168(%rsp), %rdi | |
| movabsq $NRT_incref, %rax | |
| vzeroupper | |
| callq *%rax | |
| cmpq $0, 592(%rsp) | |
| movq 600(%rsp), %r10 | |
| movq %r10, %r9 | |
| jle .LBB0_449 | |
| xorl %r10d, %r10d | |
| movq 584(%rsp), %r12 | |
| xorl %r11d, %r11d | |
| xorl %esi, %esi | |
| xorl %r8d, %r8d | |
| xorl %edx, %edx | |
| xorl %edi, %edi | |
| movq %r13, 40(%rsp) | |
| .p2align 4, 0x90 | |
| .LBB0_397: | |
| testq %r9, %r9 | |
| jle .LBB0_448 | |
| cmpq $2, 592(%rsp) | |
| jb .LBB0_403 | |
| cmpq $2, %r9 | |
| jb .LBB0_407 | |
| movq 80(%rsp), %rdi | |
| xorl %eax, %eax | |
| testq %rdi, %rdi | |
| je .LBB0_417 | |
| movq 72(%rsp), %rcx | |
| .p2align 4, 0x90 | |
| .LBB0_402: | |
| cmpq $1, %r13 | |
| cmovaq %r11, %r8 | |
| cmpq $1, %r14 | |
| vmovsd (%r12,%rax,8), %xmm0 | |
| cmovaq %rax, %rsi | |
| movq %r8, %rdx | |
| imulq %r14, %rdx | |
| addq %rsi, %rdx | |
| vaddsd (%r15,%rdx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%r12,%rax,8) | |
| addq $1, %rax | |
| cmpq %rax, %rdi | |
| jne .LBB0_402 | |
| jmp .LBB0_418 | |
| .p2align 4, 0x90 | |
| .LBB0_403: | |
| movq %r9, %rax | |
| movq %rdi, %r9 | |
| imulq %rax, %r9 | |
| movq %rax, %rbp | |
| cmpq $2, %rax | |
| movq %rdi, 16(%rsp) | |
| jb .LBB0_410 | |
| movq 80(%rsp), %rdi | |
| testq %rdi, %rdi | |
| je .LBB0_421 | |
| movq 584(%rsp), %rax | |
| leaq (%rax,%r9,8), %rdx | |
| xorl %ebx, %ebx | |
| movq 72(%rsp), %rax | |
| .p2align 4, 0x90 | |
| .LBB0_406: | |
| cmpq $1, %r13 | |
| cmovaq %r11, %r8 | |
| cmpq $1, %r14 | |
| vmovsd (%rdx,%rbx,8), %xmm0 | |
| cmovaq %rbx, %rsi | |
| movq %r8, %rcx | |
| imulq %r14, %rcx | |
| addq %rsi, %rcx | |
| vaddsd (%r15,%rcx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%r12,%rbx,8) | |
| addq $1, %rbx | |
| cmpq %rbx, %rdi | |
| jne .LBB0_406 | |
| jmp .LBB0_422 | |
| .LBB0_407: | |
| movq %r9, %rax | |
| movq %r11, %r9 | |
| movq %rax, %rbp | |
| imulq %rax, %r9 | |
| movq %rdx, 8(%rsp) | |
| addq %rdx, %r9 | |
| movq 80(%rsp), %rdi | |
| xorl %ebx, %ebx | |
| testq %rdi, %rdi | |
| je .LBB0_427 | |
| movq 584(%rsp), %rax | |
| movq 72(%rsp), %rcx | |
| .p2align 4, 0x90 | |
| .LBB0_409: | |
| cmpq $1, %r13 | |
| cmovaq %r11, %r8 | |
| cmpq $1, %r14 | |
| vmovsd (%rax,%r9,8), %xmm0 | |
| cmovaq %rbx, %rsi | |
| movq %r8, %rdx | |
| imulq %r14, %rdx | |
| addq %rsi, %rdx | |
| vaddsd (%r15,%rdx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%r12,%rbx,8) | |
| addq $1, %rbx | |
| cmpq %rbx, %rdi | |
| jne .LBB0_409 | |
| jmp .LBB0_428 | |
| .LBB0_410: | |
| addq %rdx, %r9 | |
| cmpq $1, %r13 | |
| movq %rdx, 8(%rsp) | |
| jbe .LBB0_414 | |
| movq %r11, %r8 | |
| imulq %r14, %r8 | |
| movq 80(%rsp), %rbx | |
| testq %rbx, %rbx | |
| movq 72(%rsp), %rax | |
| je .LBB0_433 | |
| xorl %edx, %edx | |
| movq 584(%rsp), %rdi | |
| .p2align 4, 0x90 | |
| .LBB0_413: | |
| cmpq $1, %r14 | |
| vmovsd (%rdi,%r9,8), %xmm0 | |
| cmovaq %rdx, %rsi | |
| leaq (%rsi,%r8), %rcx | |
| vaddsd (%r15,%rcx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%r12,%rdx,8) | |
| addq $1, %rdx | |
| cmpq %rdx, %rbx | |
| jne .LBB0_413 | |
| jmp .LBB0_434 | |
| .LBB0_414: | |
| movq %r8, %r13 | |
| imulq %r14, %r13 | |
| movq 80(%rsp), %rbx | |
| testq %rbx, %rbx | |
| movq 72(%rsp), %rdi | |
| je .LBB0_439 | |
| xorl %edx, %edx | |
| movq 584(%rsp), %rax | |
| .p2align 4, 0x90 | |
| .LBB0_416: | |
| cmpq $1, %r14 | |
| vmovsd (%rax,%r9,8), %xmm0 | |
| cmovaq %rdx, %rsi | |
| leaq (%rsi,%r13), %rcx | |
| vaddsd (%r15,%rcx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%r12,%rdx,8) | |
| addq $1, %rdx | |
| cmpq %rdx, %rbx | |
| jne .LBB0_416 | |
| jmp .LBB0_440 | |
| .LBB0_417: | |
| movq 72(%rsp), %rcx | |
| .LBB0_418: | |
| movq %r11, %rdi | |
| movq %rcx, %rdx | |
| cmpq $3, %rcx | |
| jb .LBB0_448 | |
| .p2align 4, 0x90 | |
| .LBB0_419: | |
| movq %rsi, %rdx | |
| leaq 3(%rax), %rsi | |
| cmpq $1, %r14 | |
| cmovbeq %rdx, %rsi | |
| leaq 2(%rax), %rbp | |
| cmpq $1, %r14 | |
| cmovbeq %rdx, %rbp | |
| leaq 1(%rax), %rbx | |
| cmpq $1, %r14 | |
| cmovbeq %rdx, %rbx | |
| cmovaq %rax, %rdx | |
| cmpq $1, %r13 | |
| vmovsd (%r12,%rax,8), %xmm0 | |
| cmovaq %r11, %r8 | |
| movq %r8, %rdi | |
| imulq %r14, %rdi | |
| addq %rdi, %rdx | |
| vaddsd (%r15,%rdx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%r12,%rax,8) | |
| vmovsd 8(%r12,%rax,8), %xmm0 | |
| addq %rdi, %rbx | |
| vaddsd (%r15,%rbx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 8(%r12,%rax,8) | |
| vmovsd 16(%r12,%rax,8), %xmm0 | |
| addq %rdi, %rbp | |
| vaddsd (%r15,%rbp,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 16(%r12,%rax,8) | |
| vmovsd 24(%r12,%rax,8), %xmm0 | |
| addq %rsi, %rdi | |
| vaddsd (%r15,%rdi,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 24(%r12,%rax,8) | |
| addq $4, %rax | |
| cmpq %r9, %rax | |
| jne .LBB0_419 | |
| movq %r11, %rdi | |
| movq 72(%rsp), %rdx | |
| jmp .LBB0_448 | |
| .LBB0_421: | |
| xorl %ebx, %ebx | |
| movq 72(%rsp), %rax | |
| .LBB0_422: | |
| cmpq $3, %rax | |
| jae .LBB0_424 | |
| movq %rax, %rdx | |
| movq %rbp, %r9 | |
| jmp .LBB0_447 | |
| .LBB0_424: | |
| movq 200(%rsp), %rax | |
| leaq (%rax,%r9,8), %rax | |
| movq %rbp, %r9 | |
| .p2align 4, 0x90 | |
| .LBB0_425: | |
| movq %rsi, %rcx | |
| leaq 3(%rbx), %rsi | |
| cmpq $1, %r14 | |
| cmovbeq %rcx, %rsi | |
| leaq 2(%rbx), %rdx | |
| cmpq $1, %r14 | |
| cmovbeq %rcx, %rdx | |
| leaq 1(%rbx), %rdi | |
| cmpq $1, %r14 | |
| cmovbeq %rcx, %rdi | |
| cmovaq %rbx, %rcx | |
| cmpq $1, %r13 | |
| vmovsd -24(%rax,%rbx,8), %xmm0 | |
| cmovaq %r11, %r8 | |
| movq %r8, %rbp | |
| imulq %r14, %rbp | |
| addq %rbp, %rcx | |
| vaddsd (%r15,%rcx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%r12,%rbx,8) | |
| vmovsd -16(%rax,%rbx,8), %xmm0 | |
| addq %rbp, %rdi | |
| vaddsd (%r15,%rdi,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 8(%r12,%rbx,8) | |
| vmovsd -8(%rax,%rbx,8), %xmm0 | |
| addq %rbp, %rdx | |
| vaddsd (%r15,%rdx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 16(%r12,%rbx,8) | |
| vmovsd (%rax,%rbx,8), %xmm0 | |
| addq %rsi, %rbp | |
| vaddsd (%r15,%rbp,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 24(%r12,%rbx,8) | |
| addq $4, %rbx | |
| cmpq %r9, %rbx | |
| jne .LBB0_425 | |
| movq 72(%rsp), %rdx | |
| jmp .LBB0_447 | |
| .LBB0_427: | |
| movq 72(%rsp), %rcx | |
| .LBB0_428: | |
| cmpq $3, %rcx | |
| jae .LBB0_430 | |
| movq %r11, %rdi | |
| movq %rbp, %r9 | |
| movq 8(%rsp), %rdx | |
| jmp .LBB0_448 | |
| .LBB0_430: | |
| movq %r10, %rax | |
| movq 584(%rsp), %r13 | |
| movq %rax, 24(%rsp) | |
| leaq (%r13,%rax), %rdx | |
| movq %rbp, %r10 | |
| .p2align 4, 0x90 | |
| .LBB0_431: | |
| movq %rsi, %rdi | |
| leaq 3(%rbx), %rsi | |
| cmpq $1, %r14 | |
| cmovbeq %rdi, %rsi | |
| leaq 2(%rbx), %rbp | |
| cmpq $1, %r14 | |
| cmovbeq %rdi, %rbp | |
| leaq 1(%rbx), %rax | |
| cmpq $1, %r14 | |
| cmovbeq %rdi, %rax | |
| cmovaq %rbx, %rdi | |
| cmpq $1, 40(%rsp) | |
| vmovsd (%r13,%r9,8), %xmm0 | |
| cmovaq %r11, %r8 | |
| movq %r8, %rcx | |
| imulq %r14, %rcx | |
| addq %rcx, %rdi | |
| vaddsd (%r15,%rdi,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%rdx,%rbx,8) | |
| vmovsd (%r13,%r9,8), %xmm0 | |
| addq %rcx, %rax | |
| vaddsd (%r15,%rax,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 8(%rdx,%rbx,8) | |
| vmovsd (%r13,%r9,8), %xmm0 | |
| addq %rcx, %rbp | |
| vaddsd (%r15,%rbp,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 16(%rdx,%rbx,8) | |
| vmovsd (%r13,%r9,8), %xmm0 | |
| addq %rsi, %rcx | |
| vaddsd (%r15,%rcx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 24(%rdx,%rbx,8) | |
| addq $4, %rbx | |
| cmpq %r10, %rbx | |
| jne .LBB0_431 | |
| movq %r11, %rdi | |
| movq 40(%rsp), %r13 | |
| movq 24(%rsp), %rax | |
| movq %r10, %r9 | |
| movq %rax, %r10 | |
| movq 8(%rsp), %rdx | |
| jmp .LBB0_448 | |
| .LBB0_433: | |
| xorl %edx, %edx | |
| movq 584(%rsp), %rdi | |
| .LBB0_434: | |
| cmpq $3, %rax | |
| jae .LBB0_436 | |
| movq %r11, %r8 | |
| movq %rbp, %r9 | |
| jmp .LBB0_446 | |
| .LBB0_436: | |
| movq %r10, 24(%rsp) | |
| leaq (%rdi,%r10), %rbx | |
| movq %rbp, %r10 | |
| .p2align 4, 0x90 | |
| .LBB0_437: | |
| movq %rsi, %rcx | |
| leaq 3(%rdx), %rsi | |
| cmpq $1, %r14 | |
| cmovbeq %rcx, %rsi | |
| leaq 2(%rdx), %rbp | |
| cmpq $1, %r14 | |
| cmovbeq %rcx, %rbp | |
| leaq 1(%rdx), %rax | |
| cmpq $1, %r14 | |
| vmovsd (%rdi,%r9,8), %xmm0 | |
| cmovbeq %rcx, %rax | |
| cmovaq %rdx, %rcx | |
| addq %r8, %rcx | |
| vaddsd (%r15,%rcx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%rbx,%rdx,8) | |
| vmovsd (%rdi,%r9,8), %xmm0 | |
| addq %r8, %rax | |
| vaddsd (%r15,%rax,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 8(%rbx,%rdx,8) | |
| vmovsd (%rdi,%r9,8), %xmm0 | |
| addq %r8, %rbp | |
| vaddsd (%r15,%rbp,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 16(%rbx,%rdx,8) | |
| vmovsd (%rdi,%r9,8), %xmm0 | |
| leaq (%rsi,%r8), %rax | |
| vaddsd (%r15,%rax,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, 24(%rbx,%rdx,8) | |
| addq $4, %rdx | |
| cmpq %r10, %rdx | |
| jne .LBB0_437 | |
| movq %r11, %r8 | |
| movq %r10, %r9 | |
| jmp .LBB0_445 | |
| .LBB0_439: | |
| xorl %edx, %edx | |
| .LBB0_440: | |
| cmpq $3, %rdi | |
| jae .LBB0_442 | |
| movq 40(%rsp), %r13 | |
| movq %rbp, %r9 | |
| jmp .LBB0_446 | |
| .LBB0_442: | |
| addq $3, %rdx | |
| movq 584(%rsp), %rbx | |
| movq %r10, 24(%rsp) | |
| leaq (%rbx,%r10), %rbp | |
| .p2align 4, 0x90 | |
| .LBB0_443: | |
| movq %rsi, %rax | |
| cmpq $1, %r14 | |
| cmovaq %rdx, %rsi | |
| leaq -1(%rdx), %rcx | |
| cmpq $1, %r14 | |
| cmovbeq %rax, %rcx | |
| leaq -2(%rdx), %rdi | |
| cmpq $1, %r14 | |
| cmovbeq %rax, %rdi | |
| leaq -3(%rdx), %r10 | |
| cmpq $1, %r14 | |
| vmovsd (%rbx,%r9,8), %xmm0 | |
| cmovbeq %rax, %r10 | |
| addq %r13, %r10 | |
| vaddsd (%r15,%r10,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, -24(%rbp,%rdx,8) | |
| vmovsd (%rbx,%r9,8), %xmm0 | |
| addq %r13, %rdi | |
| vaddsd (%r15,%rdi,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, -16(%rbp,%rdx,8) | |
| vmovsd (%rbx,%r9,8), %xmm0 | |
| addq %r13, %rcx | |
| vaddsd (%r15,%rcx,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, -8(%rbp,%rdx,8) | |
| vmovsd (%rbx,%r9,8), %xmm0 | |
| leaq (%rsi,%r13), %rax | |
| vaddsd (%r15,%rax,8), %xmm0, %xmm0 | |
| vmovsd %xmm0, (%rbp,%rdx,8) | |
| movq 240(%rsp), %rax | |
| leaq 4(%rax,%rdx), %rax | |
| addq $4, %rdx | |
| cmpq $3, %rax | |
| jne .LBB0_443 | |
| movq 600(%rsp), %r10 | |
| movq %r10, %r9 | |
| movq 40(%rsp), %r13 | |
| .LBB0_445: | |
| movq 24(%rsp), %r10 | |
| .LBB0_446: | |
| movq 8(%rsp), %rdx | |
| .LBB0_447: | |
| movq 16(%rsp), %rdi | |
| .LBB0_448: | |
| addq $1, %r11 | |
| movq 88(%rsp), %rax | |
| addq %rax, %r12 | |
| addq %rax, %r10 | |
| cmpq 592(%rsp), %r11 | |
| jne .LBB0_397 | |
| .LBB0_449: | |
| movq 144(%rsp), %rdi | |
| movabsq $NRT_decref, %rax | |
| movq %rax, %rbx | |
| callq *%rbx | |
| movq 168(%rsp), %rdi | |
| callq *%rbx | |
| movq 600(%rsp), %rax | |
| vmovsd 216(%rsp), %xmm3 | |
| vmovsd 384(%rsp), %xmm5 | |
| vaddsd %xmm3, %xmm5, %xmm5 | |
| movabsq $.LCPI0_2, %rcx | |
| vmovsd (%rcx), %xmm0 | |
| vucomisd %xmm5, %xmm0 | |
| movq 584(%rsp), %rcx | |
| movq 672(%rsp), %r12 | |
| ja .LBB0_1 | |
| movq 624(%rsp), %rdi | |
| movq %rax, %r14 | |
| movq %rcx, %rbp | |
| movabsq $NRT_decref, %rax | |
| movq %rax, %rbx | |
| callq *%rbx | |
| movq 768(%rsp), %rdi | |
| callq *%rbx | |
| movq 696(%rsp), %rdi | |
| callq *%rbx | |
| movq 312(%rsp), %rax | |
| movq 168(%rsp), %rcx | |
| movq %rcx, (%rax) | |
| movq 296(%rsp), %rcx | |
| movq %rcx, 8(%rax) | |
| movq 304(%rsp), %rcx | |
| movq %rcx, 16(%rax) | |
| movq 576(%rsp), %rcx | |
| movq %rcx, 24(%rax) | |
| movq %rbp, 32(%rax) | |
| movq 592(%rsp), %rcx | |
| movq %rcx, 40(%rax) | |
| movq %r14, 48(%rax) | |
| movq 608(%rsp), %rcx | |
| movq %rcx, 56(%rax) | |
| movq 616(%rsp), %rcx | |
| movq %rcx, 64(%rax) | |
| xorl %eax, %eax | |
| jmp .LBB0_453 | |
| .LBB0_451: | |
| movabsq $.const.picklebuf.139995429797704, %rax | |
| .LBB0_452: | |
| movq 184(%rsp), %rcx | |
| movq %rax, (%rcx) | |
| movl $1, %eax | |
| .LBB0_453: | |
| addq $520, %rsp | |
| popq %rbx | |
| popq %r12 | |
| popq %r13 | |
| popq %r14 | |
| popq %r15 | |
| popq %rbp | |
| vzeroupper | |
| retq | |
| .LBB0_454: | |
| movabsq $.const.picklebuf.139995429850312, %rax | |
| jmp .LBB0_452 | |
| .Lfunc_end0: | |
| .size _ZN8__main__17leapfrog_slow$241E5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi1E1C7mutable7alignedE, .Lfunc_end0-_ZN8__main__17leapfrog_slow$241E5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi1E1C7mutable7alignedE | |
| .cfi_endproc | |
| .globl _ZN7cpython8__main__17leapfrog_slow$241E5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi1E1C7mutable7alignedE | |
| .p2align 4, 0x90 | |
| .type _ZN7cpython8__main__17leapfrog_slow$241E5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi1E1C7mutable7alignedE,@function | |
| _ZN7cpython8__main__17leapfrog_slow$241E5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi1E1C7mutable7alignedE: | |
| .cfi_startproc | |
| pushq %rbp | |
| .Lcfi13: | |
| .cfi_def_cfa_offset 16 | |
| .Lcfi14: | |
| .cfi_offset %rbp, -16 | |
| movq %rsp, %rbp | |
| .Lcfi15: | |
| .cfi_def_cfa_register %rbp | |
| pushq %r15 | |
| pushq %r14 | |
| pushq %r13 | |
| pushq %r12 | |
| pushq %rbx | |
| andq $-32, %rsp | |
| subq $800, %rsp | |
| .Lcfi16: | |
| .cfi_offset %rbx, -56 | |
| .Lcfi17: | |
| .cfi_offset %r12, -48 | |
| .Lcfi18: | |
| .cfi_offset %r13, -40 | |
| .Lcfi19: | |
| .cfi_offset %r14, -32 | |
| .Lcfi20: | |
| .cfi_offset %r15, -24 | |
| movq %rdi, %rbx | |
| leaq 224(%rsp), %r11 | |
| leaq 232(%rsp), %r14 | |
| movabsq $.const.leapfrog_slow, %r10 | |
| movabsq $PyArg_UnpackTuple, %r15 | |
| leaq 248(%rsp), %r8 | |
| leaq 240(%rsp), %r9 | |
| movl $4, %edx | |
| movl $4, %ecx | |
| xorl %eax, %eax | |
| movq %rsi, %rdi | |
| movq %r10, %rsi | |
| pushq %r11 | |
| pushq %r14 | |
| callq *%r15 | |
| addq $16, %rsp | |
| vxorps %ymm0, %ymm0, %ymm0 | |
| vmovaps %ymm0, 576(%rsp) | |
| vmovaps %ymm0, 544(%rsp) | |
| movq $0, 608(%rsp) | |
| vmovaps %ymm0, 480(%rsp) | |
| vmovaps %ymm0, 448(%rsp) | |
| movq $0, 512(%rsp) | |
| vmovaps %ymm0, 384(%rsp) | |
| vmovaps %ymm0, 352(%rsp) | |
| movq $0, 416(%rsp) | |
| vmovups %ymm0, 664(%rsp) | |
| vmovaps %ymm0, 640(%rsp) | |
| testl %eax, %eax | |
| je .LBB1_24 | |
| testq %rbx, %rbx | |
| je .LBB1_26 | |
| movq 24(%rbx), %r12 | |
| testq %r12, %r12 | |
| je .LBB1_18 | |
| movq 248(%rsp), %rdi | |
| movabsq $NRT_adapt_ndarray_from_python, %r14 | |
| leaq 544(%rsp), %rsi | |
| vzeroupper | |
| callq *%r14 | |
| testl %eax, %eax | |
| jne .LBB1_24 | |
| movq 544(%rsp), %rax | |
| movq %rax, 72(%rsp) | |
| movq 552(%rsp), %rax | |
| movq %rax, 40(%rsp) | |
| movq 560(%rsp), %rax | |
| movq %rax, 32(%rsp) | |
| movq 568(%rsp), %r15 | |
| movq 576(%rsp), %r13 | |
| movq 584(%rsp), %rbx | |
| movq 592(%rsp), %rax | |
| movq %rax, 64(%rsp) | |
| movq 600(%rsp), %rax | |
| movq %rax, 56(%rsp) | |
| movq 608(%rsp), %rax | |
| movq %rax, 48(%rsp) | |
| movq 240(%rsp), %rdi | |
| leaq 448(%rsp), %rsi | |
| callq *%r14 | |
| testl %eax, %eax | |
| jne .LBB1_22 | |
| movq %rbx, 24(%rsp) | |
| movq 448(%rsp), %rbx | |
| movq 456(%rsp), %rax | |
| movq %rax, 16(%rsp) | |
| movq 464(%rsp), %rax | |
| movq %rax, 8(%rsp) | |
| movq 472(%rsp), %rax | |
| movq %rax, (%rsp) | |
| movq 480(%rsp), %rax | |
| movq %rax, 208(%rsp) | |
| movq 488(%rsp), %rax | |
| movq %rax, 200(%rsp) | |
| movq 496(%rsp), %rax | |
| movq %rax, 192(%rsp) | |
| movq 504(%rsp), %rax | |
| movq %rax, 184(%rsp) | |
| movq 512(%rsp), %rax | |
| movq %rax, 176(%rsp) | |
| movq 232(%rsp), %rdi | |
| leaq 352(%rsp), %rsi | |
| callq *%r14 | |
| testl %eax, %eax | |
| jne .LBB1_21 | |
| movq %r13, 160(%rsp) | |
| movq %r15, 168(%rsp) | |
| movq %rbx, 80(%rsp) | |
| movq 352(%rsp), %r15 | |
| movq %r14, %rax | |
| movq 360(%rsp), %r14 | |
| movq 368(%rsp), %rbx | |
| movq 376(%rsp), %r13 | |
| movq 384(%rsp), %rcx | |
| movq %rcx, 152(%rsp) | |
| movq 392(%rsp), %rcx | |
| movq %rcx, 144(%rsp) | |
| movq 400(%rsp), %rcx | |
| movq %rcx, 136(%rsp) | |
| movq 408(%rsp), %rcx | |
| movq %rcx, 128(%rsp) | |
| movq 416(%rsp), %rcx | |
| movq %rcx, 120(%rsp) | |
| movq 224(%rsp), %rdi | |
| leaq 640(%rsp), %rsi | |
| callq *%rax | |
| testl %eax, %eax | |
| jne .LBB1_20 | |
| movq %r12, 88(%rsp) | |
| movq %rbx, 104(%rsp) | |
| movq 640(%rsp), %rax | |
| movq %rax, 112(%rsp) | |
| movq 648(%rsp), %r11 | |
| movq %r13, 96(%rsp) | |
| movq 656(%rsp), %r13 | |
| movq 664(%rsp), %rbx | |
| movq 672(%rsp), %rdx | |
| movq 680(%rsp), %r10 | |
| movq 688(%rsp), %r12 | |
| vxorps %ymm0, %ymm0, %ymm0 | |
| vmovaps %ymm0, 288(%rsp) | |
| vmovaps %ymm0, 256(%rsp) | |
| movq $0, 320(%rsp) | |
| subq $8, %rsp | |
| leaq 264(%rsp), %rdi | |
| leaq 224(%rsp), %rsi | |
| movq 80(%rsp), %rcx | |
| movq 48(%rsp), %r8 | |
| movq 40(%rsp), %r9 | |
| pushq %r12 | |
| pushq %r10 | |
| pushq %rdx | |
| pushq %rbx | |
| pushq %r13 | |
| pushq %r11 | |
| pushq %rax | |
| pushq 184(%rsp) | |
| pushq 200(%rsp) | |
| pushq 216(%rsp) | |
| pushq 232(%rsp) | |
| pushq 248(%rsp) | |
| pushq 200(%rsp) | |
| pushq 216(%rsp) | |
| pushq %r14 | |
| pushq %r15 | |
| movq %r15, %r14 | |
| pushq 312(%rsp) | |
| pushq 328(%rsp) | |
| pushq 344(%rsp) | |
| pushq 360(%rsp) | |
| pushq 376(%rsp) | |
| pushq 176(%rsp) | |
| pushq 192(%rsp) | |
| pushq 208(%rsp) | |
| movq 280(%rsp), %r13 | |
| pushq %r13 | |
| pushq 256(%rsp) | |
| pushq 272(%rsp) | |
| pushq 288(%rsp) | |
| pushq 256(%rsp) | |
| pushq 400(%rsp) | |
| pushq 416(%rsp) | |
| movabsq $_ZN8__main__17leapfrog_slow$241E5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi1E1C7mutable7alignedE, %rax | |
| vzeroupper | |
| callq *%rax | |
| addq $256, %rsp | |
| movl %eax, %ebx | |
| movq 216(%rsp), %rax | |
| movq %rax, (%rsp) | |
| movq 256(%rsp), %r15 | |
| movq 264(%rsp), %rax | |
| movq %rax, 64(%rsp) | |
| movq 272(%rsp), %rax | |
| movq %rax, 56(%rsp) | |
| movq 280(%rsp), %rax | |
| movq %rax, 48(%rsp) | |
| movq 288(%rsp), %rax | |
| movq %rax, 40(%rsp) | |
| movq 296(%rsp), %rax | |
| movq %rax, 32(%rsp) | |
| movq 304(%rsp), %rax | |
| movq %rax, 24(%rsp) | |
| movq 312(%rsp), %rax | |
| movq %rax, 16(%rsp) | |
| movq 320(%rsp), %rax | |
| movq %rax, 8(%rsp) | |
| movabsq $NRT_decref, %r12 | |
| movq 72(%rsp), %rdi | |
| callq *%r12 | |
| movq %r13, %rdi | |
| callq *%r12 | |
| movq %r14, %rdi | |
| callq *%r12 | |
| movq 112(%rsp), %rdi | |
| callq *%r12 | |
| cmpl $-2, %ebx | |
| je .LBB1_10 | |
| testl %ebx, %ebx | |
| jne .LBB1_11 | |
| movq 88(%rsp), %rax | |
| movq 24(%rax), %rdi | |
| movabsq $PyList_GetItem, %rax | |
| xorl %esi, %esi | |
| callq *%rax | |
| movq %r15, 704(%rsp) | |
| movq 64(%rsp), %rcx | |
| movq %rcx, 712(%rsp) | |
| movq 56(%rsp), %rcx | |
| movq %rcx, 720(%rsp) | |
| movq 48(%rsp), %rcx | |
| movq %rcx, 728(%rsp) | |
| movq 40(%rsp), %rcx | |
| movq %rcx, 736(%rsp) | |
| movq 32(%rsp), %rcx | |
| movq %rcx, 744(%rsp) | |
| movq 24(%rsp), %rcx | |
| movq %rcx, 752(%rsp) | |
| movq 16(%rsp), %rcx | |
| movq %rcx, 760(%rsp) | |
| movq 8(%rsp), %rcx | |
| movq %rcx, 768(%rsp) | |
| movabsq $NRT_adapt_ndarray_to_python, %rbx | |
| leaq 704(%rsp), %rdi | |
| movl $2, %esi | |
| movl $1, %edx | |
| movq %rax, %rcx | |
| callq *%rbx | |
| jmp .LBB1_25 | |
| .LBB1_10: | |
| movabsq $_Py_NoneStruct, %rbx | |
| movabsq $Py_IncRef, %rax | |
| movq %rbx, %rdi | |
| callq *%rax | |
| movq %rbx, %rax | |
| jmp .LBB1_25 | |
| .LBB1_11: | |
| jle .LBB1_14 | |
| movabsq $PyErr_Clear, %rax | |
| callq *%rax | |
| movq (%rsp), %rax | |
| movl 8(%rax), %esi | |
| movq (%rax), %rdi | |
| movabsq $numba_unpickle, %rax | |
| callq *%rax | |
| testq %rax, %rax | |
| je .LBB1_24 | |
| movabsq $numba_do_raise, %rcx | |
| movq %rax, %rdi | |
| callq *%rcx | |
| jmp .LBB1_24 | |
| .LBB1_14: | |
| cmpl $-3, %ebx | |
| je .LBB1_17 | |
| cmpl $-1, %ebx | |
| je .LBB1_24 | |
| movabsq $PyExc_SystemError, %rdi | |
| movabsq $".const.unknown error when calling native function", %rsi | |
| jmp .LBB1_19 | |
| .LBB1_17: | |
| movabsq $PyExc_StopIteration, %rdi | |
| movabsq $PyErr_SetNone, %rax | |
| jmp .LBB1_23 | |
| .LBB1_18: | |
| movabsq $PyExc_RuntimeError, %rdi | |
| movabsq $".const.missing Environment", %rsi | |
| .LBB1_19: | |
| movabsq $PyErr_SetString, %rax | |
| vzeroupper | |
| callq *%rax | |
| jmp .LBB1_24 | |
| .LBB1_20: | |
| movabsq $NRT_decref, %rax | |
| movq %r15, %rdi | |
| callq *%rax | |
| movq 80(%rsp), %rbx | |
| .LBB1_21: | |
| movabsq $NRT_decref, %rax | |
| movq %rbx, %rdi | |
| callq *%rax | |
| .LBB1_22: | |
| movabsq $NRT_decref, %rax | |
| movq 72(%rsp), %rdi | |
| .LBB1_23: | |
| callq *%rax | |
| .LBB1_24: | |
| xorl %eax, %eax | |
| .LBB1_25: | |
| leaq -40(%rbp), %rsp | |
| popq %rbx | |
| popq %r12 | |
| popq %r13 | |
| popq %r14 | |
| popq %r15 | |
| popq %rbp | |
| vzeroupper | |
| retq | |
| .LBB1_26: | |
| movabsq $".const.Fatal error: missing _dynfunc.Closure", %rdi | |
| movabsq $puts, %rax | |
| vzeroupper | |
| callq *%rax | |
| .Lfunc_end1: | |
| .size _ZN7cpython8__main__17leapfrog_slow$241E5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi1E1C7mutable7alignedE, .Lfunc_end1-_ZN7cpython8__main__17leapfrog_slow$241E5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi2E1C7mutable7alignedE5ArrayIdLi1E1C7mutable7alignedE | |
| .cfi_endproc | |
| .weak NRT_incref | |
| .p2align 4, 0x90 | |
| .type NRT_incref,@function | |
| NRT_incref: | |
| testq %rdi, %rdi | |
| je .LBB2_1 | |
| lock addq $1, (%rdi) | |
| retq | |
| .LBB2_1: | |
| retq | |
| .Lfunc_end2: | |
| .size NRT_incref, .Lfunc_end2-NRT_incref | |
| .weak NRT_decref | |
| .p2align 4, 0x90 | |
| .type NRT_decref,@function | |
| NRT_decref: | |
| .cfi_startproc | |
| testq %rdi, %rdi | |
| je .LBB3_2 | |
| movq $-1, %rax | |
| lock xaddq %rax, (%rdi) | |
| cmpq $1, %rax | |
| je .LBB3_3 | |
| .LBB3_2: | |
| retq | |
| .LBB3_3: | |
| movabsq $NRT_MemInfo_call_dtor, %rax | |
| jmpq *%rax | |
| .Lfunc_end3: | |
| .size NRT_decref, .Lfunc_end3-NRT_decref | |
| .cfi_endproc | |
| .type .const.picklebuf.139995429850312,@object | |
| .section .rodata,"a",@progbits | |
| .p2align 3 | |
| .const.picklebuf.139995429850312: | |
| .quad .const.pickledata.139995429850312 | |
| .long 91 | |
| .zero 4 | |
| .size .const.picklebuf.139995429850312, 16 | |
| .type .const.picklebuf.139995429797704,@object | |
| .p2align 3 | |
| .const.picklebuf.139995429797704: | |
| .quad .const.pickledata.139995429797704 | |
| .long 68 | |
| .zero 4 | |
| .size .const.picklebuf.139995429797704, 16 | |
| .type .const.pickledata.139995429797704,@object | |
| .p2align 4 | |
| .const.pickledata.139995429797704: | |
| .ascii "\200\004\2259\000\000\000\000\000\000\000\214\bbuiltins\224\214\021ZeroDivisionError\224\223\224\214\020division by zero\224\205\224\206\224." | |
| .size .const.pickledata.139995429797704, 68 | |
| .type .const.pickledata.139995429850312,@object | |
| .p2align 4 | |
| .const.pickledata.139995429850312: | |
| .ascii "\200\004\225P\000\000\000\000\000\000\000\214\bbuiltins\224\214\nValueError\224\223\224\214.unable to broadcast argument 1 to output array\224\205\224\206\224." | |
| .size .const.pickledata.139995429850312, 91 | |
| .type .const.leapfrog_slow,@object | |
| .const.leapfrog_slow: | |
| .asciz "leapfrog_slow" | |
| .size .const.leapfrog_slow, 14 | |
| .type ".const.Fatal error: missing _dynfunc.Closure",@object | |
| .p2align 4 | |
| ".const.Fatal error: missing _dynfunc.Closure": | |
| .asciz "Fatal error: missing _dynfunc.Closure" | |
| .size ".const.Fatal error: missing _dynfunc.Closure", 38 | |
| .type ".const.missing Environment",@object | |
| .p2align 4 | |
| ".const.missing Environment": | |
| .asciz "missing Environment" | |
| .size ".const.missing Environment", 20 | |
| .type ".const.unknown error when calling native function",@object | |
| .p2align 4 | |
| ".const.unknown error when calling native function": | |
| .asciz "unknown error when calling native function" | |
| .size ".const.unknown error when calling native function", 43 | |
| .section ".note.GNU-stack","",@progbits | |
| ================================================================================ |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment