Path: blob/main/sys/crypto/openssl/amd64/aesni-gcm-x86_64.S
39482 views
/* Do not modify. This file is auto-generated from aesni-gcm-x86_64.pl. */1.text23.type _aesni_ctr32_ghash_6x,@function4.align 325_aesni_ctr32_ghash_6x:6.cfi_startproc7vmovdqu 32(%r11),%xmm28subq $6,%rdx9vpxor %xmm4,%xmm4,%xmm410vmovdqu 0-128(%rcx),%xmm1511vpaddb %xmm2,%xmm1,%xmm1012vpaddb %xmm2,%xmm10,%xmm1113vpaddb %xmm2,%xmm11,%xmm1214vpaddb %xmm2,%xmm12,%xmm1315vpaddb %xmm2,%xmm13,%xmm1416vpxor %xmm15,%xmm1,%xmm917vmovdqu %xmm4,16+8(%rsp)18jmp .Loop6x1920.align 3221.Loop6x:22addl $100663296,%ebx23jc .Lhandle_ctr3224vmovdqu 0-32(%r9),%xmm325vpaddb %xmm2,%xmm14,%xmm126vpxor %xmm15,%xmm10,%xmm1027vpxor %xmm15,%xmm11,%xmm112829.Lresume_ctr32:30vmovdqu %xmm1,(%r8)31vpclmulqdq $0x10,%xmm3,%xmm7,%xmm532vpxor %xmm15,%xmm12,%xmm1233vmovups 16-128(%rcx),%xmm234vpclmulqdq $0x01,%xmm3,%xmm7,%xmm635xorq %r12,%r1236cmpq %r14,%r153738vaesenc %xmm2,%xmm9,%xmm939vmovdqu 48+8(%rsp),%xmm040vpxor %xmm15,%xmm13,%xmm1341vpclmulqdq $0x00,%xmm3,%xmm7,%xmm142vaesenc %xmm2,%xmm10,%xmm1043vpxor %xmm15,%xmm14,%xmm1444setnc %r12b45vpclmulqdq $0x11,%xmm3,%xmm7,%xmm746vaesenc %xmm2,%xmm11,%xmm1147vmovdqu 16-32(%r9),%xmm348negq %r1249vaesenc %xmm2,%xmm12,%xmm1250vpxor %xmm5,%xmm6,%xmm651vpclmulqdq $0x00,%xmm3,%xmm0,%xmm552vpxor %xmm4,%xmm8,%xmm853vaesenc %xmm2,%xmm13,%xmm1354vpxor %xmm5,%xmm1,%xmm455andq $0x60,%r1256vmovups 32-128(%rcx),%xmm1557vpclmulqdq $0x10,%xmm3,%xmm0,%xmm158vaesenc %xmm2,%xmm14,%xmm145960vpclmulqdq $0x01,%xmm3,%xmm0,%xmm261leaq (%r14,%r12,1),%r1462vaesenc %xmm15,%xmm9,%xmm963vpxor 16+8(%rsp),%xmm8,%xmm864vpclmulqdq $0x11,%xmm3,%xmm0,%xmm365vmovdqu 64+8(%rsp),%xmm066vaesenc %xmm15,%xmm10,%xmm1067movbeq 88(%r14),%r1368vaesenc %xmm15,%xmm11,%xmm1169movbeq 80(%r14),%r1270vaesenc %xmm15,%xmm12,%xmm1271movq %r13,32+8(%rsp)72vaesenc %xmm15,%xmm13,%xmm1373movq %r12,40+8(%rsp)74vmovdqu 48-32(%r9),%xmm575vaesenc %xmm15,%xmm14,%xmm147677vmovups 48-128(%rcx),%xmm1578vpxor %xmm1,%xmm6,%xmm679vpclmulqdq $0x00,%xmm5,%xmm0,%xmm180vaesenc %xmm15,%xmm9,%xmm981vpxor %xmm2,%xmm6,%xmm682vpclmulqdq $0x10,%xmm5,%xmm0,%xmm283vaesenc %xmm15,%xmm10,%xmm1084vpxor %xmm3,%xmm7,%xmm785vpclmulqdq $0x01,%xmm5,%xmm0,%xmm386vaesenc %xmm15,%xmm11,%xmm1187vpclmulqdq $0x11,%xmm5,%xmm0,%xmm588vmovdqu 80+8(%rsp),%xmm089vaesenc %xmm15,%xmm12,%xmm1290vaesenc %xmm15,%xmm13,%xmm1391vpxor %xmm1,%xmm4,%xmm492vmovdqu 64-32(%r9),%xmm193vaesenc %xmm15,%xmm14,%xmm149495vmovups 64-128(%rcx),%xmm1596vpxor %xmm2,%xmm6,%xmm697vpclmulqdq $0x00,%xmm1,%xmm0,%xmm298vaesenc %xmm15,%xmm9,%xmm999vpxor %xmm3,%xmm6,%xmm6100vpclmulqdq $0x10,%xmm1,%xmm0,%xmm3101vaesenc %xmm15,%xmm10,%xmm10102movbeq 72(%r14),%r13103vpxor %xmm5,%xmm7,%xmm7104vpclmulqdq $0x01,%xmm1,%xmm0,%xmm5105vaesenc %xmm15,%xmm11,%xmm11106movbeq 64(%r14),%r12107vpclmulqdq $0x11,%xmm1,%xmm0,%xmm1108vmovdqu 96+8(%rsp),%xmm0109vaesenc %xmm15,%xmm12,%xmm12110movq %r13,48+8(%rsp)111vaesenc %xmm15,%xmm13,%xmm13112movq %r12,56+8(%rsp)113vpxor %xmm2,%xmm4,%xmm4114vmovdqu 96-32(%r9),%xmm2115vaesenc %xmm15,%xmm14,%xmm14116117vmovups 80-128(%rcx),%xmm15118vpxor %xmm3,%xmm6,%xmm6119vpclmulqdq $0x00,%xmm2,%xmm0,%xmm3120vaesenc %xmm15,%xmm9,%xmm9121vpxor %xmm5,%xmm6,%xmm6122vpclmulqdq $0x10,%xmm2,%xmm0,%xmm5123vaesenc %xmm15,%xmm10,%xmm10124movbeq 56(%r14),%r13125vpxor %xmm1,%xmm7,%xmm7126vpclmulqdq $0x01,%xmm2,%xmm0,%xmm1127vpxor 112+8(%rsp),%xmm8,%xmm8128vaesenc %xmm15,%xmm11,%xmm11129movbeq 48(%r14),%r12130vpclmulqdq $0x11,%xmm2,%xmm0,%xmm2131vaesenc %xmm15,%xmm12,%xmm12132movq %r13,64+8(%rsp)133vaesenc %xmm15,%xmm13,%xmm13134movq %r12,72+8(%rsp)135vpxor %xmm3,%xmm4,%xmm4136vmovdqu 112-32(%r9),%xmm3137vaesenc %xmm15,%xmm14,%xmm14138139vmovups 96-128(%rcx),%xmm15140vpxor %xmm5,%xmm6,%xmm6141vpclmulqdq $0x10,%xmm3,%xmm8,%xmm5142vaesenc %xmm15,%xmm9,%xmm9143vpxor %xmm1,%xmm6,%xmm6144vpclmulqdq $0x01,%xmm3,%xmm8,%xmm1145vaesenc %xmm15,%xmm10,%xmm10146movbeq 40(%r14),%r13147vpxor %xmm2,%xmm7,%xmm7148vpclmulqdq $0x00,%xmm3,%xmm8,%xmm2149vaesenc %xmm15,%xmm11,%xmm11150movbeq 32(%r14),%r12151vpclmulqdq $0x11,%xmm3,%xmm8,%xmm8152vaesenc %xmm15,%xmm12,%xmm12153movq %r13,80+8(%rsp)154vaesenc %xmm15,%xmm13,%xmm13155movq %r12,88+8(%rsp)156vpxor %xmm5,%xmm6,%xmm6157vaesenc %xmm15,%xmm14,%xmm14158vpxor %xmm1,%xmm6,%xmm6159160vmovups 112-128(%rcx),%xmm15161vpslldq $8,%xmm6,%xmm5162vpxor %xmm2,%xmm4,%xmm4163vmovdqu 16(%r11),%xmm3164165vaesenc %xmm15,%xmm9,%xmm9166vpxor %xmm8,%xmm7,%xmm7167vaesenc %xmm15,%xmm10,%xmm10168vpxor %xmm5,%xmm4,%xmm4169movbeq 24(%r14),%r13170vaesenc %xmm15,%xmm11,%xmm11171movbeq 16(%r14),%r12172vpalignr $8,%xmm4,%xmm4,%xmm0173vpclmulqdq $0x10,%xmm3,%xmm4,%xmm4174movq %r13,96+8(%rsp)175vaesenc %xmm15,%xmm12,%xmm12176movq %r12,104+8(%rsp)177vaesenc %xmm15,%xmm13,%xmm13178vmovups 128-128(%rcx),%xmm1179vaesenc %xmm15,%xmm14,%xmm14180181vaesenc %xmm1,%xmm9,%xmm9182vmovups 144-128(%rcx),%xmm15183vaesenc %xmm1,%xmm10,%xmm10184vpsrldq $8,%xmm6,%xmm6185vaesenc %xmm1,%xmm11,%xmm11186vpxor %xmm6,%xmm7,%xmm7187vaesenc %xmm1,%xmm12,%xmm12188vpxor %xmm0,%xmm4,%xmm4189movbeq 8(%r14),%r13190vaesenc %xmm1,%xmm13,%xmm13191movbeq 0(%r14),%r12192vaesenc %xmm1,%xmm14,%xmm14193vmovups 160-128(%rcx),%xmm1194cmpl $11,%ebp195jb .Lenc_tail196197vaesenc %xmm15,%xmm9,%xmm9198vaesenc %xmm15,%xmm10,%xmm10199vaesenc %xmm15,%xmm11,%xmm11200vaesenc %xmm15,%xmm12,%xmm12201vaesenc %xmm15,%xmm13,%xmm13202vaesenc %xmm15,%xmm14,%xmm14203204vaesenc %xmm1,%xmm9,%xmm9205vaesenc %xmm1,%xmm10,%xmm10206vaesenc %xmm1,%xmm11,%xmm11207vaesenc %xmm1,%xmm12,%xmm12208vaesenc %xmm1,%xmm13,%xmm13209vmovups 176-128(%rcx),%xmm15210vaesenc %xmm1,%xmm14,%xmm14211vmovups 192-128(%rcx),%xmm1212je .Lenc_tail213214vaesenc %xmm15,%xmm9,%xmm9215vaesenc %xmm15,%xmm10,%xmm10216vaesenc %xmm15,%xmm11,%xmm11217vaesenc %xmm15,%xmm12,%xmm12218vaesenc %xmm15,%xmm13,%xmm13219vaesenc %xmm15,%xmm14,%xmm14220221vaesenc %xmm1,%xmm9,%xmm9222vaesenc %xmm1,%xmm10,%xmm10223vaesenc %xmm1,%xmm11,%xmm11224vaesenc %xmm1,%xmm12,%xmm12225vaesenc %xmm1,%xmm13,%xmm13226vmovups 208-128(%rcx),%xmm15227vaesenc %xmm1,%xmm14,%xmm14228vmovups 224-128(%rcx),%xmm1229jmp .Lenc_tail230231.align 32232.Lhandle_ctr32:233vmovdqu (%r11),%xmm0234vpshufb %xmm0,%xmm1,%xmm6235vmovdqu 48(%r11),%xmm5236vpaddd 64(%r11),%xmm6,%xmm10237vpaddd %xmm5,%xmm6,%xmm11238vmovdqu 0-32(%r9),%xmm3239vpaddd %xmm5,%xmm10,%xmm12240vpshufb %xmm0,%xmm10,%xmm10241vpaddd %xmm5,%xmm11,%xmm13242vpshufb %xmm0,%xmm11,%xmm11243vpxor %xmm15,%xmm10,%xmm10244vpaddd %xmm5,%xmm12,%xmm14245vpshufb %xmm0,%xmm12,%xmm12246vpxor %xmm15,%xmm11,%xmm11247vpaddd %xmm5,%xmm13,%xmm1248vpshufb %xmm0,%xmm13,%xmm13249vpshufb %xmm0,%xmm14,%xmm14250vpshufb %xmm0,%xmm1,%xmm1251jmp .Lresume_ctr32252253.align 32254.Lenc_tail:255vaesenc %xmm15,%xmm9,%xmm9256vmovdqu %xmm7,16+8(%rsp)257vpalignr $8,%xmm4,%xmm4,%xmm8258vaesenc %xmm15,%xmm10,%xmm10259vpclmulqdq $0x10,%xmm3,%xmm4,%xmm4260vpxor 0(%rdi),%xmm1,%xmm2261vaesenc %xmm15,%xmm11,%xmm11262vpxor 16(%rdi),%xmm1,%xmm0263vaesenc %xmm15,%xmm12,%xmm12264vpxor 32(%rdi),%xmm1,%xmm5265vaesenc %xmm15,%xmm13,%xmm13266vpxor 48(%rdi),%xmm1,%xmm6267vaesenc %xmm15,%xmm14,%xmm14268vpxor 64(%rdi),%xmm1,%xmm7269vpxor 80(%rdi),%xmm1,%xmm3270vmovdqu (%r8),%xmm1271272vaesenclast %xmm2,%xmm9,%xmm9273vmovdqu 32(%r11),%xmm2274vaesenclast %xmm0,%xmm10,%xmm10275vpaddb %xmm2,%xmm1,%xmm0276movq %r13,112+8(%rsp)277leaq 96(%rdi),%rdi278vaesenclast %xmm5,%xmm11,%xmm11279vpaddb %xmm2,%xmm0,%xmm5280movq %r12,120+8(%rsp)281leaq 96(%rsi),%rsi282vmovdqu 0-128(%rcx),%xmm15283vaesenclast %xmm6,%xmm12,%xmm12284vpaddb %xmm2,%xmm5,%xmm6285vaesenclast %xmm7,%xmm13,%xmm13286vpaddb %xmm2,%xmm6,%xmm7287vaesenclast %xmm3,%xmm14,%xmm14288vpaddb %xmm2,%xmm7,%xmm3289290addq $0x60,%r10291subq $0x6,%rdx292jc .L6x_done293294vmovups %xmm9,-96(%rsi)295vpxor %xmm15,%xmm1,%xmm9296vmovups %xmm10,-80(%rsi)297vmovdqa %xmm0,%xmm10298vmovups %xmm11,-64(%rsi)299vmovdqa %xmm5,%xmm11300vmovups %xmm12,-48(%rsi)301vmovdqa %xmm6,%xmm12302vmovups %xmm13,-32(%rsi)303vmovdqa %xmm7,%xmm13304vmovups %xmm14,-16(%rsi)305vmovdqa %xmm3,%xmm14306vmovdqu 32+8(%rsp),%xmm7307jmp .Loop6x308309.L6x_done:310vpxor 16+8(%rsp),%xmm8,%xmm8311vpxor %xmm4,%xmm8,%xmm8312313.byte 0xf3,0xc3314.cfi_endproc315.size _aesni_ctr32_ghash_6x,.-_aesni_ctr32_ghash_6x316.globl aesni_gcm_decrypt317.type aesni_gcm_decrypt,@function318.align 32319aesni_gcm_decrypt:320.cfi_startproc321xorq %r10,%r10322cmpq $0x60,%rdx323jb .Lgcm_dec_abort324325leaq (%rsp),%rax326.cfi_def_cfa_register %rax327pushq %rbx328.cfi_offset %rbx,-16329pushq %rbp330.cfi_offset %rbp,-24331pushq %r12332.cfi_offset %r12,-32333pushq %r13334.cfi_offset %r13,-40335pushq %r14336.cfi_offset %r14,-48337pushq %r15338.cfi_offset %r15,-56339vzeroupper340341vmovdqu (%r8),%xmm1342addq $-128,%rsp343movl 12(%r8),%ebx344leaq .Lbswap_mask(%rip),%r11345leaq -128(%rcx),%r14346movq $0xf80,%r15347vmovdqu (%r9),%xmm8348andq $-128,%rsp349vmovdqu (%r11),%xmm0350leaq 128(%rcx),%rcx351leaq 32+32(%r9),%r9352movl 240-128(%rcx),%ebp353vpshufb %xmm0,%xmm8,%xmm8354355andq %r15,%r14356andq %rsp,%r15357subq %r14,%r15358jc .Ldec_no_key_aliasing359cmpq $768,%r15360jnc .Ldec_no_key_aliasing361subq %r15,%rsp362.Ldec_no_key_aliasing:363364vmovdqu 80(%rdi),%xmm7365leaq (%rdi),%r14366vmovdqu 64(%rdi),%xmm4367leaq -192(%rdi,%rdx,1),%r15368vmovdqu 48(%rdi),%xmm5369shrq $4,%rdx370xorq %r10,%r10371vmovdqu 32(%rdi),%xmm6372vpshufb %xmm0,%xmm7,%xmm7373vmovdqu 16(%rdi),%xmm2374vpshufb %xmm0,%xmm4,%xmm4375vmovdqu (%rdi),%xmm3376vpshufb %xmm0,%xmm5,%xmm5377vmovdqu %xmm4,48(%rsp)378vpshufb %xmm0,%xmm6,%xmm6379vmovdqu %xmm5,64(%rsp)380vpshufb %xmm0,%xmm2,%xmm2381vmovdqu %xmm6,80(%rsp)382vpshufb %xmm0,%xmm3,%xmm3383vmovdqu %xmm2,96(%rsp)384vmovdqu %xmm3,112(%rsp)385386call _aesni_ctr32_ghash_6x387388vmovups %xmm9,-96(%rsi)389vmovups %xmm10,-80(%rsi)390vmovups %xmm11,-64(%rsi)391vmovups %xmm12,-48(%rsi)392vmovups %xmm13,-32(%rsi)393vmovups %xmm14,-16(%rsi)394395vpshufb (%r11),%xmm8,%xmm8396vmovdqu %xmm8,-64(%r9)397398vzeroupper399movq -48(%rax),%r15400.cfi_restore %r15401movq -40(%rax),%r14402.cfi_restore %r14403movq -32(%rax),%r13404.cfi_restore %r13405movq -24(%rax),%r12406.cfi_restore %r12407movq -16(%rax),%rbp408.cfi_restore %rbp409movq -8(%rax),%rbx410.cfi_restore %rbx411leaq (%rax),%rsp412.cfi_def_cfa_register %rsp413.Lgcm_dec_abort:414movq %r10,%rax415.byte 0xf3,0xc3416.cfi_endproc417.size aesni_gcm_decrypt,.-aesni_gcm_decrypt418.type _aesni_ctr32_6x,@function419.align 32420_aesni_ctr32_6x:421.cfi_startproc422vmovdqu 0-128(%rcx),%xmm4423vmovdqu 32(%r11),%xmm2424leaq -1(%rbp),%r13425vmovups 16-128(%rcx),%xmm15426leaq 32-128(%rcx),%r12427vpxor %xmm4,%xmm1,%xmm9428addl $100663296,%ebx429jc .Lhandle_ctr32_2430vpaddb %xmm2,%xmm1,%xmm10431vpaddb %xmm2,%xmm10,%xmm11432vpxor %xmm4,%xmm10,%xmm10433vpaddb %xmm2,%xmm11,%xmm12434vpxor %xmm4,%xmm11,%xmm11435vpaddb %xmm2,%xmm12,%xmm13436vpxor %xmm4,%xmm12,%xmm12437vpaddb %xmm2,%xmm13,%xmm14438vpxor %xmm4,%xmm13,%xmm13439vpaddb %xmm2,%xmm14,%xmm1440vpxor %xmm4,%xmm14,%xmm14441jmp .Loop_ctr32442443.align 16444.Loop_ctr32:445vaesenc %xmm15,%xmm9,%xmm9446vaesenc %xmm15,%xmm10,%xmm10447vaesenc %xmm15,%xmm11,%xmm11448vaesenc %xmm15,%xmm12,%xmm12449vaesenc %xmm15,%xmm13,%xmm13450vaesenc %xmm15,%xmm14,%xmm14451vmovups (%r12),%xmm15452leaq 16(%r12),%r12453decl %r13d454jnz .Loop_ctr32455456vmovdqu (%r12),%xmm3457vaesenc %xmm15,%xmm9,%xmm9458vpxor 0(%rdi),%xmm3,%xmm4459vaesenc %xmm15,%xmm10,%xmm10460vpxor 16(%rdi),%xmm3,%xmm5461vaesenc %xmm15,%xmm11,%xmm11462vpxor 32(%rdi),%xmm3,%xmm6463vaesenc %xmm15,%xmm12,%xmm12464vpxor 48(%rdi),%xmm3,%xmm8465vaesenc %xmm15,%xmm13,%xmm13466vpxor 64(%rdi),%xmm3,%xmm2467vaesenc %xmm15,%xmm14,%xmm14468vpxor 80(%rdi),%xmm3,%xmm3469leaq 96(%rdi),%rdi470471vaesenclast %xmm4,%xmm9,%xmm9472vaesenclast %xmm5,%xmm10,%xmm10473vaesenclast %xmm6,%xmm11,%xmm11474vaesenclast %xmm8,%xmm12,%xmm12475vaesenclast %xmm2,%xmm13,%xmm13476vaesenclast %xmm3,%xmm14,%xmm14477vmovups %xmm9,0(%rsi)478vmovups %xmm10,16(%rsi)479vmovups %xmm11,32(%rsi)480vmovups %xmm12,48(%rsi)481vmovups %xmm13,64(%rsi)482vmovups %xmm14,80(%rsi)483leaq 96(%rsi),%rsi484485.byte 0xf3,0xc3486.align 32487.Lhandle_ctr32_2:488vpshufb %xmm0,%xmm1,%xmm6489vmovdqu 48(%r11),%xmm5490vpaddd 64(%r11),%xmm6,%xmm10491vpaddd %xmm5,%xmm6,%xmm11492vpaddd %xmm5,%xmm10,%xmm12493vpshufb %xmm0,%xmm10,%xmm10494vpaddd %xmm5,%xmm11,%xmm13495vpshufb %xmm0,%xmm11,%xmm11496vpxor %xmm4,%xmm10,%xmm10497vpaddd %xmm5,%xmm12,%xmm14498vpshufb %xmm0,%xmm12,%xmm12499vpxor %xmm4,%xmm11,%xmm11500vpaddd %xmm5,%xmm13,%xmm1501vpshufb %xmm0,%xmm13,%xmm13502vpxor %xmm4,%xmm12,%xmm12503vpshufb %xmm0,%xmm14,%xmm14504vpxor %xmm4,%xmm13,%xmm13505vpshufb %xmm0,%xmm1,%xmm1506vpxor %xmm4,%xmm14,%xmm14507jmp .Loop_ctr32508.cfi_endproc509.size _aesni_ctr32_6x,.-_aesni_ctr32_6x510511.globl aesni_gcm_encrypt512.type aesni_gcm_encrypt,@function513.align 32514aesni_gcm_encrypt:515.cfi_startproc516xorq %r10,%r10517cmpq $288,%rdx518jb .Lgcm_enc_abort519520leaq (%rsp),%rax521.cfi_def_cfa_register %rax522pushq %rbx523.cfi_offset %rbx,-16524pushq %rbp525.cfi_offset %rbp,-24526pushq %r12527.cfi_offset %r12,-32528pushq %r13529.cfi_offset %r13,-40530pushq %r14531.cfi_offset %r14,-48532pushq %r15533.cfi_offset %r15,-56534vzeroupper535536vmovdqu (%r8),%xmm1537addq $-128,%rsp538movl 12(%r8),%ebx539leaq .Lbswap_mask(%rip),%r11540leaq -128(%rcx),%r14541movq $0xf80,%r15542leaq 128(%rcx),%rcx543vmovdqu (%r11),%xmm0544andq $-128,%rsp545movl 240-128(%rcx),%ebp546547andq %r15,%r14548andq %rsp,%r15549subq %r14,%r15550jc .Lenc_no_key_aliasing551cmpq $768,%r15552jnc .Lenc_no_key_aliasing553subq %r15,%rsp554.Lenc_no_key_aliasing:555556leaq (%rsi),%r14557leaq -192(%rsi,%rdx,1),%r15558shrq $4,%rdx559560call _aesni_ctr32_6x561vpshufb %xmm0,%xmm9,%xmm8562vpshufb %xmm0,%xmm10,%xmm2563vmovdqu %xmm8,112(%rsp)564vpshufb %xmm0,%xmm11,%xmm4565vmovdqu %xmm2,96(%rsp)566vpshufb %xmm0,%xmm12,%xmm5567vmovdqu %xmm4,80(%rsp)568vpshufb %xmm0,%xmm13,%xmm6569vmovdqu %xmm5,64(%rsp)570vpshufb %xmm0,%xmm14,%xmm7571vmovdqu %xmm6,48(%rsp)572573call _aesni_ctr32_6x574575vmovdqu (%r9),%xmm8576leaq 32+32(%r9),%r9577subq $12,%rdx578movq $192,%r10579vpshufb %xmm0,%xmm8,%xmm8580581call _aesni_ctr32_ghash_6x582vmovdqu 32(%rsp),%xmm7583vmovdqu (%r11),%xmm0584vmovdqu 0-32(%r9),%xmm3585vpunpckhqdq %xmm7,%xmm7,%xmm1586vmovdqu 32-32(%r9),%xmm15587vmovups %xmm9,-96(%rsi)588vpshufb %xmm0,%xmm9,%xmm9589vpxor %xmm7,%xmm1,%xmm1590vmovups %xmm10,-80(%rsi)591vpshufb %xmm0,%xmm10,%xmm10592vmovups %xmm11,-64(%rsi)593vpshufb %xmm0,%xmm11,%xmm11594vmovups %xmm12,-48(%rsi)595vpshufb %xmm0,%xmm12,%xmm12596vmovups %xmm13,-32(%rsi)597vpshufb %xmm0,%xmm13,%xmm13598vmovups %xmm14,-16(%rsi)599vpshufb %xmm0,%xmm14,%xmm14600vmovdqu %xmm9,16(%rsp)601vmovdqu 48(%rsp),%xmm6602vmovdqu 16-32(%r9),%xmm0603vpunpckhqdq %xmm6,%xmm6,%xmm2604vpclmulqdq $0x00,%xmm3,%xmm7,%xmm5605vpxor %xmm6,%xmm2,%xmm2606vpclmulqdq $0x11,%xmm3,%xmm7,%xmm7607vpclmulqdq $0x00,%xmm15,%xmm1,%xmm1608609vmovdqu 64(%rsp),%xmm9610vpclmulqdq $0x00,%xmm0,%xmm6,%xmm4611vmovdqu 48-32(%r9),%xmm3612vpxor %xmm5,%xmm4,%xmm4613vpunpckhqdq %xmm9,%xmm9,%xmm5614vpclmulqdq $0x11,%xmm0,%xmm6,%xmm6615vpxor %xmm9,%xmm5,%xmm5616vpxor %xmm7,%xmm6,%xmm6617vpclmulqdq $0x10,%xmm15,%xmm2,%xmm2618vmovdqu 80-32(%r9),%xmm15619vpxor %xmm1,%xmm2,%xmm2620621vmovdqu 80(%rsp),%xmm1622vpclmulqdq $0x00,%xmm3,%xmm9,%xmm7623vmovdqu 64-32(%r9),%xmm0624vpxor %xmm4,%xmm7,%xmm7625vpunpckhqdq %xmm1,%xmm1,%xmm4626vpclmulqdq $0x11,%xmm3,%xmm9,%xmm9627vpxor %xmm1,%xmm4,%xmm4628vpxor %xmm6,%xmm9,%xmm9629vpclmulqdq $0x00,%xmm15,%xmm5,%xmm5630vpxor %xmm2,%xmm5,%xmm5631632vmovdqu 96(%rsp),%xmm2633vpclmulqdq $0x00,%xmm0,%xmm1,%xmm6634vmovdqu 96-32(%r9),%xmm3635vpxor %xmm7,%xmm6,%xmm6636vpunpckhqdq %xmm2,%xmm2,%xmm7637vpclmulqdq $0x11,%xmm0,%xmm1,%xmm1638vpxor %xmm2,%xmm7,%xmm7639vpxor %xmm9,%xmm1,%xmm1640vpclmulqdq $0x10,%xmm15,%xmm4,%xmm4641vmovdqu 128-32(%r9),%xmm15642vpxor %xmm5,%xmm4,%xmm4643644vpxor 112(%rsp),%xmm8,%xmm8645vpclmulqdq $0x00,%xmm3,%xmm2,%xmm5646vmovdqu 112-32(%r9),%xmm0647vpunpckhqdq %xmm8,%xmm8,%xmm9648vpxor %xmm6,%xmm5,%xmm5649vpclmulqdq $0x11,%xmm3,%xmm2,%xmm2650vpxor %xmm8,%xmm9,%xmm9651vpxor %xmm1,%xmm2,%xmm2652vpclmulqdq $0x00,%xmm15,%xmm7,%xmm7653vpxor %xmm4,%xmm7,%xmm4654655vpclmulqdq $0x00,%xmm0,%xmm8,%xmm6656vmovdqu 0-32(%r9),%xmm3657vpunpckhqdq %xmm14,%xmm14,%xmm1658vpclmulqdq $0x11,%xmm0,%xmm8,%xmm8659vpxor %xmm14,%xmm1,%xmm1660vpxor %xmm5,%xmm6,%xmm5661vpclmulqdq $0x10,%xmm15,%xmm9,%xmm9662vmovdqu 32-32(%r9),%xmm15663vpxor %xmm2,%xmm8,%xmm7664vpxor %xmm4,%xmm9,%xmm6665666vmovdqu 16-32(%r9),%xmm0667vpxor %xmm5,%xmm7,%xmm9668vpclmulqdq $0x00,%xmm3,%xmm14,%xmm4669vpxor %xmm9,%xmm6,%xmm6670vpunpckhqdq %xmm13,%xmm13,%xmm2671vpclmulqdq $0x11,%xmm3,%xmm14,%xmm14672vpxor %xmm13,%xmm2,%xmm2673vpslldq $8,%xmm6,%xmm9674vpclmulqdq $0x00,%xmm15,%xmm1,%xmm1675vpxor %xmm9,%xmm5,%xmm8676vpsrldq $8,%xmm6,%xmm6677vpxor %xmm6,%xmm7,%xmm7678679vpclmulqdq $0x00,%xmm0,%xmm13,%xmm5680vmovdqu 48-32(%r9),%xmm3681vpxor %xmm4,%xmm5,%xmm5682vpunpckhqdq %xmm12,%xmm12,%xmm9683vpclmulqdq $0x11,%xmm0,%xmm13,%xmm13684vpxor %xmm12,%xmm9,%xmm9685vpxor %xmm14,%xmm13,%xmm13686vpalignr $8,%xmm8,%xmm8,%xmm14687vpclmulqdq $0x10,%xmm15,%xmm2,%xmm2688vmovdqu 80-32(%r9),%xmm15689vpxor %xmm1,%xmm2,%xmm2690691vpclmulqdq $0x00,%xmm3,%xmm12,%xmm4692vmovdqu 64-32(%r9),%xmm0693vpxor %xmm5,%xmm4,%xmm4694vpunpckhqdq %xmm11,%xmm11,%xmm1695vpclmulqdq $0x11,%xmm3,%xmm12,%xmm12696vpxor %xmm11,%xmm1,%xmm1697vpxor %xmm13,%xmm12,%xmm12698vxorps 16(%rsp),%xmm7,%xmm7699vpclmulqdq $0x00,%xmm15,%xmm9,%xmm9700vpxor %xmm2,%xmm9,%xmm9701702vpclmulqdq $0x10,16(%r11),%xmm8,%xmm8703vxorps %xmm14,%xmm8,%xmm8704705vpclmulqdq $0x00,%xmm0,%xmm11,%xmm5706vmovdqu 96-32(%r9),%xmm3707vpxor %xmm4,%xmm5,%xmm5708vpunpckhqdq %xmm10,%xmm10,%xmm2709vpclmulqdq $0x11,%xmm0,%xmm11,%xmm11710vpxor %xmm10,%xmm2,%xmm2711vpalignr $8,%xmm8,%xmm8,%xmm14712vpxor %xmm12,%xmm11,%xmm11713vpclmulqdq $0x10,%xmm15,%xmm1,%xmm1714vmovdqu 128-32(%r9),%xmm15715vpxor %xmm9,%xmm1,%xmm1716717vxorps %xmm7,%xmm14,%xmm14718vpclmulqdq $0x10,16(%r11),%xmm8,%xmm8719vxorps %xmm14,%xmm8,%xmm8720721vpclmulqdq $0x00,%xmm3,%xmm10,%xmm4722vmovdqu 112-32(%r9),%xmm0723vpxor %xmm5,%xmm4,%xmm4724vpunpckhqdq %xmm8,%xmm8,%xmm9725vpclmulqdq $0x11,%xmm3,%xmm10,%xmm10726vpxor %xmm8,%xmm9,%xmm9727vpxor %xmm11,%xmm10,%xmm10728vpclmulqdq $0x00,%xmm15,%xmm2,%xmm2729vpxor %xmm1,%xmm2,%xmm2730731vpclmulqdq $0x00,%xmm0,%xmm8,%xmm5732vpclmulqdq $0x11,%xmm0,%xmm8,%xmm7733vpxor %xmm4,%xmm5,%xmm5734vpclmulqdq $0x10,%xmm15,%xmm9,%xmm6735vpxor %xmm10,%xmm7,%xmm7736vpxor %xmm2,%xmm6,%xmm6737738vpxor %xmm5,%xmm7,%xmm4739vpxor %xmm4,%xmm6,%xmm6740vpslldq $8,%xmm6,%xmm1741vmovdqu 16(%r11),%xmm3742vpsrldq $8,%xmm6,%xmm6743vpxor %xmm1,%xmm5,%xmm8744vpxor %xmm6,%xmm7,%xmm7745746vpalignr $8,%xmm8,%xmm8,%xmm2747vpclmulqdq $0x10,%xmm3,%xmm8,%xmm8748vpxor %xmm2,%xmm8,%xmm8749750vpalignr $8,%xmm8,%xmm8,%xmm2751vpclmulqdq $0x10,%xmm3,%xmm8,%xmm8752vpxor %xmm7,%xmm2,%xmm2753vpxor %xmm2,%xmm8,%xmm8754vpshufb (%r11),%xmm8,%xmm8755vmovdqu %xmm8,-64(%r9)756757vzeroupper758movq -48(%rax),%r15759.cfi_restore %r15760movq -40(%rax),%r14761.cfi_restore %r14762movq -32(%rax),%r13763.cfi_restore %r13764movq -24(%rax),%r12765.cfi_restore %r12766movq -16(%rax),%rbp767.cfi_restore %rbp768movq -8(%rax),%rbx769.cfi_restore %rbx770leaq (%rax),%rsp771.cfi_def_cfa_register %rsp772.Lgcm_enc_abort:773movq %r10,%rax774.byte 0xf3,0xc3775.cfi_endproc776.size aesni_gcm_encrypt,.-aesni_gcm_encrypt777.section .rodata778.align 64779.Lbswap_mask:780.byte 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0781.Lpoly:782.byte 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0xc2783.Lone_msb:784.byte 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1785.Ltwo_lsb:786.byte 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0787.Lone_lsb:788.byte 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0789.byte 65,69,83,45,78,73,32,71,67,77,32,109,111,100,117,108,101,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0790.previous791.align 64792.section ".note.gnu.property", "a"793.p2align 3794.long 1f - 0f795.long 4f - 1f796.long 57970:798# "GNU" encoded with .byte, since .asciz isn't supported799# on Solaris.800.byte 0x47801.byte 0x4e802.byte 0x55803.byte 08041:805.p2align 3806.long 0xc0000002807.long 3f - 2f8082:809.long 38103:811.p2align 38124:813814815