/* SPDX-License-Identifier: GPL-2.0-only */1/*2* Copyright 2008 Vitaly Mayatskikh <[email protected]>3* Copyright 2002 Andi Kleen, SuSE Labs.4*5* Functions to copy from and to user space.6*/78#include <linux/export.h>9#include <linux/linkage.h>10#include <linux/cfi_types.h>11#include <linux/objtool.h>12#include <asm/cpufeatures.h>13#include <asm/alternative.h>14#include <asm/asm.h>1516/*17* rep_movs_alternative - memory copy with exception handling.18* This version is for CPUs that don't have FSRM (Fast Short Rep Movs)19*20* Input:21* rdi destination22* rsi source23* rcx count24*25* Output:26* rcx uncopied bytes or 0 if successful.27*28* NOTE! The calling convention is very intentionally the same as29* for 'rep movs', so that we can rewrite the function call with30* just a plain 'rep movs' on machines that have FSRM. But to make31* it simpler for us, we can clobber rsi/rdi and rax freely.32*/33SYM_FUNC_START(rep_movs_alternative)34ANNOTATE_NOENDBR35cmpq $64,%rcx36jae .Llarge3738cmp $8,%ecx39jae .Lword4041testl %ecx,%ecx42je .Lexit4344.Lcopy_user_tail:450: movb (%rsi),%al461: movb %al,(%rdi)47inc %rdi48inc %rsi49dec %rcx50jne .Lcopy_user_tail51.Lexit:52RET5354_ASM_EXTABLE_UA( 0b, .Lexit)55_ASM_EXTABLE_UA( 1b, .Lexit)5657.p2align 458.Lword:592: movq (%rsi),%rax603: movq %rax,(%rdi)61addq $8,%rsi62addq $8,%rdi63sub $8,%ecx64je .Lexit65cmp $8,%ecx66jae .Lword67jmp .Lcopy_user_tail6869_ASM_EXTABLE_UA( 2b, .Lcopy_user_tail)70_ASM_EXTABLE_UA( 3b, .Lcopy_user_tail)7172.Llarge:730: ALTERNATIVE "jmp .Llarge_movsq", "rep movsb", X86_FEATURE_ERMS741: RET7576_ASM_EXTABLE_UA( 0b, 1b)7778.Llarge_movsq:79/* Do the first possibly unaligned word */800: movq (%rsi),%rax811: movq %rax,(%rdi)8283_ASM_EXTABLE_UA( 0b, .Lcopy_user_tail)84_ASM_EXTABLE_UA( 1b, .Lcopy_user_tail)8586/* What would be the offset to the aligned destination? */87leaq 8(%rdi),%rax88andq $-8,%rax89subq %rdi,%rax9091/* .. and update pointers and count to match */92addq %rax,%rdi93addq %rax,%rsi94subq %rax,%rcx9596/* make %rcx contain the number of words, %rax the remainder */97movq %rcx,%rax98shrq $3,%rcx99andl $7,%eax1000: rep movsq101movl %eax,%ecx102testl %ecx,%ecx103jne .Lcopy_user_tail104RET1051061: leaq (%rax,%rcx,8),%rcx107jmp .Lcopy_user_tail108109_ASM_EXTABLE_UA( 0b, 1b)110SYM_FUNC_END(rep_movs_alternative)111EXPORT_SYMBOL(rep_movs_alternative)112113114