Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
awilliam
GitHub Repository: awilliam/linux-vfio
Path: blob/master/arch/tile/lib/memcpy_user_64.c
10817 views
1
/*
2
* Copyright 2011 Tilera Corporation. All Rights Reserved.
3
*
4
* This program is free software; you can redistribute it and/or
5
* modify it under the terms of the GNU General Public License
6
* as published by the Free Software Foundation, version 2.
7
*
8
* This program is distributed in the hope that it will be useful, but
9
* WITHOUT ANY WARRANTY; without even the implied warranty of
10
* MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE, GOOD TITLE or
11
* NON INFRINGEMENT. See the GNU General Public License for
12
* more details.
13
*
14
* Do memcpy(), but trap and return "n" when a load or store faults.
15
*
16
* Note: this idiom only works when memcpy() compiles to a leaf function.
17
* If "sp" is updated during memcpy, the "jrp lr" will be incorrect.
18
*
19
* Also note that we are capturing "n" from the containing scope here.
20
*/
21
22
#define _ST(p, inst, v) \
23
({ \
24
asm("1: " #inst " %0, %1;" \
25
".pushsection .coldtext.memcpy,\"ax\";" \
26
"2: { move r0, %2; jrp lr };" \
27
".section __ex_table,\"a\";" \
28
".quad 1b, 2b;" \
29
".popsection" \
30
: "=m" (*(p)) : "r" (v), "r" (n)); \
31
})
32
33
#define _LD(p, inst) \
34
({ \
35
unsigned long __v; \
36
asm("1: " #inst " %0, %1;" \
37
".pushsection .coldtext.memcpy,\"ax\";" \
38
"2: { move r0, %2; jrp lr };" \
39
".section __ex_table,\"a\";" \
40
".quad 1b, 2b;" \
41
".popsection" \
42
: "=r" (__v) : "m" (*(p)), "r" (n)); \
43
__v; \
44
})
45
46
#define USERCOPY_FUNC __copy_to_user_inatomic
47
#define ST1(p, v) _ST((p), st1, (v))
48
#define ST2(p, v) _ST((p), st2, (v))
49
#define ST4(p, v) _ST((p), st4, (v))
50
#define ST8(p, v) _ST((p), st, (v))
51
#define LD1 LD
52
#define LD2 LD
53
#define LD4 LD
54
#define LD8 LD
55
#include "memcpy_64.c"
56
57
#define USERCOPY_FUNC __copy_from_user_inatomic
58
#define ST1 ST
59
#define ST2 ST
60
#define ST4 ST
61
#define ST8 ST
62
#define LD1(p) _LD((p), ld1u)
63
#define LD2(p) _LD((p), ld2u)
64
#define LD4(p) _LD((p), ld4u)
65
#define LD8(p) _LD((p), ld)
66
#include "memcpy_64.c"
67
68
#define USERCOPY_FUNC __copy_in_user_inatomic
69
#define ST1(p, v) _ST((p), st1, (v))
70
#define ST2(p, v) _ST((p), st2, (v))
71
#define ST4(p, v) _ST((p), st4, (v))
72
#define ST8(p, v) _ST((p), st, (v))
73
#define LD1(p) _LD((p), ld1u)
74
#define LD2(p) _LD((p), ld2u)
75
#define LD4(p) _LD((p), ld4u)
76
#define LD8(p) _LD((p), ld)
77
#include "memcpy_64.c"
78
79
unsigned long __copy_from_user_zeroing(void *to, const void __user *from,
80
unsigned long n)
81
{
82
unsigned long rc = __copy_from_user_inatomic(to, from, n);
83
if (unlikely(rc))
84
memset(to + n - rc, 0, rc);
85
return rc;
86
}
87
88