Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
torvalds
GitHub Repository: torvalds/linux
Path: blob/master/arch/csky/abiv2/fpu.c
26439 views
1
// SPDX-License-Identifier: GPL-2.0
2
// Copyright (C) 2018 Hangzhou C-SKY Microsystems co.,ltd.
3
4
#include <linux/ptrace.h>
5
#include <linux/uaccess.h>
6
#include <abi/reg_ops.h>
7
8
#define MTCR_MASK 0xFC00FFE0
9
#define MFCR_MASK 0xFC00FFE0
10
#define MTCR_DIST 0xC0006420
11
#define MFCR_DIST 0xC0006020
12
13
/*
14
* fpu_libc_helper() is to help libc to excute:
15
* - mfcr %a, cr<1, 2>
16
* - mfcr %a, cr<2, 2>
17
* - mtcr %a, cr<1, 2>
18
* - mtcr %a, cr<2, 2>
19
*/
20
int fpu_libc_helper(struct pt_regs *regs)
21
{
22
int fault;
23
unsigned long instrptr, regx = 0;
24
unsigned long index = 0, tmp = 0;
25
unsigned long tinstr = 0;
26
u16 instr_hi, instr_low;
27
28
instrptr = instruction_pointer(regs);
29
if (instrptr & 1)
30
return 0;
31
32
fault = __get_user(instr_low, (u16 *)instrptr);
33
if (fault)
34
return 0;
35
36
fault = __get_user(instr_hi, (u16 *)(instrptr + 2));
37
if (fault)
38
return 0;
39
40
tinstr = instr_hi | ((unsigned long)instr_low << 16);
41
42
if (((tinstr >> 21) & 0x1F) != 2)
43
return 0;
44
45
if ((tinstr & MTCR_MASK) == MTCR_DIST) {
46
index = (tinstr >> 16) & 0x1F;
47
if (index > 13)
48
return 0;
49
50
tmp = tinstr & 0x1F;
51
if (tmp > 2)
52
return 0;
53
54
regx = *(&regs->a0 + index);
55
56
if (tmp == 1)
57
mtcr("cr<1, 2>", regx);
58
else if (tmp == 2)
59
mtcr("cr<2, 2>", regx);
60
else
61
return 0;
62
63
regs->pc += 4;
64
return 1;
65
}
66
67
if ((tinstr & MFCR_MASK) == MFCR_DIST) {
68
index = tinstr & 0x1F;
69
if (index > 13)
70
return 0;
71
72
tmp = ((tinstr >> 16) & 0x1F);
73
if (tmp > 2)
74
return 0;
75
76
if (tmp == 1)
77
regx = mfcr("cr<1, 2>");
78
else if (tmp == 2)
79
regx = mfcr("cr<2, 2>");
80
else
81
return 0;
82
83
*(&regs->a0 + index) = regx;
84
85
regs->pc += 4;
86
return 1;
87
}
88
89
return 0;
90
}
91
92
void fpu_fpe(struct pt_regs *regs)
93
{
94
int sig, code;
95
unsigned int fesr;
96
97
fesr = mfcr("cr<2, 2>");
98
99
sig = SIGFPE;
100
code = FPE_FLTUNK;
101
102
if (fesr & FPE_ILLE) {
103
sig = SIGILL;
104
code = ILL_ILLOPC;
105
} else if (fesr & FPE_IDC) {
106
sig = SIGILL;
107
code = ILL_ILLOPN;
108
} else if (fesr & FPE_FEC) {
109
sig = SIGFPE;
110
if (fesr & FPE_IOC)
111
code = FPE_FLTINV;
112
else if (fesr & FPE_DZC)
113
code = FPE_FLTDIV;
114
else if (fesr & FPE_UFC)
115
code = FPE_FLTUND;
116
else if (fesr & FPE_OFC)
117
code = FPE_FLTOVF;
118
else if (fesr & FPE_IXC)
119
code = FPE_FLTRES;
120
}
121
122
force_sig_fault(sig, code, (void __user *)regs->pc);
123
}
124
125
#define FMFVR_FPU_REGS(vrx, vry) \
126
"fmfvrl %0, "#vrx"\n" \
127
"fmfvrh %1, "#vrx"\n" \
128
"fmfvrl %2, "#vry"\n" \
129
"fmfvrh %3, "#vry"\n"
130
131
#define FMTVR_FPU_REGS(vrx, vry) \
132
"fmtvrl "#vrx", %0\n" \
133
"fmtvrh "#vrx", %1\n" \
134
"fmtvrl "#vry", %2\n" \
135
"fmtvrh "#vry", %3\n"
136
137
#define STW_FPU_REGS(a, b, c, d) \
138
"stw %0, (%4, "#a")\n" \
139
"stw %1, (%4, "#b")\n" \
140
"stw %2, (%4, "#c")\n" \
141
"stw %3, (%4, "#d")\n"
142
143
#define LDW_FPU_REGS(a, b, c, d) \
144
"ldw %0, (%4, "#a")\n" \
145
"ldw %1, (%4, "#b")\n" \
146
"ldw %2, (%4, "#c")\n" \
147
"ldw %3, (%4, "#d")\n"
148
149
void save_to_user_fp(struct user_fp *user_fp)
150
{
151
unsigned long flg;
152
unsigned long tmp1, tmp2;
153
unsigned long *fpregs;
154
155
local_irq_save(flg);
156
157
tmp1 = mfcr("cr<1, 2>");
158
tmp2 = mfcr("cr<2, 2>");
159
160
user_fp->fcr = tmp1;
161
user_fp->fesr = tmp2;
162
163
fpregs = &user_fp->vr[0];
164
#ifdef CONFIG_CPU_HAS_FPUV2
165
#ifdef CONFIG_CPU_HAS_VDSP
166
asm volatile(
167
"vstmu.32 vr0-vr3, (%0)\n"
168
"vstmu.32 vr4-vr7, (%0)\n"
169
"vstmu.32 vr8-vr11, (%0)\n"
170
"vstmu.32 vr12-vr15, (%0)\n"
171
"fstmu.64 vr16-vr31, (%0)\n"
172
: "+a"(fpregs)
173
::"memory");
174
#else
175
asm volatile(
176
"fstmu.64 vr0-vr31, (%0)\n"
177
: "+a"(fpregs)
178
::"memory");
179
#endif
180
#else
181
{
182
unsigned long tmp3, tmp4;
183
184
asm volatile(
185
FMFVR_FPU_REGS(vr0, vr1)
186
STW_FPU_REGS(0, 4, 16, 20)
187
FMFVR_FPU_REGS(vr2, vr3)
188
STW_FPU_REGS(32, 36, 48, 52)
189
FMFVR_FPU_REGS(vr4, vr5)
190
STW_FPU_REGS(64, 68, 80, 84)
191
FMFVR_FPU_REGS(vr6, vr7)
192
STW_FPU_REGS(96, 100, 112, 116)
193
"addi %4, 128\n"
194
FMFVR_FPU_REGS(vr8, vr9)
195
STW_FPU_REGS(0, 4, 16, 20)
196
FMFVR_FPU_REGS(vr10, vr11)
197
STW_FPU_REGS(32, 36, 48, 52)
198
FMFVR_FPU_REGS(vr12, vr13)
199
STW_FPU_REGS(64, 68, 80, 84)
200
FMFVR_FPU_REGS(vr14, vr15)
201
STW_FPU_REGS(96, 100, 112, 116)
202
: "=a"(tmp1), "=a"(tmp2), "=a"(tmp3),
203
"=a"(tmp4), "+a"(fpregs)
204
::"memory");
205
}
206
#endif
207
208
local_irq_restore(flg);
209
}
210
211
void restore_from_user_fp(struct user_fp *user_fp)
212
{
213
unsigned long flg;
214
unsigned long tmp1, tmp2;
215
unsigned long *fpregs;
216
217
local_irq_save(flg);
218
219
tmp1 = user_fp->fcr;
220
tmp2 = user_fp->fesr;
221
222
mtcr("cr<1, 2>", tmp1);
223
mtcr("cr<2, 2>", tmp2);
224
225
fpregs = &user_fp->vr[0];
226
#ifdef CONFIG_CPU_HAS_FPUV2
227
#ifdef CONFIG_CPU_HAS_VDSP
228
asm volatile(
229
"vldmu.32 vr0-vr3, (%0)\n"
230
"vldmu.32 vr4-vr7, (%0)\n"
231
"vldmu.32 vr8-vr11, (%0)\n"
232
"vldmu.32 vr12-vr15, (%0)\n"
233
"fldmu.64 vr16-vr31, (%0)\n"
234
: "+a"(fpregs)
235
::"memory");
236
#else
237
asm volatile(
238
"fldmu.64 vr0-vr31, (%0)\n"
239
: "+a"(fpregs)
240
::"memory");
241
#endif
242
#else
243
{
244
unsigned long tmp3, tmp4;
245
246
asm volatile(
247
LDW_FPU_REGS(0, 4, 16, 20)
248
FMTVR_FPU_REGS(vr0, vr1)
249
LDW_FPU_REGS(32, 36, 48, 52)
250
FMTVR_FPU_REGS(vr2, vr3)
251
LDW_FPU_REGS(64, 68, 80, 84)
252
FMTVR_FPU_REGS(vr4, vr5)
253
LDW_FPU_REGS(96, 100, 112, 116)
254
FMTVR_FPU_REGS(vr6, vr7)
255
"addi %4, 128\n"
256
LDW_FPU_REGS(0, 4, 16, 20)
257
FMTVR_FPU_REGS(vr8, vr9)
258
LDW_FPU_REGS(32, 36, 48, 52)
259
FMTVR_FPU_REGS(vr10, vr11)
260
LDW_FPU_REGS(64, 68, 80, 84)
261
FMTVR_FPU_REGS(vr12, vr13)
262
LDW_FPU_REGS(96, 100, 112, 116)
263
FMTVR_FPU_REGS(vr14, vr15)
264
: "=a"(tmp1), "=a"(tmp2), "=a"(tmp3),
265
"=a"(tmp4), "+a"(fpregs)
266
::"memory");
267
}
268
#endif
269
local_irq_restore(flg);
270
}
271
272