Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
torvalds
GitHub Repository: torvalds/linux
Path: blob/master/arch/loongarch/kernel/fpu.S
26424 views
1
/* SPDX-License-Identifier: GPL-2.0 */
2
/*
3
* Author: Lu Zeng <[email protected]>
4
* Pei Huang <[email protected]>
5
* Huacai Chen <[email protected]>
6
*
7
* Copyright (C) 2020-2022 Loongson Technology Corporation Limited
8
*/
9
#include <linux/export.h>
10
#include <asm/asm.h>
11
#include <asm/asmmacro.h>
12
#include <asm/asm-extable.h>
13
#include <asm/asm-offsets.h>
14
#include <asm/errno.h>
15
#include <asm/fpregdef.h>
16
#include <asm/loongarch.h>
17
#include <asm/regdef.h>
18
#include <asm/unwind_hints.h>
19
20
#define FPU_REG_WIDTH 8
21
#define LSX_REG_WIDTH 16
22
#define LASX_REG_WIDTH 32
23
24
.macro EX insn, reg, src, offs
25
.ex\@: \insn \reg, \src, \offs
26
_asm_extable .ex\@, .L_fpu_fault
27
.endm
28
29
.macro sc_save_fp base
30
EX fst.d $f0, \base, (0 * FPU_REG_WIDTH)
31
EX fst.d $f1, \base, (1 * FPU_REG_WIDTH)
32
EX fst.d $f2, \base, (2 * FPU_REG_WIDTH)
33
EX fst.d $f3, \base, (3 * FPU_REG_WIDTH)
34
EX fst.d $f4, \base, (4 * FPU_REG_WIDTH)
35
EX fst.d $f5, \base, (5 * FPU_REG_WIDTH)
36
EX fst.d $f6, \base, (6 * FPU_REG_WIDTH)
37
EX fst.d $f7, \base, (7 * FPU_REG_WIDTH)
38
EX fst.d $f8, \base, (8 * FPU_REG_WIDTH)
39
EX fst.d $f9, \base, (9 * FPU_REG_WIDTH)
40
EX fst.d $f10, \base, (10 * FPU_REG_WIDTH)
41
EX fst.d $f11, \base, (11 * FPU_REG_WIDTH)
42
EX fst.d $f12, \base, (12 * FPU_REG_WIDTH)
43
EX fst.d $f13, \base, (13 * FPU_REG_WIDTH)
44
EX fst.d $f14, \base, (14 * FPU_REG_WIDTH)
45
EX fst.d $f15, \base, (15 * FPU_REG_WIDTH)
46
EX fst.d $f16, \base, (16 * FPU_REG_WIDTH)
47
EX fst.d $f17, \base, (17 * FPU_REG_WIDTH)
48
EX fst.d $f18, \base, (18 * FPU_REG_WIDTH)
49
EX fst.d $f19, \base, (19 * FPU_REG_WIDTH)
50
EX fst.d $f20, \base, (20 * FPU_REG_WIDTH)
51
EX fst.d $f21, \base, (21 * FPU_REG_WIDTH)
52
EX fst.d $f22, \base, (22 * FPU_REG_WIDTH)
53
EX fst.d $f23, \base, (23 * FPU_REG_WIDTH)
54
EX fst.d $f24, \base, (24 * FPU_REG_WIDTH)
55
EX fst.d $f25, \base, (25 * FPU_REG_WIDTH)
56
EX fst.d $f26, \base, (26 * FPU_REG_WIDTH)
57
EX fst.d $f27, \base, (27 * FPU_REG_WIDTH)
58
EX fst.d $f28, \base, (28 * FPU_REG_WIDTH)
59
EX fst.d $f29, \base, (29 * FPU_REG_WIDTH)
60
EX fst.d $f30, \base, (30 * FPU_REG_WIDTH)
61
EX fst.d $f31, \base, (31 * FPU_REG_WIDTH)
62
.endm
63
64
.macro sc_restore_fp base
65
EX fld.d $f0, \base, (0 * FPU_REG_WIDTH)
66
EX fld.d $f1, \base, (1 * FPU_REG_WIDTH)
67
EX fld.d $f2, \base, (2 * FPU_REG_WIDTH)
68
EX fld.d $f3, \base, (3 * FPU_REG_WIDTH)
69
EX fld.d $f4, \base, (4 * FPU_REG_WIDTH)
70
EX fld.d $f5, \base, (5 * FPU_REG_WIDTH)
71
EX fld.d $f6, \base, (6 * FPU_REG_WIDTH)
72
EX fld.d $f7, \base, (7 * FPU_REG_WIDTH)
73
EX fld.d $f8, \base, (8 * FPU_REG_WIDTH)
74
EX fld.d $f9, \base, (9 * FPU_REG_WIDTH)
75
EX fld.d $f10, \base, (10 * FPU_REG_WIDTH)
76
EX fld.d $f11, \base, (11 * FPU_REG_WIDTH)
77
EX fld.d $f12, \base, (12 * FPU_REG_WIDTH)
78
EX fld.d $f13, \base, (13 * FPU_REG_WIDTH)
79
EX fld.d $f14, \base, (14 * FPU_REG_WIDTH)
80
EX fld.d $f15, \base, (15 * FPU_REG_WIDTH)
81
EX fld.d $f16, \base, (16 * FPU_REG_WIDTH)
82
EX fld.d $f17, \base, (17 * FPU_REG_WIDTH)
83
EX fld.d $f18, \base, (18 * FPU_REG_WIDTH)
84
EX fld.d $f19, \base, (19 * FPU_REG_WIDTH)
85
EX fld.d $f20, \base, (20 * FPU_REG_WIDTH)
86
EX fld.d $f21, \base, (21 * FPU_REG_WIDTH)
87
EX fld.d $f22, \base, (22 * FPU_REG_WIDTH)
88
EX fld.d $f23, \base, (23 * FPU_REG_WIDTH)
89
EX fld.d $f24, \base, (24 * FPU_REG_WIDTH)
90
EX fld.d $f25, \base, (25 * FPU_REG_WIDTH)
91
EX fld.d $f26, \base, (26 * FPU_REG_WIDTH)
92
EX fld.d $f27, \base, (27 * FPU_REG_WIDTH)
93
EX fld.d $f28, \base, (28 * FPU_REG_WIDTH)
94
EX fld.d $f29, \base, (29 * FPU_REG_WIDTH)
95
EX fld.d $f30, \base, (30 * FPU_REG_WIDTH)
96
EX fld.d $f31, \base, (31 * FPU_REG_WIDTH)
97
.endm
98
99
.macro sc_save_fcc base, tmp0, tmp1
100
movcf2gr \tmp0, $fcc0
101
move \tmp1, \tmp0
102
movcf2gr \tmp0, $fcc1
103
bstrins.d \tmp1, \tmp0, 15, 8
104
movcf2gr \tmp0, $fcc2
105
bstrins.d \tmp1, \tmp0, 23, 16
106
movcf2gr \tmp0, $fcc3
107
bstrins.d \tmp1, \tmp0, 31, 24
108
movcf2gr \tmp0, $fcc4
109
bstrins.d \tmp1, \tmp0, 39, 32
110
movcf2gr \tmp0, $fcc5
111
bstrins.d \tmp1, \tmp0, 47, 40
112
movcf2gr \tmp0, $fcc6
113
bstrins.d \tmp1, \tmp0, 55, 48
114
movcf2gr \tmp0, $fcc7
115
bstrins.d \tmp1, \tmp0, 63, 56
116
EX st.d \tmp1, \base, 0
117
.endm
118
119
.macro sc_restore_fcc base, tmp0, tmp1
120
EX ld.d \tmp0, \base, 0
121
bstrpick.d \tmp1, \tmp0, 7, 0
122
movgr2cf $fcc0, \tmp1
123
bstrpick.d \tmp1, \tmp0, 15, 8
124
movgr2cf $fcc1, \tmp1
125
bstrpick.d \tmp1, \tmp0, 23, 16
126
movgr2cf $fcc2, \tmp1
127
bstrpick.d \tmp1, \tmp0, 31, 24
128
movgr2cf $fcc3, \tmp1
129
bstrpick.d \tmp1, \tmp0, 39, 32
130
movgr2cf $fcc4, \tmp1
131
bstrpick.d \tmp1, \tmp0, 47, 40
132
movgr2cf $fcc5, \tmp1
133
bstrpick.d \tmp1, \tmp0, 55, 48
134
movgr2cf $fcc6, \tmp1
135
bstrpick.d \tmp1, \tmp0, 63, 56
136
movgr2cf $fcc7, \tmp1
137
.endm
138
139
.macro sc_save_fcsr base, tmp0
140
movfcsr2gr \tmp0, fcsr0
141
EX st.w \tmp0, \base, 0
142
#if defined(CONFIG_CPU_HAS_LBT)
143
/* TM bit is always 0 if LBT not supported */
144
andi \tmp0, \tmp0, FPU_CSR_TM
145
beqz \tmp0, 1f
146
x86clrtm
147
1:
148
#endif
149
.endm
150
151
.macro sc_restore_fcsr base, tmp0
152
EX ld.w \tmp0, \base, 0
153
movgr2fcsr fcsr0, \tmp0
154
.endm
155
156
.macro sc_save_lsx base
157
#ifdef CONFIG_CPU_HAS_LSX
158
EX vst $vr0, \base, (0 * LSX_REG_WIDTH)
159
EX vst $vr1, \base, (1 * LSX_REG_WIDTH)
160
EX vst $vr2, \base, (2 * LSX_REG_WIDTH)
161
EX vst $vr3, \base, (3 * LSX_REG_WIDTH)
162
EX vst $vr4, \base, (4 * LSX_REG_WIDTH)
163
EX vst $vr5, \base, (5 * LSX_REG_WIDTH)
164
EX vst $vr6, \base, (6 * LSX_REG_WIDTH)
165
EX vst $vr7, \base, (7 * LSX_REG_WIDTH)
166
EX vst $vr8, \base, (8 * LSX_REG_WIDTH)
167
EX vst $vr9, \base, (9 * LSX_REG_WIDTH)
168
EX vst $vr10, \base, (10 * LSX_REG_WIDTH)
169
EX vst $vr11, \base, (11 * LSX_REG_WIDTH)
170
EX vst $vr12, \base, (12 * LSX_REG_WIDTH)
171
EX vst $vr13, \base, (13 * LSX_REG_WIDTH)
172
EX vst $vr14, \base, (14 * LSX_REG_WIDTH)
173
EX vst $vr15, \base, (15 * LSX_REG_WIDTH)
174
EX vst $vr16, \base, (16 * LSX_REG_WIDTH)
175
EX vst $vr17, \base, (17 * LSX_REG_WIDTH)
176
EX vst $vr18, \base, (18 * LSX_REG_WIDTH)
177
EX vst $vr19, \base, (19 * LSX_REG_WIDTH)
178
EX vst $vr20, \base, (20 * LSX_REG_WIDTH)
179
EX vst $vr21, \base, (21 * LSX_REG_WIDTH)
180
EX vst $vr22, \base, (22 * LSX_REG_WIDTH)
181
EX vst $vr23, \base, (23 * LSX_REG_WIDTH)
182
EX vst $vr24, \base, (24 * LSX_REG_WIDTH)
183
EX vst $vr25, \base, (25 * LSX_REG_WIDTH)
184
EX vst $vr26, \base, (26 * LSX_REG_WIDTH)
185
EX vst $vr27, \base, (27 * LSX_REG_WIDTH)
186
EX vst $vr28, \base, (28 * LSX_REG_WIDTH)
187
EX vst $vr29, \base, (29 * LSX_REG_WIDTH)
188
EX vst $vr30, \base, (30 * LSX_REG_WIDTH)
189
EX vst $vr31, \base, (31 * LSX_REG_WIDTH)
190
#endif
191
.endm
192
193
.macro sc_restore_lsx base
194
#ifdef CONFIG_CPU_HAS_LSX
195
EX vld $vr0, \base, (0 * LSX_REG_WIDTH)
196
EX vld $vr1, \base, (1 * LSX_REG_WIDTH)
197
EX vld $vr2, \base, (2 * LSX_REG_WIDTH)
198
EX vld $vr3, \base, (3 * LSX_REG_WIDTH)
199
EX vld $vr4, \base, (4 * LSX_REG_WIDTH)
200
EX vld $vr5, \base, (5 * LSX_REG_WIDTH)
201
EX vld $vr6, \base, (6 * LSX_REG_WIDTH)
202
EX vld $vr7, \base, (7 * LSX_REG_WIDTH)
203
EX vld $vr8, \base, (8 * LSX_REG_WIDTH)
204
EX vld $vr9, \base, (9 * LSX_REG_WIDTH)
205
EX vld $vr10, \base, (10 * LSX_REG_WIDTH)
206
EX vld $vr11, \base, (11 * LSX_REG_WIDTH)
207
EX vld $vr12, \base, (12 * LSX_REG_WIDTH)
208
EX vld $vr13, \base, (13 * LSX_REG_WIDTH)
209
EX vld $vr14, \base, (14 * LSX_REG_WIDTH)
210
EX vld $vr15, \base, (15 * LSX_REG_WIDTH)
211
EX vld $vr16, \base, (16 * LSX_REG_WIDTH)
212
EX vld $vr17, \base, (17 * LSX_REG_WIDTH)
213
EX vld $vr18, \base, (18 * LSX_REG_WIDTH)
214
EX vld $vr19, \base, (19 * LSX_REG_WIDTH)
215
EX vld $vr20, \base, (20 * LSX_REG_WIDTH)
216
EX vld $vr21, \base, (21 * LSX_REG_WIDTH)
217
EX vld $vr22, \base, (22 * LSX_REG_WIDTH)
218
EX vld $vr23, \base, (23 * LSX_REG_WIDTH)
219
EX vld $vr24, \base, (24 * LSX_REG_WIDTH)
220
EX vld $vr25, \base, (25 * LSX_REG_WIDTH)
221
EX vld $vr26, \base, (26 * LSX_REG_WIDTH)
222
EX vld $vr27, \base, (27 * LSX_REG_WIDTH)
223
EX vld $vr28, \base, (28 * LSX_REG_WIDTH)
224
EX vld $vr29, \base, (29 * LSX_REG_WIDTH)
225
EX vld $vr30, \base, (30 * LSX_REG_WIDTH)
226
EX vld $vr31, \base, (31 * LSX_REG_WIDTH)
227
#endif
228
.endm
229
230
.macro sc_save_lasx base
231
#ifdef CONFIG_CPU_HAS_LASX
232
EX xvst $xr0, \base, (0 * LASX_REG_WIDTH)
233
EX xvst $xr1, \base, (1 * LASX_REG_WIDTH)
234
EX xvst $xr2, \base, (2 * LASX_REG_WIDTH)
235
EX xvst $xr3, \base, (3 * LASX_REG_WIDTH)
236
EX xvst $xr4, \base, (4 * LASX_REG_WIDTH)
237
EX xvst $xr5, \base, (5 * LASX_REG_WIDTH)
238
EX xvst $xr6, \base, (6 * LASX_REG_WIDTH)
239
EX xvst $xr7, \base, (7 * LASX_REG_WIDTH)
240
EX xvst $xr8, \base, (8 * LASX_REG_WIDTH)
241
EX xvst $xr9, \base, (9 * LASX_REG_WIDTH)
242
EX xvst $xr10, \base, (10 * LASX_REG_WIDTH)
243
EX xvst $xr11, \base, (11 * LASX_REG_WIDTH)
244
EX xvst $xr12, \base, (12 * LASX_REG_WIDTH)
245
EX xvst $xr13, \base, (13 * LASX_REG_WIDTH)
246
EX xvst $xr14, \base, (14 * LASX_REG_WIDTH)
247
EX xvst $xr15, \base, (15 * LASX_REG_WIDTH)
248
EX xvst $xr16, \base, (16 * LASX_REG_WIDTH)
249
EX xvst $xr17, \base, (17 * LASX_REG_WIDTH)
250
EX xvst $xr18, \base, (18 * LASX_REG_WIDTH)
251
EX xvst $xr19, \base, (19 * LASX_REG_WIDTH)
252
EX xvst $xr20, \base, (20 * LASX_REG_WIDTH)
253
EX xvst $xr21, \base, (21 * LASX_REG_WIDTH)
254
EX xvst $xr22, \base, (22 * LASX_REG_WIDTH)
255
EX xvst $xr23, \base, (23 * LASX_REG_WIDTH)
256
EX xvst $xr24, \base, (24 * LASX_REG_WIDTH)
257
EX xvst $xr25, \base, (25 * LASX_REG_WIDTH)
258
EX xvst $xr26, \base, (26 * LASX_REG_WIDTH)
259
EX xvst $xr27, \base, (27 * LASX_REG_WIDTH)
260
EX xvst $xr28, \base, (28 * LASX_REG_WIDTH)
261
EX xvst $xr29, \base, (29 * LASX_REG_WIDTH)
262
EX xvst $xr30, \base, (30 * LASX_REG_WIDTH)
263
EX xvst $xr31, \base, (31 * LASX_REG_WIDTH)
264
#endif
265
.endm
266
267
.macro sc_restore_lasx base
268
#ifdef CONFIG_CPU_HAS_LASX
269
EX xvld $xr0, \base, (0 * LASX_REG_WIDTH)
270
EX xvld $xr1, \base, (1 * LASX_REG_WIDTH)
271
EX xvld $xr2, \base, (2 * LASX_REG_WIDTH)
272
EX xvld $xr3, \base, (3 * LASX_REG_WIDTH)
273
EX xvld $xr4, \base, (4 * LASX_REG_WIDTH)
274
EX xvld $xr5, \base, (5 * LASX_REG_WIDTH)
275
EX xvld $xr6, \base, (6 * LASX_REG_WIDTH)
276
EX xvld $xr7, \base, (7 * LASX_REG_WIDTH)
277
EX xvld $xr8, \base, (8 * LASX_REG_WIDTH)
278
EX xvld $xr9, \base, (9 * LASX_REG_WIDTH)
279
EX xvld $xr10, \base, (10 * LASX_REG_WIDTH)
280
EX xvld $xr11, \base, (11 * LASX_REG_WIDTH)
281
EX xvld $xr12, \base, (12 * LASX_REG_WIDTH)
282
EX xvld $xr13, \base, (13 * LASX_REG_WIDTH)
283
EX xvld $xr14, \base, (14 * LASX_REG_WIDTH)
284
EX xvld $xr15, \base, (15 * LASX_REG_WIDTH)
285
EX xvld $xr16, \base, (16 * LASX_REG_WIDTH)
286
EX xvld $xr17, \base, (17 * LASX_REG_WIDTH)
287
EX xvld $xr18, \base, (18 * LASX_REG_WIDTH)
288
EX xvld $xr19, \base, (19 * LASX_REG_WIDTH)
289
EX xvld $xr20, \base, (20 * LASX_REG_WIDTH)
290
EX xvld $xr21, \base, (21 * LASX_REG_WIDTH)
291
EX xvld $xr22, \base, (22 * LASX_REG_WIDTH)
292
EX xvld $xr23, \base, (23 * LASX_REG_WIDTH)
293
EX xvld $xr24, \base, (24 * LASX_REG_WIDTH)
294
EX xvld $xr25, \base, (25 * LASX_REG_WIDTH)
295
EX xvld $xr26, \base, (26 * LASX_REG_WIDTH)
296
EX xvld $xr27, \base, (27 * LASX_REG_WIDTH)
297
EX xvld $xr28, \base, (28 * LASX_REG_WIDTH)
298
EX xvld $xr29, \base, (29 * LASX_REG_WIDTH)
299
EX xvld $xr30, \base, (30 * LASX_REG_WIDTH)
300
EX xvld $xr31, \base, (31 * LASX_REG_WIDTH)
301
#endif
302
.endm
303
304
/*
305
* Save a thread's fp context.
306
*/
307
SYM_FUNC_START(_save_fp)
308
fpu_save_csr a0 t1
309
fpu_save_double a0 t1 # clobbers t1
310
fpu_save_cc a0 t1 t2 # clobbers t1, t2
311
jr ra
312
SYM_FUNC_END(_save_fp)
313
EXPORT_SYMBOL(_save_fp)
314
315
/*
316
* Restore a thread's fp context.
317
*/
318
SYM_FUNC_START(_restore_fp)
319
fpu_restore_double a0 t1 # clobbers t1
320
fpu_restore_csr a0 t1 t2
321
fpu_restore_cc a0 t1 t2 # clobbers t1, t2
322
jr ra
323
SYM_FUNC_END(_restore_fp)
324
325
#ifdef CONFIG_CPU_HAS_LSX
326
327
/*
328
* Save a thread's LSX vector context.
329
*/
330
SYM_FUNC_START(_save_lsx)
331
lsx_save_all a0 t1 t2
332
jr ra
333
SYM_FUNC_END(_save_lsx)
334
EXPORT_SYMBOL(_save_lsx)
335
336
/*
337
* Restore a thread's LSX vector context.
338
*/
339
SYM_FUNC_START(_restore_lsx)
340
lsx_restore_all a0 t1 t2
341
jr ra
342
SYM_FUNC_END(_restore_lsx)
343
344
SYM_FUNC_START(_save_lsx_upper)
345
lsx_save_all_upper a0 t0 t1
346
jr ra
347
SYM_FUNC_END(_save_lsx_upper)
348
349
SYM_FUNC_START(_restore_lsx_upper)
350
lsx_restore_all_upper a0 t0 t1
351
jr ra
352
SYM_FUNC_END(_restore_lsx_upper)
353
EXPORT_SYMBOL(_restore_lsx_upper)
354
355
SYM_FUNC_START(_init_lsx_upper)
356
lsx_init_all_upper t1
357
jr ra
358
SYM_FUNC_END(_init_lsx_upper)
359
#endif
360
361
#ifdef CONFIG_CPU_HAS_LASX
362
363
/*
364
* Save a thread's LASX vector context.
365
*/
366
SYM_FUNC_START(_save_lasx)
367
lasx_save_all a0 t1 t2
368
jr ra
369
SYM_FUNC_END(_save_lasx)
370
EXPORT_SYMBOL(_save_lasx)
371
372
/*
373
* Restore a thread's LASX vector context.
374
*/
375
SYM_FUNC_START(_restore_lasx)
376
lasx_restore_all a0 t1 t2
377
jr ra
378
SYM_FUNC_END(_restore_lasx)
379
380
SYM_FUNC_START(_save_lasx_upper)
381
lasx_save_all_upper a0 t0 t1
382
jr ra
383
SYM_FUNC_END(_save_lasx_upper)
384
385
SYM_FUNC_START(_restore_lasx_upper)
386
lasx_restore_all_upper a0 t0 t1
387
jr ra
388
SYM_FUNC_END(_restore_lasx_upper)
389
EXPORT_SYMBOL(_restore_lasx_upper)
390
391
SYM_FUNC_START(_init_lasx_upper)
392
lasx_init_all_upper t1
393
jr ra
394
SYM_FUNC_END(_init_lasx_upper)
395
#endif
396
397
/*
398
* Load the FPU with signalling NANS. This bit pattern we're using has
399
* the property that no matter whether considered as single or as double
400
* precision represents signaling NANS.
401
*
402
* The value to initialize fcsr0 to comes in $a0.
403
*/
404
405
SYM_FUNC_START(_init_fpu)
406
li.w t1, CSR_EUEN_FPEN
407
csrxchg t1, t1, LOONGARCH_CSR_EUEN
408
409
movgr2fcsr fcsr0, a0
410
411
li.w t1, -1 # SNaN
412
413
movgr2fr.d $f0, t1
414
movgr2fr.d $f1, t1
415
movgr2fr.d $f2, t1
416
movgr2fr.d $f3, t1
417
movgr2fr.d $f4, t1
418
movgr2fr.d $f5, t1
419
movgr2fr.d $f6, t1
420
movgr2fr.d $f7, t1
421
movgr2fr.d $f8, t1
422
movgr2fr.d $f9, t1
423
movgr2fr.d $f10, t1
424
movgr2fr.d $f11, t1
425
movgr2fr.d $f12, t1
426
movgr2fr.d $f13, t1
427
movgr2fr.d $f14, t1
428
movgr2fr.d $f15, t1
429
movgr2fr.d $f16, t1
430
movgr2fr.d $f17, t1
431
movgr2fr.d $f18, t1
432
movgr2fr.d $f19, t1
433
movgr2fr.d $f20, t1
434
movgr2fr.d $f21, t1
435
movgr2fr.d $f22, t1
436
movgr2fr.d $f23, t1
437
movgr2fr.d $f24, t1
438
movgr2fr.d $f25, t1
439
movgr2fr.d $f26, t1
440
movgr2fr.d $f27, t1
441
movgr2fr.d $f28, t1
442
movgr2fr.d $f29, t1
443
movgr2fr.d $f30, t1
444
movgr2fr.d $f31, t1
445
446
jr ra
447
SYM_FUNC_END(_init_fpu)
448
449
/*
450
* a0: fpregs
451
* a1: fcc
452
* a2: fcsr
453
*/
454
SYM_FUNC_START(_save_fp_context)
455
sc_save_fcc a1 t1 t2
456
sc_save_fcsr a2 t1
457
sc_save_fp a0
458
li.w a0, 0 # success
459
jr ra
460
SYM_FUNC_END(_save_fp_context)
461
EXPORT_SYMBOL_GPL(_save_fp_context)
462
463
/*
464
* a0: fpregs
465
* a1: fcc
466
* a2: fcsr
467
*/
468
SYM_FUNC_START(_restore_fp_context)
469
sc_restore_fp a0
470
sc_restore_fcc a1 t1 t2
471
sc_restore_fcsr a2 t1
472
li.w a0, 0 # success
473
jr ra
474
SYM_FUNC_END(_restore_fp_context)
475
EXPORT_SYMBOL_GPL(_restore_fp_context)
476
477
/*
478
* a0: fpregs
479
* a1: fcc
480
* a2: fcsr
481
*/
482
SYM_FUNC_START(_save_lsx_context)
483
sc_save_fcc a1, t0, t1
484
sc_save_fcsr a2, t0
485
sc_save_lsx a0
486
li.w a0, 0 # success
487
jr ra
488
SYM_FUNC_END(_save_lsx_context)
489
EXPORT_SYMBOL_GPL(_save_lsx_context)
490
491
/*
492
* a0: fpregs
493
* a1: fcc
494
* a2: fcsr
495
*/
496
SYM_FUNC_START(_restore_lsx_context)
497
sc_restore_lsx a0
498
sc_restore_fcc a1, t1, t2
499
sc_restore_fcsr a2, t1
500
li.w a0, 0 # success
501
jr ra
502
SYM_FUNC_END(_restore_lsx_context)
503
EXPORT_SYMBOL_GPL(_restore_lsx_context)
504
505
/*
506
* a0: fpregs
507
* a1: fcc
508
* a2: fcsr
509
*/
510
SYM_FUNC_START(_save_lasx_context)
511
sc_save_fcc a1, t0, t1
512
sc_save_fcsr a2, t0
513
sc_save_lasx a0
514
li.w a0, 0 # success
515
jr ra
516
SYM_FUNC_END(_save_lasx_context)
517
EXPORT_SYMBOL_GPL(_save_lasx_context)
518
519
/*
520
* a0: fpregs
521
* a1: fcc
522
* a2: fcsr
523
*/
524
SYM_FUNC_START(_restore_lasx_context)
525
sc_restore_lasx a0
526
sc_restore_fcc a1, t1, t2
527
sc_restore_fcsr a2, t1
528
li.w a0, 0 # success
529
jr ra
530
SYM_FUNC_END(_restore_lasx_context)
531
EXPORT_SYMBOL_GPL(_restore_lasx_context)
532
533
.L_fpu_fault:
534
li.w a0, -EFAULT # failure
535
jr ra
536
537
#ifdef CONFIG_CPU_HAS_LBT
538
STACK_FRAME_NON_STANDARD _restore_fp
539
#ifdef CONFIG_CPU_HAS_LSX
540
STACK_FRAME_NON_STANDARD _restore_lsx
541
#endif
542
#ifdef CONFIG_CPU_HAS_LASX
543
STACK_FRAME_NON_STANDARD _restore_lasx
544
#endif
545
#endif
546
547