Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
torvalds
GitHub Repository: torvalds/linux
Path: blob/master/arch/sh/kernel/cpu/shmobile/sleep.S
26495 views
1
/* SPDX-License-Identifier: GPL-2.0
2
*
3
* arch/sh/kernel/cpu/sh4a/sleep-sh_mobile.S
4
*
5
* Sleep mode and Standby modes support for SuperH Mobile
6
*
7
* Copyright (C) 2009 Magnus Damm
8
*/
9
10
#include <linux/sys.h>
11
#include <linux/errno.h>
12
#include <linux/linkage.h>
13
#include <asm/asm-offsets.h>
14
#include <asm/suspend.h>
15
16
/*
17
* Kernel mode register usage, see entry.S:
18
* k0 scratch
19
* k1 scratch
20
*/
21
#define k0 r0
22
#define k1 r1
23
24
/* manage self-refresh and enter standby mode. must be self-contained.
25
* this code will be copied to on-chip memory and executed from there.
26
*/
27
.balign 4
28
ENTRY(sh_mobile_sleep_enter_start)
29
30
/* save mode flags */
31
mov.l r4, @(SH_SLEEP_MODE, r5)
32
33
/* save original vbr */
34
stc vbr, r0
35
mov.l r0, @(SH_SLEEP_VBR, r5)
36
37
/* point vbr to our on-chip memory page */
38
ldc r5, vbr
39
40
/* save return address */
41
sts pr, r0
42
mov.l r0, @(SH_SLEEP_SPC, r5)
43
44
/* save sr */
45
stc sr, r0
46
mov.l r0, @(SH_SLEEP_SR, r5)
47
48
/* save general purpose registers to stack if needed */
49
mov.l @(SH_SLEEP_MODE, r5), r0
50
tst #SUSP_SH_REGS, r0
51
bt skip_regs_save
52
53
sts.l pr, @-r15
54
mov.l r14, @-r15
55
mov.l r13, @-r15
56
mov.l r12, @-r15
57
mov.l r11, @-r15
58
mov.l r10, @-r15
59
mov.l r9, @-r15
60
mov.l r8, @-r15
61
62
/* make sure bank0 is selected, save low registers */
63
mov.l rb_bit, r9
64
not r9, r9
65
bsr set_sr
66
mov #0, r10
67
68
bsr save_low_regs
69
nop
70
71
/* switch to bank 1, save low registers */
72
mov.l rb_bit, r10
73
bsr set_sr
74
mov #-1, r9
75
76
bsr save_low_regs
77
nop
78
79
/* switch back to bank 0 */
80
mov.l rb_bit, r9
81
not r9, r9
82
bsr set_sr
83
mov #0, r10
84
85
skip_regs_save:
86
87
/* save sp, also set to internal ram */
88
mov.l r15, @(SH_SLEEP_SP, r5)
89
mov r5, r15
90
91
/* save stbcr */
92
bsr save_register
93
mov #SH_SLEEP_REG_STBCR, r0
94
95
/* save mmu and cache context if needed */
96
mov.l @(SH_SLEEP_MODE, r5), r0
97
tst #SUSP_SH_MMU, r0
98
bt skip_mmu_save_disable
99
100
/* save mmu state */
101
bsr save_register
102
mov #SH_SLEEP_REG_PTEH, r0
103
104
bsr save_register
105
mov #SH_SLEEP_REG_PTEL, r0
106
107
bsr save_register
108
mov #SH_SLEEP_REG_TTB, r0
109
110
bsr save_register
111
mov #SH_SLEEP_REG_TEA, r0
112
113
bsr save_register
114
mov #SH_SLEEP_REG_MMUCR, r0
115
116
bsr save_register
117
mov #SH_SLEEP_REG_PTEA, r0
118
119
bsr save_register
120
mov #SH_SLEEP_REG_PASCR, r0
121
122
bsr save_register
123
mov #SH_SLEEP_REG_IRMCR, r0
124
125
/* invalidate TLBs and disable the MMU */
126
bsr get_register
127
mov #SH_SLEEP_REG_MMUCR, r0
128
mov #4, r1
129
mov.l r1, @r0
130
icbi @r0
131
132
/* save cache registers and disable caches */
133
bsr save_register
134
mov #SH_SLEEP_REG_CCR, r0
135
136
bsr save_register
137
mov #SH_SLEEP_REG_RAMCR, r0
138
139
bsr get_register
140
mov #SH_SLEEP_REG_CCR, r0
141
mov #0, r1
142
mov.l r1, @r0
143
icbi @r0
144
145
skip_mmu_save_disable:
146
/* call self-refresh entering code if needed */
147
mov.l @(SH_SLEEP_MODE, r5), r0
148
tst #SUSP_SH_SF, r0
149
bt skip_set_sf
150
151
mov.l @(SH_SLEEP_SF_PRE, r5), r0
152
jsr @r0
153
nop
154
155
skip_set_sf:
156
mov.l @(SH_SLEEP_MODE, r5), r0
157
tst #SUSP_SH_STANDBY, r0
158
bt test_rstandby
159
160
/* set mode to "software standby mode" */
161
bra do_sleep
162
mov #0x80, r1
163
164
test_rstandby:
165
tst #SUSP_SH_RSTANDBY, r0
166
bt test_ustandby
167
168
/* setup BAR register */
169
bsr get_register
170
mov #SH_SLEEP_REG_BAR, r0
171
mov.l @(SH_SLEEP_RESUME, r5), r1
172
mov.l r1, @r0
173
174
/* set mode to "r-standby mode" */
175
bra do_sleep
176
mov #0x20, r1
177
178
test_ustandby:
179
tst #SUSP_SH_USTANDBY, r0
180
bt force_sleep
181
182
/* set mode to "u-standby mode" */
183
bra do_sleep
184
mov #0x10, r1
185
186
force_sleep:
187
188
/* set mode to "sleep mode" */
189
mov #0x00, r1
190
191
do_sleep:
192
/* setup and enter selected standby mode */
193
bsr get_register
194
mov #SH_SLEEP_REG_STBCR, r0
195
mov.l r1, @r0
196
again:
197
sleep
198
bra again
199
nop
200
201
save_register:
202
add #SH_SLEEP_BASE_ADDR, r0
203
mov.l @(r0, r5), r1
204
add #-SH_SLEEP_BASE_ADDR, r0
205
mov.l @r1, r1
206
add #SH_SLEEP_BASE_DATA, r0
207
mov.l r1, @(r0, r5)
208
add #-SH_SLEEP_BASE_DATA, r0
209
rts
210
nop
211
212
get_register:
213
add #SH_SLEEP_BASE_ADDR, r0
214
mov.l @(r0, r5), r0
215
rts
216
nop
217
218
set_sr:
219
stc sr, r8
220
and r9, r8
221
or r10, r8
222
ldc r8, sr
223
rts
224
nop
225
226
save_low_regs:
227
mov.l r7, @-r15
228
mov.l r6, @-r15
229
mov.l r5, @-r15
230
mov.l r4, @-r15
231
mov.l r3, @-r15
232
mov.l r2, @-r15
233
mov.l r1, @-r15
234
rts
235
mov.l r0, @-r15
236
237
.balign 4
238
rb_bit: .long 0x20000000 ! RB=1
239
240
ENTRY(sh_mobile_sleep_enter_end)
241
242
.balign 4
243
ENTRY(sh_mobile_sleep_resume_start)
244
245
/* figure out start address */
246
bsr 0f
247
nop
248
0:
249
sts pr, k1
250
mov.l 1f, k0
251
and k0, k1
252
253
/* store pointer to data area in VBR */
254
ldc k1, vbr
255
256
/* setup sr with saved sr */
257
mov.l @(SH_SLEEP_SR, k1), k0
258
ldc k0, sr
259
260
/* now: user register set! */
261
stc vbr, r5
262
263
/* setup spc with return address to c code */
264
mov.l @(SH_SLEEP_SPC, r5), r0
265
ldc r0, spc
266
267
/* restore vbr */
268
mov.l @(SH_SLEEP_VBR, r5), r0
269
ldc r0, vbr
270
271
/* setup ssr with saved sr */
272
mov.l @(SH_SLEEP_SR, r5), r0
273
ldc r0, ssr
274
275
/* restore sp */
276
mov.l @(SH_SLEEP_SP, r5), r15
277
278
/* restore sleep mode register */
279
bsr restore_register
280
mov #SH_SLEEP_REG_STBCR, r0
281
282
/* call self-refresh resume code if needed */
283
mov.l @(SH_SLEEP_MODE, r5), r0
284
tst #SUSP_SH_SF, r0
285
bt skip_restore_sf
286
287
mov.l @(SH_SLEEP_SF_POST, r5), r0
288
jsr @r0
289
nop
290
291
skip_restore_sf:
292
/* restore mmu and cache state if needed */
293
mov.l @(SH_SLEEP_MODE, r5), r0
294
tst #SUSP_SH_MMU, r0
295
bt skip_restore_mmu
296
297
/* restore mmu state */
298
bsr restore_register
299
mov #SH_SLEEP_REG_PTEH, r0
300
301
bsr restore_register
302
mov #SH_SLEEP_REG_PTEL, r0
303
304
bsr restore_register
305
mov #SH_SLEEP_REG_TTB, r0
306
307
bsr restore_register
308
mov #SH_SLEEP_REG_TEA, r0
309
310
bsr restore_register
311
mov #SH_SLEEP_REG_PTEA, r0
312
313
bsr restore_register
314
mov #SH_SLEEP_REG_PASCR, r0
315
316
bsr restore_register
317
mov #SH_SLEEP_REG_IRMCR, r0
318
319
bsr restore_register
320
mov #SH_SLEEP_REG_MMUCR, r0
321
icbi @r0
322
323
/* restore cache settings */
324
bsr restore_register
325
mov #SH_SLEEP_REG_RAMCR, r0
326
icbi @r0
327
328
bsr restore_register
329
mov #SH_SLEEP_REG_CCR, r0
330
icbi @r0
331
332
skip_restore_mmu:
333
334
/* restore general purpose registers if needed */
335
mov.l @(SH_SLEEP_MODE, r5), r0
336
tst #SUSP_SH_REGS, r0
337
bt skip_restore_regs
338
339
/* switch to bank 1, restore low registers */
340
mov.l _rb_bit, r10
341
bsr _set_sr
342
mov #-1, r9
343
344
bsr restore_low_regs
345
nop
346
347
/* switch to bank0, restore low registers */
348
mov.l _rb_bit, r9
349
not r9, r9
350
bsr _set_sr
351
mov #0, r10
352
353
bsr restore_low_regs
354
nop
355
356
/* restore the rest of the registers */
357
mov.l @r15+, r8
358
mov.l @r15+, r9
359
mov.l @r15+, r10
360
mov.l @r15+, r11
361
mov.l @r15+, r12
362
mov.l @r15+, r13
363
mov.l @r15+, r14
364
lds.l @r15+, pr
365
366
skip_restore_regs:
367
rte
368
nop
369
370
restore_register:
371
add #SH_SLEEP_BASE_DATA, r0
372
mov.l @(r0, r5), r1
373
add #-SH_SLEEP_BASE_DATA, r0
374
add #SH_SLEEP_BASE_ADDR, r0
375
mov.l @(r0, r5), r0
376
mov.l r1, @r0
377
rts
378
nop
379
380
_set_sr:
381
stc sr, r8
382
and r9, r8
383
or r10, r8
384
ldc r8, sr
385
rts
386
nop
387
388
restore_low_regs:
389
mov.l @r15+, r0
390
mov.l @r15+, r1
391
mov.l @r15+, r2
392
mov.l @r15+, r3
393
mov.l @r15+, r4
394
mov.l @r15+, r5
395
mov.l @r15+, r6
396
rts
397
mov.l @r15+, r7
398
399
.balign 4
400
_rb_bit: .long 0x20000000 ! RB=1
401
1: .long ~0x7ff
402
ENTRY(sh_mobile_sleep_resume_end)
403
404