Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
torvalds
GitHub Repository: torvalds/linux
Path: blob/master/arch/powerpc/kernel/cpu_setup_6xx.S
26424 views
1
/* SPDX-License-Identifier: GPL-2.0-or-later */
2
/*
3
* This file contains low level CPU setup functions.
4
* Copyright (C) 2003 Benjamin Herrenschmidt ([email protected])
5
*/
6
7
#include <linux/linkage.h>
8
9
#include <asm/processor.h>
10
#include <asm/page.h>
11
#include <asm/cputable.h>
12
#include <asm/ppc_asm.h>
13
#include <asm/asm-offsets.h>
14
#include <asm/cache.h>
15
#include <asm/mmu.h>
16
#include <asm/feature-fixups.h>
17
18
_GLOBAL(__setup_cpu_603)
19
mflr r5
20
BEGIN_MMU_FTR_SECTION
21
li r10,0
22
mtspr SPRN_SPRG_603_LRU,r10 /* init SW LRU tracking */
23
END_MMU_FTR_SECTION_IFSET(MMU_FTR_NEED_DTLB_SW_LRU)
24
25
BEGIN_FTR_SECTION
26
bl __init_fpu_registers
27
END_FTR_SECTION_IFCLR(CPU_FTR_FPU_UNAVAILABLE)
28
bl setup_common_caches
29
30
/*
31
* This assumes that all cores using __setup_cpu_603 with
32
* MMU_FTR_USE_HIGH_BATS are G2_LE compatible
33
*/
34
BEGIN_MMU_FTR_SECTION
35
bl setup_g2_le_hid2
36
END_MMU_FTR_SECTION_IFSET(MMU_FTR_USE_HIGH_BATS)
37
38
mtlr r5
39
blr
40
_GLOBAL(__setup_cpu_604)
41
mflr r5
42
bl setup_common_caches
43
bl setup_604_hid0
44
mtlr r5
45
blr
46
_GLOBAL(__setup_cpu_750)
47
mflr r5
48
bl __init_fpu_registers
49
bl setup_common_caches
50
bl setup_750_7400_hid0
51
mtlr r5
52
blr
53
_GLOBAL(__setup_cpu_750cx)
54
mflr r5
55
bl __init_fpu_registers
56
bl setup_common_caches
57
bl setup_750_7400_hid0
58
bl setup_750cx
59
mtlr r5
60
blr
61
_GLOBAL(__setup_cpu_750fx)
62
mflr r5
63
bl __init_fpu_registers
64
bl setup_common_caches
65
bl setup_750_7400_hid0
66
bl setup_750fx
67
mtlr r5
68
blr
69
_GLOBAL(__setup_cpu_7400)
70
mflr r5
71
bl __init_fpu_registers
72
bl setup_7400_workarounds
73
bl setup_common_caches
74
bl setup_750_7400_hid0
75
mtlr r5
76
blr
77
_GLOBAL(__setup_cpu_7410)
78
mflr r5
79
bl __init_fpu_registers
80
bl setup_7410_workarounds
81
bl setup_common_caches
82
bl setup_750_7400_hid0
83
li r3,0
84
mtspr SPRN_L2CR2,r3
85
mtlr r5
86
blr
87
_GLOBAL(__setup_cpu_745x)
88
mflr r5
89
bl setup_common_caches
90
bl setup_745x_specifics
91
mtlr r5
92
blr
93
94
/* Enable caches for 603's, 604, 750 & 7400 */
95
SYM_FUNC_START_LOCAL(setup_common_caches)
96
mfspr r11,SPRN_HID0
97
andi. r0,r11,HID0_DCE
98
ori r11,r11,HID0_ICE|HID0_DCE
99
ori r8,r11,HID0_ICFI
100
bne 1f /* don't invalidate the D-cache */
101
ori r8,r8,HID0_DCI /* unless it wasn't enabled */
102
1: sync
103
mtspr SPRN_HID0,r8 /* enable and invalidate caches */
104
sync
105
mtspr SPRN_HID0,r11 /* enable caches */
106
sync
107
isync
108
blr
109
SYM_FUNC_END(setup_common_caches)
110
111
/* 604, 604e, 604ev, ...
112
* Enable superscalar execution & branch history table
113
*/
114
SYM_FUNC_START_LOCAL(setup_604_hid0)
115
mfspr r11,SPRN_HID0
116
ori r11,r11,HID0_SIED|HID0_BHTE
117
ori r8,r11,HID0_BTCD
118
sync
119
mtspr SPRN_HID0,r8 /* flush branch target address cache */
120
sync /* on 604e/604r */
121
mtspr SPRN_HID0,r11
122
sync
123
isync
124
blr
125
SYM_FUNC_END(setup_604_hid0)
126
127
/* Enable high BATs for G2_LE and derivatives like e300cX */
128
SYM_FUNC_START_LOCAL(setup_g2_le_hid2)
129
mfspr r11,SPRN_HID2_G2_LE
130
oris r11,r11,HID2_G2_LE_HBE@h
131
mtspr SPRN_HID2_G2_LE,r11
132
sync
133
isync
134
blr
135
SYM_FUNC_END(setup_g2_le_hid2)
136
137
/* 7400 <= rev 2.7 and 7410 rev = 1.0 suffer from some
138
* erratas we work around here.
139
* Moto MPC710CE.pdf describes them, those are errata
140
* #3, #4 and #5
141
* Note that we assume the firmware didn't choose to
142
* apply other workarounds (there are other ones documented
143
* in the .pdf). It appear that Apple firmware only works
144
* around #3 and with the same fix we use. We may want to
145
* check if the CPU is using 60x bus mode in which case
146
* the workaround for errata #4 is useless. Also, we may
147
* want to explicitly clear HID0_NOPDST as this is not
148
* needed once we have applied workaround #5 (though it's
149
* not set by Apple's firmware at least).
150
*/
151
SYM_FUNC_START_LOCAL(setup_7400_workarounds)
152
mfpvr r3
153
rlwinm r3,r3,0,20,31
154
cmpwi 0,r3,0x0207
155
ble 1f
156
blr
157
SYM_FUNC_END(setup_7400_workarounds)
158
SYM_FUNC_START_LOCAL(setup_7410_workarounds)
159
mfpvr r3
160
rlwinm r3,r3,0,20,31
161
cmpwi 0,r3,0x0100
162
bnelr
163
1:
164
mfspr r11,SPRN_MSSSR0
165
/* Errata #3: Set L1OPQ_SIZE to 0x10 */
166
rlwinm r11,r11,0,9,6
167
oris r11,r11,0x0100
168
/* Errata #4: Set L2MQ_SIZE to 1 (check for MPX mode first ?) */
169
oris r11,r11,0x0002
170
/* Errata #5: Set DRLT_SIZE to 0x01 */
171
rlwinm r11,r11,0,5,2
172
oris r11,r11,0x0800
173
sync
174
mtspr SPRN_MSSSR0,r11
175
sync
176
isync
177
blr
178
SYM_FUNC_END(setup_7410_workarounds)
179
180
/* 740/750/7400/7410
181
* Enable Store Gathering (SGE), Address Broadcast (ABE),
182
* Branch History Table (BHTE), Branch Target ICache (BTIC)
183
* Dynamic Power Management (DPM), Speculative (SPD)
184
* Clear Instruction cache throttling (ICTC)
185
*/
186
SYM_FUNC_START_LOCAL(setup_750_7400_hid0)
187
mfspr r11,SPRN_HID0
188
ori r11,r11,HID0_SGE | HID0_ABE | HID0_BHTE | HID0_BTIC
189
oris r11,r11,HID0_DPM@h
190
BEGIN_FTR_SECTION
191
xori r11,r11,HID0_BTIC
192
END_FTR_SECTION_IFSET(CPU_FTR_NO_BTIC)
193
BEGIN_FTR_SECTION
194
xoris r11,r11,HID0_DPM@h /* disable dynamic power mgmt */
195
END_FTR_SECTION_IFSET(CPU_FTR_NO_DPM)
196
li r3,HID0_SPD
197
andc r11,r11,r3 /* clear SPD: enable speculative */
198
li r3,0
199
mtspr SPRN_ICTC,r3 /* Instruction Cache Throttling off */
200
isync
201
mtspr SPRN_HID0,r11
202
sync
203
isync
204
blr
205
SYM_FUNC_END(setup_750_7400_hid0)
206
207
/* 750cx specific
208
* Looks like we have to disable NAP feature for some PLL settings...
209
* (waiting for confirmation)
210
*/
211
SYM_FUNC_START_LOCAL(setup_750cx)
212
mfspr r10, SPRN_HID1
213
rlwinm r10,r10,4,28,31
214
cmpwi cr0,r10,7
215
cmpwi cr1,r10,9
216
cmpwi cr2,r10,11
217
cror 4*cr0+eq,4*cr0+eq,4*cr1+eq
218
cror 4*cr0+eq,4*cr0+eq,4*cr2+eq
219
bnelr
220
lwz r6,CPU_SPEC_FEATURES(r4)
221
li r7,CPU_FTR_CAN_NAP
222
andc r6,r6,r7
223
stw r6,CPU_SPEC_FEATURES(r4)
224
blr
225
SYM_FUNC_END(setup_750cx)
226
227
/* 750fx specific
228
*/
229
SYM_FUNC_START_LOCAL(setup_750fx)
230
blr
231
SYM_FUNC_END(setup_750fx)
232
233
/* MPC 745x
234
* Enable Store Gathering (SGE), Branch Folding (FOLD)
235
* Branch History Table (BHTE), Branch Target ICache (BTIC)
236
* Dynamic Power Management (DPM), Speculative (SPD)
237
* Ensure our data cache instructions really operate.
238
* Timebase has to be running or we wouldn't have made it here,
239
* just ensure we don't disable it.
240
* Clear Instruction cache throttling (ICTC)
241
* Enable L2 HW prefetch
242
*/
243
SYM_FUNC_START_LOCAL(setup_745x_specifics)
244
/* We check for the presence of an L3 cache setup by
245
* the firmware. If any, we disable NAP capability as
246
* it's known to be bogus on rev 2.1 and earlier
247
*/
248
BEGIN_FTR_SECTION
249
mfspr r11,SPRN_L3CR
250
andis. r11,r11,L3CR_L3E@h
251
beq 1f
252
END_FTR_SECTION_IFSET(CPU_FTR_L3CR)
253
lwz r6,CPU_SPEC_FEATURES(r4)
254
andis. r0,r6,CPU_FTR_L3_DISABLE_NAP@h
255
beq 1f
256
li r7,CPU_FTR_CAN_NAP
257
andc r6,r6,r7
258
stw r6,CPU_SPEC_FEATURES(r4)
259
1:
260
mfspr r11,SPRN_HID0
261
262
/* All of the bits we have to set.....
263
*/
264
ori r11,r11,HID0_SGE | HID0_FOLD | HID0_BHTE
265
ori r11,r11,HID0_LRSTK | HID0_BTIC
266
oris r11,r11,HID0_DPM@h
267
BEGIN_MMU_FTR_SECTION
268
oris r11,r11,HID0_HIGH_BAT@h
269
END_MMU_FTR_SECTION_IFSET(MMU_FTR_USE_HIGH_BATS)
270
BEGIN_FTR_SECTION
271
xori r11,r11,HID0_BTIC
272
END_FTR_SECTION_IFSET(CPU_FTR_NO_BTIC)
273
BEGIN_FTR_SECTION
274
xoris r11,r11,HID0_DPM@h /* disable dynamic power mgmt */
275
END_FTR_SECTION_IFSET(CPU_FTR_NO_DPM)
276
277
/* All of the bits we have to clear....
278
*/
279
li r3,HID0_SPD | HID0_NOPDST | HID0_NOPTI
280
andc r11,r11,r3 /* clear SPD: enable speculative */
281
li r3,0
282
283
mtspr SPRN_ICTC,r3 /* Instruction Cache Throttling off */
284
isync
285
mtspr SPRN_HID0,r11
286
sync
287
isync
288
289
/* Enable L2 HW prefetch, if L2 is enabled
290
*/
291
mfspr r3,SPRN_L2CR
292
andis. r3,r3,L2CR_L2E@h
293
beqlr
294
mfspr r3,SPRN_MSSCR0
295
ori r3,r3,3
296
sync
297
mtspr SPRN_MSSCR0,r3
298
sync
299
isync
300
blr
301
SYM_FUNC_END(setup_745x_specifics)
302
303
/*
304
* Initialize the FPU registers. This is needed to work around an errata
305
* in some 750 cpus where using a not yet initialized FPU register after
306
* power on reset may hang the CPU
307
*/
308
_GLOBAL(__init_fpu_registers)
309
mfmsr r10
310
ori r11,r10,MSR_FP
311
mtmsr r11
312
isync
313
addis r9,r3,empty_zero_page@ha
314
addi r9,r9,empty_zero_page@l
315
REST_32FPRS(0,r9)
316
sync
317
mtmsr r10
318
isync
319
blr
320
_ASM_NOKPROBE_SYMBOL(__init_fpu_registers)
321
322
323
/* Definitions for the table use to save CPU states */
324
#define CS_HID0 0
325
#define CS_HID1 4
326
#define CS_HID2 8
327
#define CS_MSSCR0 12
328
#define CS_MSSSR0 16
329
#define CS_ICTRL 20
330
#define CS_LDSTCR 24
331
#define CS_LDSTDB 28
332
#define CS_SIZE 32
333
334
.data
335
.balign L1_CACHE_BYTES
336
cpu_state_storage:
337
.space CS_SIZE
338
.balign L1_CACHE_BYTES,0
339
.text
340
341
/* Called in normal context to backup CPU 0 state. This
342
* does not include cache settings. This function is also
343
* called for machine sleep. This does not include the MMU
344
* setup, BATs, etc... but rather the "special" registers
345
* like HID0, HID1, MSSCR0, etc...
346
*/
347
_GLOBAL(__save_cpu_setup)
348
/* Some CR fields are volatile, we back it up all */
349
mfcr r7
350
351
/* Get storage ptr */
352
lis r5,cpu_state_storage@h
353
ori r5,r5,cpu_state_storage@l
354
355
/* Save HID0 (common to all CONFIG_PPC_BOOK3S_32 cpus) */
356
mfspr r3,SPRN_HID0
357
stw r3,CS_HID0(r5)
358
359
/* Now deal with CPU type dependent registers */
360
mfspr r3,SPRN_PVR
361
srwi r3,r3,16
362
cmplwi cr0,r3,0x8000 /* 7450 */
363
cmplwi cr1,r3,0x000c /* 7400 */
364
cmplwi cr2,r3,0x800c /* 7410 */
365
cmplwi cr3,r3,0x8001 /* 7455 */
366
cmplwi cr4,r3,0x8002 /* 7457 */
367
cmplwi cr5,r3,0x8003 /* 7447A */
368
cmplwi cr6,r3,0x7000 /* 750FX */
369
cmplwi cr7,r3,0x8004 /* 7448 */
370
/* cr1 is 7400 || 7410 */
371
cror 4*cr1+eq,4*cr1+eq,4*cr2+eq
372
/* cr0 is 74xx */
373
cror 4*cr0+eq,4*cr0+eq,4*cr3+eq
374
cror 4*cr0+eq,4*cr0+eq,4*cr4+eq
375
cror 4*cr0+eq,4*cr0+eq,4*cr1+eq
376
cror 4*cr0+eq,4*cr0+eq,4*cr5+eq
377
cror 4*cr0+eq,4*cr0+eq,4*cr7+eq
378
bne 1f
379
/* Backup 74xx specific regs */
380
mfspr r4,SPRN_MSSCR0
381
stw r4,CS_MSSCR0(r5)
382
mfspr r4,SPRN_MSSSR0
383
stw r4,CS_MSSSR0(r5)
384
beq cr1,1f
385
/* Backup 745x specific registers */
386
mfspr r4,SPRN_HID1
387
stw r4,CS_HID1(r5)
388
mfspr r4,SPRN_ICTRL
389
stw r4,CS_ICTRL(r5)
390
mfspr r4,SPRN_LDSTCR
391
stw r4,CS_LDSTCR(r5)
392
mfspr r4,SPRN_LDSTDB
393
stw r4,CS_LDSTDB(r5)
394
1:
395
bne cr6,1f
396
/* Backup 750FX specific registers */
397
mfspr r4,SPRN_HID1
398
stw r4,CS_HID1(r5)
399
/* If rev 2.x, backup HID2 */
400
mfspr r3,SPRN_PVR
401
andi. r3,r3,0xff00
402
cmpwi cr0,r3,0x0200
403
bne 1f
404
mfspr r4,SPRN_HID2_750FX
405
stw r4,CS_HID2(r5)
406
1:
407
mtcr r7
408
blr
409
410
/* Called with no MMU context (typically MSR:IR/DR off) to
411
* restore CPU state as backed up by the previous
412
* function. This does not include cache setting
413
*/
414
_GLOBAL(__restore_cpu_setup)
415
/* Some CR fields are volatile, we back it up all */
416
mfcr r7
417
418
/* Get storage ptr */
419
lis r5,(cpu_state_storage-KERNELBASE)@h
420
ori r5,r5,cpu_state_storage@l
421
422
/* Restore HID0 */
423
lwz r3,CS_HID0(r5)
424
sync
425
isync
426
mtspr SPRN_HID0,r3
427
sync
428
isync
429
430
/* Now deal with CPU type dependent registers */
431
mfspr r3,SPRN_PVR
432
srwi r3,r3,16
433
cmplwi cr0,r3,0x8000 /* 7450 */
434
cmplwi cr1,r3,0x000c /* 7400 */
435
cmplwi cr2,r3,0x800c /* 7410 */
436
cmplwi cr3,r3,0x8001 /* 7455 */
437
cmplwi cr4,r3,0x8002 /* 7457 */
438
cmplwi cr5,r3,0x8003 /* 7447A */
439
cmplwi cr6,r3,0x7000 /* 750FX */
440
cmplwi cr7,r3,0x8004 /* 7448 */
441
/* cr1 is 7400 || 7410 */
442
cror 4*cr1+eq,4*cr1+eq,4*cr2+eq
443
/* cr0 is 74xx */
444
cror 4*cr0+eq,4*cr0+eq,4*cr3+eq
445
cror 4*cr0+eq,4*cr0+eq,4*cr4+eq
446
cror 4*cr0+eq,4*cr0+eq,4*cr1+eq
447
cror 4*cr0+eq,4*cr0+eq,4*cr5+eq
448
cror 4*cr0+eq,4*cr0+eq,4*cr7+eq
449
bne 2f
450
/* Restore 74xx specific regs */
451
lwz r4,CS_MSSCR0(r5)
452
sync
453
mtspr SPRN_MSSCR0,r4
454
sync
455
isync
456
lwz r4,CS_MSSSR0(r5)
457
sync
458
mtspr SPRN_MSSSR0,r4
459
sync
460
isync
461
bne cr2,1f
462
/* Clear 7410 L2CR2 */
463
li r4,0
464
mtspr SPRN_L2CR2,r4
465
1: beq cr1,2f
466
/* Restore 745x specific registers */
467
lwz r4,CS_HID1(r5)
468
sync
469
mtspr SPRN_HID1,r4
470
isync
471
sync
472
lwz r4,CS_ICTRL(r5)
473
sync
474
mtspr SPRN_ICTRL,r4
475
isync
476
sync
477
lwz r4,CS_LDSTCR(r5)
478
sync
479
mtspr SPRN_LDSTCR,r4
480
isync
481
sync
482
lwz r4,CS_LDSTDB(r5)
483
sync
484
mtspr SPRN_LDSTDB,r4
485
isync
486
sync
487
2: bne cr6,1f
488
/* Restore 750FX specific registers
489
* that is restore HID2 on rev 2.x and PLL config & switch
490
* to PLL 0 on all
491
*/
492
/* If rev 2.x, restore HID2 with low voltage bit cleared */
493
mfspr r3,SPRN_PVR
494
andi. r3,r3,0xff00
495
cmpwi cr0,r3,0x0200
496
bne 4f
497
lwz r4,CS_HID2(r5)
498
rlwinm r4,r4,0,19,17
499
mtspr SPRN_HID2_750FX,r4
500
sync
501
4:
502
lwz r4,CS_HID1(r5)
503
rlwinm r5,r4,0,16,14
504
mtspr SPRN_HID1,r5
505
/* Wait for PLL to stabilize */
506
mftbl r5
507
3: mftbl r6
508
sub r6,r6,r5
509
cmplwi cr0,r6,10000
510
ble 3b
511
/* Setup final PLL */
512
mtspr SPRN_HID1,r4
513
1:
514
mtcr r7
515
blr
516
_ASM_NOKPROBE_SYMBOL(__restore_cpu_setup)
517
518