Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
awilliam
GitHub Repository: awilliam/linux-vfio
Path: blob/master/arch/powerpc/kernel/cpu_setup_6xx.S
10817 views
1
/*
2
* This file contains low level CPU setup functions.
3
* Copyright (C) 2003 Benjamin Herrenschmidt ([email protected])
4
*
5
* This program is free software; you can redistribute it and/or
6
* modify it under the terms of the GNU General Public License
7
* as published by the Free Software Foundation; either version
8
* 2 of the License, or (at your option) any later version.
9
*
10
*/
11
12
#include <asm/processor.h>
13
#include <asm/page.h>
14
#include <asm/cputable.h>
15
#include <asm/ppc_asm.h>
16
#include <asm/asm-offsets.h>
17
#include <asm/cache.h>
18
#include <asm/mmu.h>
19
20
_GLOBAL(__setup_cpu_603)
21
mflr r5
22
BEGIN_MMU_FTR_SECTION
23
li r10,0
24
mtspr SPRN_SPRG_603_LRU,r10 /* init SW LRU tracking */
25
END_MMU_FTR_SECTION_IFSET(MMU_FTR_NEED_DTLB_SW_LRU)
26
BEGIN_FTR_SECTION
27
bl __init_fpu_registers
28
END_FTR_SECTION_IFCLR(CPU_FTR_FPU_UNAVAILABLE)
29
bl setup_common_caches
30
mtlr r5
31
blr
32
_GLOBAL(__setup_cpu_604)
33
mflr r5
34
bl setup_common_caches
35
bl setup_604_hid0
36
mtlr r5
37
blr
38
_GLOBAL(__setup_cpu_750)
39
mflr r5
40
bl __init_fpu_registers
41
bl setup_common_caches
42
bl setup_750_7400_hid0
43
mtlr r5
44
blr
45
_GLOBAL(__setup_cpu_750cx)
46
mflr r5
47
bl __init_fpu_registers
48
bl setup_common_caches
49
bl setup_750_7400_hid0
50
bl setup_750cx
51
mtlr r5
52
blr
53
_GLOBAL(__setup_cpu_750fx)
54
mflr r5
55
bl __init_fpu_registers
56
bl setup_common_caches
57
bl setup_750_7400_hid0
58
bl setup_750fx
59
mtlr r5
60
blr
61
_GLOBAL(__setup_cpu_7400)
62
mflr r5
63
bl __init_fpu_registers
64
bl setup_7400_workarounds
65
bl setup_common_caches
66
bl setup_750_7400_hid0
67
mtlr r5
68
blr
69
_GLOBAL(__setup_cpu_7410)
70
mflr r5
71
bl __init_fpu_registers
72
bl setup_7410_workarounds
73
bl setup_common_caches
74
bl setup_750_7400_hid0
75
li r3,0
76
mtspr SPRN_L2CR2,r3
77
mtlr r5
78
blr
79
_GLOBAL(__setup_cpu_745x)
80
mflr r5
81
bl setup_common_caches
82
bl setup_745x_specifics
83
mtlr r5
84
blr
85
86
/* Enable caches for 603's, 604, 750 & 7400 */
87
setup_common_caches:
88
mfspr r11,SPRN_HID0
89
andi. r0,r11,HID0_DCE
90
ori r11,r11,HID0_ICE|HID0_DCE
91
ori r8,r11,HID0_ICFI
92
bne 1f /* don't invalidate the D-cache */
93
ori r8,r8,HID0_DCI /* unless it wasn't enabled */
94
1: sync
95
mtspr SPRN_HID0,r8 /* enable and invalidate caches */
96
sync
97
mtspr SPRN_HID0,r11 /* enable caches */
98
sync
99
isync
100
blr
101
102
/* 604, 604e, 604ev, ...
103
* Enable superscalar execution & branch history table
104
*/
105
setup_604_hid0:
106
mfspr r11,SPRN_HID0
107
ori r11,r11,HID0_SIED|HID0_BHTE
108
ori r8,r11,HID0_BTCD
109
sync
110
mtspr SPRN_HID0,r8 /* flush branch target address cache */
111
sync /* on 604e/604r */
112
mtspr SPRN_HID0,r11
113
sync
114
isync
115
blr
116
117
/* 7400 <= rev 2.7 and 7410 rev = 1.0 suffer from some
118
* erratas we work around here.
119
* Moto MPC710CE.pdf describes them, those are errata
120
* #3, #4 and #5
121
* Note that we assume the firmware didn't choose to
122
* apply other workarounds (there are other ones documented
123
* in the .pdf). It appear that Apple firmware only works
124
* around #3 and with the same fix we use. We may want to
125
* check if the CPU is using 60x bus mode in which case
126
* the workaround for errata #4 is useless. Also, we may
127
* want to explicitly clear HID0_NOPDST as this is not
128
* needed once we have applied workaround #5 (though it's
129
* not set by Apple's firmware at least).
130
*/
131
setup_7400_workarounds:
132
mfpvr r3
133
rlwinm r3,r3,0,20,31
134
cmpwi 0,r3,0x0207
135
ble 1f
136
blr
137
setup_7410_workarounds:
138
mfpvr r3
139
rlwinm r3,r3,0,20,31
140
cmpwi 0,r3,0x0100
141
bnelr
142
1:
143
mfspr r11,SPRN_MSSSR0
144
/* Errata #3: Set L1OPQ_SIZE to 0x10 */
145
rlwinm r11,r11,0,9,6
146
oris r11,r11,0x0100
147
/* Errata #4: Set L2MQ_SIZE to 1 (check for MPX mode first ?) */
148
oris r11,r11,0x0002
149
/* Errata #5: Set DRLT_SIZE to 0x01 */
150
rlwinm r11,r11,0,5,2
151
oris r11,r11,0x0800
152
sync
153
mtspr SPRN_MSSSR0,r11
154
sync
155
isync
156
blr
157
158
/* 740/750/7400/7410
159
* Enable Store Gathering (SGE), Address Brodcast (ABE),
160
* Branch History Table (BHTE), Branch Target ICache (BTIC)
161
* Dynamic Power Management (DPM), Speculative (SPD)
162
* Clear Instruction cache throttling (ICTC)
163
*/
164
setup_750_7400_hid0:
165
mfspr r11,SPRN_HID0
166
ori r11,r11,HID0_SGE | HID0_ABE | HID0_BHTE | HID0_BTIC
167
oris r11,r11,HID0_DPM@h
168
BEGIN_FTR_SECTION
169
xori r11,r11,HID0_BTIC
170
END_FTR_SECTION_IFSET(CPU_FTR_NO_BTIC)
171
BEGIN_FTR_SECTION
172
xoris r11,r11,HID0_DPM@h /* disable dynamic power mgmt */
173
END_FTR_SECTION_IFSET(CPU_FTR_NO_DPM)
174
li r3,HID0_SPD
175
andc r11,r11,r3 /* clear SPD: enable speculative */
176
li r3,0
177
mtspr SPRN_ICTC,r3 /* Instruction Cache Throttling off */
178
isync
179
mtspr SPRN_HID0,r11
180
sync
181
isync
182
blr
183
184
/* 750cx specific
185
* Looks like we have to disable NAP feature for some PLL settings...
186
* (waiting for confirmation)
187
*/
188
setup_750cx:
189
mfspr r10, SPRN_HID1
190
rlwinm r10,r10,4,28,31
191
cmpwi cr0,r10,7
192
cmpwi cr1,r10,9
193
cmpwi cr2,r10,11
194
cror 4*cr0+eq,4*cr0+eq,4*cr1+eq
195
cror 4*cr0+eq,4*cr0+eq,4*cr2+eq
196
bnelr
197
lwz r6,CPU_SPEC_FEATURES(r4)
198
li r7,CPU_FTR_CAN_NAP
199
andc r6,r6,r7
200
stw r6,CPU_SPEC_FEATURES(r4)
201
blr
202
203
/* 750fx specific
204
*/
205
setup_750fx:
206
blr
207
208
/* MPC 745x
209
* Enable Store Gathering (SGE), Branch Folding (FOLD)
210
* Branch History Table (BHTE), Branch Target ICache (BTIC)
211
* Dynamic Power Management (DPM), Speculative (SPD)
212
* Ensure our data cache instructions really operate.
213
* Timebase has to be running or we wouldn't have made it here,
214
* just ensure we don't disable it.
215
* Clear Instruction cache throttling (ICTC)
216
* Enable L2 HW prefetch
217
*/
218
setup_745x_specifics:
219
/* We check for the presence of an L3 cache setup by
220
* the firmware. If any, we disable NAP capability as
221
* it's known to be bogus on rev 2.1 and earlier
222
*/
223
BEGIN_FTR_SECTION
224
mfspr r11,SPRN_L3CR
225
andis. r11,r11,L3CR_L3E@h
226
beq 1f
227
END_FTR_SECTION_IFSET(CPU_FTR_L3CR)
228
lwz r6,CPU_SPEC_FEATURES(r4)
229
andi. r0,r6,CPU_FTR_L3_DISABLE_NAP
230
beq 1f
231
li r7,CPU_FTR_CAN_NAP
232
andc r6,r6,r7
233
stw r6,CPU_SPEC_FEATURES(r4)
234
1:
235
mfspr r11,SPRN_HID0
236
237
/* All of the bits we have to set.....
238
*/
239
ori r11,r11,HID0_SGE | HID0_FOLD | HID0_BHTE
240
ori r11,r11,HID0_LRSTK | HID0_BTIC
241
oris r11,r11,HID0_DPM@h
242
BEGIN_MMU_FTR_SECTION
243
oris r11,r11,HID0_HIGH_BAT@h
244
END_MMU_FTR_SECTION_IFSET(MMU_FTR_USE_HIGH_BATS)
245
BEGIN_FTR_SECTION
246
xori r11,r11,HID0_BTIC
247
END_FTR_SECTION_IFSET(CPU_FTR_NO_BTIC)
248
BEGIN_FTR_SECTION
249
xoris r11,r11,HID0_DPM@h /* disable dynamic power mgmt */
250
END_FTR_SECTION_IFSET(CPU_FTR_NO_DPM)
251
252
/* All of the bits we have to clear....
253
*/
254
li r3,HID0_SPD | HID0_NOPDST | HID0_NOPTI
255
andc r11,r11,r3 /* clear SPD: enable speculative */
256
li r3,0
257
258
mtspr SPRN_ICTC,r3 /* Instruction Cache Throttling off */
259
isync
260
mtspr SPRN_HID0,r11
261
sync
262
isync
263
264
/* Enable L2 HW prefetch, if L2 is enabled
265
*/
266
mfspr r3,SPRN_L2CR
267
andis. r3,r3,L2CR_L2E@h
268
beqlr
269
mfspr r3,SPRN_MSSCR0
270
ori r3,r3,3
271
sync
272
mtspr SPRN_MSSCR0,r3
273
sync
274
isync
275
blr
276
277
/*
278
* Initialize the FPU registers. This is needed to work around an errata
279
* in some 750 cpus where using a not yet initialized FPU register after
280
* power on reset may hang the CPU
281
*/
282
_GLOBAL(__init_fpu_registers)
283
mfmsr r10
284
ori r11,r10,MSR_FP
285
mtmsr r11
286
isync
287
addis r9,r3,empty_zero_page@ha
288
addi r9,r9,empty_zero_page@l
289
REST_32FPRS(0,r9)
290
sync
291
mtmsr r10
292
isync
293
blr
294
295
296
/* Definitions for the table use to save CPU states */
297
#define CS_HID0 0
298
#define CS_HID1 4
299
#define CS_HID2 8
300
#define CS_MSSCR0 12
301
#define CS_MSSSR0 16
302
#define CS_ICTRL 20
303
#define CS_LDSTCR 24
304
#define CS_LDSTDB 28
305
#define CS_SIZE 32
306
307
.data
308
.balign L1_CACHE_BYTES
309
cpu_state_storage:
310
.space CS_SIZE
311
.balign L1_CACHE_BYTES,0
312
.text
313
314
/* Called in normal context to backup CPU 0 state. This
315
* does not include cache settings. This function is also
316
* called for machine sleep. This does not include the MMU
317
* setup, BATs, etc... but rather the "special" registers
318
* like HID0, HID1, MSSCR0, etc...
319
*/
320
_GLOBAL(__save_cpu_setup)
321
/* Some CR fields are volatile, we back it up all */
322
mfcr r7
323
324
/* Get storage ptr */
325
lis r5,cpu_state_storage@h
326
ori r5,r5,cpu_state_storage@l
327
328
/* Save HID0 (common to all CONFIG_6xx cpus) */
329
mfspr r3,SPRN_HID0
330
stw r3,CS_HID0(r5)
331
332
/* Now deal with CPU type dependent registers */
333
mfspr r3,SPRN_PVR
334
srwi r3,r3,16
335
cmplwi cr0,r3,0x8000 /* 7450 */
336
cmplwi cr1,r3,0x000c /* 7400 */
337
cmplwi cr2,r3,0x800c /* 7410 */
338
cmplwi cr3,r3,0x8001 /* 7455 */
339
cmplwi cr4,r3,0x8002 /* 7457 */
340
cmplwi cr5,r3,0x8003 /* 7447A */
341
cmplwi cr6,r3,0x7000 /* 750FX */
342
cmplwi cr7,r3,0x8004 /* 7448 */
343
/* cr1 is 7400 || 7410 */
344
cror 4*cr1+eq,4*cr1+eq,4*cr2+eq
345
/* cr0 is 74xx */
346
cror 4*cr0+eq,4*cr0+eq,4*cr3+eq
347
cror 4*cr0+eq,4*cr0+eq,4*cr4+eq
348
cror 4*cr0+eq,4*cr0+eq,4*cr1+eq
349
cror 4*cr0+eq,4*cr0+eq,4*cr5+eq
350
cror 4*cr0+eq,4*cr0+eq,4*cr7+eq
351
bne 1f
352
/* Backup 74xx specific regs */
353
mfspr r4,SPRN_MSSCR0
354
stw r4,CS_MSSCR0(r5)
355
mfspr r4,SPRN_MSSSR0
356
stw r4,CS_MSSSR0(r5)
357
beq cr1,1f
358
/* Backup 745x specific registers */
359
mfspr r4,SPRN_HID1
360
stw r4,CS_HID1(r5)
361
mfspr r4,SPRN_ICTRL
362
stw r4,CS_ICTRL(r5)
363
mfspr r4,SPRN_LDSTCR
364
stw r4,CS_LDSTCR(r5)
365
mfspr r4,SPRN_LDSTDB
366
stw r4,CS_LDSTDB(r5)
367
1:
368
bne cr6,1f
369
/* Backup 750FX specific registers */
370
mfspr r4,SPRN_HID1
371
stw r4,CS_HID1(r5)
372
/* If rev 2.x, backup HID2 */
373
mfspr r3,SPRN_PVR
374
andi. r3,r3,0xff00
375
cmpwi cr0,r3,0x0200
376
bne 1f
377
mfspr r4,SPRN_HID2
378
stw r4,CS_HID2(r5)
379
1:
380
mtcr r7
381
blr
382
383
/* Called with no MMU context (typically MSR:IR/DR off) to
384
* restore CPU state as backed up by the previous
385
* function. This does not include cache setting
386
*/
387
_GLOBAL(__restore_cpu_setup)
388
/* Some CR fields are volatile, we back it up all */
389
mfcr r7
390
391
/* Get storage ptr */
392
lis r5,(cpu_state_storage-KERNELBASE)@h
393
ori r5,r5,cpu_state_storage@l
394
395
/* Restore HID0 */
396
lwz r3,CS_HID0(r5)
397
sync
398
isync
399
mtspr SPRN_HID0,r3
400
sync
401
isync
402
403
/* Now deal with CPU type dependent registers */
404
mfspr r3,SPRN_PVR
405
srwi r3,r3,16
406
cmplwi cr0,r3,0x8000 /* 7450 */
407
cmplwi cr1,r3,0x000c /* 7400 */
408
cmplwi cr2,r3,0x800c /* 7410 */
409
cmplwi cr3,r3,0x8001 /* 7455 */
410
cmplwi cr4,r3,0x8002 /* 7457 */
411
cmplwi cr5,r3,0x8003 /* 7447A */
412
cmplwi cr6,r3,0x7000 /* 750FX */
413
cmplwi cr7,r3,0x8004 /* 7448 */
414
/* cr1 is 7400 || 7410 */
415
cror 4*cr1+eq,4*cr1+eq,4*cr2+eq
416
/* cr0 is 74xx */
417
cror 4*cr0+eq,4*cr0+eq,4*cr3+eq
418
cror 4*cr0+eq,4*cr0+eq,4*cr4+eq
419
cror 4*cr0+eq,4*cr0+eq,4*cr1+eq
420
cror 4*cr0+eq,4*cr0+eq,4*cr5+eq
421
cror 4*cr0+eq,4*cr0+eq,4*cr7+eq
422
bne 2f
423
/* Restore 74xx specific regs */
424
lwz r4,CS_MSSCR0(r5)
425
sync
426
mtspr SPRN_MSSCR0,r4
427
sync
428
isync
429
lwz r4,CS_MSSSR0(r5)
430
sync
431
mtspr SPRN_MSSSR0,r4
432
sync
433
isync
434
bne cr2,1f
435
/* Clear 7410 L2CR2 */
436
li r4,0
437
mtspr SPRN_L2CR2,r4
438
1: beq cr1,2f
439
/* Restore 745x specific registers */
440
lwz r4,CS_HID1(r5)
441
sync
442
mtspr SPRN_HID1,r4
443
isync
444
sync
445
lwz r4,CS_ICTRL(r5)
446
sync
447
mtspr SPRN_ICTRL,r4
448
isync
449
sync
450
lwz r4,CS_LDSTCR(r5)
451
sync
452
mtspr SPRN_LDSTCR,r4
453
isync
454
sync
455
lwz r4,CS_LDSTDB(r5)
456
sync
457
mtspr SPRN_LDSTDB,r4
458
isync
459
sync
460
2: bne cr6,1f
461
/* Restore 750FX specific registers
462
* that is restore HID2 on rev 2.x and PLL config & switch
463
* to PLL 0 on all
464
*/
465
/* If rev 2.x, restore HID2 with low voltage bit cleared */
466
mfspr r3,SPRN_PVR
467
andi. r3,r3,0xff00
468
cmpwi cr0,r3,0x0200
469
bne 4f
470
lwz r4,CS_HID2(r5)
471
rlwinm r4,r4,0,19,17
472
mtspr SPRN_HID2,r4
473
sync
474
4:
475
lwz r4,CS_HID1(r5)
476
rlwinm r5,r4,0,16,14
477
mtspr SPRN_HID1,r5
478
/* Wait for PLL to stabilize */
479
mftbl r5
480
3: mftbl r6
481
sub r6,r6,r5
482
cmplwi cr0,r6,10000
483
ble 3b
484
/* Setup final PLL */
485
mtspr SPRN_HID1,r4
486
1:
487
mtcr r7
488
blr
489
490
491