cpu_setup_6xx.S 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490
  1. /*
  2. * This file contains low level CPU setup functions.
  3. * Copyright (C) 2003 Benjamin Herrenschmidt (benh@kernel.crashing.org)
  4. *
  5. * This program is free software; you can redistribute it and/or
  6. * modify it under the terms of the GNU General Public License
  7. * as published by the Free Software Foundation; either version
  8. * 2 of the License, or (at your option) any later version.
  9. *
  10. */
  11. #include <asm/processor.h>
  12. #include <asm/page.h>
  13. #include <asm/cputable.h>
  14. #include <asm/ppc_asm.h>
  15. #include <asm/asm-offsets.h>
  16. #include <asm/cache.h>
  17. #include <asm/mmu.h>
  18. #include <asm/feature-fixups.h>
  19. _GLOBAL(__setup_cpu_603)
  20. mflr r5
  21. BEGIN_MMU_FTR_SECTION
  22. li r10,0
  23. mtspr SPRN_SPRG_603_LRU,r10 /* init SW LRU tracking */
  24. END_MMU_FTR_SECTION_IFSET(MMU_FTR_NEED_DTLB_SW_LRU)
  25. BEGIN_FTR_SECTION
  26. bl __init_fpu_registers
  27. END_FTR_SECTION_IFCLR(CPU_FTR_FPU_UNAVAILABLE)
  28. bl setup_common_caches
  29. mtlr r5
  30. blr
  31. _GLOBAL(__setup_cpu_604)
  32. mflr r5
  33. bl setup_common_caches
  34. bl setup_604_hid0
  35. mtlr r5
  36. blr
  37. _GLOBAL(__setup_cpu_750)
  38. mflr r5
  39. bl __init_fpu_registers
  40. bl setup_common_caches
  41. bl setup_750_7400_hid0
  42. mtlr r5
  43. blr
  44. _GLOBAL(__setup_cpu_750cx)
  45. mflr r5
  46. bl __init_fpu_registers
  47. bl setup_common_caches
  48. bl setup_750_7400_hid0
  49. bl setup_750cx
  50. mtlr r5
  51. blr
  52. _GLOBAL(__setup_cpu_750fx)
  53. mflr r5
  54. bl __init_fpu_registers
  55. bl setup_common_caches
  56. bl setup_750_7400_hid0
  57. bl setup_750fx
  58. mtlr r5
  59. blr
  60. _GLOBAL(__setup_cpu_7400)
  61. mflr r5
  62. bl __init_fpu_registers
  63. bl setup_7400_workarounds
  64. bl setup_common_caches
  65. bl setup_750_7400_hid0
  66. mtlr r5
  67. blr
  68. _GLOBAL(__setup_cpu_7410)
  69. mflr r5
  70. bl __init_fpu_registers
  71. bl setup_7410_workarounds
  72. bl setup_common_caches
  73. bl setup_750_7400_hid0
  74. li r3,0
  75. mtspr SPRN_L2CR2,r3
  76. mtlr r5
  77. blr
  78. _GLOBAL(__setup_cpu_745x)
  79. mflr r5
  80. bl setup_common_caches
  81. bl setup_745x_specifics
  82. mtlr r5
  83. blr
  84. /* Enable caches for 603's, 604, 750 & 7400 */
  85. setup_common_caches:
  86. mfspr r11,SPRN_HID0
  87. andi. r0,r11,HID0_DCE
  88. ori r11,r11,HID0_ICE|HID0_DCE
  89. ori r8,r11,HID0_ICFI
  90. bne 1f /* don't invalidate the D-cache */
  91. ori r8,r8,HID0_DCI /* unless it wasn't enabled */
  92. 1: sync
  93. mtspr SPRN_HID0,r8 /* enable and invalidate caches */
  94. sync
  95. mtspr SPRN_HID0,r11 /* enable caches */
  96. sync
  97. isync
  98. blr
  99. /* 604, 604e, 604ev, ...
  100. * Enable superscalar execution & branch history table
  101. */
  102. setup_604_hid0:
  103. mfspr r11,SPRN_HID0
  104. ori r11,r11,HID0_SIED|HID0_BHTE
  105. ori r8,r11,HID0_BTCD
  106. sync
  107. mtspr SPRN_HID0,r8 /* flush branch target address cache */
  108. sync /* on 604e/604r */
  109. mtspr SPRN_HID0,r11
  110. sync
  111. isync
  112. blr
  113. /* 7400 <= rev 2.7 and 7410 rev = 1.0 suffer from some
  114. * erratas we work around here.
  115. * Moto MPC710CE.pdf describes them, those are errata
  116. * #3, #4 and #5
  117. * Note that we assume the firmware didn't choose to
  118. * apply other workarounds (there are other ones documented
  119. * in the .pdf). It appear that Apple firmware only works
  120. * around #3 and with the same fix we use. We may want to
  121. * check if the CPU is using 60x bus mode in which case
  122. * the workaround for errata #4 is useless. Also, we may
  123. * want to explicitly clear HID0_NOPDST as this is not
  124. * needed once we have applied workaround #5 (though it's
  125. * not set by Apple's firmware at least).
  126. */
  127. setup_7400_workarounds:
  128. mfpvr r3
  129. rlwinm r3,r3,0,20,31
  130. cmpwi 0,r3,0x0207
  131. ble 1f
  132. blr
  133. setup_7410_workarounds:
  134. mfpvr r3
  135. rlwinm r3,r3,0,20,31
  136. cmpwi 0,r3,0x0100
  137. bnelr
  138. 1:
  139. mfspr r11,SPRN_MSSSR0
  140. /* Errata #3: Set L1OPQ_SIZE to 0x10 */
  141. rlwinm r11,r11,0,9,6
  142. oris r11,r11,0x0100
  143. /* Errata #4: Set L2MQ_SIZE to 1 (check for MPX mode first ?) */
  144. oris r11,r11,0x0002
  145. /* Errata #5: Set DRLT_SIZE to 0x01 */
  146. rlwinm r11,r11,0,5,2
  147. oris r11,r11,0x0800
  148. sync
  149. mtspr SPRN_MSSSR0,r11
  150. sync
  151. isync
  152. blr
  153. /* 740/750/7400/7410
  154. * Enable Store Gathering (SGE), Address Broadcast (ABE),
  155. * Branch History Table (BHTE), Branch Target ICache (BTIC)
  156. * Dynamic Power Management (DPM), Speculative (SPD)
  157. * Clear Instruction cache throttling (ICTC)
  158. */
  159. setup_750_7400_hid0:
  160. mfspr r11,SPRN_HID0
  161. ori r11,r11,HID0_SGE | HID0_ABE | HID0_BHTE | HID0_BTIC
  162. oris r11,r11,HID0_DPM@h
  163. BEGIN_FTR_SECTION
  164. xori r11,r11,HID0_BTIC
  165. END_FTR_SECTION_IFSET(CPU_FTR_NO_BTIC)
  166. BEGIN_FTR_SECTION
  167. xoris r11,r11,HID0_DPM@h /* disable dynamic power mgmt */
  168. END_FTR_SECTION_IFSET(CPU_FTR_NO_DPM)
  169. li r3,HID0_SPD
  170. andc r11,r11,r3 /* clear SPD: enable speculative */
  171. li r3,0
  172. mtspr SPRN_ICTC,r3 /* Instruction Cache Throttling off */
  173. isync
  174. mtspr SPRN_HID0,r11
  175. sync
  176. isync
  177. blr
  178. /* 750cx specific
  179. * Looks like we have to disable NAP feature for some PLL settings...
  180. * (waiting for confirmation)
  181. */
  182. setup_750cx:
  183. mfspr r10, SPRN_HID1
  184. rlwinm r10,r10,4,28,31
  185. cmpwi cr0,r10,7
  186. cmpwi cr1,r10,9
  187. cmpwi cr2,r10,11
  188. cror 4*cr0+eq,4*cr0+eq,4*cr1+eq
  189. cror 4*cr0+eq,4*cr0+eq,4*cr2+eq
  190. bnelr
  191. lwz r6,CPU_SPEC_FEATURES(r4)
  192. li r7,CPU_FTR_CAN_NAP
  193. andc r6,r6,r7
  194. stw r6,CPU_SPEC_FEATURES(r4)
  195. blr
  196. /* 750fx specific
  197. */
  198. setup_750fx:
  199. blr
  200. /* MPC 745x
  201. * Enable Store Gathering (SGE), Branch Folding (FOLD)
  202. * Branch History Table (BHTE), Branch Target ICache (BTIC)
  203. * Dynamic Power Management (DPM), Speculative (SPD)
  204. * Ensure our data cache instructions really operate.
  205. * Timebase has to be running or we wouldn't have made it here,
  206. * just ensure we don't disable it.
  207. * Clear Instruction cache throttling (ICTC)
  208. * Enable L2 HW prefetch
  209. */
  210. setup_745x_specifics:
  211. /* We check for the presence of an L3 cache setup by
  212. * the firmware. If any, we disable NAP capability as
  213. * it's known to be bogus on rev 2.1 and earlier
  214. */
  215. BEGIN_FTR_SECTION
  216. mfspr r11,SPRN_L3CR
  217. andis. r11,r11,L3CR_L3E@h
  218. beq 1f
  219. END_FTR_SECTION_IFSET(CPU_FTR_L3CR)
  220. lwz r6,CPU_SPEC_FEATURES(r4)
  221. andis. r0,r6,CPU_FTR_L3_DISABLE_NAP@h
  222. beq 1f
  223. li r7,CPU_FTR_CAN_NAP
  224. andc r6,r6,r7
  225. stw r6,CPU_SPEC_FEATURES(r4)
  226. 1:
  227. mfspr r11,SPRN_HID0
  228. /* All of the bits we have to set.....
  229. */
  230. ori r11,r11,HID0_SGE | HID0_FOLD | HID0_BHTE
  231. ori r11,r11,HID0_LRSTK | HID0_BTIC
  232. oris r11,r11,HID0_DPM@h
  233. BEGIN_MMU_FTR_SECTION
  234. oris r11,r11,HID0_HIGH_BAT@h
  235. END_MMU_FTR_SECTION_IFSET(MMU_FTR_USE_HIGH_BATS)
  236. BEGIN_FTR_SECTION
  237. xori r11,r11,HID0_BTIC
  238. END_FTR_SECTION_IFSET(CPU_FTR_NO_BTIC)
  239. BEGIN_FTR_SECTION
  240. xoris r11,r11,HID0_DPM@h /* disable dynamic power mgmt */
  241. END_FTR_SECTION_IFSET(CPU_FTR_NO_DPM)
  242. /* All of the bits we have to clear....
  243. */
  244. li r3,HID0_SPD | HID0_NOPDST | HID0_NOPTI
  245. andc r11,r11,r3 /* clear SPD: enable speculative */
  246. li r3,0
  247. mtspr SPRN_ICTC,r3 /* Instruction Cache Throttling off */
  248. isync
  249. mtspr SPRN_HID0,r11
  250. sync
  251. isync
  252. /* Enable L2 HW prefetch, if L2 is enabled
  253. */
  254. mfspr r3,SPRN_L2CR
  255. andis. r3,r3,L2CR_L2E@h
  256. beqlr
  257. mfspr r3,SPRN_MSSCR0
  258. ori r3,r3,3
  259. sync
  260. mtspr SPRN_MSSCR0,r3
  261. sync
  262. isync
  263. blr
  264. /*
  265. * Initialize the FPU registers. This is needed to work around an errata
  266. * in some 750 cpus where using a not yet initialized FPU register after
  267. * power on reset may hang the CPU
  268. */
  269. _GLOBAL(__init_fpu_registers)
  270. mfmsr r10
  271. ori r11,r10,MSR_FP
  272. mtmsr r11
  273. isync
  274. addis r9,r3,empty_zero_page@ha
  275. addi r9,r9,empty_zero_page@l
  276. REST_32FPRS(0,r9)
  277. sync
  278. mtmsr r10
  279. isync
  280. blr
  281. /* Definitions for the table use to save CPU states */
  282. #define CS_HID0 0
  283. #define CS_HID1 4
  284. #define CS_HID2 8
  285. #define CS_MSSCR0 12
  286. #define CS_MSSSR0 16
  287. #define CS_ICTRL 20
  288. #define CS_LDSTCR 24
  289. #define CS_LDSTDB 28
  290. #define CS_SIZE 32
  291. .data
  292. .balign L1_CACHE_BYTES
  293. cpu_state_storage:
  294. .space CS_SIZE
  295. .balign L1_CACHE_BYTES,0
  296. .text
  297. /* Called in normal context to backup CPU 0 state. This
  298. * does not include cache settings. This function is also
  299. * called for machine sleep. This does not include the MMU
  300. * setup, BATs, etc... but rather the "special" registers
  301. * like HID0, HID1, MSSCR0, etc...
  302. */
  303. _GLOBAL(__save_cpu_setup)
  304. /* Some CR fields are volatile, we back it up all */
  305. mfcr r7
  306. /* Get storage ptr */
  307. lis r5,cpu_state_storage@h
  308. ori r5,r5,cpu_state_storage@l
  309. /* Save HID0 (common to all CONFIG_6xx cpus) */
  310. mfspr r3,SPRN_HID0
  311. stw r3,CS_HID0(r5)
  312. /* Now deal with CPU type dependent registers */
  313. mfspr r3,SPRN_PVR
  314. srwi r3,r3,16
  315. cmplwi cr0,r3,0x8000 /* 7450 */
  316. cmplwi cr1,r3,0x000c /* 7400 */
  317. cmplwi cr2,r3,0x800c /* 7410 */
  318. cmplwi cr3,r3,0x8001 /* 7455 */
  319. cmplwi cr4,r3,0x8002 /* 7457 */
  320. cmplwi cr5,r3,0x8003 /* 7447A */
  321. cmplwi cr6,r3,0x7000 /* 750FX */
  322. cmplwi cr7,r3,0x8004 /* 7448 */
  323. /* cr1 is 7400 || 7410 */
  324. cror 4*cr1+eq,4*cr1+eq,4*cr2+eq
  325. /* cr0 is 74xx */
  326. cror 4*cr0+eq,4*cr0+eq,4*cr3+eq
  327. cror 4*cr0+eq,4*cr0+eq,4*cr4+eq
  328. cror 4*cr0+eq,4*cr0+eq,4*cr1+eq
  329. cror 4*cr0+eq,4*cr0+eq,4*cr5+eq
  330. cror 4*cr0+eq,4*cr0+eq,4*cr7+eq
  331. bne 1f
  332. /* Backup 74xx specific regs */
  333. mfspr r4,SPRN_MSSCR0
  334. stw r4,CS_MSSCR0(r5)
  335. mfspr r4,SPRN_MSSSR0
  336. stw r4,CS_MSSSR0(r5)
  337. beq cr1,1f
  338. /* Backup 745x specific registers */
  339. mfspr r4,SPRN_HID1
  340. stw r4,CS_HID1(r5)
  341. mfspr r4,SPRN_ICTRL
  342. stw r4,CS_ICTRL(r5)
  343. mfspr r4,SPRN_LDSTCR
  344. stw r4,CS_LDSTCR(r5)
  345. mfspr r4,SPRN_LDSTDB
  346. stw r4,CS_LDSTDB(r5)
  347. 1:
  348. bne cr6,1f
  349. /* Backup 750FX specific registers */
  350. mfspr r4,SPRN_HID1
  351. stw r4,CS_HID1(r5)
  352. /* If rev 2.x, backup HID2 */
  353. mfspr r3,SPRN_PVR
  354. andi. r3,r3,0xff00
  355. cmpwi cr0,r3,0x0200
  356. bne 1f
  357. mfspr r4,SPRN_HID2
  358. stw r4,CS_HID2(r5)
  359. 1:
  360. mtcr r7
  361. blr
  362. /* Called with no MMU context (typically MSR:IR/DR off) to
  363. * restore CPU state as backed up by the previous
  364. * function. This does not include cache setting
  365. */
  366. _GLOBAL(__restore_cpu_setup)
  367. /* Some CR fields are volatile, we back it up all */
  368. mfcr r7
  369. /* Get storage ptr */
  370. lis r5,(cpu_state_storage-KERNELBASE)@h
  371. ori r5,r5,cpu_state_storage@l
  372. /* Restore HID0 */
  373. lwz r3,CS_HID0(r5)
  374. sync
  375. isync
  376. mtspr SPRN_HID0,r3
  377. sync
  378. isync
  379. /* Now deal with CPU type dependent registers */
  380. mfspr r3,SPRN_PVR
  381. srwi r3,r3,16
  382. cmplwi cr0,r3,0x8000 /* 7450 */
  383. cmplwi cr1,r3,0x000c /* 7400 */
  384. cmplwi cr2,r3,0x800c /* 7410 */
  385. cmplwi cr3,r3,0x8001 /* 7455 */
  386. cmplwi cr4,r3,0x8002 /* 7457 */
  387. cmplwi cr5,r3,0x8003 /* 7447A */
  388. cmplwi cr6,r3,0x7000 /* 750FX */
  389. cmplwi cr7,r3,0x8004 /* 7448 */
  390. /* cr1 is 7400 || 7410 */
  391. cror 4*cr1+eq,4*cr1+eq,4*cr2+eq
  392. /* cr0 is 74xx */
  393. cror 4*cr0+eq,4*cr0+eq,4*cr3+eq
  394. cror 4*cr0+eq,4*cr0+eq,4*cr4+eq
  395. cror 4*cr0+eq,4*cr0+eq,4*cr1+eq
  396. cror 4*cr0+eq,4*cr0+eq,4*cr5+eq
  397. cror 4*cr0+eq,4*cr0+eq,4*cr7+eq
  398. bne 2f
  399. /* Restore 74xx specific regs */
  400. lwz r4,CS_MSSCR0(r5)
  401. sync
  402. mtspr SPRN_MSSCR0,r4
  403. sync
  404. isync
  405. lwz r4,CS_MSSSR0(r5)
  406. sync
  407. mtspr SPRN_MSSSR0,r4
  408. sync
  409. isync
  410. bne cr2,1f
  411. /* Clear 7410 L2CR2 */
  412. li r4,0
  413. mtspr SPRN_L2CR2,r4
  414. 1: beq cr1,2f
  415. /* Restore 745x specific registers */
  416. lwz r4,CS_HID1(r5)
  417. sync
  418. mtspr SPRN_HID1,r4
  419. isync
  420. sync
  421. lwz r4,CS_ICTRL(r5)
  422. sync
  423. mtspr SPRN_ICTRL,r4
  424. isync
  425. sync
  426. lwz r4,CS_LDSTCR(r5)
  427. sync
  428. mtspr SPRN_LDSTCR,r4
  429. isync
  430. sync
  431. lwz r4,CS_LDSTDB(r5)
  432. sync
  433. mtspr SPRN_LDSTDB,r4
  434. isync
  435. sync
  436. 2: bne cr6,1f
  437. /* Restore 750FX specific registers
  438. * that is restore HID2 on rev 2.x and PLL config & switch
  439. * to PLL 0 on all
  440. */
  441. /* If rev 2.x, restore HID2 with low voltage bit cleared */
  442. mfspr r3,SPRN_PVR
  443. andi. r3,r3,0xff00
  444. cmpwi cr0,r3,0x0200
  445. bne 4f
  446. lwz r4,CS_HID2(r5)
  447. rlwinm r4,r4,0,19,17
  448. mtspr SPRN_HID2,r4
  449. sync
  450. 4:
  451. lwz r4,CS_HID1(r5)
  452. rlwinm r5,r4,0,16,14
  453. mtspr SPRN_HID1,r5
  454. /* Wait for PLL to stabilize */
  455. mftbl r5
  456. 3: mftbl r6
  457. sub r6,r6,r5
  458. cmplwi cr0,r6,10000
  459. ble 3b
  460. /* Setup final PLL */
  461. mtspr SPRN_HID1,r4
  462. 1:
  463. mtcr r7
  464. blr