pacache.S 35 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403
  1. /*
  2. * PARISC TLB and cache flushing support
  3. * Copyright (C) 2000-2001 Hewlett-Packard (John Marvin)
  4. * Copyright (C) 2001 Matthew Wilcox (willy at parisc-linux.org)
  5. * Copyright (C) 2002 Richard Hirst (rhirst with parisc-linux.org)
  6. *
  7. * This program is free software; you can redistribute it and/or modify
  8. * it under the terms of the GNU General Public License as published by
  9. * the Free Software Foundation; either version 2, or (at your option)
  10. * any later version.
  11. *
  12. * This program is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  15. * GNU General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU General Public License
  18. * along with this program; if not, write to the Free Software
  19. * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
  20. */
  21. /*
  22. * NOTE: fdc,fic, and pdc instructions that use base register modification
  23. * should only use index and base registers that are not shadowed,
  24. * so that the fast path emulation in the non access miss handler
  25. * can be used.
  26. */
  27. #ifdef CONFIG_64BIT
  28. .level 2.0w
  29. #else
  30. .level 2.0
  31. #endif
  32. #include <asm/psw.h>
  33. #include <asm/assembly.h>
  34. #include <asm/pgtable.h>
  35. #include <asm/cache.h>
  36. #include <asm/ldcw.h>
  37. #include <asm/alternative.h>
  38. #include <linux/linkage.h>
  39. #include <linux/init.h>
  40. .section .text.hot
  41. .align 16
  42. ENTRY_CFI(flush_tlb_all_local)
  43. /*
  44. * The pitlbe and pdtlbe instructions should only be used to
  45. * flush the entire tlb. Also, there needs to be no intervening
  46. * tlb operations, e.g. tlb misses, so the operation needs
  47. * to happen in real mode with all interruptions disabled.
  48. */
  49. /* pcxt_ssm_bug - relied upon translation! PA 2.0 Arch. F-4 and F-5 */
  50. rsm PSW_SM_I, %r19 /* save I-bit state */
  51. load32 PA(1f), %r1
  52. nop
  53. nop
  54. nop
  55. nop
  56. nop
  57. rsm PSW_SM_Q, %r0 /* prep to load iia queue */
  58. mtctl %r0, %cr17 /* Clear IIASQ tail */
  59. mtctl %r0, %cr17 /* Clear IIASQ head */
  60. mtctl %r1, %cr18 /* IIAOQ head */
  61. ldo 4(%r1), %r1
  62. mtctl %r1, %cr18 /* IIAOQ tail */
  63. load32 REAL_MODE_PSW, %r1
  64. mtctl %r1, %ipsw
  65. rfi
  66. nop
  67. 1: load32 PA(cache_info), %r1
  68. /* Flush Instruction Tlb */
  69. LDREG ITLB_SID_BASE(%r1), %r20
  70. LDREG ITLB_SID_STRIDE(%r1), %r21
  71. LDREG ITLB_SID_COUNT(%r1), %r22
  72. LDREG ITLB_OFF_BASE(%r1), %arg0
  73. LDREG ITLB_OFF_STRIDE(%r1), %arg1
  74. LDREG ITLB_OFF_COUNT(%r1), %arg2
  75. LDREG ITLB_LOOP(%r1), %arg3
  76. addib,COND(=) -1, %arg3, fitoneloop /* Preadjust and test */
  77. movb,<,n %arg3, %r31, fitdone /* If loop < 0, skip */
  78. copy %arg0, %r28 /* Init base addr */
  79. fitmanyloop: /* Loop if LOOP >= 2 */
  80. mtsp %r20, %sr1
  81. add %r21, %r20, %r20 /* increment space */
  82. copy %arg2, %r29 /* Init middle loop count */
  83. fitmanymiddle: /* Loop if LOOP >= 2 */
  84. addib,COND(>) -1, %r31, fitmanymiddle /* Adjusted inner loop decr */
  85. pitlbe %r0(%sr1, %r28)
  86. pitlbe,m %arg1(%sr1, %r28) /* Last pitlbe and addr adjust */
  87. addib,COND(>) -1, %r29, fitmanymiddle /* Middle loop decr */
  88. copy %arg3, %r31 /* Re-init inner loop count */
  89. movb,tr %arg0, %r28, fitmanyloop /* Re-init base addr */
  90. addib,COND(<=),n -1, %r22, fitdone /* Outer loop count decr */
  91. fitoneloop: /* Loop if LOOP = 1 */
  92. mtsp %r20, %sr1
  93. copy %arg0, %r28 /* init base addr */
  94. copy %arg2, %r29 /* init middle loop count */
  95. fitonemiddle: /* Loop if LOOP = 1 */
  96. addib,COND(>) -1, %r29, fitonemiddle /* Middle loop count decr */
  97. pitlbe,m %arg1(%sr1, %r28) /* pitlbe for one loop */
  98. addib,COND(>) -1, %r22, fitoneloop /* Outer loop count decr */
  99. add %r21, %r20, %r20 /* increment space */
  100. fitdone:
  101. /* Flush Data Tlb */
  102. LDREG DTLB_SID_BASE(%r1), %r20
  103. LDREG DTLB_SID_STRIDE(%r1), %r21
  104. LDREG DTLB_SID_COUNT(%r1), %r22
  105. LDREG DTLB_OFF_BASE(%r1), %arg0
  106. LDREG DTLB_OFF_STRIDE(%r1), %arg1
  107. LDREG DTLB_OFF_COUNT(%r1), %arg2
  108. LDREG DTLB_LOOP(%r1), %arg3
  109. addib,COND(=) -1, %arg3, fdtoneloop /* Preadjust and test */
  110. movb,<,n %arg3, %r31, fdtdone /* If loop < 0, skip */
  111. copy %arg0, %r28 /* Init base addr */
  112. fdtmanyloop: /* Loop if LOOP >= 2 */
  113. mtsp %r20, %sr1
  114. add %r21, %r20, %r20 /* increment space */
  115. copy %arg2, %r29 /* Init middle loop count */
  116. fdtmanymiddle: /* Loop if LOOP >= 2 */
  117. addib,COND(>) -1, %r31, fdtmanymiddle /* Adjusted inner loop decr */
  118. pdtlbe %r0(%sr1, %r28)
  119. pdtlbe,m %arg1(%sr1, %r28) /* Last pdtlbe and addr adjust */
  120. addib,COND(>) -1, %r29, fdtmanymiddle /* Middle loop decr */
  121. copy %arg3, %r31 /* Re-init inner loop count */
  122. movb,tr %arg0, %r28, fdtmanyloop /* Re-init base addr */
  123. addib,COND(<=),n -1, %r22,fdtdone /* Outer loop count decr */
  124. fdtoneloop: /* Loop if LOOP = 1 */
  125. mtsp %r20, %sr1
  126. copy %arg0, %r28 /* init base addr */
  127. copy %arg2, %r29 /* init middle loop count */
  128. fdtonemiddle: /* Loop if LOOP = 1 */
  129. addib,COND(>) -1, %r29, fdtonemiddle /* Middle loop count decr */
  130. pdtlbe,m %arg1(%sr1, %r28) /* pdtlbe for one loop */
  131. addib,COND(>) -1, %r22, fdtoneloop /* Outer loop count decr */
  132. add %r21, %r20, %r20 /* increment space */
  133. fdtdone:
  134. /*
  135. * Switch back to virtual mode
  136. */
  137. /* pcxt_ssm_bug */
  138. rsm PSW_SM_I, %r0
  139. load32 2f, %r1
  140. nop
  141. nop
  142. nop
  143. nop
  144. nop
  145. rsm PSW_SM_Q, %r0 /* prep to load iia queue */
  146. mtctl %r0, %cr17 /* Clear IIASQ tail */
  147. mtctl %r0, %cr17 /* Clear IIASQ head */
  148. mtctl %r1, %cr18 /* IIAOQ head */
  149. ldo 4(%r1), %r1
  150. mtctl %r1, %cr18 /* IIAOQ tail */
  151. load32 KERNEL_PSW, %r1
  152. or %r1, %r19, %r1 /* I-bit to state on entry */
  153. mtctl %r1, %ipsw /* restore I-bit (entire PSW) */
  154. rfi
  155. nop
  156. 2: bv %r0(%r2)
  157. nop
  158. ENDPROC_CFI(flush_tlb_all_local)
  159. .import cache_info,data
  160. ENTRY_CFI(flush_instruction_cache_local)
  161. 88: load32 cache_info, %r1
  162. /* Flush Instruction Cache */
  163. LDREG ICACHE_BASE(%r1), %arg0
  164. LDREG ICACHE_STRIDE(%r1), %arg1
  165. LDREG ICACHE_COUNT(%r1), %arg2
  166. LDREG ICACHE_LOOP(%r1), %arg3
  167. rsm PSW_SM_I, %r22 /* No mmgt ops during loop*/
  168. mtsp %r0, %sr1
  169. addib,COND(=) -1, %arg3, fioneloop /* Preadjust and test */
  170. movb,<,n %arg3, %r31, fisync /* If loop < 0, do sync */
  171. fimanyloop: /* Loop if LOOP >= 2 */
  172. addib,COND(>) -1, %r31, fimanyloop /* Adjusted inner loop decr */
  173. fice %r0(%sr1, %arg0)
  174. fice,m %arg1(%sr1, %arg0) /* Last fice and addr adjust */
  175. movb,tr %arg3, %r31, fimanyloop /* Re-init inner loop count */
  176. addib,COND(<=),n -1, %arg2, fisync /* Outer loop decr */
  177. fioneloop: /* Loop if LOOP = 1 */
  178. /* Some implementations may flush with a single fice instruction */
  179. cmpib,COND(>>=),n 15, %arg2, fioneloop2
  180. fioneloop1:
  181. fice,m %arg1(%sr1, %arg0)
  182. fice,m %arg1(%sr1, %arg0)
  183. fice,m %arg1(%sr1, %arg0)
  184. fice,m %arg1(%sr1, %arg0)
  185. fice,m %arg1(%sr1, %arg0)
  186. fice,m %arg1(%sr1, %arg0)
  187. fice,m %arg1(%sr1, %arg0)
  188. fice,m %arg1(%sr1, %arg0)
  189. fice,m %arg1(%sr1, %arg0)
  190. fice,m %arg1(%sr1, %arg0)
  191. fice,m %arg1(%sr1, %arg0)
  192. fice,m %arg1(%sr1, %arg0)
  193. fice,m %arg1(%sr1, %arg0)
  194. fice,m %arg1(%sr1, %arg0)
  195. fice,m %arg1(%sr1, %arg0)
  196. addib,COND(>) -16, %arg2, fioneloop1
  197. fice,m %arg1(%sr1, %arg0)
  198. /* Check if done */
  199. cmpb,COND(=),n %arg2, %r0, fisync /* Predict branch taken */
  200. fioneloop2:
  201. addib,COND(>) -1, %arg2, fioneloop2 /* Outer loop count decr */
  202. fice,m %arg1(%sr1, %arg0) /* Fice for one loop */
  203. fisync:
  204. sync
  205. mtsm %r22 /* restore I-bit */
  206. 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_ICACHE, INSN_NOP)
  207. bv %r0(%r2)
  208. nop
  209. ENDPROC_CFI(flush_instruction_cache_local)
  210. .import cache_info, data
  211. ENTRY_CFI(flush_data_cache_local)
  212. 88: load32 cache_info, %r1
  213. /* Flush Data Cache */
  214. LDREG DCACHE_BASE(%r1), %arg0
  215. LDREG DCACHE_STRIDE(%r1), %arg1
  216. LDREG DCACHE_COUNT(%r1), %arg2
  217. LDREG DCACHE_LOOP(%r1), %arg3
  218. rsm PSW_SM_I, %r22 /* No mmgt ops during loop*/
  219. mtsp %r0, %sr1
  220. addib,COND(=) -1, %arg3, fdoneloop /* Preadjust and test */
  221. movb,<,n %arg3, %r31, fdsync /* If loop < 0, do sync */
  222. fdmanyloop: /* Loop if LOOP >= 2 */
  223. addib,COND(>) -1, %r31, fdmanyloop /* Adjusted inner loop decr */
  224. fdce %r0(%sr1, %arg0)
  225. fdce,m %arg1(%sr1, %arg0) /* Last fdce and addr adjust */
  226. movb,tr %arg3, %r31, fdmanyloop /* Re-init inner loop count */
  227. addib,COND(<=),n -1, %arg2, fdsync /* Outer loop decr */
  228. fdoneloop: /* Loop if LOOP = 1 */
  229. /* Some implementations may flush with a single fdce instruction */
  230. cmpib,COND(>>=),n 15, %arg2, fdoneloop2
  231. fdoneloop1:
  232. fdce,m %arg1(%sr1, %arg0)
  233. fdce,m %arg1(%sr1, %arg0)
  234. fdce,m %arg1(%sr1, %arg0)
  235. fdce,m %arg1(%sr1, %arg0)
  236. fdce,m %arg1(%sr1, %arg0)
  237. fdce,m %arg1(%sr1, %arg0)
  238. fdce,m %arg1(%sr1, %arg0)
  239. fdce,m %arg1(%sr1, %arg0)
  240. fdce,m %arg1(%sr1, %arg0)
  241. fdce,m %arg1(%sr1, %arg0)
  242. fdce,m %arg1(%sr1, %arg0)
  243. fdce,m %arg1(%sr1, %arg0)
  244. fdce,m %arg1(%sr1, %arg0)
  245. fdce,m %arg1(%sr1, %arg0)
  246. fdce,m %arg1(%sr1, %arg0)
  247. addib,COND(>) -16, %arg2, fdoneloop1
  248. fdce,m %arg1(%sr1, %arg0)
  249. /* Check if done */
  250. cmpb,COND(=),n %arg2, %r0, fdsync /* Predict branch taken */
  251. fdoneloop2:
  252. addib,COND(>) -1, %arg2, fdoneloop2 /* Outer loop count decr */
  253. fdce,m %arg1(%sr1, %arg0) /* Fdce for one loop */
  254. fdsync:
  255. syncdma
  256. sync
  257. mtsm %r22 /* restore I-bit */
  258. 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP)
  259. bv %r0(%r2)
  260. nop
  261. ENDPROC_CFI(flush_data_cache_local)
  262. /* Macros to serialize TLB purge operations on SMP. */
  263. .macro tlb_lock la,flags,tmp
  264. #ifdef CONFIG_SMP
  265. 98:
  266. #if __PA_LDCW_ALIGNMENT > 4
  267. load32 pa_tlb_lock + __PA_LDCW_ALIGNMENT-1, \la
  268. depi 0,31,__PA_LDCW_ALIGN_ORDER, \la
  269. #else
  270. load32 pa_tlb_lock, \la
  271. #endif
  272. rsm PSW_SM_I,\flags
  273. 1: LDCW 0(\la),\tmp
  274. cmpib,<>,n 0,\tmp,3f
  275. 2: ldw 0(\la),\tmp
  276. cmpb,<> %r0,\tmp,1b
  277. nop
  278. b,n 2b
  279. 3:
  280. 99: ALTERNATIVE(98b, 99b, ALT_COND_NO_SMP, INSN_NOP)
  281. #endif
  282. .endm
  283. .macro tlb_unlock la,flags,tmp
  284. #ifdef CONFIG_SMP
  285. 98: ldi 1,\tmp
  286. sync
  287. stw \tmp,0(\la)
  288. mtsm \flags
  289. 99: ALTERNATIVE(98b, 99b, ALT_COND_NO_SMP, INSN_NOP)
  290. #endif
  291. .endm
  292. /* Clear page using kernel mapping. */
  293. ENTRY_CFI(clear_page_asm)
  294. #ifdef CONFIG_64BIT
  295. /* Unroll the loop. */
  296. ldi (PAGE_SIZE / 128), %r1
  297. 1:
  298. std %r0, 0(%r26)
  299. std %r0, 8(%r26)
  300. std %r0, 16(%r26)
  301. std %r0, 24(%r26)
  302. std %r0, 32(%r26)
  303. std %r0, 40(%r26)
  304. std %r0, 48(%r26)
  305. std %r0, 56(%r26)
  306. std %r0, 64(%r26)
  307. std %r0, 72(%r26)
  308. std %r0, 80(%r26)
  309. std %r0, 88(%r26)
  310. std %r0, 96(%r26)
  311. std %r0, 104(%r26)
  312. std %r0, 112(%r26)
  313. std %r0, 120(%r26)
  314. /* Note reverse branch hint for addib is taken. */
  315. addib,COND(>),n -1, %r1, 1b
  316. ldo 128(%r26), %r26
  317. #else
  318. /*
  319. * Note that until (if) we start saving the full 64-bit register
  320. * values on interrupt, we can't use std on a 32 bit kernel.
  321. */
  322. ldi (PAGE_SIZE / 64), %r1
  323. 1:
  324. stw %r0, 0(%r26)
  325. stw %r0, 4(%r26)
  326. stw %r0, 8(%r26)
  327. stw %r0, 12(%r26)
  328. stw %r0, 16(%r26)
  329. stw %r0, 20(%r26)
  330. stw %r0, 24(%r26)
  331. stw %r0, 28(%r26)
  332. stw %r0, 32(%r26)
  333. stw %r0, 36(%r26)
  334. stw %r0, 40(%r26)
  335. stw %r0, 44(%r26)
  336. stw %r0, 48(%r26)
  337. stw %r0, 52(%r26)
  338. stw %r0, 56(%r26)
  339. stw %r0, 60(%r26)
  340. addib,COND(>),n -1, %r1, 1b
  341. ldo 64(%r26), %r26
  342. #endif
  343. bv %r0(%r2)
  344. nop
  345. ENDPROC_CFI(clear_page_asm)
  346. /* Copy page using kernel mapping. */
  347. ENTRY_CFI(copy_page_asm)
  348. #ifdef CONFIG_64BIT
  349. /* PA8x00 CPUs can consume 2 loads or 1 store per cycle.
  350. * Unroll the loop by hand and arrange insn appropriately.
  351. * Prefetch doesn't improve performance on rp3440.
  352. * GCC probably can do this just as well...
  353. */
  354. ldi (PAGE_SIZE / 128), %r1
  355. 1: ldd 0(%r25), %r19
  356. ldd 8(%r25), %r20
  357. ldd 16(%r25), %r21
  358. ldd 24(%r25), %r22
  359. std %r19, 0(%r26)
  360. std %r20, 8(%r26)
  361. ldd 32(%r25), %r19
  362. ldd 40(%r25), %r20
  363. std %r21, 16(%r26)
  364. std %r22, 24(%r26)
  365. ldd 48(%r25), %r21
  366. ldd 56(%r25), %r22
  367. std %r19, 32(%r26)
  368. std %r20, 40(%r26)
  369. ldd 64(%r25), %r19
  370. ldd 72(%r25), %r20
  371. std %r21, 48(%r26)
  372. std %r22, 56(%r26)
  373. ldd 80(%r25), %r21
  374. ldd 88(%r25), %r22
  375. std %r19, 64(%r26)
  376. std %r20, 72(%r26)
  377. ldd 96(%r25), %r19
  378. ldd 104(%r25), %r20
  379. std %r21, 80(%r26)
  380. std %r22, 88(%r26)
  381. ldd 112(%r25), %r21
  382. ldd 120(%r25), %r22
  383. ldo 128(%r25), %r25
  384. std %r19, 96(%r26)
  385. std %r20, 104(%r26)
  386. std %r21, 112(%r26)
  387. std %r22, 120(%r26)
  388. /* Note reverse branch hint for addib is taken. */
  389. addib,COND(>),n -1, %r1, 1b
  390. ldo 128(%r26), %r26
  391. #else
  392. /*
  393. * This loop is optimized for PCXL/PCXL2 ldw/ldw and stw/stw
  394. * bundles (very restricted rules for bundling).
  395. * Note that until (if) we start saving
  396. * the full 64 bit register values on interrupt, we can't
  397. * use ldd/std on a 32 bit kernel.
  398. */
  399. ldw 0(%r25), %r19
  400. ldi (PAGE_SIZE / 64), %r1
  401. 1:
  402. ldw 4(%r25), %r20
  403. ldw 8(%r25), %r21
  404. ldw 12(%r25), %r22
  405. stw %r19, 0(%r26)
  406. stw %r20, 4(%r26)
  407. stw %r21, 8(%r26)
  408. stw %r22, 12(%r26)
  409. ldw 16(%r25), %r19
  410. ldw 20(%r25), %r20
  411. ldw 24(%r25), %r21
  412. ldw 28(%r25), %r22
  413. stw %r19, 16(%r26)
  414. stw %r20, 20(%r26)
  415. stw %r21, 24(%r26)
  416. stw %r22, 28(%r26)
  417. ldw 32(%r25), %r19
  418. ldw 36(%r25), %r20
  419. ldw 40(%r25), %r21
  420. ldw 44(%r25), %r22
  421. stw %r19, 32(%r26)
  422. stw %r20, 36(%r26)
  423. stw %r21, 40(%r26)
  424. stw %r22, 44(%r26)
  425. ldw 48(%r25), %r19
  426. ldw 52(%r25), %r20
  427. ldw 56(%r25), %r21
  428. ldw 60(%r25), %r22
  429. stw %r19, 48(%r26)
  430. stw %r20, 52(%r26)
  431. ldo 64(%r25), %r25
  432. stw %r21, 56(%r26)
  433. stw %r22, 60(%r26)
  434. ldo 64(%r26), %r26
  435. addib,COND(>),n -1, %r1, 1b
  436. ldw 0(%r25), %r19
  437. #endif
  438. bv %r0(%r2)
  439. nop
  440. ENDPROC_CFI(copy_page_asm)
  441. /*
  442. * NOTE: Code in clear_user_page has a hard coded dependency on the
  443. * maximum alias boundary being 4 Mb. We've been assured by the
  444. * parisc chip designers that there will not ever be a parisc
  445. * chip with a larger alias boundary (Never say never :-) ).
  446. *
  447. * Subtle: the dtlb miss handlers support the temp alias region by
  448. * "knowing" that if a dtlb miss happens within the temp alias
  449. * region it must have occurred while in clear_user_page. Since
  450. * this routine makes use of processor local translations, we
  451. * don't want to insert them into the kernel page table. Instead,
  452. * we load up some general registers (they need to be registers
  453. * which aren't shadowed) with the physical page numbers (preshifted
  454. * for tlb insertion) needed to insert the translations. When we
  455. * miss on the translation, the dtlb miss handler inserts the
  456. * translation into the tlb using these values:
  457. *
  458. * %r26 physical page (shifted for tlb insert) of "to" translation
  459. * %r23 physical page (shifted for tlb insert) of "from" translation
  460. */
  461. /* Drop prot bits and convert to page addr for iitlbt and idtlbt */
  462. #define PAGE_ADD_SHIFT (PAGE_SHIFT-12)
  463. .macro convert_phys_for_tlb_insert20 phys
  464. extrd,u \phys, 56-PAGE_ADD_SHIFT, 32-PAGE_ADD_SHIFT, \phys
  465. #if _PAGE_SIZE_ENCODING_DEFAULT
  466. depdi _PAGE_SIZE_ENCODING_DEFAULT, 63, (63-58), \phys
  467. #endif
  468. .endm
  469. /*
  470. * copy_user_page_asm() performs a page copy using mappings
  471. * equivalent to the user page mappings. It can be used to
  472. * implement copy_user_page() but unfortunately both the `from'
  473. * and `to' pages need to be flushed through mappings equivalent
  474. * to the user mappings after the copy because the kernel accesses
  475. * the `from' page through the kmap kernel mapping and the `to'
  476. * page needs to be flushed since code can be copied. As a
  477. * result, this implementation is less efficient than the simpler
  478. * copy using the kernel mapping. It only needs the `from' page
  479. * to flushed via the user mapping. The kunmap routines handle
  480. * the flushes needed for the kernel mapping.
  481. *
  482. * I'm still keeping this around because it may be possible to
  483. * use it if more information is passed into copy_user_page().
  484. * Have to do some measurements to see if it is worthwhile to
  485. * lobby for such a change.
  486. *
  487. */
  488. ENTRY_CFI(copy_user_page_asm)
  489. /* Convert virtual `to' and `from' addresses to physical addresses.
  490. Move `from' physical address to non shadowed register. */
  491. ldil L%(__PAGE_OFFSET), %r1
  492. sub %r26, %r1, %r26
  493. sub %r25, %r1, %r23
  494. ldil L%(TMPALIAS_MAP_START), %r28
  495. #ifdef CONFIG_64BIT
  496. #if (TMPALIAS_MAP_START >= 0x80000000)
  497. depdi 0, 31,32, %r28 /* clear any sign extension */
  498. #endif
  499. convert_phys_for_tlb_insert20 %r26 /* convert phys addr to tlb insert format */
  500. convert_phys_for_tlb_insert20 %r23 /* convert phys addr to tlb insert format */
  501. depd %r24,63,22, %r28 /* Form aliased virtual address 'to' */
  502. depdi 0, 63,PAGE_SHIFT, %r28 /* Clear any offset bits */
  503. copy %r28, %r29
  504. depdi 1, 41,1, %r29 /* Form aliased virtual address 'from' */
  505. #else
  506. extrw,u %r26, 24,25, %r26 /* convert phys addr to tlb insert format */
  507. extrw,u %r23, 24,25, %r23 /* convert phys addr to tlb insert format */
  508. depw %r24, 31,22, %r28 /* Form aliased virtual address 'to' */
  509. depwi 0, 31,PAGE_SHIFT, %r28 /* Clear any offset bits */
  510. copy %r28, %r29
  511. depwi 1, 9,1, %r29 /* Form aliased virtual address 'from' */
  512. #endif
  513. /* Purge any old translations */
  514. #ifdef CONFIG_PA20
  515. pdtlb,l %r0(%r28)
  516. pdtlb,l %r0(%r29)
  517. #else
  518. tlb_lock %r20,%r21,%r22
  519. 0: pdtlb %r0(%r28)
  520. 1: pdtlb %r0(%r29)
  521. tlb_unlock %r20,%r21,%r22
  522. ALTERNATIVE(0b, 0b+4, ALT_COND_NO_SMP, INSN_PxTLB)
  523. ALTERNATIVE(1b, 1b+4, ALT_COND_NO_SMP, INSN_PxTLB)
  524. #endif
  525. #ifdef CONFIG_64BIT
  526. /* PA8x00 CPUs can consume 2 loads or 1 store per cycle.
  527. * Unroll the loop by hand and arrange insn appropriately.
  528. * GCC probably can do this just as well.
  529. */
  530. ldd 0(%r29), %r19
  531. ldi (PAGE_SIZE / 128), %r1
  532. 1: ldd 8(%r29), %r20
  533. ldd 16(%r29), %r21
  534. ldd 24(%r29), %r22
  535. std %r19, 0(%r28)
  536. std %r20, 8(%r28)
  537. ldd 32(%r29), %r19
  538. ldd 40(%r29), %r20
  539. std %r21, 16(%r28)
  540. std %r22, 24(%r28)
  541. ldd 48(%r29), %r21
  542. ldd 56(%r29), %r22
  543. std %r19, 32(%r28)
  544. std %r20, 40(%r28)
  545. ldd 64(%r29), %r19
  546. ldd 72(%r29), %r20
  547. std %r21, 48(%r28)
  548. std %r22, 56(%r28)
  549. ldd 80(%r29), %r21
  550. ldd 88(%r29), %r22
  551. std %r19, 64(%r28)
  552. std %r20, 72(%r28)
  553. ldd 96(%r29), %r19
  554. ldd 104(%r29), %r20
  555. std %r21, 80(%r28)
  556. std %r22, 88(%r28)
  557. ldd 112(%r29), %r21
  558. ldd 120(%r29), %r22
  559. std %r19, 96(%r28)
  560. std %r20, 104(%r28)
  561. ldo 128(%r29), %r29
  562. std %r21, 112(%r28)
  563. std %r22, 120(%r28)
  564. ldo 128(%r28), %r28
  565. /* conditional branches nullify on forward taken branch, and on
  566. * non-taken backward branch. Note that .+4 is a backwards branch.
  567. * The ldd should only get executed if the branch is taken.
  568. */
  569. addib,COND(>),n -1, %r1, 1b /* bundle 10 */
  570. ldd 0(%r29), %r19 /* start next loads */
  571. #else
  572. ldi (PAGE_SIZE / 64), %r1
  573. /*
  574. * This loop is optimized for PCXL/PCXL2 ldw/ldw and stw/stw
  575. * bundles (very restricted rules for bundling). It probably
  576. * does OK on PCXU and better, but we could do better with
  577. * ldd/std instructions. Note that until (if) we start saving
  578. * the full 64 bit register values on interrupt, we can't
  579. * use ldd/std on a 32 bit kernel.
  580. */
  581. 1: ldw 0(%r29), %r19
  582. ldw 4(%r29), %r20
  583. ldw 8(%r29), %r21
  584. ldw 12(%r29), %r22
  585. stw %r19, 0(%r28)
  586. stw %r20, 4(%r28)
  587. stw %r21, 8(%r28)
  588. stw %r22, 12(%r28)
  589. ldw 16(%r29), %r19
  590. ldw 20(%r29), %r20
  591. ldw 24(%r29), %r21
  592. ldw 28(%r29), %r22
  593. stw %r19, 16(%r28)
  594. stw %r20, 20(%r28)
  595. stw %r21, 24(%r28)
  596. stw %r22, 28(%r28)
  597. ldw 32(%r29), %r19
  598. ldw 36(%r29), %r20
  599. ldw 40(%r29), %r21
  600. ldw 44(%r29), %r22
  601. stw %r19, 32(%r28)
  602. stw %r20, 36(%r28)
  603. stw %r21, 40(%r28)
  604. stw %r22, 44(%r28)
  605. ldw 48(%r29), %r19
  606. ldw 52(%r29), %r20
  607. ldw 56(%r29), %r21
  608. ldw 60(%r29), %r22
  609. stw %r19, 48(%r28)
  610. stw %r20, 52(%r28)
  611. stw %r21, 56(%r28)
  612. stw %r22, 60(%r28)
  613. ldo 64(%r28), %r28
  614. addib,COND(>) -1, %r1,1b
  615. ldo 64(%r29), %r29
  616. #endif
  617. bv %r0(%r2)
  618. nop
  619. ENDPROC_CFI(copy_user_page_asm)
  620. ENTRY_CFI(clear_user_page_asm)
  621. tophys_r1 %r26
  622. ldil L%(TMPALIAS_MAP_START), %r28
  623. #ifdef CONFIG_64BIT
  624. #if (TMPALIAS_MAP_START >= 0x80000000)
  625. depdi 0, 31,32, %r28 /* clear any sign extension */
  626. #endif
  627. convert_phys_for_tlb_insert20 %r26 /* convert phys addr to tlb insert format */
  628. depd %r25, 63,22, %r28 /* Form aliased virtual address 'to' */
  629. depdi 0, 63,PAGE_SHIFT, %r28 /* Clear any offset bits */
  630. #else
  631. extrw,u %r26, 24,25, %r26 /* convert phys addr to tlb insert format */
  632. depw %r25, 31,22, %r28 /* Form aliased virtual address 'to' */
  633. depwi 0, 31,PAGE_SHIFT, %r28 /* Clear any offset bits */
  634. #endif
  635. /* Purge any old translation */
  636. #ifdef CONFIG_PA20
  637. pdtlb,l %r0(%r28)
  638. #else
  639. tlb_lock %r20,%r21,%r22
  640. 0: pdtlb %r0(%r28)
  641. tlb_unlock %r20,%r21,%r22
  642. ALTERNATIVE(0b, 0b+4, ALT_COND_NO_SMP, INSN_PxTLB)
  643. #endif
  644. #ifdef CONFIG_64BIT
  645. ldi (PAGE_SIZE / 128), %r1
  646. /* PREFETCH (Write) has not (yet) been proven to help here */
  647. /* #define PREFETCHW_OP ldd 256(%0), %r0 */
  648. 1: std %r0, 0(%r28)
  649. std %r0, 8(%r28)
  650. std %r0, 16(%r28)
  651. std %r0, 24(%r28)
  652. std %r0, 32(%r28)
  653. std %r0, 40(%r28)
  654. std %r0, 48(%r28)
  655. std %r0, 56(%r28)
  656. std %r0, 64(%r28)
  657. std %r0, 72(%r28)
  658. std %r0, 80(%r28)
  659. std %r0, 88(%r28)
  660. std %r0, 96(%r28)
  661. std %r0, 104(%r28)
  662. std %r0, 112(%r28)
  663. std %r0, 120(%r28)
  664. addib,COND(>) -1, %r1, 1b
  665. ldo 128(%r28), %r28
  666. #else /* ! CONFIG_64BIT */
  667. ldi (PAGE_SIZE / 64), %r1
  668. 1: stw %r0, 0(%r28)
  669. stw %r0, 4(%r28)
  670. stw %r0, 8(%r28)
  671. stw %r0, 12(%r28)
  672. stw %r0, 16(%r28)
  673. stw %r0, 20(%r28)
  674. stw %r0, 24(%r28)
  675. stw %r0, 28(%r28)
  676. stw %r0, 32(%r28)
  677. stw %r0, 36(%r28)
  678. stw %r0, 40(%r28)
  679. stw %r0, 44(%r28)
  680. stw %r0, 48(%r28)
  681. stw %r0, 52(%r28)
  682. stw %r0, 56(%r28)
  683. stw %r0, 60(%r28)
  684. addib,COND(>) -1, %r1, 1b
  685. ldo 64(%r28), %r28
  686. #endif /* CONFIG_64BIT */
  687. bv %r0(%r2)
  688. nop
  689. ENDPROC_CFI(clear_user_page_asm)
  690. ENTRY_CFI(flush_dcache_page_asm)
  691. ldil L%(TMPALIAS_MAP_START), %r28
  692. #ifdef CONFIG_64BIT
  693. #if (TMPALIAS_MAP_START >= 0x80000000)
  694. depdi 0, 31,32, %r28 /* clear any sign extension */
  695. #endif
  696. convert_phys_for_tlb_insert20 %r26 /* convert phys addr to tlb insert format */
  697. depd %r25, 63,22, %r28 /* Form aliased virtual address 'to' */
  698. depdi 0, 63,PAGE_SHIFT, %r28 /* Clear any offset bits */
  699. #else
  700. extrw,u %r26, 24,25, %r26 /* convert phys addr to tlb insert format */
  701. depw %r25, 31,22, %r28 /* Form aliased virtual address 'to' */
  702. depwi 0, 31,PAGE_SHIFT, %r28 /* Clear any offset bits */
  703. #endif
  704. /* Purge any old translation */
  705. #ifdef CONFIG_PA20
  706. pdtlb,l %r0(%r28)
  707. #else
  708. tlb_lock %r20,%r21,%r22
  709. 0: pdtlb %r0(%r28)
  710. tlb_unlock %r20,%r21,%r22
  711. ALTERNATIVE(0b, 0b+4, ALT_COND_NO_SMP, INSN_PxTLB)
  712. #endif
  713. 88: ldil L%dcache_stride, %r1
  714. ldw R%dcache_stride(%r1), r31
  715. #ifdef CONFIG_64BIT
  716. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  717. #else
  718. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  719. #endif
  720. add %r28, %r25, %r25
  721. sub %r25, r31, %r25
  722. 1: fdc,m r31(%r28)
  723. fdc,m r31(%r28)
  724. fdc,m r31(%r28)
  725. fdc,m r31(%r28)
  726. fdc,m r31(%r28)
  727. fdc,m r31(%r28)
  728. fdc,m r31(%r28)
  729. fdc,m r31(%r28)
  730. fdc,m r31(%r28)
  731. fdc,m r31(%r28)
  732. fdc,m r31(%r28)
  733. fdc,m r31(%r28)
  734. fdc,m r31(%r28)
  735. fdc,m r31(%r28)
  736. fdc,m r31(%r28)
  737. cmpb,COND(>>) %r25, %r28, 1b /* predict taken */
  738. fdc,m r31(%r28)
  739. 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP)
  740. sync
  741. bv %r0(%r2)
  742. nop
  743. ENDPROC_CFI(flush_dcache_page_asm)
  744. ENTRY_CFI(purge_dcache_page_asm)
  745. ldil L%(TMPALIAS_MAP_START), %r28
  746. #ifdef CONFIG_64BIT
  747. #if (TMPALIAS_MAP_START >= 0x80000000)
  748. depdi 0, 31,32, %r28 /* clear any sign extension */
  749. #endif
  750. convert_phys_for_tlb_insert20 %r26 /* convert phys addr to tlb insert format */
  751. depd %r25, 63,22, %r28 /* Form aliased virtual address 'to' */
  752. depdi 0, 63,PAGE_SHIFT, %r28 /* Clear any offset bits */
  753. #else
  754. extrw,u %r26, 24,25, %r26 /* convert phys addr to tlb insert format */
  755. depw %r25, 31,22, %r28 /* Form aliased virtual address 'to' */
  756. depwi 0, 31,PAGE_SHIFT, %r28 /* Clear any offset bits */
  757. #endif
  758. /* Purge any old translation */
  759. #ifdef CONFIG_PA20
  760. pdtlb,l %r0(%r28)
  761. #else
  762. tlb_lock %r20,%r21,%r22
  763. 0: pdtlb %r0(%r28)
  764. tlb_unlock %r20,%r21,%r22
  765. ALTERNATIVE(0b, 0b+4, ALT_COND_NO_SMP, INSN_PxTLB)
  766. #endif
  767. 88: ldil L%dcache_stride, %r1
  768. ldw R%dcache_stride(%r1), r31
  769. #ifdef CONFIG_64BIT
  770. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  771. #else
  772. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  773. #endif
  774. add %r28, %r25, %r25
  775. sub %r25, r31, %r25
  776. 1: pdc,m r31(%r28)
  777. pdc,m r31(%r28)
  778. pdc,m r31(%r28)
  779. pdc,m r31(%r28)
  780. pdc,m r31(%r28)
  781. pdc,m r31(%r28)
  782. pdc,m r31(%r28)
  783. pdc,m r31(%r28)
  784. pdc,m r31(%r28)
  785. pdc,m r31(%r28)
  786. pdc,m r31(%r28)
  787. pdc,m r31(%r28)
  788. pdc,m r31(%r28)
  789. pdc,m r31(%r28)
  790. pdc,m r31(%r28)
  791. cmpb,COND(>>) %r25, %r28, 1b /* predict taken */
  792. pdc,m r31(%r28)
  793. 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP)
  794. sync
  795. bv %r0(%r2)
  796. nop
  797. ENDPROC_CFI(purge_dcache_page_asm)
  798. ENTRY_CFI(flush_icache_page_asm)
  799. ldil L%(TMPALIAS_MAP_START), %r28
  800. #ifdef CONFIG_64BIT
  801. #if (TMPALIAS_MAP_START >= 0x80000000)
  802. depdi 0, 31,32, %r28 /* clear any sign extension */
  803. #endif
  804. convert_phys_for_tlb_insert20 %r26 /* convert phys addr to tlb insert format */
  805. depd %r25, 63,22, %r28 /* Form aliased virtual address 'to' */
  806. depdi 0, 63,PAGE_SHIFT, %r28 /* Clear any offset bits */
  807. #else
  808. extrw,u %r26, 24,25, %r26 /* convert phys addr to tlb insert format */
  809. depw %r25, 31,22, %r28 /* Form aliased virtual address 'to' */
  810. depwi 0, 31,PAGE_SHIFT, %r28 /* Clear any offset bits */
  811. #endif
  812. /* Purge any old translation. Note that the FIC instruction
  813. * may use either the instruction or data TLB. Given that we
  814. * have a flat address space, it's not clear which TLB will be
  815. * used. So, we purge both entries. */
  816. #ifdef CONFIG_PA20
  817. pdtlb,l %r0(%r28)
  818. 1: pitlb,l %r0(%sr4,%r28)
  819. ALTERNATIVE(1b, 1b+4, ALT_COND_NO_SPLIT_TLB, INSN_NOP)
  820. #else
  821. tlb_lock %r20,%r21,%r22
  822. 0: pdtlb %r0(%r28)
  823. 1: pitlb %r0(%sr4,%r28)
  824. tlb_unlock %r20,%r21,%r22
  825. ALTERNATIVE(0b, 0b+4, ALT_COND_NO_SMP, INSN_PxTLB)
  826. ALTERNATIVE(1b, 1b+4, ALT_COND_NO_SMP, INSN_PxTLB)
  827. ALTERNATIVE(1b, 1b+4, ALT_COND_NO_SPLIT_TLB, INSN_NOP)
  828. #endif
  829. 88: ldil L%icache_stride, %r1
  830. ldw R%icache_stride(%r1), %r31
  831. #ifdef CONFIG_64BIT
  832. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  833. #else
  834. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  835. #endif
  836. add %r28, %r25, %r25
  837. sub %r25, %r31, %r25
  838. /* fic only has the type 26 form on PA1.1, requiring an
  839. * explicit space specification, so use %sr4 */
  840. 1: fic,m %r31(%sr4,%r28)
  841. fic,m %r31(%sr4,%r28)
  842. fic,m %r31(%sr4,%r28)
  843. fic,m %r31(%sr4,%r28)
  844. fic,m %r31(%sr4,%r28)
  845. fic,m %r31(%sr4,%r28)
  846. fic,m %r31(%sr4,%r28)
  847. fic,m %r31(%sr4,%r28)
  848. fic,m %r31(%sr4,%r28)
  849. fic,m %r31(%sr4,%r28)
  850. fic,m %r31(%sr4,%r28)
  851. fic,m %r31(%sr4,%r28)
  852. fic,m %r31(%sr4,%r28)
  853. fic,m %r31(%sr4,%r28)
  854. fic,m %r31(%sr4,%r28)
  855. cmpb,COND(>>) %r25, %r28, 1b /* predict taken */
  856. fic,m %r31(%sr4,%r28)
  857. 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_ICACHE, INSN_NOP)
  858. sync
  859. bv %r0(%r2)
  860. nop
  861. ENDPROC_CFI(flush_icache_page_asm)
  862. ENTRY_CFI(flush_kernel_dcache_page_asm)
  863. 88: ldil L%dcache_stride, %r1
  864. ldw R%dcache_stride(%r1), %r23
  865. #ifdef CONFIG_64BIT
  866. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  867. #else
  868. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  869. #endif
  870. add %r26, %r25, %r25
  871. sub %r25, %r23, %r25
  872. 1: fdc,m %r23(%r26)
  873. fdc,m %r23(%r26)
  874. fdc,m %r23(%r26)
  875. fdc,m %r23(%r26)
  876. fdc,m %r23(%r26)
  877. fdc,m %r23(%r26)
  878. fdc,m %r23(%r26)
  879. fdc,m %r23(%r26)
  880. fdc,m %r23(%r26)
  881. fdc,m %r23(%r26)
  882. fdc,m %r23(%r26)
  883. fdc,m %r23(%r26)
  884. fdc,m %r23(%r26)
  885. fdc,m %r23(%r26)
  886. fdc,m %r23(%r26)
  887. cmpb,COND(>>) %r25, %r26, 1b /* predict taken */
  888. fdc,m %r23(%r26)
  889. 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP)
  890. sync
  891. bv %r0(%r2)
  892. nop
  893. ENDPROC_CFI(flush_kernel_dcache_page_asm)
  894. ENTRY_CFI(purge_kernel_dcache_page_asm)
  895. 88: ldil L%dcache_stride, %r1
  896. ldw R%dcache_stride(%r1), %r23
  897. #ifdef CONFIG_64BIT
  898. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  899. #else
  900. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  901. #endif
  902. add %r26, %r25, %r25
  903. sub %r25, %r23, %r25
  904. 1: pdc,m %r23(%r26)
  905. pdc,m %r23(%r26)
  906. pdc,m %r23(%r26)
  907. pdc,m %r23(%r26)
  908. pdc,m %r23(%r26)
  909. pdc,m %r23(%r26)
  910. pdc,m %r23(%r26)
  911. pdc,m %r23(%r26)
  912. pdc,m %r23(%r26)
  913. pdc,m %r23(%r26)
  914. pdc,m %r23(%r26)
  915. pdc,m %r23(%r26)
  916. pdc,m %r23(%r26)
  917. pdc,m %r23(%r26)
  918. pdc,m %r23(%r26)
  919. cmpb,COND(>>) %r25, %r26, 1b /* predict taken */
  920. pdc,m %r23(%r26)
  921. 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP)
  922. sync
  923. bv %r0(%r2)
  924. nop
  925. ENDPROC_CFI(purge_kernel_dcache_page_asm)
  926. ENTRY_CFI(flush_user_dcache_range_asm)
  927. 88: ldil L%dcache_stride, %r1
  928. ldw R%dcache_stride(%r1), %r23
  929. ldo -1(%r23), %r21
  930. ANDCM %r26, %r21, %r26
  931. #ifdef CONFIG_64BIT
  932. depd,z %r23, 59, 60, %r21
  933. #else
  934. depw,z %r23, 27, 28, %r21
  935. #endif
  936. add %r26, %r21, %r22
  937. cmpb,COND(>>),n %r22, %r25, 2f /* predict not taken */
  938. 1: add %r22, %r21, %r22
  939. fdc,m %r23(%sr3, %r26)
  940. fdc,m %r23(%sr3, %r26)
  941. fdc,m %r23(%sr3, %r26)
  942. fdc,m %r23(%sr3, %r26)
  943. fdc,m %r23(%sr3, %r26)
  944. fdc,m %r23(%sr3, %r26)
  945. fdc,m %r23(%sr3, %r26)
  946. fdc,m %r23(%sr3, %r26)
  947. fdc,m %r23(%sr3, %r26)
  948. fdc,m %r23(%sr3, %r26)
  949. fdc,m %r23(%sr3, %r26)
  950. fdc,m %r23(%sr3, %r26)
  951. fdc,m %r23(%sr3, %r26)
  952. fdc,m %r23(%sr3, %r26)
  953. fdc,m %r23(%sr3, %r26)
  954. cmpb,COND(<<=) %r22, %r25, 1b /* predict taken */
  955. fdc,m %r23(%sr3, %r26)
  956. 2: cmpb,COND(>>),n %r25, %r26, 2b
  957. fdc,m %r23(%sr3, %r26)
  958. 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP)
  959. sync
  960. bv %r0(%r2)
  961. nop
  962. ENDPROC_CFI(flush_user_dcache_range_asm)
  963. ENTRY_CFI(flush_kernel_dcache_range_asm)
  964. 88: ldil L%dcache_stride, %r1
  965. ldw R%dcache_stride(%r1), %r23
  966. ldo -1(%r23), %r21
  967. ANDCM %r26, %r21, %r26
  968. #ifdef CONFIG_64BIT
  969. depd,z %r23, 59, 60, %r21
  970. #else
  971. depw,z %r23, 27, 28, %r21
  972. #endif
  973. add %r26, %r21, %r22
  974. cmpb,COND(>>),n %r22, %r25, 2f /* predict not taken */
  975. 1: add %r22, %r21, %r22
  976. fdc,m %r23(%r26)
  977. fdc,m %r23(%r26)
  978. fdc,m %r23(%r26)
  979. fdc,m %r23(%r26)
  980. fdc,m %r23(%r26)
  981. fdc,m %r23(%r26)
  982. fdc,m %r23(%r26)
  983. fdc,m %r23(%r26)
  984. fdc,m %r23(%r26)
  985. fdc,m %r23(%r26)
  986. fdc,m %r23(%r26)
  987. fdc,m %r23(%r26)
  988. fdc,m %r23(%r26)
  989. fdc,m %r23(%r26)
  990. fdc,m %r23(%r26)
  991. cmpb,COND(<<=) %r22, %r25, 1b /* predict taken */
  992. fdc,m %r23(%r26)
  993. 2: cmpb,COND(>>),n %r25, %r26, 2b /* predict taken */
  994. fdc,m %r23(%r26)
  995. sync
  996. 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP)
  997. syncdma
  998. bv %r0(%r2)
  999. nop
  1000. ENDPROC_CFI(flush_kernel_dcache_range_asm)
  1001. ENTRY_CFI(purge_kernel_dcache_range_asm)
  1002. 88: ldil L%dcache_stride, %r1
  1003. ldw R%dcache_stride(%r1), %r23
  1004. ldo -1(%r23), %r21
  1005. ANDCM %r26, %r21, %r26
  1006. #ifdef CONFIG_64BIT
  1007. depd,z %r23, 59, 60, %r21
  1008. #else
  1009. depw,z %r23, 27, 28, %r21
  1010. #endif
  1011. add %r26, %r21, %r22
  1012. cmpb,COND(>>),n %r22, %r25, 2f /* predict not taken */
  1013. 1: add %r22, %r21, %r22
  1014. pdc,m %r23(%r26)
  1015. pdc,m %r23(%r26)
  1016. pdc,m %r23(%r26)
  1017. pdc,m %r23(%r26)
  1018. pdc,m %r23(%r26)
  1019. pdc,m %r23(%r26)
  1020. pdc,m %r23(%r26)
  1021. pdc,m %r23(%r26)
  1022. pdc,m %r23(%r26)
  1023. pdc,m %r23(%r26)
  1024. pdc,m %r23(%r26)
  1025. pdc,m %r23(%r26)
  1026. pdc,m %r23(%r26)
  1027. pdc,m %r23(%r26)
  1028. pdc,m %r23(%r26)
  1029. cmpb,COND(<<=) %r22, %r25, 1b /* predict taken */
  1030. pdc,m %r23(%r26)
  1031. 2: cmpb,COND(>>),n %r25, %r26, 2b /* predict taken */
  1032. pdc,m %r23(%r26)
  1033. sync
  1034. 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP)
  1035. syncdma
  1036. bv %r0(%r2)
  1037. nop
  1038. ENDPROC_CFI(purge_kernel_dcache_range_asm)
  1039. ENTRY_CFI(flush_user_icache_range_asm)
  1040. 88: ldil L%icache_stride, %r1
  1041. ldw R%icache_stride(%r1), %r23
  1042. ldo -1(%r23), %r21
  1043. ANDCM %r26, %r21, %r26
  1044. #ifdef CONFIG_64BIT
  1045. depd,z %r23, 59, 60, %r21
  1046. #else
  1047. depw,z %r23, 27, 28, %r21
  1048. #endif
  1049. add %r26, %r21, %r22
  1050. cmpb,COND(>>),n %r22, %r25, 2f /* predict not taken */
  1051. 1: add %r22, %r21, %r22
  1052. fic,m %r23(%sr3, %r26)
  1053. fic,m %r23(%sr3, %r26)
  1054. fic,m %r23(%sr3, %r26)
  1055. fic,m %r23(%sr3, %r26)
  1056. fic,m %r23(%sr3, %r26)
  1057. fic,m %r23(%sr3, %r26)
  1058. fic,m %r23(%sr3, %r26)
  1059. fic,m %r23(%sr3, %r26)
  1060. fic,m %r23(%sr3, %r26)
  1061. fic,m %r23(%sr3, %r26)
  1062. fic,m %r23(%sr3, %r26)
  1063. fic,m %r23(%sr3, %r26)
  1064. fic,m %r23(%sr3, %r26)
  1065. fic,m %r23(%sr3, %r26)
  1066. fic,m %r23(%sr3, %r26)
  1067. cmpb,COND(<<=) %r22, %r25, 1b /* predict taken */
  1068. fic,m %r23(%sr3, %r26)
  1069. 2: cmpb,COND(>>),n %r25, %r26, 2b
  1070. fic,m %r23(%sr3, %r26)
  1071. 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_ICACHE, INSN_NOP)
  1072. sync
  1073. bv %r0(%r2)
  1074. nop
  1075. ENDPROC_CFI(flush_user_icache_range_asm)
  1076. ENTRY_CFI(flush_kernel_icache_page)
  1077. 88: ldil L%icache_stride, %r1
  1078. ldw R%icache_stride(%r1), %r23
  1079. #ifdef CONFIG_64BIT
  1080. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  1081. #else
  1082. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  1083. #endif
  1084. add %r26, %r25, %r25
  1085. sub %r25, %r23, %r25
  1086. 1: fic,m %r23(%sr4, %r26)
  1087. fic,m %r23(%sr4, %r26)
  1088. fic,m %r23(%sr4, %r26)
  1089. fic,m %r23(%sr4, %r26)
  1090. fic,m %r23(%sr4, %r26)
  1091. fic,m %r23(%sr4, %r26)
  1092. fic,m %r23(%sr4, %r26)
  1093. fic,m %r23(%sr4, %r26)
  1094. fic,m %r23(%sr4, %r26)
  1095. fic,m %r23(%sr4, %r26)
  1096. fic,m %r23(%sr4, %r26)
  1097. fic,m %r23(%sr4, %r26)
  1098. fic,m %r23(%sr4, %r26)
  1099. fic,m %r23(%sr4, %r26)
  1100. fic,m %r23(%sr4, %r26)
  1101. cmpb,COND(>>) %r25, %r26, 1b /* predict taken */
  1102. fic,m %r23(%sr4, %r26)
  1103. 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_ICACHE, INSN_NOP)
  1104. sync
  1105. bv %r0(%r2)
  1106. nop
  1107. ENDPROC_CFI(flush_kernel_icache_page)
  1108. ENTRY_CFI(flush_kernel_icache_range_asm)
  1109. 88: ldil L%icache_stride, %r1
  1110. ldw R%icache_stride(%r1), %r23
  1111. ldo -1(%r23), %r21
  1112. ANDCM %r26, %r21, %r26
  1113. #ifdef CONFIG_64BIT
  1114. depd,z %r23, 59, 60, %r21
  1115. #else
  1116. depw,z %r23, 27, 28, %r21
  1117. #endif
  1118. add %r26, %r21, %r22
  1119. cmpb,COND(>>),n %r22, %r25, 2f /* predict not taken */
  1120. 1: add %r22, %r21, %r22
  1121. fic,m %r23(%sr4, %r26)
  1122. fic,m %r23(%sr4, %r26)
  1123. fic,m %r23(%sr4, %r26)
  1124. fic,m %r23(%sr4, %r26)
  1125. fic,m %r23(%sr4, %r26)
  1126. fic,m %r23(%sr4, %r26)
  1127. fic,m %r23(%sr4, %r26)
  1128. fic,m %r23(%sr4, %r26)
  1129. fic,m %r23(%sr4, %r26)
  1130. fic,m %r23(%sr4, %r26)
  1131. fic,m %r23(%sr4, %r26)
  1132. fic,m %r23(%sr4, %r26)
  1133. fic,m %r23(%sr4, %r26)
  1134. fic,m %r23(%sr4, %r26)
  1135. fic,m %r23(%sr4, %r26)
  1136. cmpb,COND(<<=) %r22, %r25, 1b /* predict taken */
  1137. fic,m %r23(%sr4, %r26)
  1138. 2: cmpb,COND(>>),n %r25, %r26, 2b /* predict taken */
  1139. fic,m %r23(%sr4, %r26)
  1140. 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_ICACHE, INSN_NOP)
  1141. sync
  1142. bv %r0(%r2)
  1143. nop
  1144. ENDPROC_CFI(flush_kernel_icache_range_asm)
  1145. __INIT
  1146. /* align should cover use of rfi in disable_sr_hashing_asm and
  1147. * srdis_done.
  1148. */
  1149. .align 256
  1150. ENTRY_CFI(disable_sr_hashing_asm)
  1151. /*
  1152. * Switch to real mode
  1153. */
  1154. /* pcxt_ssm_bug */
  1155. rsm PSW_SM_I, %r0
  1156. load32 PA(1f), %r1
  1157. nop
  1158. nop
  1159. nop
  1160. nop
  1161. nop
  1162. rsm PSW_SM_Q, %r0 /* prep to load iia queue */
  1163. mtctl %r0, %cr17 /* Clear IIASQ tail */
  1164. mtctl %r0, %cr17 /* Clear IIASQ head */
  1165. mtctl %r1, %cr18 /* IIAOQ head */
  1166. ldo 4(%r1), %r1
  1167. mtctl %r1, %cr18 /* IIAOQ tail */
  1168. load32 REAL_MODE_PSW, %r1
  1169. mtctl %r1, %ipsw
  1170. rfi
  1171. nop
  1172. 1: cmpib,=,n SRHASH_PCXST, %r26,srdis_pcxs
  1173. cmpib,=,n SRHASH_PCXL, %r26,srdis_pcxl
  1174. cmpib,=,n SRHASH_PA20, %r26,srdis_pa20
  1175. b,n srdis_done
  1176. srdis_pcxs:
  1177. /* Disable Space Register Hashing for PCXS,PCXT,PCXT' */
  1178. .word 0x141c1a00 /* mfdiag %dr0, %r28 */
  1179. .word 0x141c1a00 /* must issue twice */
  1180. depwi 0,18,1, %r28 /* Clear DHE (dcache hash enable) */
  1181. depwi 0,20,1, %r28 /* Clear IHE (icache hash enable) */
  1182. .word 0x141c1600 /* mtdiag %r28, %dr0 */
  1183. .word 0x141c1600 /* must issue twice */
  1184. b,n srdis_done
  1185. srdis_pcxl:
  1186. /* Disable Space Register Hashing for PCXL */
  1187. .word 0x141c0600 /* mfdiag %dr0, %r28 */
  1188. depwi 0,28,2, %r28 /* Clear DHASH_EN & IHASH_EN */
  1189. .word 0x141c0240 /* mtdiag %r28, %dr0 */
  1190. b,n srdis_done
  1191. srdis_pa20:
  1192. /* Disable Space Register Hashing for PCXU,PCXU+,PCXW,PCXW+,PCXW2 */
  1193. .word 0x144008bc /* mfdiag %dr2, %r28 */
  1194. depdi 0, 54,1, %r28 /* clear DIAG_SPHASH_ENAB (bit 54) */
  1195. .word 0x145c1840 /* mtdiag %r28, %dr2 */
  1196. srdis_done:
  1197. /* Switch back to virtual mode */
  1198. rsm PSW_SM_I, %r0 /* prep to load iia queue */
  1199. load32 2f, %r1
  1200. nop
  1201. nop
  1202. nop
  1203. nop
  1204. nop
  1205. rsm PSW_SM_Q, %r0 /* prep to load iia queue */
  1206. mtctl %r0, %cr17 /* Clear IIASQ tail */
  1207. mtctl %r0, %cr17 /* Clear IIASQ head */
  1208. mtctl %r1, %cr18 /* IIAOQ head */
  1209. ldo 4(%r1), %r1
  1210. mtctl %r1, %cr18 /* IIAOQ tail */
  1211. load32 KERNEL_PSW, %r1
  1212. mtctl %r1, %ipsw
  1213. rfi
  1214. nop
  1215. 2: bv %r0(%r2)
  1216. nop
  1217. ENDPROC_CFI(disable_sr_hashing_asm)
  1218. .end