stackframe.h 8.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407
  1. /*
  2. * This file is subject to the terms and conditions of the GNU General Public
  3. * License. See the file "COPYING" in the main directory of this archive
  4. * for more details.
  5. *
  6. * Copyright (C) 1994, 95, 96, 99, 2001 Ralf Baechle
  7. * Copyright (C) 1994, 1995, 1996 Paul M. Antoine.
  8. * Copyright (C) 1999 Silicon Graphics, Inc.
  9. * Copyright (C) 2007 Maciej W. Rozycki
  10. */
  11. #ifndef _ASM_STACKFRAME_H
  12. #define _ASM_STACKFRAME_H
  13. #include <linux/threads.h>
  14. #include <asm/asm.h>
  15. #include <asm/asmmacro.h>
  16. #include <asm/mipsregs.h>
  17. #include <asm/asm-offsets.h>
  18. #include <asm/thread_info.h>
  19. #if defined(CONFIG_CPU_R3000) || defined(CONFIG_CPU_TX39XX)
  20. #define STATMASK 0x3f
  21. #else
  22. #define STATMASK 0x1f
  23. #endif
  24. .macro SAVE_AT
  25. .set push
  26. .set noat
  27. LONG_S $1, PT_R1(sp)
  28. .set pop
  29. .endm
  30. .macro SAVE_TEMP
  31. #ifdef CONFIG_CPU_HAS_SMARTMIPS
  32. mflhxu v1
  33. LONG_S v1, PT_LO(sp)
  34. mflhxu v1
  35. LONG_S v1, PT_HI(sp)
  36. mflhxu v1
  37. LONG_S v1, PT_ACX(sp)
  38. #else
  39. mfhi v1
  40. #endif
  41. #ifdef CONFIG_32BIT
  42. LONG_S $8, PT_R8(sp)
  43. LONG_S $9, PT_R9(sp)
  44. #endif
  45. LONG_S $10, PT_R10(sp)
  46. LONG_S $11, PT_R11(sp)
  47. LONG_S $12, PT_R12(sp)
  48. #ifndef CONFIG_CPU_HAS_SMARTMIPS
  49. LONG_S v1, PT_HI(sp)
  50. mflo v1
  51. #endif
  52. LONG_S $13, PT_R13(sp)
  53. LONG_S $14, PT_R14(sp)
  54. LONG_S $15, PT_R15(sp)
  55. LONG_S $24, PT_R24(sp)
  56. #ifndef CONFIG_CPU_HAS_SMARTMIPS
  57. LONG_S v1, PT_LO(sp)
  58. #endif
  59. #ifdef CONFIG_CPU_CAVIUM_OCTEON
  60. /*
  61. * The Octeon multiplier state is affected by general
  62. * multiply instructions. It must be saved before and
  63. * kernel code might corrupt it
  64. */
  65. jal octeon_mult_save
  66. #endif
  67. .endm
  68. .macro SAVE_STATIC
  69. LONG_S $16, PT_R16(sp)
  70. LONG_S $17, PT_R17(sp)
  71. LONG_S $18, PT_R18(sp)
  72. LONG_S $19, PT_R19(sp)
  73. LONG_S $20, PT_R20(sp)
  74. LONG_S $21, PT_R21(sp)
  75. LONG_S $22, PT_R22(sp)
  76. LONG_S $23, PT_R23(sp)
  77. LONG_S $30, PT_R30(sp)
  78. .endm
  79. #ifdef CONFIG_SMP
  80. .macro get_saved_sp /* SMP variation */
  81. ASM_CPUID_MFC0 k0, ASM_SMP_CPUID_REG
  82. #if defined(CONFIG_32BIT) || defined(KBUILD_64BIT_SYM32)
  83. lui k1, %hi(kernelsp)
  84. #else
  85. lui k1, %highest(kernelsp)
  86. daddiu k1, %higher(kernelsp)
  87. dsll k1, 16
  88. daddiu k1, %hi(kernelsp)
  89. dsll k1, 16
  90. #endif
  91. LONG_SRL k0, SMP_CPUID_PTRSHIFT
  92. LONG_ADDU k1, k0
  93. LONG_L k1, %lo(kernelsp)(k1)
  94. .endm
  95. .macro set_saved_sp stackp temp temp2
  96. ASM_CPUID_MFC0 \temp, ASM_SMP_CPUID_REG
  97. LONG_SRL \temp, SMP_CPUID_PTRSHIFT
  98. LONG_S \stackp, kernelsp(\temp)
  99. .endm
  100. #else /* !CONFIG_SMP */
  101. .macro get_saved_sp /* Uniprocessor variation */
  102. #ifdef CONFIG_CPU_JUMP_WORKAROUNDS
  103. /*
  104. * Clear BTB (branch target buffer), forbid RAS (return address
  105. * stack) to workaround the Out-of-order Issue in Loongson2F
  106. * via its diagnostic register.
  107. */
  108. move k0, ra
  109. jal 1f
  110. nop
  111. 1: jal 1f
  112. nop
  113. 1: jal 1f
  114. nop
  115. 1: jal 1f
  116. nop
  117. 1: move ra, k0
  118. li k0, 3
  119. mtc0 k0, $22
  120. #endif /* CONFIG_CPU_JUMP_WORKAROUNDS */
  121. #if defined(CONFIG_32BIT) || defined(KBUILD_64BIT_SYM32)
  122. lui k1, %hi(kernelsp)
  123. #else
  124. lui k1, %highest(kernelsp)
  125. daddiu k1, %higher(kernelsp)
  126. dsll k1, k1, 16
  127. daddiu k1, %hi(kernelsp)
  128. dsll k1, k1, 16
  129. #endif
  130. LONG_L k1, %lo(kernelsp)(k1)
  131. .endm
  132. .macro set_saved_sp stackp temp temp2
  133. LONG_S \stackp, kernelsp
  134. .endm
  135. #endif
  136. .macro SAVE_SOME
  137. .set push
  138. .set noat
  139. .set reorder
  140. mfc0 k0, CP0_STATUS
  141. sll k0, 3 /* extract cu0 bit */
  142. .set noreorder
  143. bltz k0, 8f
  144. move k1, sp
  145. .set reorder
  146. /* Called from user mode, new stack. */
  147. get_saved_sp
  148. #ifndef CONFIG_CPU_DADDI_WORKAROUNDS
  149. 8: move k0, sp
  150. PTR_SUBU sp, k1, PT_SIZE
  151. #else
  152. .set at=k0
  153. 8: PTR_SUBU k1, PT_SIZE
  154. .set noat
  155. move k0, sp
  156. move sp, k1
  157. #endif
  158. LONG_S k0, PT_R29(sp)
  159. LONG_S $3, PT_R3(sp)
  160. /*
  161. * You might think that you don't need to save $0,
  162. * but the FPU emulator and gdb remote debug stub
  163. * need it to operate correctly
  164. */
  165. LONG_S $0, PT_R0(sp)
  166. mfc0 v1, CP0_STATUS
  167. LONG_S $2, PT_R2(sp)
  168. LONG_S v1, PT_STATUS(sp)
  169. LONG_S $4, PT_R4(sp)
  170. mfc0 v1, CP0_CAUSE
  171. LONG_S $5, PT_R5(sp)
  172. LONG_S v1, PT_CAUSE(sp)
  173. LONG_S $6, PT_R6(sp)
  174. MFC0 v1, CP0_EPC
  175. LONG_S $7, PT_R7(sp)
  176. #ifdef CONFIG_64BIT
  177. LONG_S $8, PT_R8(sp)
  178. LONG_S $9, PT_R9(sp)
  179. #endif
  180. LONG_S v1, PT_EPC(sp)
  181. LONG_S $25, PT_R25(sp)
  182. LONG_S $28, PT_R28(sp)
  183. LONG_S $31, PT_R31(sp)
  184. ori $28, sp, _THREAD_MASK
  185. xori $28, _THREAD_MASK
  186. #ifdef CONFIG_CPU_CAVIUM_OCTEON
  187. .set mips64
  188. pref 0, 0($28) /* Prefetch the current pointer */
  189. #endif
  190. .set pop
  191. .endm
  192. .macro SAVE_ALL
  193. SAVE_SOME
  194. SAVE_AT
  195. SAVE_TEMP
  196. SAVE_STATIC
  197. .endm
  198. .macro RESTORE_AT
  199. .set push
  200. .set noat
  201. LONG_L $1, PT_R1(sp)
  202. .set pop
  203. .endm
  204. .macro RESTORE_TEMP
  205. #ifdef CONFIG_CPU_CAVIUM_OCTEON
  206. /* Restore the Octeon multiplier state */
  207. jal octeon_mult_restore
  208. #endif
  209. #ifdef CONFIG_CPU_HAS_SMARTMIPS
  210. LONG_L $24, PT_ACX(sp)
  211. mtlhx $24
  212. LONG_L $24, PT_HI(sp)
  213. mtlhx $24
  214. LONG_L $24, PT_LO(sp)
  215. mtlhx $24
  216. #else
  217. LONG_L $24, PT_LO(sp)
  218. mtlo $24
  219. LONG_L $24, PT_HI(sp)
  220. mthi $24
  221. #endif
  222. #ifdef CONFIG_32BIT
  223. LONG_L $8, PT_R8(sp)
  224. LONG_L $9, PT_R9(sp)
  225. #endif
  226. LONG_L $10, PT_R10(sp)
  227. LONG_L $11, PT_R11(sp)
  228. LONG_L $12, PT_R12(sp)
  229. LONG_L $13, PT_R13(sp)
  230. LONG_L $14, PT_R14(sp)
  231. LONG_L $15, PT_R15(sp)
  232. LONG_L $24, PT_R24(sp)
  233. .endm
  234. .macro RESTORE_STATIC
  235. LONG_L $16, PT_R16(sp)
  236. LONG_L $17, PT_R17(sp)
  237. LONG_L $18, PT_R18(sp)
  238. LONG_L $19, PT_R19(sp)
  239. LONG_L $20, PT_R20(sp)
  240. LONG_L $21, PT_R21(sp)
  241. LONG_L $22, PT_R22(sp)
  242. LONG_L $23, PT_R23(sp)
  243. LONG_L $30, PT_R30(sp)
  244. .endm
  245. #if defined(CONFIG_CPU_R3000) || defined(CONFIG_CPU_TX39XX)
  246. .macro RESTORE_SOME
  247. .set push
  248. .set reorder
  249. .set noat
  250. mfc0 a0, CP0_STATUS
  251. li v1, 0xff00
  252. ori a0, STATMASK
  253. xori a0, STATMASK
  254. mtc0 a0, CP0_STATUS
  255. and a0, v1
  256. LONG_L v0, PT_STATUS(sp)
  257. nor v1, $0, v1
  258. and v0, v1
  259. or v0, a0
  260. mtc0 v0, CP0_STATUS
  261. LONG_L $31, PT_R31(sp)
  262. LONG_L $28, PT_R28(sp)
  263. LONG_L $25, PT_R25(sp)
  264. LONG_L $7, PT_R7(sp)
  265. LONG_L $6, PT_R6(sp)
  266. LONG_L $5, PT_R5(sp)
  267. LONG_L $4, PT_R4(sp)
  268. LONG_L $3, PT_R3(sp)
  269. LONG_L $2, PT_R2(sp)
  270. .set pop
  271. .endm
  272. .macro RESTORE_SP_AND_RET
  273. .set push
  274. .set noreorder
  275. LONG_L k0, PT_EPC(sp)
  276. LONG_L sp, PT_R29(sp)
  277. jr k0
  278. rfe
  279. .set pop
  280. .endm
  281. #else
  282. .macro RESTORE_SOME
  283. .set push
  284. .set reorder
  285. .set noat
  286. mfc0 a0, CP0_STATUS
  287. ori a0, STATMASK
  288. xori a0, STATMASK
  289. mtc0 a0, CP0_STATUS
  290. li v1, 0xff00
  291. and a0, v1
  292. LONG_L v0, PT_STATUS(sp)
  293. nor v1, $0, v1
  294. and v0, v1
  295. or v0, a0
  296. mtc0 v0, CP0_STATUS
  297. LONG_L v1, PT_EPC(sp)
  298. MTC0 v1, CP0_EPC
  299. LONG_L $31, PT_R31(sp)
  300. LONG_L $28, PT_R28(sp)
  301. LONG_L $25, PT_R25(sp)
  302. #ifdef CONFIG_64BIT
  303. LONG_L $8, PT_R8(sp)
  304. LONG_L $9, PT_R9(sp)
  305. #endif
  306. LONG_L $7, PT_R7(sp)
  307. LONG_L $6, PT_R6(sp)
  308. LONG_L $5, PT_R5(sp)
  309. LONG_L $4, PT_R4(sp)
  310. LONG_L $3, PT_R3(sp)
  311. LONG_L $2, PT_R2(sp)
  312. .set pop
  313. .endm
  314. .macro RESTORE_SP_AND_RET
  315. LONG_L sp, PT_R29(sp)
  316. .set arch=r4000
  317. eret
  318. .set mips0
  319. .endm
  320. #endif
  321. .macro RESTORE_SP
  322. LONG_L sp, PT_R29(sp)
  323. .endm
  324. .macro RESTORE_ALL
  325. RESTORE_TEMP
  326. RESTORE_STATIC
  327. RESTORE_AT
  328. RESTORE_SOME
  329. RESTORE_SP
  330. .endm
  331. .macro RESTORE_ALL_AND_RET
  332. RESTORE_TEMP
  333. RESTORE_STATIC
  334. RESTORE_AT
  335. RESTORE_SOME
  336. RESTORE_SP_AND_RET
  337. .endm
  338. /*
  339. * Move to kernel mode and disable interrupts.
  340. * Set cp0 enable bit as sign that we're running on the kernel stack
  341. */
  342. .macro CLI
  343. mfc0 t0, CP0_STATUS
  344. li t1, ST0_CU0 | STATMASK
  345. or t0, t1
  346. xori t0, STATMASK
  347. mtc0 t0, CP0_STATUS
  348. irq_disable_hazard
  349. .endm
  350. /*
  351. * Move to kernel mode and enable interrupts.
  352. * Set cp0 enable bit as sign that we're running on the kernel stack
  353. */
  354. .macro STI
  355. mfc0 t0, CP0_STATUS
  356. li t1, ST0_CU0 | STATMASK
  357. or t0, t1
  358. xori t0, STATMASK & ~1
  359. mtc0 t0, CP0_STATUS
  360. irq_enable_hazard
  361. .endm
  362. /*
  363. * Just move to kernel mode and leave interrupts as they are. Note
  364. * for the R3000 this means copying the previous enable from IEp.
  365. * Set cp0 enable bit as sign that we're running on the kernel stack
  366. */
  367. .macro KMODE
  368. mfc0 t0, CP0_STATUS
  369. li t1, ST0_CU0 | (STATMASK & ~1)
  370. #if defined(CONFIG_CPU_R3000) || defined(CONFIG_CPU_TX39XX)
  371. andi t2, t0, ST0_IEP
  372. srl t2, 2
  373. or t0, t2
  374. #endif
  375. or t0, t1
  376. xori t0, STATMASK & ~1
  377. mtc0 t0, CP0_STATUS
  378. irq_disable_hazard
  379. .endm
  380. #endif /* _ASM_STACKFRAME_H */