r4k_switch.S 5.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293
  1. /*
  2. * This file is subject to the terms and conditions of the GNU General Public
  3. * License. See the file "COPYING" in the main directory of this archive
  4. * for more details.
  5. *
  6. * Copyright (C) 1994, 1995, 1996, 1998, 1999, 2002, 2003 Ralf Baechle
  7. * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
  8. * Copyright (C) 1994, 1995, 1996, by Andreas Busse
  9. * Copyright (C) 1999 Silicon Graphics, Inc.
  10. * Copyright (C) 2000 MIPS Technologies, Inc.
  11. * written by Carsten Langgaard, carstenl@mips.com
  12. */
  13. #include <asm/asm.h>
  14. #include <asm/cachectl.h>
  15. #include <asm/fpregdef.h>
  16. #include <asm/mipsregs.h>
  17. #include <asm/asm-offsets.h>
  18. #include <asm/pgtable-bits.h>
  19. #include <asm/regdef.h>
  20. #include <asm/stackframe.h>
  21. #include <asm/thread_info.h>
  22. #include <asm/asmmacro.h>
  23. /*
  24. * Offset to the current process status flags, the first 32 bytes of the
  25. * stack are not used.
  26. */
  27. #define ST_OFF (_THREAD_SIZE - 32 - PT_SIZE + PT_STATUS)
  28. #ifndef USE_ALTERNATE_RESUME_IMPL
  29. /*
  30. * task_struct *resume(task_struct *prev, task_struct *next,
  31. * struct thread_info *next_ti, s32 fp_save)
  32. */
  33. .align 5
  34. LEAF(resume)
  35. mfc0 t1, CP0_STATUS
  36. LONG_S t1, THREAD_STATUS(a0)
  37. cpu_save_nonscratch a0
  38. LONG_S ra, THREAD_REG31(a0)
  39. /*
  40. * Check whether we need to save any FP context. FP context is saved
  41. * iff the process has used the context with the scalar FPU or the MSA
  42. * ASE in the current time slice, as indicated by _TIF_USEDFPU and
  43. * _TIF_USEDMSA respectively. switch_to will have set fp_save
  44. * accordingly to an FP_SAVE_ enum value.
  45. */
  46. beqz a3, 2f
  47. /*
  48. * We do. Clear the saved CU1 bit for prev, such that next time it is
  49. * scheduled it will start in userland with the FPU disabled. If the
  50. * task uses the FPU then it will be enabled again via the do_cpu trap.
  51. * This allows us to lazily restore the FP context.
  52. */
  53. PTR_L t3, TASK_THREAD_INFO(a0)
  54. LONG_L t0, ST_OFF(t3)
  55. li t1, ~ST0_CU1
  56. and t0, t0, t1
  57. LONG_S t0, ST_OFF(t3)
  58. /* Check whether we're saving scalar or vector context. */
  59. bgtz a3, 1f
  60. /* Save 128b MSA vector context + scalar FP control & status. */
  61. cfc1 t1, fcr31
  62. msa_save_all a0
  63. sw t1, THREAD_FCR31(a0)
  64. b 2f
  65. 1: /* Save 32b/64b scalar FP context. */
  66. fpu_save_double a0 t0 t1 # c0_status passed in t0
  67. # clobbers t1
  68. 2:
  69. #if defined(CONFIG_CC_STACKPROTECTOR) && !defined(CONFIG_SMP)
  70. PTR_LA t8, __stack_chk_guard
  71. LONG_L t9, TASK_STACK_CANARY(a1)
  72. LONG_S t9, 0(t8)
  73. #endif
  74. /*
  75. * The order of restoring the registers takes care of the race
  76. * updating $28, $29 and kernelsp without disabling ints.
  77. */
  78. move $28, a2
  79. cpu_restore_nonscratch a1
  80. PTR_ADDU t0, $28, _THREAD_SIZE - 32
  81. set_saved_sp t0, t1, t2
  82. mfc0 t1, CP0_STATUS /* Do we really need this? */
  83. li a3, 0xff01
  84. and t1, a3
  85. LONG_L a2, THREAD_STATUS(a1)
  86. nor a3, $0, a3
  87. and a2, a3
  88. or a2, t1
  89. mtc0 a2, CP0_STATUS
  90. move v0, a0
  91. jr ra
  92. END(resume)
  93. #endif /* USE_ALTERNATE_RESUME_IMPL */
  94. /*
  95. * Save a thread's fp context.
  96. */
  97. LEAF(_save_fp)
  98. #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2)
  99. mfc0 t0, CP0_STATUS
  100. #endif
  101. fpu_save_double a0 t0 t1 # clobbers t1
  102. jr ra
  103. END(_save_fp)
  104. /*
  105. * Restore a thread's fp context.
  106. */
  107. LEAF(_restore_fp)
  108. #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2)
  109. mfc0 t0, CP0_STATUS
  110. #endif
  111. fpu_restore_double a0 t0 t1 # clobbers t1
  112. jr ra
  113. END(_restore_fp)
  114. #ifdef CONFIG_CPU_HAS_MSA
  115. /*
  116. * Save a thread's MSA vector context.
  117. */
  118. LEAF(_save_msa)
  119. msa_save_all a0
  120. jr ra
  121. END(_save_msa)
  122. /*
  123. * Restore a thread's MSA vector context.
  124. */
  125. LEAF(_restore_msa)
  126. msa_restore_all a0
  127. jr ra
  128. END(_restore_msa)
  129. LEAF(_init_msa_upper)
  130. msa_init_all_upper
  131. jr ra
  132. END(_init_msa_upper)
  133. #endif
  134. /*
  135. * Load the FPU with signalling NANS. This bit pattern we're using has
  136. * the property that no matter whether considered as single or as double
  137. * precision represents signaling NANS.
  138. *
  139. * We initialize fcr31 to rounding to nearest, no exceptions.
  140. */
  141. #define FPU_DEFAULT 0x00000000
  142. LEAF(_init_fpu)
  143. mfc0 t0, CP0_STATUS
  144. li t1, ST0_CU1
  145. or t0, t1
  146. mtc0 t0, CP0_STATUS
  147. enable_fpu_hazard
  148. li t1, FPU_DEFAULT
  149. ctc1 t1, fcr31
  150. li t1, -1 # SNaN
  151. #ifdef CONFIG_64BIT
  152. sll t0, t0, 5
  153. bgez t0, 1f # 16 / 32 register mode?
  154. dmtc1 t1, $f1
  155. dmtc1 t1, $f3
  156. dmtc1 t1, $f5
  157. dmtc1 t1, $f7
  158. dmtc1 t1, $f9
  159. dmtc1 t1, $f11
  160. dmtc1 t1, $f13
  161. dmtc1 t1, $f15
  162. dmtc1 t1, $f17
  163. dmtc1 t1, $f19
  164. dmtc1 t1, $f21
  165. dmtc1 t1, $f23
  166. dmtc1 t1, $f25
  167. dmtc1 t1, $f27
  168. dmtc1 t1, $f29
  169. dmtc1 t1, $f31
  170. 1:
  171. #endif
  172. #ifdef CONFIG_CPU_MIPS32
  173. mtc1 t1, $f0
  174. mtc1 t1, $f1
  175. mtc1 t1, $f2
  176. mtc1 t1, $f3
  177. mtc1 t1, $f4
  178. mtc1 t1, $f5
  179. mtc1 t1, $f6
  180. mtc1 t1, $f7
  181. mtc1 t1, $f8
  182. mtc1 t1, $f9
  183. mtc1 t1, $f10
  184. mtc1 t1, $f11
  185. mtc1 t1, $f12
  186. mtc1 t1, $f13
  187. mtc1 t1, $f14
  188. mtc1 t1, $f15
  189. mtc1 t1, $f16
  190. mtc1 t1, $f17
  191. mtc1 t1, $f18
  192. mtc1 t1, $f19
  193. mtc1 t1, $f20
  194. mtc1 t1, $f21
  195. mtc1 t1, $f22
  196. mtc1 t1, $f23
  197. mtc1 t1, $f24
  198. mtc1 t1, $f25
  199. mtc1 t1, $f26
  200. mtc1 t1, $f27
  201. mtc1 t1, $f28
  202. mtc1 t1, $f29
  203. mtc1 t1, $f30
  204. mtc1 t1, $f31
  205. #ifdef CONFIG_CPU_MIPS32_R2
  206. .set push
  207. .set mips64r2
  208. sll t0, t0, 5 # is Status.FR set?
  209. bgez t0, 1f # no: skip setting upper 32b
  210. mthc1 t1, $f0
  211. mthc1 t1, $f1
  212. mthc1 t1, $f2
  213. mthc1 t1, $f3
  214. mthc1 t1, $f4
  215. mthc1 t1, $f5
  216. mthc1 t1, $f6
  217. mthc1 t1, $f7
  218. mthc1 t1, $f8
  219. mthc1 t1, $f9
  220. mthc1 t1, $f10
  221. mthc1 t1, $f11
  222. mthc1 t1, $f12
  223. mthc1 t1, $f13
  224. mthc1 t1, $f14
  225. mthc1 t1, $f15
  226. mthc1 t1, $f16
  227. mthc1 t1, $f17
  228. mthc1 t1, $f18
  229. mthc1 t1, $f19
  230. mthc1 t1, $f20
  231. mthc1 t1, $f21
  232. mthc1 t1, $f22
  233. mthc1 t1, $f23
  234. mthc1 t1, $f24
  235. mthc1 t1, $f25
  236. mthc1 t1, $f26
  237. mthc1 t1, $f27
  238. mthc1 t1, $f28
  239. mthc1 t1, $f29
  240. mthc1 t1, $f30
  241. mthc1 t1, $f31
  242. 1: .set pop
  243. #endif /* CONFIG_CPU_MIPS32_R2 */
  244. #else
  245. .set arch=r4000
  246. dmtc1 t1, $f0
  247. dmtc1 t1, $f2
  248. dmtc1 t1, $f4
  249. dmtc1 t1, $f6
  250. dmtc1 t1, $f8
  251. dmtc1 t1, $f10
  252. dmtc1 t1, $f12
  253. dmtc1 t1, $f14
  254. dmtc1 t1, $f16
  255. dmtc1 t1, $f18
  256. dmtc1 t1, $f20
  257. dmtc1 t1, $f22
  258. dmtc1 t1, $f24
  259. dmtc1 t1, $f26
  260. dmtc1 t1, $f28
  261. dmtc1 t1, $f30
  262. #endif
  263. jr ra
  264. END(_init_fpu)