sleep.S 2.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131
  1. /*
  2. * Copyright (c) 2013 Samsung Electronics Co., Ltd.
  3. * http://www.samsung.com
  4. *
  5. * Exynos low-level resume code
  6. *
  7. * This program is free software; you can redistribute it and/or modify
  8. * it under the terms of the GNU General Public License as published by
  9. * the Free Software Foundation; either version 2 of the License, or
  10. * (at your option) any later version.
  11. *
  12. * This program is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  15. * GNU General Public License for more details.
  16. */
  17. #include <linux/linkage.h>
  18. #include <asm/asm-offsets.h>
  19. #include <asm/hardware/cache-l2x0.h>
  20. #include "smc.h"
  21. #define CPU_MASK 0xff0ffff0
  22. #define CPU_CORTEX_A9 0x410fc090
  23. /*
  24. * The following code is located into the .data section. This is to
  25. * allow l2x0_regs_phys to be accessed with a relative load while we
  26. * can't rely on any MMU translation. We could have put l2x0_regs_phys
  27. * in the .text section as well, but some setups might insist on it to
  28. * be truly read-only. (Reference from: arch/arm/kernel/sleep.S)
  29. */
  30. .data
  31. .align
  32. /*
  33. * sleep magic, to allow the bootloader to check for an valid
  34. * image to resume to. Must be the first word before the
  35. * exynos_cpu_resume entry.
  36. */
  37. .word 0x2bedf00d
  38. /*
  39. * exynos_cpu_resume
  40. *
  41. * resume code entry for bootloader to call
  42. */
  43. ENTRY(exynos_cpu_resume)
  44. #ifdef CONFIG_CACHE_L2X0
  45. mrc p15, 0, r0, c0, c0, 0
  46. ldr r1, =CPU_MASK
  47. and r0, r0, r1
  48. ldr r1, =CPU_CORTEX_A9
  49. cmp r0, r1
  50. bleq l2c310_early_resume
  51. #endif
  52. b cpu_resume
  53. ENDPROC(exynos_cpu_resume)
  54. .align
  55. ENTRY(exynos_cpu_resume_ns)
  56. mrc p15, 0, r0, c0, c0, 0
  57. ldr r1, =CPU_MASK
  58. and r0, r0, r1
  59. ldr r1, =CPU_CORTEX_A9
  60. cmp r0, r1
  61. bne skip_cp15
  62. adr r0, cp15_save_power
  63. ldr r1, [r0]
  64. adr r0, cp15_save_diag
  65. ldr r2, [r0]
  66. mov r0, #SMC_CMD_C15RESUME
  67. dsb
  68. smc #0
  69. #ifdef CONFIG_CACHE_L2X0
  70. adr r0, 1f
  71. ldr r2, [r0]
  72. add r0, r2, r0
  73. /* Check that the address has been initialised. */
  74. ldr r1, [r0, #L2X0_R_PHY_BASE]
  75. teq r1, #0
  76. beq skip_l2x0
  77. /* Check if controller has been enabled. */
  78. ldr r2, [r1, #L2X0_CTRL]
  79. tst r2, #0x1
  80. bne skip_l2x0
  81. ldr r1, [r0, #L2X0_R_TAG_LATENCY]
  82. ldr r2, [r0, #L2X0_R_DATA_LATENCY]
  83. ldr r3, [r0, #L2X0_R_PREFETCH_CTRL]
  84. mov r0, #SMC_CMD_L2X0SETUP1
  85. smc #0
  86. /* Reload saved regs pointer because smc corrupts registers. */
  87. adr r0, 1f
  88. ldr r2, [r0]
  89. add r0, r2, r0
  90. ldr r1, [r0, #L2X0_R_PWR_CTRL]
  91. ldr r2, [r0, #L2X0_R_AUX_CTRL]
  92. mov r0, #SMC_CMD_L2X0SETUP2
  93. smc #0
  94. mov r0, #SMC_CMD_L2X0INVALL
  95. smc #0
  96. mov r1, #1
  97. mov r0, #SMC_CMD_L2X0CTRL
  98. smc #0
  99. skip_l2x0:
  100. #endif /* CONFIG_CACHE_L2X0 */
  101. skip_cp15:
  102. b cpu_resume
  103. ENDPROC(exynos_cpu_resume_ns)
  104. .globl cp15_save_diag
  105. cp15_save_diag:
  106. .long 0 @ cp15 diagnostic
  107. .globl cp15_save_power
  108. cp15_save_power:
  109. .long 0 @ cp15 power control
  110. #ifdef CONFIG_CACHE_L2X0
  111. .align
  112. 1: .long l2x0_saved_regs - .
  113. #endif /* CONFIG_CACHE_L2X0 */