hibernate_asm_32.S 2.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. /*
  3. * This may not use any stack, nor any variable that is not "NoSave":
  4. *
  5. * Its rewriting one kernel image with another. What is stack in "old"
  6. * image could very well be data page in "new" image, and overwriting
  7. * your own stack under you is bad idea.
  8. */
  9. #include <linux/linkage.h>
  10. #include <asm/segment.h>
  11. #include <asm/page_types.h>
  12. #include <asm/asm-offsets.h>
  13. #include <asm/processor-flags.h>
  14. #include <asm/frame.h>
  15. .text
  16. ENTRY(swsusp_arch_suspend)
  17. movl %esp, saved_context_esp
  18. movl %ebx, saved_context_ebx
  19. movl %ebp, saved_context_ebp
  20. movl %esi, saved_context_esi
  21. movl %edi, saved_context_edi
  22. pushfl
  23. popl saved_context_eflags
  24. /* save cr3 */
  25. movl %cr3, %eax
  26. movl %eax, restore_cr3
  27. FRAME_BEGIN
  28. call swsusp_save
  29. FRAME_END
  30. ret
  31. ENDPROC(swsusp_arch_suspend)
  32. ENTRY(restore_image)
  33. /* prepare to jump to the image kernel */
  34. movl restore_jump_address, %ebx
  35. movl restore_cr3, %ebp
  36. movl mmu_cr4_features, %ecx
  37. /* jump to relocated restore code */
  38. movl relocated_restore_code, %eax
  39. jmpl *%eax
  40. /* code below has been relocated to a safe page */
  41. ENTRY(core_restore_code)
  42. movl temp_pgt, %eax
  43. movl %eax, %cr3
  44. jecxz 1f # cr4 Pentium and higher, skip if zero
  45. andl $~(X86_CR4_PGE), %ecx
  46. movl %ecx, %cr4; # turn off PGE
  47. movl %cr3, %eax; # flush TLB
  48. movl %eax, %cr3
  49. 1:
  50. movl restore_pblist, %edx
  51. .p2align 4,,7
  52. copy_loop:
  53. testl %edx, %edx
  54. jz done
  55. movl pbe_address(%edx), %esi
  56. movl pbe_orig_address(%edx), %edi
  57. movl $(PAGE_SIZE >> 2), %ecx
  58. rep
  59. movsl
  60. movl pbe_next(%edx), %edx
  61. jmp copy_loop
  62. .p2align 4,,7
  63. done:
  64. jmpl *%ebx
  65. /* code below belongs to the image kernel */
  66. .align PAGE_SIZE
  67. ENTRY(restore_registers)
  68. /* go back to the original page tables */
  69. movl %ebp, %cr3
  70. movl mmu_cr4_features, %ecx
  71. jecxz 1f # cr4 Pentium and higher, skip if zero
  72. movl %ecx, %cr4; # turn PGE back on
  73. 1:
  74. movl saved_context_esp, %esp
  75. movl saved_context_ebp, %ebp
  76. movl saved_context_ebx, %ebx
  77. movl saved_context_esi, %esi
  78. movl saved_context_edi, %edi
  79. pushl saved_context_eflags
  80. popfl
  81. /* Saved in save_processor_state. */
  82. movl $saved_context, %eax
  83. lgdt saved_context_gdt_desc(%eax)
  84. xorl %eax, %eax
  85. /* tell the hibernation core that we've just restored the memory */
  86. movl %eax, in_suspend
  87. ret
  88. ENDPROC(restore_registers)