efi_stub_64.S 2.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170
  1. /*
  2. * Function calling ABI conversion from Linux to EFI for x86_64
  3. *
  4. * Copyright (C) 2007 Intel Corp
  5. * Bibo Mao <bibo.mao@intel.com>
  6. * Huang Ying <ying.huang@intel.com>
  7. */
  8. #include <linux/linkage.h>
  9. #define SAVE_XMM \
  10. mov %rsp, %rax; \
  11. subq $0x70, %rsp; \
  12. and $~0xf, %rsp; \
  13. mov %rax, (%rsp); \
  14. mov %cr0, %rax; \
  15. clts; \
  16. mov %rax, 0x8(%rsp); \
  17. movaps %xmm0, 0x60(%rsp); \
  18. movaps %xmm1, 0x50(%rsp); \
  19. movaps %xmm2, 0x40(%rsp); \
  20. movaps %xmm3, 0x30(%rsp); \
  21. movaps %xmm4, 0x20(%rsp); \
  22. movaps %xmm5, 0x10(%rsp)
  23. #define RESTORE_XMM \
  24. movaps 0x60(%rsp), %xmm0; \
  25. movaps 0x50(%rsp), %xmm1; \
  26. movaps 0x40(%rsp), %xmm2; \
  27. movaps 0x30(%rsp), %xmm3; \
  28. movaps 0x20(%rsp), %xmm4; \
  29. movaps 0x10(%rsp), %xmm5; \
  30. mov 0x8(%rsp), %rsi; \
  31. mov %rsi, %cr0; \
  32. mov (%rsp), %rsp
  33. /* stolen from gcc */
  34. .macro FLUSH_TLB_ALL
  35. movq %r15, efi_scratch(%rip)
  36. movq %r14, efi_scratch+8(%rip)
  37. movq %cr4, %r15
  38. movq %r15, %r14
  39. andb $0x7f, %r14b
  40. movq %r14, %cr4
  41. movq %r15, %cr4
  42. movq efi_scratch+8(%rip), %r14
  43. movq efi_scratch(%rip), %r15
  44. .endm
  45. .macro SWITCH_PGT
  46. cmpb $0, efi_scratch+24(%rip)
  47. je 1f
  48. movq %r15, efi_scratch(%rip) # r15
  49. # save previous CR3
  50. movq %cr3, %r15
  51. movq %r15, efi_scratch+8(%rip) # prev_cr3
  52. movq efi_scratch+16(%rip), %r15 # EFI pgt
  53. movq %r15, %cr3
  54. 1:
  55. .endm
  56. .macro RESTORE_PGT
  57. cmpb $0, efi_scratch+24(%rip)
  58. je 2f
  59. movq efi_scratch+8(%rip), %r15
  60. movq %r15, %cr3
  61. movq efi_scratch(%rip), %r15
  62. FLUSH_TLB_ALL
  63. 2:
  64. .endm
  65. ENTRY(efi_call0)
  66. SAVE_XMM
  67. subq $32, %rsp
  68. SWITCH_PGT
  69. call *%rdi
  70. RESTORE_PGT
  71. addq $32, %rsp
  72. RESTORE_XMM
  73. ret
  74. ENDPROC(efi_call0)
  75. ENTRY(efi_call1)
  76. SAVE_XMM
  77. subq $32, %rsp
  78. mov %rsi, %rcx
  79. SWITCH_PGT
  80. call *%rdi
  81. RESTORE_PGT
  82. addq $32, %rsp
  83. RESTORE_XMM
  84. ret
  85. ENDPROC(efi_call1)
  86. ENTRY(efi_call2)
  87. SAVE_XMM
  88. subq $32, %rsp
  89. mov %rsi, %rcx
  90. SWITCH_PGT
  91. call *%rdi
  92. RESTORE_PGT
  93. addq $32, %rsp
  94. RESTORE_XMM
  95. ret
  96. ENDPROC(efi_call2)
  97. ENTRY(efi_call3)
  98. SAVE_XMM
  99. subq $32, %rsp
  100. mov %rcx, %r8
  101. mov %rsi, %rcx
  102. SWITCH_PGT
  103. call *%rdi
  104. RESTORE_PGT
  105. addq $32, %rsp
  106. RESTORE_XMM
  107. ret
  108. ENDPROC(efi_call3)
  109. ENTRY(efi_call4)
  110. SAVE_XMM
  111. subq $32, %rsp
  112. mov %r8, %r9
  113. mov %rcx, %r8
  114. mov %rsi, %rcx
  115. SWITCH_PGT
  116. call *%rdi
  117. RESTORE_PGT
  118. addq $32, %rsp
  119. RESTORE_XMM
  120. ret
  121. ENDPROC(efi_call4)
  122. ENTRY(efi_call5)
  123. SAVE_XMM
  124. subq $48, %rsp
  125. mov %r9, 32(%rsp)
  126. mov %r8, %r9
  127. mov %rcx, %r8
  128. mov %rsi, %rcx
  129. SWITCH_PGT
  130. call *%rdi
  131. RESTORE_PGT
  132. addq $48, %rsp
  133. RESTORE_XMM
  134. ret
  135. ENDPROC(efi_call5)
  136. ENTRY(efi_call6)
  137. SAVE_XMM
  138. mov (%rsp), %rax
  139. mov 8(%rax), %rax
  140. subq $48, %rsp
  141. mov %r9, 32(%rsp)
  142. mov %rax, 40(%rsp)
  143. mov %r8, %r9
  144. mov %rcx, %r8
  145. mov %rsi, %rcx
  146. SWITCH_PGT
  147. call *%rdi
  148. RESTORE_PGT
  149. addq $48, %rsp
  150. RESTORE_XMM
  151. ret
  152. ENDPROC(efi_call6)
  153. .data
  154. ENTRY(efi_scratch)
  155. .fill 3,8,0
  156. .byte 0