efi_stub.S 3.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193
  1. /*
  2. * Function calling ABI conversion from Linux to EFI for x86_64
  3. *
  4. * Copyright (C) 2007 Intel Corp
  5. * Bibo Mao <[email protected]>
  6. * Huang Ying <[email protected]>
  7. * Copyright (C) 2012 Felipe Contreras <[email protected]>
  8. */
  9. #if !defined(HAVE_USE_MS_ABI)
  10. /*
  11. * EFI calling conventions are documented at:
  12. * http://msdn.microsoft.com/en-us/library/ms235286%28v=vs.80%29.aspx
  13. * ELF calling conventions are documented at:
  14. * http://www.x86-64.org/documentation/abi.pdf
  15. *
  16. * Basically here are the conversion rules:
  17. * a) our function pointer is in %rdi
  18. * b) rsi through r8 (elf) aka rcx through r9 (ms) require stack space
  19. * on the MS side even though it's not getting used at all.
  20. * c) 8(%rsp) is always aligned to 16 in ELF, so %rsp is shifted 8 bytes extra
  21. * d) arguments are as follows: (elf -> ms)
  22. * 1) rdi -> rcx (32 saved)
  23. * 2) rsi -> rdx (32 saved)
  24. * 3) rdx -> r8 (32 saved)
  25. * 4) rcx -> r9 (32 saved)
  26. * 5) r8 -> 32(%rsp) (32 saved)
  27. * 6) r9 -> 40(%rsp) (48 saved)
  28. * 7) 8(%rsp) -> 48(%rsp) (48 saved)
  29. * 8) 16(%rsp) -> 56(%rsp) (64 saved)
  30. * 9) 24(%rsp) -> 64(%rsp) (64 saved)
  31. * 10) 32(%rsp) -> 72(%rsp) (80 saved)
  32. * e) because the first argument we recieve in a thunker is actually the
  33. * function to be called, arguments are offset as such:
  34. * 0) rdi -> caller
  35. * 1) rsi -> rcx (32 saved)
  36. * 2) rdx -> rdx (32 saved)
  37. * 3) rcx -> r8 (32 saved)
  38. * 4) r8 -> r9 (32 saved)
  39. * 5) r9 -> 32(%rsp) (32 saved)
  40. * 6) 8(%rsp) -> 40(%rsp) (48 saved)
  41. * 7) 16(%rsp) -> 48(%rsp) (48 saved)
  42. * 8) 24(%rsp) -> 56(%rsp) (64 saved)
  43. * 9) 32(%rsp) -> 64(%rsp) (64 saved)
  44. * 10) 40(%rsp) -> 72(%rsp) (80 saved)
  45. * f) arguments need to be moved in opposite order to avoid clobbering
  46. */
  47. #define ENTRY(name) \
  48. .globl name; \
  49. name:
  50. ENTRY(efi_call0)
  51. subq $40, %rsp
  52. call *%rdi
  53. addq $40, %rsp
  54. ret
  55. ENTRY(efi_call1)
  56. subq $40, %rsp
  57. mov %rsi, %rcx
  58. call *%rdi
  59. addq $40, %rsp
  60. ret
  61. ENTRY(efi_call2)
  62. subq $40, %rsp
  63. /* mov %rdx, %rdx */
  64. mov %rsi, %rcx
  65. call *%rdi
  66. addq $40, %rsp
  67. ret
  68. ENTRY(efi_call3)
  69. subq $40, %rsp
  70. mov %rcx, %r8
  71. /* mov %rdx, %rdx */
  72. mov %rsi, %rcx
  73. call *%rdi
  74. addq $40, %rsp
  75. ret
  76. ENTRY(efi_call4)
  77. subq $40, %rsp
  78. mov %r8, %r9
  79. mov %rcx, %r8
  80. /* mov %rdx, %rdx */
  81. mov %rsi, %rcx
  82. call *%rdi
  83. addq $40, %rsp
  84. ret
  85. ENTRY(efi_call5)
  86. subq $40, %rsp
  87. mov %r9, 32(%rsp)
  88. mov %r8, %r9
  89. mov %rcx, %r8
  90. /* mov %rdx, %rdx */
  91. mov %rsi, %rcx
  92. call *%rdi
  93. addq $40, %rsp
  94. ret
  95. ENTRY(efi_call6)
  96. subq $56, %rsp
  97. mov 56+8(%rsp), %rax
  98. mov %rax, 40(%rsp)
  99. mov %r9, 32(%rsp)
  100. mov %r8, %r9
  101. mov %rcx, %r8
  102. /* mov %rdx, %rdx */
  103. mov %rsi, %rcx
  104. call *%rdi
  105. addq $56, %rsp
  106. ret
  107. ENTRY(efi_call7)
  108. subq $56, %rsp
  109. mov 56+16(%rsp), %rax
  110. mov %rax, 48(%rsp)
  111. mov 56+8(%rsp), %rax
  112. mov %rax, 40(%rsp)
  113. mov %r9, 32(%rsp)
  114. mov %r8, %r9
  115. mov %rcx, %r8
  116. /* mov %rdx, %rdx */
  117. mov %rsi, %rcx
  118. call *%rdi
  119. addq $56, %rsp
  120. ret
  121. ENTRY(efi_call8)
  122. subq $72, %rsp
  123. mov 72+24(%rsp), %rax
  124. mov %rax, 56(%rsp)
  125. mov 72+16(%rsp), %rax
  126. mov %rax, 48(%rsp)
  127. mov 72+8(%rsp), %rax
  128. mov %rax, 40(%rsp)
  129. mov %r9, 32(%rsp)
  130. mov %r8, %r9
  131. mov %rcx, %r8
  132. /* mov %rdx, %rdx */
  133. mov %rsi, %rcx
  134. call *%rdi
  135. addq $72, %rsp
  136. ret
  137. ENTRY(efi_call9)
  138. subq $72, %rsp
  139. mov 72+32(%rsp), %rax
  140. mov %rax, 64(%rsp)
  141. mov 72+24(%rsp), %rax
  142. mov %rax, 56(%rsp)
  143. mov 72+16(%rsp), %rax
  144. mov %rax, 48(%rsp)
  145. mov 72+8(%rsp), %rax
  146. mov %rax, 40(%rsp)
  147. mov %r9, 32(%rsp)
  148. mov %r8, %r9
  149. mov %rcx, %r8
  150. /* mov %rdx, %rdx */
  151. mov %rsi, %rcx
  152. call *%rdi
  153. addq $72, %rsp
  154. ret
  155. ENTRY(efi_call10)
  156. subq $88, %rsp
  157. mov 88+40(%rsp), %rax
  158. mov %rax, 72(%rsp)
  159. mov 88+32(%rsp), %rax
  160. mov %rax, 64(%rsp)
  161. mov 88+24(%rsp), %rax
  162. mov %rax, 56(%rsp)
  163. mov 88+16(%rsp), %rax
  164. mov %rax, 48(%rsp)
  165. mov 88+8(%rsp), %rax
  166. mov %rax, 40(%rsp)
  167. mov %r9, 32(%rsp)
  168. mov %r8, %r9
  169. mov %rcx, %r8
  170. /* mov %rdx, %rdx */
  171. mov %rsi, %rcx
  172. call *%rdi
  173. addq $88, %rsp
  174. ret
  175. #endif
  176. #if defined(__ELF__) && defined(__linux__)
  177. .section .note.GNU-stack,"",%progbits
  178. #endif