exceptions.S 4.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162
  1. /**
  2. * Saves the volatile registers onto the stack. This currently takes 14
  3. * instructions, so it can be used in exception handlers with 18 instructions
  4. * left.
  5. *
  6. * On return, x0 and x1 are initialised to elr_el2 and spsr_el2 respectively,
  7. * which can be used as the first and second arguments of a subsequent call.
  8. */
  9. .macro save_volatile_to_stack
  10. /* Reserve stack space and save registers x0-x18, x29 & x30. */
  11. stp x0, x1, [sp, #-(8 * 24)]!
  12. stp x2, x3, [sp, #8 * 2]
  13. stp x4, x5, [sp, #8 * 4]
  14. stp x6, x7, [sp, #8 * 6]
  15. stp x8, x9, [sp, #8 * 8]
  16. stp x10, x11, [sp, #8 * 10]
  17. stp x12, x13, [sp, #8 * 12]
  18. stp x14, x15, [sp, #8 * 14]
  19. stp x16, x17, [sp, #8 * 16]
  20. str x18, [sp, #8 * 18]
  21. stp x29, x30, [sp, #8 * 20]
  22. /*
  23. * Save elr_el1 & spsr_el1. This such that we can take nested exception
  24. * and still be able to unwind.
  25. */
  26. mrs x0, elr_el1
  27. mrs x1, spsr_el1
  28. stp x0, x1, [sp, #8 * 22]
  29. .endm
  30. /**
  31. * Restores the volatile registers from the stack. This currently takes 14
  32. * instructions, so it can be used in exception handlers while still leaving 18
  33. * instructions left; if paired with save_volatile_to_stack, there are 4
  34. * instructions to spare.
  35. */
  36. .macro restore_volatile_from_stack
  37. /* Restore registers x2-x18, x29 & x30. */
  38. ldp x2, x3, [sp, #8 * 2]
  39. ldp x4, x5, [sp, #8 * 4]
  40. ldp x6, x7, [sp, #8 * 6]
  41. ldp x8, x9, [sp, #8 * 8]
  42. ldp x10, x11, [sp, #8 * 10]
  43. ldp x12, x13, [sp, #8 * 12]
  44. ldp x14, x15, [sp, #8 * 14]
  45. ldp x16, x17, [sp, #8 * 16]
  46. ldr x18, [sp, #8 * 18]
  47. ldp x29, x30, [sp, #8 * 20]
  48. /* Restore registers elr_el1 & spsr_el1, using x0 & x1 as scratch. */
  49. ldp x0, x1, [sp, #8 * 22]
  50. msr elr_el1, x0
  51. msr spsr_el1, x1
  52. /* Restore x0 & x1, and release stack space. */
  53. ldp x0, x1, [sp], #8 * 24
  54. .endm
  55. /**
  56. * This is a generic handler for exceptions taken at the current EL while using
  57. * SP0. It behaves similarly to the SPx case by first switching to SPx, doing
  58. * the work, then switching back to SP0 before returning.
  59. *
  60. * Switching to SPx and calling the Rust handler takes 16 instructions. To
  61. * restore and return we need an additional 16 instructions, so we can implement
  62. * the whole handler within the allotted 32 instructions.
  63. */
  64. .macro current_exception_sp0 handler:req
  65. msr spsel, #1
  66. save_volatile_to_stack
  67. bl \handler
  68. restore_volatile_from_stack
  69. msr spsel, #0
  70. eret
  71. .endm
  72. /**
  73. * This is a generic handler for exceptions taken at the current EL while using
  74. * SPx. It saves volatile registers, calls the Rust handler, restores volatile
  75. * registers, then returns.
  76. *
  77. * This also works for exceptions taken from EL0, if we don't care about
  78. * non-volatile registers.
  79. *
  80. * Saving state and jumping to the Rust handler takes 15 instructions, and
  81. * restoring and returning also takes 15 instructions, so we can fit the whole
  82. * handler in 30 instructions, under the limit of 32.
  83. */
  84. .macro current_exception_spx handler:req
  85. save_volatile_to_stack
  86. bl \handler
  87. restore_volatile_from_stack
  88. eret
  89. .endm
  90. .section .text.vector_table_el1, "ax"
  91. .global vector_table_el1
  92. .balign 0x800
  93. vector_table_el1:
  94. sync_cur_sp0:
  95. current_exception_sp0 sync_exception_current
  96. .balign 0x80
  97. irq_cur_sp0:
  98. current_exception_sp0 irq_current
  99. .balign 0x80
  100. fiq_cur_sp0:
  101. current_exception_sp0 fiq_current
  102. .balign 0x80
  103. serr_cur_sp0:
  104. current_exception_sp0 serr_current
  105. .balign 0x80
  106. sync_cur_spx:
  107. current_exception_spx sync_exception_current
  108. .balign 0x80
  109. irq_cur_spx:
  110. current_exception_spx irq_current
  111. .balign 0x80
  112. fiq_cur_spx:
  113. current_exception_spx fiq_current
  114. .balign 0x80
  115. serr_cur_spx:
  116. current_exception_spx serr_current
  117. .balign 0x80
  118. sync_lower_64:
  119. current_exception_spx sync_lower
  120. .balign 0x80
  121. irq_lower_64:
  122. current_exception_spx irq_lower
  123. .balign 0x80
  124. fiq_lower_64:
  125. current_exception_spx fiq_lower
  126. .balign 0x80
  127. serr_lower_64:
  128. current_exception_spx serr_lower
  129. .balign 0x80
  130. sync_lower_32:
  131. current_exception_spx sync_lower
  132. .balign 0x80
  133. irq_lower_32:
  134. current_exception_spx irq_lower
  135. .balign 0x80
  136. fiq_lower_32:
  137. current_exception_spx fiq_lower
  138. .balign 0x80
  139. serr_lower_32:
  140. current_exception_spx serr_lower