x86.rs 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134
  1. use core::arch::asm;
  2. use core::fmt;
  3. use core::ops;
  4. use gimli::{Register, X86};
  5. // Match DWARF_FRAME_REGISTERS in libgcc
  6. pub const MAX_REG_RULES: usize = 17;
  7. #[repr(C)]
  8. #[derive(Clone, Default)]
  9. pub struct Context {
  10. pub registers: [usize; 8],
  11. pub ra: usize,
  12. pub mcxsr: usize,
  13. pub fcw: usize,
  14. }
  15. impl fmt::Debug for Context {
  16. fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
  17. let mut fmt = fmt.debug_struct("Context");
  18. for i in 0..=7 {
  19. fmt.field(
  20. X86::register_name(Register(i as _)).unwrap(),
  21. &self.registers[i],
  22. );
  23. }
  24. fmt.field("ra", &self.ra)
  25. .field("mcxsr", &self.mcxsr)
  26. .field("fcw", &self.fcw)
  27. .finish()
  28. }
  29. }
  30. impl ops::Index<Register> for Context {
  31. type Output = usize;
  32. fn index(&self, reg: Register) -> &usize {
  33. match reg {
  34. Register(0..=7) => &self.registers[reg.0 as usize],
  35. X86::RA => &self.ra,
  36. X86::MXCSR => &self.mcxsr,
  37. _ => unimplemented!(),
  38. }
  39. }
  40. }
  41. impl ops::IndexMut<gimli::Register> for Context {
  42. fn index_mut(&mut self, reg: Register) -> &mut usize {
  43. match reg {
  44. Register(0..=7) => &mut self.registers[reg.0 as usize],
  45. X86::RA => &mut self.ra,
  46. X86::MXCSR => &mut self.mcxsr,
  47. _ => unimplemented!(),
  48. }
  49. }
  50. }
  51. #[naked]
  52. pub extern "C-unwind" fn save_context(f: extern "C" fn(&mut Context, *mut ()), ptr: *mut ()) {
  53. // No need to save caller-saved registers here.
  54. unsafe {
  55. asm!(
  56. "
  57. sub esp, 52
  58. mov [esp + 4], ecx
  59. mov [esp + 8], edx
  60. mov [esp + 12], ebx
  61. /* Adjust the stack to account for the return address */
  62. lea eax, [esp + 56]
  63. mov [esp + 16], eax
  64. mov [esp + 20], ebp
  65. mov [esp + 24], esi
  66. mov [esp + 28], edi
  67. /* Return address */
  68. mov eax, [esp + 52]
  69. mov [esp + 32], eax
  70. stmxcsr [esp + 36]
  71. fnstcw [esp + 40]
  72. mov eax, [esp + 60]
  73. mov ecx, esp
  74. push eax
  75. push ecx
  76. call [esp + 64]
  77. add esp, 60
  78. ret
  79. ",
  80. options(noreturn)
  81. );
  82. }
  83. }
  84. pub unsafe fn restore_context(ctx: &Context) -> ! {
  85. unsafe {
  86. asm!(
  87. "
  88. /* Restore stack */
  89. mov esp, [edx + 16]
  90. /* Restore callee-saved control registers */
  91. ldmxcsr [edx + 36]
  92. fldcw [edx + 40]
  93. /* Restore return address */
  94. mov eax, [edx + 32]
  95. push eax
  96. /*
  97. * Restore general-purpose registers. Non-callee-saved registers are
  98. * also restored because sometimes it's used to pass unwind arguments.
  99. */
  100. mov eax, [edx + 0]
  101. mov ecx, [edx + 4]
  102. mov ebx, [edx + 12]
  103. mov ebp, [edx + 20]
  104. mov esi, [edx + 24]
  105. mov edi, [edx + 28]
  106. /* EDX restored last */
  107. mov edx, [edx + 8]
  108. ret
  109. ",
  110. in("edx") ctx,
  111. options(noreturn)
  112. );
  113. }
  114. }