entry.S 3.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126
  1. .macro adr_l, reg:req, sym:req
  2. adrp \reg, \sym
  3. add \reg, \reg, :lo12:\sym
  4. .endm
  5. .macro mov_i, reg:req, imm:req
  6. movz \reg, :abs_g3:\imm
  7. movk \reg, :abs_g2_nc:\imm
  8. movk \reg, :abs_g1_nc:\imm
  9. movk \reg, :abs_g0_nc:\imm
  10. .endm
  11. .set .L_MAIR_DEV_nGnRE, 0x04
  12. .set .L_MAIR_MEM_WBWA, 0xff
  13. .set .Lmairval, .L_MAIR_DEV_nGnRE | (.L_MAIR_MEM_WBWA << 8)
  14. /* 4 KiB granule size for TTBR0_EL1. */
  15. .set .L_TCR_TG0_4KB, 0x0 << 14
  16. /* 4 KiB granule size for TTBR1_EL1. */
  17. .set .L_TCR_TG1_4KB, 0x2 << 30
  18. /* Disable translation table walk for TTBR1_EL1, generating a translation fault instead. */
  19. .set .L_TCR_EPD1, 0x1 << 23
  20. /* Translation table walks for TTBR0_EL1 are inner sharable. */
  21. .set .L_TCR_SH_INNER, 0x3 << 12
  22. /*
  23. * Translation table walks for TTBR0_EL1 are outer write-back read-allocate write-allocate
  24. * cacheable.
  25. */
  26. .set .L_TCR_RGN_OWB, 0x1 << 10
  27. /*
  28. * Translation table walks for TTBR0_EL1 are inner write-back read-allocate write-allocate
  29. * cacheable.
  30. */
  31. .set .L_TCR_RGN_IWB, 0x1 << 8
  32. /* Size offset for TTBR0_EL1 is 2**39 bytes (512 GiB). */
  33. .set .L_TCR_T0SZ_512, 64 - 39
  34. .set .Ltcrval, .L_TCR_TG0_4KB | .L_TCR_TG1_4KB | .L_TCR_EPD1 | .L_TCR_RGN_OWB
  35. .set .Ltcrval, .Ltcrval | .L_TCR_RGN_IWB | .L_TCR_SH_INNER | .L_TCR_T0SZ_512
  36. /* Stage 1 instruction access cacheability is unaffected. */
  37. .set .L_SCTLR_ELx_I, 0x1 << 12
  38. /* SP alignment fault if SP is not aligned to a 16 byte boundary. */
  39. .set .L_SCTLR_ELx_SA, 0x1 << 3
  40. /* Stage 1 data access cacheability is unaffected. */
  41. .set .L_SCTLR_ELx_C, 0x1 << 2
  42. /* EL0 and EL1 stage 1 MMU enabled. */
  43. .set .L_SCTLR_ELx_M, 0x1 << 0
  44. /* Privileged Access Never is unchanged on taking an exception to EL1. */
  45. .set .L_SCTLR_EL1_SPAN, 0x1 << 23
  46. /* SETEND instruction disabled at EL0 in aarch32 mode. */
  47. .set .L_SCTLR_EL1_SED, 0x1 << 8
  48. /* Various IT instructions are disabled at EL0 in aarch32 mode. */
  49. .set .L_SCTLR_EL1_ITD, 0x1 << 7
  50. .set .L_SCTLR_EL1_RES1, (0x1 << 11) | (0x1 << 20) | (0x1 << 22) | (0x1 << 28) | (0x1 << 29)
  51. .set .Lsctlrval, .L_SCTLR_ELx_M | .L_SCTLR_ELx_C | .L_SCTLR_ELx_SA | .L_SCTLR_EL1_ITD | .L_SCTLR_EL1_SED
  52. .set .Lsctlrval, .Lsctlrval | .L_SCTLR_ELx_I | .L_SCTLR_EL1_SPAN | .L_SCTLR_EL1_RES1
  53. /**
  54. * This is a generic entry point for an image. It carries out the operations required to prepare the
  55. * loaded image to be run. Specifically, it zeroes the bss section using registers x25 and above,
  56. * prepares the stack, enables floating point, and sets up the exception vector. It preserves x0-x3
  57. * for the Rust entry point, as these may contain boot parameters.
  58. */
  59. .section .init.entry, "ax"
  60. .global entry
  61. entry:
  62. /* Load and apply the memory management configuration, ready to enable MMU and caches. */
  63. adrp x30, idmap
  64. msr ttbr0_el1, x30
  65. mov_i x30, .Lmairval
  66. msr mair_el1, x30
  67. mov_i x30, .Ltcrval
  68. /* Copy the supported PA range into TCR_EL1.IPS. */
  69. mrs x29, id_aa64mmfr0_el1
  70. bfi x30, x29, #32, #4
  71. msr tcr_el1, x30
  72. mov_i x30, .Lsctlrval
  73. /*
  74. * Ensure everything before this point has completed, then invalidate any potentially stale
  75. * local TLB entries before they start being used.
  76. */
  77. isb
  78. tlbi vmalle1
  79. ic iallu
  80. dsb nsh
  81. isb
  82. /*
  83. * Configure sctlr_el1 to enable MMU and cache and don't proceed until this has completed.
  84. */
  85. msr sctlr_el1, x30
  86. isb
  87. /* Disable trapping floating point access in EL1. */
  88. mrs x30, cpacr_el1
  89. orr x30, x30, #(0x3 << 20)
  90. msr cpacr_el1, x30
  91. isb
  92. /* Zero out the bss section. */
  93. adr_l x29, bss_begin
  94. adr_l x30, bss_end
  95. 0: cmp x29, x30
  96. b.hs 1f
  97. stp xzr, xzr, [x29], #16
  98. b 0b
  99. 1: /* Prepare the stack. */
  100. adr_l x30, boot_stack_end
  101. mov sp, x30
  102. /* Set up exception vector. */
  103. adr x30, vector_table_el1
  104. msr vbar_el1, x30
  105. /* Call into Rust code. */
  106. bl main
  107. /* Loop forever waiting for interrupts. */
  108. 2: wfi
  109. b 2b