entry_point.S 4.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169
  1. /*
  2. * Copyright (c) 2006-2020, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Date Author Notes
  7. * 2020-01-15 bigmagic the first version
  8. * 2020-08-10 SummerGift support clang compiler
  9. * 2021-11-04 GuEe-GUI set sp with SP_ELx
  10. * 2021-12-28 GuEe-GUI add smp support
  11. */
  12. #include "rtconfig.h"
  13. .section ".text.entrypoint","ax"
  14. #define SECONDARY_STACK_SIZE 4096
  15. .globl _start
  16. .globl secondary_cpu_start
  17. _start:
  18. mrs x1, mpidr_el1
  19. and x1, x1, #0xff
  20. cbnz x1, cpu_idle /* If cpu id > 0, stop slave cores */
  21. secondary_cpu_start:
  22. #ifdef RT_USING_SMP
  23. /* Read cpu mpidr_el1 */
  24. mrs x1, mpidr_el1
  25. /* Read cpu id */
  26. ldr x0, =rt_cpu_mpidr_early /* BSP must be defined `rt_cpu_mpidr_early' table in smp */
  27. mov x2, #0
  28. cpu_id_confirm:
  29. add x2, x2, #1 /* Next cpu id inc */
  30. ldr x3, [x0], #8
  31. cmp x3, #0
  32. beq cpu_idle /* Mean that `rt_cpu_mpidr_early' table is end */
  33. cmp x3, x1
  34. bne cpu_id_confirm
  35. /* Get cpu id success */
  36. sub x0, x2, #1
  37. msr tpidr_el1, x0 /* Save cpu id global */
  38. cbz x0, cpu_setup /* Only go to cpu_setup when cpu id = 0 */
  39. /* Set current cpu's stack top */
  40. sub x0, x0, #1
  41. mov x1, #SECONDARY_STACK_SIZE
  42. adr x2, .secondary_cpu_stack_top
  43. msub x1, x0, x1, x2
  44. b cpu_check_el
  45. #else
  46. msr tpidr_el1, xzr
  47. b cpu_setup
  48. #endif /* RT_USING_SMP */
  49. cpu_idle:
  50. wfe
  51. b cpu_idle
  52. cpu_setup:
  53. ldr x1, =_start
  54. cpu_check_el:
  55. mrs x0, CurrentEL /* CurrentEL Register. bit 2, 3. Others reserved */
  56. and x0, x0, #12 /* Clear reserved bits */
  57. /* Running at EL3? */
  58. cmp x0, #12 /* EL3 value is 0b1100 */
  59. bne cpu_not_in_el3
  60. /* Should never be executed, just for completeness. (EL3) */
  61. mov x2, #(1 << 0) /* EL0 and EL1 are in Non-Secure state */
  62. orr x2, x2, #(1 << 4) /* RES1 */
  63. orr x2, x2, #(1 << 5) /* RES1 */
  64. orr x2, x2, #(1 << 7) /* SMC instructions are undefined at EL1 and above */
  65. orr x2, x2, #(1 << 8) /* HVC instructions are enabled at EL1 and above */
  66. orr x2, x2, #(1 << 10) /* The next lower level is AArch64 */
  67. msr scr_el3, x2
  68. mov x2, #9 /* Next level is 0b1001->EL2h */
  69. orr x2, x2, #(1 << 6) /* Mask FIQ */
  70. orr x2, x2, #(1 << 7) /* Mask IRQ */
  71. orr x2, x2, #(1 << 8) /* Mask SError */
  72. orr x2, x2, #(1 << 9) /* Mask Debug Exception */
  73. msr spsr_el3, x2
  74. adr x2, cpu_in_el2
  75. msr elr_el3, x2
  76. eret
  77. cpu_not_in_el3: /* Running at EL2 or EL1 */
  78. cmp x0, #4 /* EL1 = 0100 */
  79. beq cpu_in_el1
  80. cpu_in_el2:
  81. /* Enable CNTP for EL1 */
  82. mrs x0, cnthctl_el2 /* Counter-timer Hypervisor Control register */
  83. orr x0, x0, #3
  84. msr cnthctl_el2, x0
  85. msr cntvoff_el2, xzr
  86. mov x0, #(1 << 31) /* Enable AArch64 in EL1 */
  87. orr x0, x0, #(1 << 1) /* SWIO hardwired on Pi3 */
  88. msr hcr_el2, x0
  89. mov x2, #5 /* Next level is 0b0101->EL1h */
  90. orr x2, x2, #(1 << 6) /* Mask FIQ */
  91. orr x2, x2, #(1 << 7) /* Mask IRQ */
  92. orr x2, x2, #(1 << 8) /* Mask SError */
  93. orr x2, x2, #(1 << 9) /* Mask Debug Exception */
  94. msr spsr_el2, x2
  95. adr x2, cpu_in_el1
  96. msr elr_el2, x2
  97. eret
  98. cpu_in_el1:
  99. msr spsel, #1
  100. mov sp, x1 /* Set sp in el1 */
  101. /* Avoid trap from SIMD or float point instruction */
  102. mov x1, #0x00300000 /* Don't trap any SIMD/FP instructions in both EL0 and EL1 */
  103. msr cpacr_el1, x1
  104. mrs x1, sctlr_el1
  105. orr x1, x1, #(1 << 12) /* Enable Instruction */
  106. bic x1, x1, #(3 << 3) /* Disable SP Alignment check */
  107. bic x1, x1, #(1 << 1) /* Disable Alignment check */
  108. msr sctlr_el1, x1
  109. #ifdef RT_USING_SMP
  110. ldr x1, =_start
  111. cmp sp, x1
  112. bne secondary_cpu_c_start
  113. #endif /* RT_USING_SMP */
  114. ldr x0, =__bss_start
  115. ldr x1, =__bss_end
  116. sub x2, x1, x0
  117. mov x3, x1
  118. cmp x2, #7
  119. bls clean_bss_check
  120. clean_bss_loop_quad:
  121. str xzr, [x0], #8
  122. sub x2, x3, x0
  123. cmp x2, #7
  124. bhi clean_bss_loop_quad
  125. cmp x1, x0
  126. bls jump_to_entry
  127. clean_bss_loop_byte:
  128. str xzr, [x0], #1
  129. clean_bss_check:
  130. cmp x1, x0
  131. bhi clean_bss_loop_byte
  132. jump_to_entry:
  133. b rtthread_startup
  134. b cpu_idle /* For failsafe, halt this core too */
  135. #ifdef RT_USING_SMP
  136. .align 12
  137. .secondary_cpu_stack:
  138. .space (SECONDARY_STACK_SIZE * (RT_CPUS_NR - 1))
  139. .secondary_cpu_stack_top:
  140. #endif