entry_point.S 4.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163
  1. /*
  2. * Copyright (c) 2006-2020, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Date Author Notes
  7. * 2020-01-15 bigmagic the first version
  8. * 2020-08-10 SummerGift support clang compiler
  9. * 2021-11-04 GuEe-GUI set sp with SP_ELx
  10. * 2021-12-28 GuEe-GUI add smp support
  11. */
  12. #include "rtconfig.h"
  13. .section ".text.entrypoint","ax"
  14. #define SECONDARY_STACK_SIZE 4096
  15. .globl _start
  16. .globl secondary_cpu_start
  17. _start:
  18. mrs x1, mpidr_el1
  19. and x1, x1, #0xff
  20. cbnz x1, cpu_idle /* If cpu id > 0, stop slave cores */
  21. secondary_cpu_start:
  22. #ifdef RT_USING_SMP
  23. /* Read cpu mpidr_el1 */
  24. mrs x1, mpidr_el1
  25. /* Read cpu id */
  26. ldr x0, =rt_cpu_mpidr_early /* BSP must be defined `rt_cpu_mpidr_early' table in smp */
  27. mov x2, #0
  28. cpu_id_confirm:
  29. add x2, x2, #1 /* Next cpu id inc */
  30. ldr x3, [x0], #8
  31. cmp x3, #0
  32. beq cpu_idle /* Mean that `rt_cpu_mpidr_early' table is end */
  33. cmp x3, x1
  34. bne cpu_id_confirm
  35. /* Get cpu id success */
  36. sub x0, x2, #1
  37. msr tpidr_el1, x0 /* Save cpu id global */
  38. cbz x0, cpu_setup /* Only go to cpu_setup when cpu id = 0 */
  39. /* Set current cpu's stack top */
  40. sub x0, x0, #1
  41. mov x1, #SECONDARY_STACK_SIZE
  42. adr x2, .secondary_cpu_stack_top
  43. msub x1, x0, x1, x2
  44. b cpu_check_el
  45. #else
  46. msr tpidr_el1, xzr
  47. b cpu_setup
  48. #endif /* RT_USING_SMP */
  49. cpu_idle:
  50. wfe
  51. b cpu_idle
  52. cpu_setup:
  53. ldr x1, =_start
  54. cpu_check_el:
  55. mrs x0, CurrentEL /* CurrentEL Register. bit 2, 3. Others reserved */
  56. and x0, x0, #12 /* Clear reserved bits */
  57. /* Running at EL3? */
  58. cmp x0, #12 /* EL3 value is 0b1100 */
  59. bne cpu_not_in_el3
  60. /* Should never be executed, just for completeness. (EL3) */
  61. mov x2, #(1 << 0) /* EL0 and EL1 are in Non-Secure state */
  62. orr x2, x2, #(1 << 4) /* RES1 */
  63. orr x2, x2, #(1 << 5) /* RES1 */
  64. orr x2, x2, #(1 << 7) /* SMC instructions are undefined at EL1 and above */
  65. orr x2, x2, #(1 << 8) /* HVC instructions are enabled at EL1 and above */
  66. orr x2, x2, #(1 << 10) /* The next lower level is AArch64 */
  67. msr scr_el3, x2
  68. /* Change execution level to EL2 */
  69. mov x2, #0x3c9
  70. msr spsr_el3, x2 /* 0b1111001001 */
  71. adr x2, cpu_not_in_el3
  72. msr elr_el3, x2
  73. eret /* Exception Return: from EL3, continue from cpu_not_in_el3 */
  74. cpu_not_in_el3: /* Running at EL2 or EL1 */
  75. cmp x0, #4 /* EL1 = 0100 */
  76. beq cpu_in_el1 /* Halt this core if running in El1 */
  77. cpu_in_el2:
  78. /* Enable CNTP for EL1 */
  79. mrs x0, cnthctl_el2 /* Counter-timer Hypervisor Control register */
  80. orr x0, x0, #3
  81. msr cnthctl_el2, x0
  82. msr cntvoff_el2, xzr
  83. mov x0, #(1 << 31) /* Enable AArch64 in EL1 */
  84. orr x0, x0, #(1 << 1) /* SWIO hardwired on Pi3 */
  85. msr hcr_el2, x0
  86. /* Change execution level to EL1 */
  87. mov x2, #0x3c4
  88. msr spsr_el2, x2 /* 0b1111000100 */
  89. adr x2, cpu_in_el1
  90. msr elr_el2, x2
  91. eret
  92. cpu_in_el1:
  93. msr spsel, #1
  94. mov sp, x1 /* Set sp in el1 */
  95. /* Avoid trap from SIMD or float point instruction */
  96. mov x1, #0x00300000 /* Don't trap any SIMD/FP instructions in both EL0 and EL1 */
  97. msr cpacr_el1, x1
  98. mrs x1, sctlr_el1
  99. orr x1, x1, #(1 << 12) /* Enable Instruction */
  100. bic x1, x1, #(3 << 3) /* Disable SP Alignment check */
  101. bic x1, x1, #(1 << 1) /* Disable Alignment check */
  102. msr sctlr_el1, x1
  103. #ifdef RT_USING_SMP
  104. ldr x1, =_start
  105. cmp sp, x1
  106. bne secondary_cpu_c_start
  107. #endif /* RT_USING_SMP */
  108. ldr x0, =__bss_start
  109. ldr x1, =__bss_end
  110. sub x2, x1, x0
  111. mov x3, x1
  112. cmp x2, #7
  113. bls clean_bss_check
  114. clean_bss_loop_quad:
  115. str xzr, [x0], #8
  116. sub x2, x3, x0
  117. cmp x2, #7
  118. bhi clean_bss_loop_quad
  119. cmp x1, x0
  120. bls jump_to_entry
  121. clean_bss_loop_byte:
  122. str xzr, [x0], #1
  123. clean_bss_check:
  124. cmp x1, x0
  125. bhi clean_bss_loop_byte
  126. jump_to_entry:
  127. b rtthread_startup
  128. b cpu_idle /* For failsafe, halt this core too */
  129. #ifdef RT_USING_SMP
  130. .align 12
  131. .secondary_cpu_stack:
  132. .space (SECONDARY_STACK_SIZE * (RT_CPUS_NR - 1))
  133. .secondary_cpu_stack_top:
  134. #endif