cpu_gcc.S 6.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338
  1. /*
  2. * Copyright (c) 2006-2024, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Date Author Notes
  7. * 2018-10-06 ZhaoXiaowei the first version (cpu_gcc.S)
  8. * 2021-05-18 Jesven the first version (context_gcc.S)
  9. * 2024-01-06 Shell Fix barrier on irq_disable/enable
  10. * 2024-01-18 Shell fix implicit dependency of cpuid management
  11. * 2024-03-28 Shell Move cpu codes from context_gcc.S
  12. */
  13. #ifndef __ASSEMBLY__
  14. #define __ASSEMBLY__
  15. #endif
  16. #include "rtconfig.h"
  17. #include "asm-generic.h"
  18. #include "asm-fpu.h"
  19. #include "armv8.h"
  20. #ifdef RT_USING_SMP
  21. #define rt_hw_interrupt_disable rt_hw_local_irq_disable
  22. #define rt_hw_interrupt_enable rt_hw_local_irq_enable
  23. #endif /* RT_USING_SMP */
  24. .text
  25. /**
  26. * #ifdef RT_USING_OFW
  27. * void rt_hw_cpu_id_set(long cpuid)
  28. * #else
  29. * void rt_hw_cpu_id_set(void)
  30. * #endif
  31. */
  32. .type rt_hw_cpu_id_set, @function
  33. rt_hw_cpu_id_set:
  34. #ifdef ARCH_USING_GENERIC_CPUID
  35. .globl rt_hw_cpu_id_set
  36. #else /* !ARCH_USING_GENERIC_CPUID */
  37. .weak rt_hw_cpu_id_set
  38. #endif /* ARCH_USING_GENERIC_CPUID */
  39. #ifndef RT_USING_OFW
  40. mrs x0, mpidr_el1 /* MPIDR_EL1: Multi-Processor Affinity Register */
  41. #ifdef ARCH_ARM_CORTEX_A55
  42. lsr x0, x0, #8
  43. #endif /* ARCH_ARM_CORTEX_A55 */
  44. and x0, x0, #15
  45. #endif /* !RT_USING_OFW */
  46. #ifdef ARCH_USING_HW_THREAD_SELF
  47. msr tpidrro_el0, x0
  48. #else /* !ARCH_USING_HW_THREAD_SELF */
  49. msr tpidr_el1, x0
  50. #endif /* ARCH_USING_HW_THREAD_SELF */
  51. ret
  52. /*
  53. int rt_hw_cpu_id(void)
  54. */
  55. .type rt_hw_cpu_id, @function
  56. rt_hw_cpu_id:
  57. #ifdef ARCH_USING_GENERIC_CPUID
  58. .globl rt_hw_cpu_id
  59. #else /* !ARCH_USING_GENERIC_CPUID */
  60. .weak rt_hw_cpu_id
  61. #endif /* ARCH_USING_GENERIC_CPUID */
  62. #if RT_CPUS_NR > 1
  63. #ifdef ARCH_USING_GENERIC_CPUID
  64. mrs x0, tpidrro_el0
  65. #else /* !ARCH_USING_GENERIC_CPUID */
  66. mrs x0, tpidr_el1
  67. #endif /* ARCH_USING_GENERIC_CPUID */
  68. #else /* RT_CPUS_NR == 1 */
  69. mov x0, xzr
  70. #endif
  71. ret
  72. /*
  73. void rt_hw_set_process_id(size_t id)
  74. */
  75. .global rt_hw_set_process_id
  76. rt_hw_set_process_id:
  77. msr CONTEXTIDR_EL1, x0
  78. ret
  79. /*
  80. *enable gtimer
  81. */
  82. .globl rt_hw_gtimer_enable
  83. rt_hw_gtimer_enable:
  84. mov x0, #1
  85. msr CNTP_CTL_EL0, x0
  86. ret
  87. /*
  88. *set gtimer CNTP_TVAL_EL0 value
  89. */
  90. .globl rt_hw_set_gtimer_val
  91. rt_hw_set_gtimer_val:
  92. msr CNTP_TVAL_EL0, x0
  93. ret
  94. /*
  95. *get gtimer CNTP_TVAL_EL0 value
  96. */
  97. .globl rt_hw_get_gtimer_val
  98. rt_hw_get_gtimer_val:
  99. mrs x0, CNTP_TVAL_EL0
  100. ret
  101. .globl rt_hw_get_cntpct_val
  102. rt_hw_get_cntpct_val:
  103. mrs x0, CNTPCT_EL0
  104. ret
  105. /*
  106. *get gtimer frq value
  107. */
  108. .globl rt_hw_get_gtimer_frq
  109. rt_hw_get_gtimer_frq:
  110. mrs x0, CNTFRQ_EL0
  111. ret
  112. .global rt_hw_interrupt_is_disabled
  113. rt_hw_interrupt_is_disabled:
  114. mrs x0, DAIF
  115. tst x0, #0xc0
  116. cset x0, NE
  117. ret
  118. /*
  119. * rt_base_t rt_hw_interrupt_disable();
  120. */
  121. .globl rt_hw_interrupt_disable
  122. rt_hw_interrupt_disable:
  123. mrs x0, DAIF
  124. and x0, x0, #0xc0
  125. cmp x0, #0xc0
  126. /* branch if bits not both set(zero) */
  127. bne 1f
  128. ret
  129. 1:
  130. msr DAIFSet, #3
  131. dsb nsh
  132. isb
  133. ret
  134. /*
  135. * void rt_hw_interrupt_enable(rt_base_t level);
  136. */
  137. .globl rt_hw_interrupt_enable
  138. rt_hw_interrupt_enable:
  139. and x0, x0, #0xc0
  140. cmp x0, #0xc0
  141. /* branch if one of the bits not set(zero) */
  142. bne 1f
  143. ret
  144. 1:
  145. isb
  146. dsb nsh
  147. and x0, x0, #0xc0
  148. mrs x1, DAIF
  149. bic x1, x1, #0xc0
  150. orr x0, x0, x1
  151. msr DAIF, x0
  152. ret
  153. .globl rt_hw_get_current_el
  154. rt_hw_get_current_el:
  155. mrs x0, CurrentEL
  156. cmp x0, 0xc
  157. b.eq 3f
  158. cmp x0, 0x8
  159. b.eq 2f
  160. cmp x0, 0x4
  161. b.eq 1f
  162. ldr x0, =0
  163. b 0f
  164. 3:
  165. ldr x0, =3
  166. b 0f
  167. 2:
  168. ldr x0, =2
  169. b 0f
  170. 1:
  171. ldr x0, =1
  172. b 0f
  173. 0:
  174. ret
  175. .globl rt_hw_set_current_vbar
  176. rt_hw_set_current_vbar:
  177. mrs x1, CurrentEL
  178. cmp x1, 0xc
  179. b.eq 3f
  180. cmp x1, 0x8
  181. b.eq 2f
  182. cmp x1, 0x4
  183. b.eq 1f
  184. b 0f
  185. 3:
  186. msr VBAR_EL3,x0
  187. b 0f
  188. 2:
  189. msr VBAR_EL2,x0
  190. b 0f
  191. 1:
  192. msr VBAR_EL1,x0
  193. b 0f
  194. 0:
  195. ret
  196. .globl rt_hw_set_elx_env
  197. rt_hw_set_elx_env:
  198. mrs x1, CurrentEL
  199. cmp x1, 0xc
  200. b.eq 3f
  201. cmp x1, 0x8
  202. b.eq 2f
  203. cmp x1, 0x4
  204. b.eq 1f
  205. b 0f
  206. 3:
  207. mrs x0, SCR_EL3
  208. orr x0, x0, #0xf /* SCR_EL3.NS|IRQ|FIQ|EA */
  209. msr SCR_EL3, x0
  210. b 0f
  211. 2:
  212. mrs x0, HCR_EL2
  213. orr x0, x0, #0x38
  214. msr HCR_EL2, x0
  215. b 0f
  216. 1:
  217. b 0f
  218. 0:
  219. ret
  220. .globl rt_cpu_vector_set_base
  221. rt_cpu_vector_set_base:
  222. msr VBAR_EL1, x0
  223. ret
  224. /**
  225. * unsigned long rt_hw_ffz(unsigned long x)
  226. */
  227. .globl rt_hw_ffz
  228. rt_hw_ffz:
  229. mvn x1, x0
  230. clz x0, x1
  231. mov x1, #0x3f
  232. sub x0, x1, x0
  233. ret
  234. .globl rt_hw_clz
  235. rt_hw_clz:
  236. clz x0, x0
  237. ret
  238. /**
  239. * Spinlock (fallback implementation)
  240. */
  241. rt_hw_spin_lock_init:
  242. .weak rt_hw_spin_lock_init
  243. stlr wzr, [x0]
  244. ret
  245. rt_hw_spin_trylock:
  246. .weak rt_hw_spin_trylock
  247. sub sp, sp, #16
  248. ldar w2, [x0]
  249. add x1, sp, 8
  250. stlr w2, [x1]
  251. ldarh w1, [x1]
  252. and w1, w1, 65535
  253. add x3, sp, 10
  254. ldarh w3, [x3]
  255. cmp w1, w3, uxth
  256. beq 1f
  257. mov w0, 0
  258. add sp, sp, 16
  259. ret
  260. 1:
  261. add x1, sp, 10
  262. 2:
  263. ldaxrh w3, [x1]
  264. add w3, w3, 1
  265. stlxrh w4, w3, [x1]
  266. cbnz w4, 2b
  267. add x1, sp, 8
  268. ldar w1, [x1]
  269. 3:
  270. ldaxr w3, [x0]
  271. cmp w3, w2
  272. bne 4f
  273. stxr w4, w1, [x0]
  274. cbnz w4, 3b
  275. 4:
  276. cset w0, eq
  277. add sp, sp, 16
  278. ret
  279. rt_hw_spin_lock:
  280. .weak rt_hw_spin_lock
  281. add x1, x0, 2
  282. 1:
  283. ldxrh w2, [x1]
  284. add w3, w2, 1
  285. stxrh w4, w3, [x1]
  286. cbnz w4, 1b
  287. and w2, w2, 65535
  288. ldarh w1, [x0]
  289. cmp w2, w1, uxth
  290. beq 3f
  291. sevl
  292. 2:
  293. wfe
  294. ldaxrh w1, [x0]
  295. cmp w2, w1
  296. bne 2b
  297. 3:
  298. ret
  299. rt_hw_spin_unlock:
  300. .weak rt_hw_spin_unlock
  301. ldxrh w1, [x0]
  302. add w1, w1, 1
  303. stlxrh w2, w1, [x0]
  304. cbnz w2, rt_hw_spin_unlock
  305. ret