context_gcc.S 8.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374
  1. /*
  2. * Copyright (c) 2006-2021, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2021-05-18 Jesven the first version
  9. */
  10. #include "rtconfig.h"
  11. #include "asm-fpu.h"
  12. /*
  13. *enable gtimer
  14. */
  15. .globl rt_hw_gtimer_enable
  16. rt_hw_gtimer_enable:
  17. MOV X0,#1
  18. MSR CNTP_CTL_EL0,X0
  19. RET
  20. /*
  21. *set gtimer CNTP_TVAL_EL0 value
  22. */
  23. .globl rt_hw_set_gtimer_val
  24. rt_hw_set_gtimer_val:
  25. MSR CNTP_TVAL_EL0,X0
  26. RET
  27. /*
  28. *get gtimer CNTP_TVAL_EL0 value
  29. */
  30. .globl rt_hw_get_gtimer_val
  31. rt_hw_get_gtimer_val:
  32. MRS X0,CNTP_TVAL_EL0
  33. RET
  34. .globl rt_hw_get_cntpct_val
  35. rt_hw_get_cntpct_val:
  36. MRS X0, CNTPCT_EL0
  37. RET
  38. /*
  39. *get gtimer frq value
  40. */
  41. .globl rt_hw_get_gtimer_frq
  42. rt_hw_get_gtimer_frq:
  43. MRS X0,CNTFRQ_EL0
  44. RET
  45. .macro SAVE_CONTEXT
  46. /* Save the entire context. */
  47. SAVE_FPU SP
  48. STP X0, X1, [SP, #-0x10]!
  49. STP X2, X3, [SP, #-0x10]!
  50. STP X4, X5, [SP, #-0x10]!
  51. STP X6, X7, [SP, #-0x10]!
  52. STP X8, X9, [SP, #-0x10]!
  53. STP X10, X11, [SP, #-0x10]!
  54. STP X12, X13, [SP, #-0x10]!
  55. STP X14, X15, [SP, #-0x10]!
  56. STP X16, X17, [SP, #-0x10]!
  57. STP X18, X19, [SP, #-0x10]!
  58. STP X20, X21, [SP, #-0x10]!
  59. STP X22, X23, [SP, #-0x10]!
  60. STP X24, X25, [SP, #-0x10]!
  61. STP X26, X27, [SP, #-0x10]!
  62. STP X28, X29, [SP, #-0x10]!
  63. MRS X28, FPCR
  64. MRS X29, FPSR
  65. STP X28, X29, [SP, #-0x10]!
  66. MRS X29, SP_EL0
  67. STP X29, X30, [SP, #-0x10]!
  68. MRS X3, SPSR_EL1
  69. MRS X2, ELR_EL1
  70. STP X2, X3, [SP, #-0x10]!
  71. MOV X0, SP /* Move SP into X0 for saving. */
  72. .endm
  73. .macro SAVE_CONTEXT_FROM_EL1
  74. /* Save the entire context. */
  75. SAVE_FPU SP
  76. STP X0, X1, [SP, #-0x10]!
  77. STP X2, X3, [SP, #-0x10]!
  78. STP X4, X5, [SP, #-0x10]!
  79. STP X6, X7, [SP, #-0x10]!
  80. STP X8, X9, [SP, #-0x10]!
  81. STP X10, X11, [SP, #-0x10]!
  82. STP X12, X13, [SP, #-0x10]!
  83. STP X14, X15, [SP, #-0x10]!
  84. STP X16, X17, [SP, #-0x10]!
  85. STP X18, X19, [SP, #-0x10]!
  86. STP X20, X21, [SP, #-0x10]!
  87. STP X22, X23, [SP, #-0x10]!
  88. STP X24, X25, [SP, #-0x10]!
  89. STP X26, X27, [SP, #-0x10]!
  90. STP X28, X29, [SP, #-0x10]!
  91. MRS X28, FPCR
  92. MRS X29, FPSR
  93. STP X28, X29, [SP, #-0x10]!
  94. MRS X29, SP_EL0
  95. STP X29, X30, [SP, #-0x10]!
  96. MOV X3, #((3 << 6) | 0x4 | 0x1) /* el1h, disable interrupt */
  97. MOV X2, X30
  98. STP X2, X3, [SP, #-0x10]!
  99. .endm
  100. .macro RESTORE_CONTEXT
  101. /* Set the SP to point to the stack of the task being restored. */
  102. MOV SP, X0
  103. BL lwp_check_exit
  104. BL rt_thread_self
  105. MOV X8, X0
  106. BL lwp_mmu_switch
  107. MOV X0, X8
  108. BL lwp_user_setting_restore
  109. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  110. TST X3, #0x1f
  111. MSR SPSR_EL1, X3
  112. MSR ELR_EL1, X2
  113. LDP X29, X30, [SP], #0x10
  114. MSR SP_EL0, X29
  115. LDP X28, X29, [SP], #0x10
  116. MSR FPCR, X28
  117. MSR FPSR, X29
  118. LDP X28, X29, [SP], #0x10
  119. LDP X26, X27, [SP], #0x10
  120. LDP X24, X25, [SP], #0x10
  121. LDP X22, X23, [SP], #0x10
  122. LDP X20, X21, [SP], #0x10
  123. LDP X18, X19, [SP], #0x10
  124. LDP X16, X17, [SP], #0x10
  125. LDP X14, X15, [SP], #0x10
  126. LDP X12, X13, [SP], #0x10
  127. LDP X10, X11, [SP], #0x10
  128. LDP X8, X9, [SP], #0x10
  129. LDP X6, X7, [SP], #0x10
  130. LDP X4, X5, [SP], #0x10
  131. LDP X2, X3, [SP], #0x10
  132. LDP X0, X1, [SP], #0x10
  133. RESTORE_FPU SP
  134. BEQ ret_to_user
  135. ERET
  136. .endm
  137. .macro RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  138. /* the SP is already ok */
  139. BL lwp_check_exit
  140. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  141. TST X3, #0x1f
  142. MSR SPSR_EL1, X3
  143. MSR ELR_EL1, X2
  144. LDP X29, X30, [SP], #0x10
  145. MSR SP_EL0, X29
  146. LDP X28, X29, [SP], #0x10
  147. MSR FPCR, X28
  148. MSR FPSR, X29
  149. LDP X28, X29, [SP], #0x10
  150. LDP X26, X27, [SP], #0x10
  151. LDP X24, X25, [SP], #0x10
  152. LDP X22, X23, [SP], #0x10
  153. LDP X20, X21, [SP], #0x10
  154. LDP X18, X19, [SP], #0x10
  155. LDP X16, X17, [SP], #0x10
  156. LDP X14, X15, [SP], #0x10
  157. LDP X12, X13, [SP], #0x10
  158. LDP X10, X11, [SP], #0x10
  159. LDP X8, X9, [SP], #0x10
  160. LDP X6, X7, [SP], #0x10
  161. LDP X4, X5, [SP], #0x10
  162. LDP X2, X3, [SP], #0x10
  163. LDP X0, X1, [SP], #0x10
  164. RESTORE_FPU SP
  165. BEQ ret_to_user
  166. ERET
  167. .endm
  168. .text
  169. /*
  170. * rt_base_t rt_hw_interrupt_disable();
  171. */
  172. .globl rt_hw_interrupt_disable
  173. rt_hw_interrupt_disable:
  174. MRS X0, DAIF
  175. MSR DAIFSet, #3
  176. DSB SY
  177. RET
  178. /*
  179. * void rt_hw_interrupt_enable(rt_base_t level);
  180. */
  181. .globl rt_hw_interrupt_enable
  182. rt_hw_interrupt_enable:
  183. DSB SY
  184. AND X0, X0, #0xc0
  185. MRS X1, DAIF
  186. BIC X1, X1, #0xc0
  187. ORR X0, X0, X1
  188. MSR DAIF, X0
  189. RET
  190. /*
  191. * void rt_hw_context_switch_to(rt_ubase_t to);
  192. * X0 --> to sp
  193. */
  194. .globl rt_hw_context_switch_to
  195. rt_hw_context_switch_to:
  196. LDR X0, [X0]
  197. RESTORE_CONTEXT
  198. .text
  199. /*
  200. * void rt_hw_context_switch(rt_ubase_t from, rt_ubase_t to);
  201. * X0 --> from sp
  202. * X1 --> to sp
  203. * X2 --> to thread
  204. */
  205. .globl rt_hw_context_switch
  206. rt_hw_context_switch:
  207. SAVE_CONTEXT_FROM_EL1
  208. MOV X2, SP
  209. STR X2, [X0] // store sp in preempted tasks TCB
  210. LDR X0, [X1] // get new task stack pointer
  211. RESTORE_CONTEXT
  212. /*
  213. * void rt_hw_context_switch_interrupt(rt_ubase_t from, rt_ubase_t to, rt_thread_t from_thread, rt_thread_t to_thread);
  214. */
  215. .globl rt_thread_switch_interrupt_flag
  216. .globl rt_interrupt_from_thread
  217. .globl rt_interrupt_to_thread
  218. .globl rt_hw_context_switch_interrupt
  219. rt_hw_context_switch_interrupt:
  220. ADR X6, rt_thread_switch_interrupt_flag
  221. LDR X7, [X6]
  222. CMP X7, #1
  223. B.EQ _reswitch
  224. ADR X4, rt_interrupt_from_thread // set rt_interrupt_from_thread
  225. STR X0, [X4]
  226. MOV X7, #1 // set rt_thread_switch_interrupt_flag to 1
  227. STR X7, [X6]
  228. STP X1, X30, [SP, #-0x10]!
  229. MOV X0, X2
  230. BL lwp_user_setting_save
  231. LDP X1, X30, [SP], #0x10
  232. _reswitch:
  233. ADR X6, rt_interrupt_to_thread // set rt_interrupt_to_thread
  234. STR X1, [X6]
  235. RET
  236. .text
  237. // -- Exception handlers ----------------------------------
  238. .align 8
  239. .globl vector_fiq
  240. vector_fiq:
  241. SAVE_CONTEXT
  242. STP X0, X1, [SP, #-0x10]!
  243. BL rt_hw_trap_fiq
  244. LDP X0, X1, [SP], #0x10
  245. RESTORE_CONTEXT
  246. .globl rt_interrupt_enter
  247. .globl rt_interrupt_leave
  248. .globl rt_thread_switch_interrupt_flag
  249. .globl rt_interrupt_from_thread
  250. .globl rt_interrupt_to_thread
  251. // -------------------------------------------------------------------
  252. .align 8
  253. .globl vector_irq
  254. vector_irq:
  255. SAVE_CONTEXT
  256. STP X0, X1, [SP, #-0x10]! /* X0 is thread sp */
  257. BL rt_interrupt_enter
  258. BL rt_hw_trap_irq
  259. BL rt_interrupt_leave
  260. LDP X0, X1, [SP], #0x10
  261. // if rt_thread_switch_interrupt_flag set, jump to
  262. // rt_hw_context_switch_interrupt_do and don't return
  263. ADR X1, rt_thread_switch_interrupt_flag
  264. LDR X2, [X1]
  265. CMP X2, #1
  266. B.NE vector_irq_exit
  267. MOV X2, #0 // clear flag
  268. STR X2, [X1]
  269. ADR X3, rt_interrupt_from_thread
  270. LDR X4, [X3]
  271. STR x0, [X4] // store sp in preempted tasks's TCB
  272. ADR x3, rt_interrupt_to_thread
  273. LDR X4, [X3]
  274. LDR x0, [X4] // get new task's stack pointer
  275. RESTORE_CONTEXT
  276. vector_irq_exit:
  277. MOV SP, X0
  278. RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  279. // -------------------------------------------------
  280. .globl vector_exception
  281. vector_exception:
  282. SAVE_CONTEXT
  283. STP X0, X1, [SP, #-0x10]!
  284. BL rt_hw_trap_exception
  285. LDP X0, X1, [SP], #0x10
  286. MOV SP, X0
  287. RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  288. .globl vector_serror
  289. vector_serror:
  290. SAVE_CONTEXT
  291. STP X0, X1, [SP, #-0x10]!
  292. BL rt_hw_trap_serror
  293. b .
  294. .global switch_mmu
  295. switch_mmu:
  296. MSR TTBR0_EL1, X0
  297. MRS X1, TCR_EL1
  298. CMP X0, XZR
  299. ORR X1, X1, #(1 << 7)
  300. BEQ 1f
  301. BIC X1, X1, #(1 << 7)
  302. 1:
  303. MSR TCR_EL1, X1
  304. DSB SY
  305. ISB
  306. TLBI VMALLE1
  307. DSB SY
  308. ISB
  309. IC IALLUIS
  310. DSB SY
  311. ISB
  312. RET
  313. .global mmu_table_get
  314. mmu_table_get:
  315. MRS X0, TTBR0_EL1
  316. RET