context_gcc.S 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524
  1. /*
  2. * Copyright (c) 2006-2021, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2021-05-18 Jesven the first version
  9. */
  10. #include "rtconfig.h"
  11. #include "asm-generic.h"
  12. #include "asm-fpu.h"
  13. .text
  14. .weak rt_hw_cpu_id_set
  15. .type rt_hw_cpu_id_set, @function
  16. rt_hw_cpu_id_set:
  17. mrs x0, mpidr_el1 /* MPIDR_EL1: Multi-Processor Affinity Register */
  18. and x0, x0, #15
  19. msr tpidr_el1, x0
  20. ret
  21. /*
  22. int rt_hw_cpu_id(void)
  23. */
  24. .weak rt_hw_cpu_id
  25. .type rt_hw_cpu_id, @function
  26. rt_hw_cpu_id:
  27. mrs x0, tpidr_el1 /* MPIDR_EL1: Multi-Processor Affinity Register */
  28. ret
  29. /*
  30. void rt_hw_set_process_id(size_t id)
  31. */
  32. .global rt_hw_set_process_id
  33. rt_hw_set_process_id:
  34. msr CONTEXTIDR_EL1, x0
  35. ret
  36. /*
  37. *enable gtimer
  38. */
  39. .globl rt_hw_gtimer_enable
  40. rt_hw_gtimer_enable:
  41. MOV X0,#1
  42. MSR CNTP_CTL_EL0,X0
  43. RET
  44. /*
  45. *set gtimer CNTP_TVAL_EL0 value
  46. */
  47. .globl rt_hw_set_gtimer_val
  48. rt_hw_set_gtimer_val:
  49. MSR CNTP_TVAL_EL0,X0
  50. RET
  51. /*
  52. *get gtimer CNTP_TVAL_EL0 value
  53. */
  54. .globl rt_hw_get_gtimer_val
  55. rt_hw_get_gtimer_val:
  56. MRS X0,CNTP_TVAL_EL0
  57. RET
  58. .globl rt_hw_get_cntpct_val
  59. rt_hw_get_cntpct_val:
  60. MRS X0, CNTPCT_EL0
  61. RET
  62. /*
  63. *get gtimer frq value
  64. */
  65. .globl rt_hw_get_gtimer_frq
  66. rt_hw_get_gtimer_frq:
  67. MRS X0,CNTFRQ_EL0
  68. RET
  69. START_POINT(_thread_start)
  70. blr x19
  71. mov x29, #0
  72. blr x20
  73. b . /* never here */
  74. START_POINT_END(_thread_start)
  75. .macro SAVE_CONTEXT
  76. /* Save the entire context. */
  77. SAVE_FPU SP
  78. STP X0, X1, [SP, #-0x10]!
  79. STP X2, X3, [SP, #-0x10]!
  80. STP X4, X5, [SP, #-0x10]!
  81. STP X6, X7, [SP, #-0x10]!
  82. STP X8, X9, [SP, #-0x10]!
  83. STP X10, X11, [SP, #-0x10]!
  84. STP X12, X13, [SP, #-0x10]!
  85. STP X14, X15, [SP, #-0x10]!
  86. STP X16, X17, [SP, #-0x10]!
  87. STP X18, X19, [SP, #-0x10]!
  88. STP X20, X21, [SP, #-0x10]!
  89. STP X22, X23, [SP, #-0x10]!
  90. STP X24, X25, [SP, #-0x10]!
  91. STP X26, X27, [SP, #-0x10]!
  92. STP X28, X29, [SP, #-0x10]!
  93. MRS X28, FPCR
  94. MRS X29, FPSR
  95. STP X28, X29, [SP, #-0x10]!
  96. MRS X29, SP_EL0
  97. STP X29, X30, [SP, #-0x10]!
  98. MRS X3, SPSR_EL1
  99. MRS X2, ELR_EL1
  100. STP X2, X3, [SP, #-0x10]!
  101. MOV X0, SP /* Move SP into X0 for saving. */
  102. .endm
  103. .macro SAVE_CONTEXT_FROM_EL1
  104. /* Save the entire context. */
  105. SAVE_FPU SP
  106. STP X0, X1, [SP, #-0x10]!
  107. STP X2, X3, [SP, #-0x10]!
  108. STP X4, X5, [SP, #-0x10]!
  109. STP X6, X7, [SP, #-0x10]!
  110. STP X8, X9, [SP, #-0x10]!
  111. STP X10, X11, [SP, #-0x10]!
  112. STP X12, X13, [SP, #-0x10]!
  113. STP X14, X15, [SP, #-0x10]!
  114. STP X16, X17, [SP, #-0x10]!
  115. STP X18, X19, [SP, #-0x10]!
  116. STP X20, X21, [SP, #-0x10]!
  117. STP X22, X23, [SP, #-0x10]!
  118. STP X24, X25, [SP, #-0x10]!
  119. STP X26, X27, [SP, #-0x10]!
  120. STP X28, X29, [SP, #-0x10]!
  121. MRS X28, FPCR
  122. MRS X29, FPSR
  123. STP X28, X29, [SP, #-0x10]!
  124. MRS X29, SP_EL0
  125. STP X29, X30, [SP, #-0x10]!
  126. MOV X19, #((3 << 6) | 0x4 | 0x1) /* el1h, disable interrupt */
  127. MOV X18, X30
  128. STP X18, X19, [SP, #-0x10]!
  129. .endm
  130. #ifdef RT_USING_SMP
  131. .macro RESTORE_CONTEXT
  132. /* Set the SP to point to the stack of the task being restored. */
  133. MOV SP, X0
  134. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  135. TST X3, #0x1f
  136. MSR SPSR_EL1, X3
  137. MSR ELR_EL1, X2
  138. LDP X29, X30, [SP], #0x10
  139. MSR SP_EL0, X29
  140. LDP X28, X29, [SP], #0x10
  141. MSR FPCR, X28
  142. MSR FPSR, X29
  143. LDP X28, X29, [SP], #0x10
  144. LDP X26, X27, [SP], #0x10
  145. LDP X24, X25, [SP], #0x10
  146. LDP X22, X23, [SP], #0x10
  147. LDP X20, X21, [SP], #0x10
  148. LDP X18, X19, [SP], #0x10
  149. LDP X16, X17, [SP], #0x10
  150. LDP X14, X15, [SP], #0x10
  151. LDP X12, X13, [SP], #0x10
  152. LDP X10, X11, [SP], #0x10
  153. LDP X8, X9, [SP], #0x10
  154. LDP X6, X7, [SP], #0x10
  155. LDP X4, X5, [SP], #0x10
  156. LDP X2, X3, [SP], #0x10
  157. LDP X0, X1, [SP], #0x10
  158. RESTORE_FPU SP
  159. #ifdef RT_USING_LWP
  160. BEQ arch_ret_to_user
  161. #endif
  162. ERET
  163. .endm
  164. #else
  165. .macro RESTORE_CONTEXT
  166. /* Set the SP to point to the stack of the task being restored. */
  167. MOV SP, X0
  168. #ifdef RT_USING_LWP
  169. BL rt_thread_self
  170. MOV X19, X0
  171. BL lwp_aspace_switch
  172. MOV X0, X19
  173. BL lwp_user_setting_restore
  174. #endif
  175. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  176. TST X3, #0x1f
  177. MSR SPSR_EL1, X3
  178. MSR ELR_EL1, X2
  179. LDP X29, X30, [SP], #0x10
  180. MSR SP_EL0, X29
  181. LDP X28, X29, [SP], #0x10
  182. MSR FPCR, X28
  183. MSR FPSR, X29
  184. LDP X28, X29, [SP], #0x10
  185. LDP X26, X27, [SP], #0x10
  186. LDP X24, X25, [SP], #0x10
  187. LDP X22, X23, [SP], #0x10
  188. LDP X20, X21, [SP], #0x10
  189. LDP X18, X19, [SP], #0x10
  190. LDP X16, X17, [SP], #0x10
  191. LDP X14, X15, [SP], #0x10
  192. LDP X12, X13, [SP], #0x10
  193. LDP X10, X11, [SP], #0x10
  194. LDP X8, X9, [SP], #0x10
  195. LDP X6, X7, [SP], #0x10
  196. LDP X4, X5, [SP], #0x10
  197. LDP X2, X3, [SP], #0x10
  198. LDP X0, X1, [SP], #0x10
  199. RESTORE_FPU SP
  200. #ifdef RT_USING_LWP
  201. BEQ arch_ret_to_user
  202. #endif
  203. ERET
  204. .endm
  205. #endif
  206. .macro RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  207. /* the SP is already ok */
  208. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  209. TST X3, #0x1f
  210. MSR SPSR_EL1, X3
  211. MSR ELR_EL1, X2
  212. LDP X29, X30, [SP], #0x10
  213. MSR SP_EL0, X29
  214. LDP X28, X29, [SP], #0x10
  215. MSR FPCR, X28
  216. MSR FPSR, X29
  217. LDP X28, X29, [SP], #0x10
  218. LDP X26, X27, [SP], #0x10
  219. LDP X24, X25, [SP], #0x10
  220. LDP X22, X23, [SP], #0x10
  221. LDP X20, X21, [SP], #0x10
  222. LDP X18, X19, [SP], #0x10
  223. LDP X16, X17, [SP], #0x10
  224. LDP X14, X15, [SP], #0x10
  225. LDP X12, X13, [SP], #0x10
  226. LDP X10, X11, [SP], #0x10
  227. LDP X8, X9, [SP], #0x10
  228. LDP X6, X7, [SP], #0x10
  229. LDP X4, X5, [SP], #0x10
  230. LDP X2, X3, [SP], #0x10
  231. LDP X0, X1, [SP], #0x10
  232. RESTORE_FPU SP
  233. #ifdef RT_USING_LWP
  234. BEQ arch_ret_to_user
  235. #endif
  236. ERET
  237. .endm
  238. #ifdef RT_USING_SMP
  239. #define rt_hw_interrupt_disable rt_hw_local_irq_disable
  240. #define rt_hw_interrupt_enable rt_hw_local_irq_enable
  241. #endif
  242. .text
  243. /*
  244. * rt_base_t rt_hw_interrupt_disable();
  245. */
  246. .globl rt_hw_interrupt_disable
  247. rt_hw_interrupt_disable:
  248. MRS X0, DAIF
  249. MSR DAIFSet, #3
  250. DSB SY
  251. RET
  252. /*
  253. * void rt_hw_interrupt_enable(rt_base_t level);
  254. */
  255. .globl rt_hw_interrupt_enable
  256. rt_hw_interrupt_enable:
  257. DSB SY
  258. AND X0, X0, #0xc0
  259. MRS X1, DAIF
  260. BIC X1, X1, #0xc0
  261. ORR X0, X0, X1
  262. MSR DAIF, X0
  263. RET
  264. .text
  265. #ifdef RT_USING_SMP
  266. /*
  267. * void rt_hw_context_switch_to(rt_uint3 to, struct rt_thread *to_thread);
  268. * X0 --> to (thread stack)
  269. * X1 --> to_thread
  270. */
  271. .globl rt_hw_context_switch_to
  272. rt_hw_context_switch_to:
  273. LDR X0, [X0]
  274. MOV SP, X0
  275. MOV X0, X1
  276. BL rt_cpus_lock_status_restore
  277. #ifdef RT_USING_LWP
  278. BL rt_thread_self
  279. BL lwp_user_setting_restore
  280. #endif
  281. B rt_hw_context_switch_exit
  282. /*
  283. * void rt_hw_context_switch(rt_uint32 from, rt_uint32
  284. to, struct rt_thread *to_thread);
  285. * X0 --> from (from_thread stack)
  286. * X1 --> to (to_thread stack)
  287. * X2 --> to_thread
  288. */
  289. .globl rt_hw_context_switch
  290. rt_hw_context_switch:
  291. SAVE_CONTEXT_FROM_EL1
  292. MOV X3, SP
  293. STR X3, [X0] // store sp in preempted tasks TCB
  294. LDR X0, [X1] // get new task stack pointer
  295. MOV SP, X0
  296. MOV X0, X2
  297. BL rt_cpus_lock_status_restore
  298. #ifdef RT_USING_LWP
  299. BL rt_thread_self
  300. BL lwp_user_setting_restore
  301. #endif
  302. B rt_hw_context_switch_exit
  303. /*
  304. * void rt_hw_context_switch_interrupt(context, from sp, to sp, tp tcb)
  305. * X0 :interrupt context
  306. * X1 :addr of from_thread's sp
  307. * X2 :addr of to_thread's sp
  308. * X3 :to_thread's tcb
  309. */
  310. .globl rt_hw_context_switch_interrupt
  311. rt_hw_context_switch_interrupt:
  312. STP X0, X1, [SP, #-0x10]!
  313. STP X2, X3, [SP, #-0x10]!
  314. STP X29, X30, [SP, #-0x10]!
  315. #ifdef RT_USING_LWP
  316. BL rt_thread_self
  317. BL lwp_user_setting_save
  318. #endif
  319. LDP X29, X30, [SP], #0x10
  320. LDP X2, X3, [SP], #0x10
  321. LDP X0, X1, [SP], #0x10
  322. STR X0, [X1]
  323. LDR X0, [X2]
  324. MOV SP, X0
  325. MOV X0, X3
  326. MOV X19, X0
  327. BL rt_cpus_lock_status_restore
  328. MOV X0, X19
  329. #ifdef RT_USING_LWP
  330. BL lwp_user_setting_restore
  331. #endif
  332. B rt_hw_context_switch_exit
  333. .globl vector_fiq
  334. vector_fiq:
  335. B .
  336. .globl vector_irq
  337. vector_irq:
  338. CLREX
  339. SAVE_CONTEXT
  340. STP X0, X1, [SP, #-0x10]! /* X0 is thread sp */
  341. BL rt_interrupt_enter
  342. BL rt_hw_trap_irq
  343. BL rt_interrupt_leave
  344. LDP X0, X1, [SP], #0x10
  345. BL rt_scheduler_do_irq_switch
  346. B rt_hw_context_switch_exit
  347. .global rt_hw_context_switch_exit
  348. rt_hw_context_switch_exit:
  349. MOV X0, SP
  350. RESTORE_CONTEXT
  351. #else
  352. /*
  353. * void rt_hw_context_switch_to(rt_ubase_t to);
  354. * X0 --> to sp
  355. */
  356. .globl rt_hw_context_switch_to
  357. rt_hw_context_switch_to:
  358. LDR X0, [X0]
  359. RESTORE_CONTEXT
  360. /*
  361. * void rt_hw_context_switch(rt_ubase_t from, rt_ubase_t to);
  362. * X0 --> from sp
  363. * X1 --> to sp
  364. * X2 --> to thread
  365. */
  366. .globl rt_hw_context_switch
  367. rt_hw_context_switch:
  368. SAVE_CONTEXT_FROM_EL1
  369. MOV X2, SP
  370. STR X2, [X0] // store sp in preempted tasks TCB
  371. LDR X0, [X1] // get new task stack pointer
  372. RESTORE_CONTEXT
  373. /*
  374. * void rt_hw_context_switch_interrupt(rt_ubase_t from, rt_ubase_t to, rt_thread_t from_thread, rt_thread_t to_thread);
  375. */
  376. .globl rt_thread_switch_interrupt_flag
  377. .globl rt_interrupt_from_thread
  378. .globl rt_interrupt_to_thread
  379. .globl rt_hw_context_switch_interrupt
  380. rt_hw_context_switch_interrupt:
  381. LDR X6, =rt_thread_switch_interrupt_flag
  382. LDR X7, [X6]
  383. CMP X7, #1
  384. B.EQ _reswitch
  385. LDR X4, =rt_interrupt_from_thread // set rt_interrupt_from_thread
  386. STR X0, [X4]
  387. MOV X7, #1 // set rt_thread_switch_interrupt_flag to 1
  388. STR X7, [X6]
  389. STP X1, X30, [SP, #-0x10]!
  390. #ifdef RT_USING_LWP
  391. MOV X0, X2
  392. BL lwp_user_setting_save
  393. #endif
  394. LDP X1, X30, [SP], #0x10
  395. _reswitch:
  396. LDR X6, =rt_interrupt_to_thread // set rt_interrupt_to_thread
  397. STR X1, [X6]
  398. RET
  399. .text
  400. // -- Exception handlers ----------------------------------
  401. .align 8
  402. .globl vector_fiq
  403. vector_fiq:
  404. SAVE_CONTEXT
  405. STP X0, X1, [SP, #-0x10]!
  406. BL rt_hw_trap_fiq
  407. LDP X0, X1, [SP], #0x10
  408. RESTORE_CONTEXT
  409. .globl rt_interrupt_enter
  410. .globl rt_interrupt_leave
  411. .globl rt_thread_switch_interrupt_flag
  412. .globl rt_interrupt_from_thread
  413. .globl rt_interrupt_to_thread
  414. // -------------------------------------------------------------------
  415. .align 8
  416. .globl vector_irq
  417. vector_irq:
  418. SAVE_CONTEXT
  419. STP X0, X1, [SP, #-0x10]! /* X0 is thread sp */
  420. BL rt_interrupt_enter
  421. BL rt_hw_trap_irq
  422. BL rt_interrupt_leave
  423. LDP X0, X1, [SP], #0x10
  424. // if rt_thread_switch_interrupt_flag set, jump to
  425. // rt_hw_context_switch_interrupt_do and don't return
  426. LDR X1, =rt_thread_switch_interrupt_flag
  427. LDR X2, [X1]
  428. CMP X2, #1
  429. B.NE vector_irq_exit
  430. MOV X2, #0 // clear flag
  431. STR X2, [X1]
  432. LDR X3, =rt_interrupt_from_thread
  433. LDR X4, [X3]
  434. STR x0, [X4] // store sp in preempted tasks's TCB
  435. LDR x3, =rt_interrupt_to_thread
  436. LDR X4, [X3]
  437. LDR x0, [X4] // get new task's stack pointer
  438. RESTORE_CONTEXT
  439. vector_irq_exit:
  440. MOV SP, X0
  441. RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  442. #endif
  443. // -------------------------------------------------
  444. START_POINT(vector_exception)
  445. SAVE_CONTEXT
  446. STP X0, X1, [SP, #-0x10]!
  447. BL rt_hw_trap_exception
  448. LDP X0, X1, [SP], #0x10
  449. MOV SP, X0
  450. RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  451. START_POINT_END(vector_exception)
  452. START_POINT(vector_serror)
  453. SAVE_CONTEXT
  454. STP X0, X1, [SP, #-0x10]!
  455. BL rt_hw_trap_serror
  456. b .
  457. START_POINT_END(vector_exception)