context_gcc.S 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559
  1. /*
  2. * Copyright (c) 2006-2021, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2021-05-18 Jesven the first version
  9. * 2023-06-24 WangXiaoyao Support backtrace for user thread
  10. */
  11. #include "rtconfig.h"
  12. #include "asm-generic.h"
  13. #include "asm-fpu.h"
  14. .text
  15. .weak rt_hw_cpu_id_set
  16. .type rt_hw_cpu_id_set, @function
  17. rt_hw_cpu_id_set:
  18. mrs x0, mpidr_el1 /* MPIDR_EL1: Multi-Processor Affinity Register */
  19. #ifdef ARCH_ARM_CORTEX_A55
  20. lsr x0, x0, #8
  21. #endif
  22. and x0, x0, #15
  23. msr tpidr_el1, x0
  24. ret
  25. /*
  26. int rt_hw_cpu_id(void)
  27. */
  28. .weak rt_hw_cpu_id
  29. .type rt_hw_cpu_id, @function
  30. rt_hw_cpu_id:
  31. mrs x0, tpidr_el1 /* MPIDR_EL1: Multi-Processor Affinity Register */
  32. ret
  33. /*
  34. void rt_hw_set_process_id(size_t id)
  35. */
  36. .global rt_hw_set_process_id
  37. rt_hw_set_process_id:
  38. msr CONTEXTIDR_EL1, x0
  39. ret
  40. /*
  41. *enable gtimer
  42. */
  43. .globl rt_hw_gtimer_enable
  44. rt_hw_gtimer_enable:
  45. MOV X0,#1
  46. MSR CNTP_CTL_EL0,X0
  47. RET
  48. /*
  49. *set gtimer CNTP_TVAL_EL0 value
  50. */
  51. .globl rt_hw_set_gtimer_val
  52. rt_hw_set_gtimer_val:
  53. MSR CNTP_TVAL_EL0,X0
  54. RET
  55. /*
  56. *get gtimer CNTP_TVAL_EL0 value
  57. */
  58. .globl rt_hw_get_gtimer_val
  59. rt_hw_get_gtimer_val:
  60. MRS X0,CNTP_TVAL_EL0
  61. RET
  62. .globl rt_hw_get_cntpct_val
  63. rt_hw_get_cntpct_val:
  64. MRS X0, CNTPCT_EL0
  65. RET
  66. /*
  67. *get gtimer frq value
  68. */
  69. .globl rt_hw_get_gtimer_frq
  70. rt_hw_get_gtimer_frq:
  71. MRS X0,CNTFRQ_EL0
  72. RET
  73. START_POINT(_thread_start)
  74. blr x19
  75. mov x29, #0
  76. blr x20
  77. b . /* never here */
  78. START_POINT_END(_thread_start)
  79. .macro SAVE_CONTEXT
  80. /* Save the entire context. */
  81. SAVE_FPU SP
  82. STP X0, X1, [SP, #-0x10]!
  83. STP X2, X3, [SP, #-0x10]!
  84. STP X4, X5, [SP, #-0x10]!
  85. STP X6, X7, [SP, #-0x10]!
  86. STP X8, X9, [SP, #-0x10]!
  87. STP X10, X11, [SP, #-0x10]!
  88. STP X12, X13, [SP, #-0x10]!
  89. STP X14, X15, [SP, #-0x10]!
  90. STP X16, X17, [SP, #-0x10]!
  91. STP X18, X19, [SP, #-0x10]!
  92. STP X20, X21, [SP, #-0x10]!
  93. STP X22, X23, [SP, #-0x10]!
  94. STP X24, X25, [SP, #-0x10]!
  95. STP X26, X27, [SP, #-0x10]!
  96. STP X28, X29, [SP, #-0x10]!
  97. MRS X28, FPCR
  98. MRS X29, FPSR
  99. STP X28, X29, [SP, #-0x10]!
  100. MRS X29, SP_EL0
  101. STP X29, X30, [SP, #-0x10]!
  102. MRS X3, SPSR_EL1
  103. MRS X2, ELR_EL1
  104. STP X2, X3, [SP, #-0x10]!
  105. MOV X0, SP /* Move SP into X0 for saving. */
  106. .endm
  107. .macro SAVE_CONTEXT_FROM_EL1
  108. /* Save the entire context. */
  109. SAVE_FPU SP
  110. STP X0, X1, [SP, #-0x10]!
  111. STP X2, X3, [SP, #-0x10]!
  112. STP X4, X5, [SP, #-0x10]!
  113. STP X6, X7, [SP, #-0x10]!
  114. STP X8, X9, [SP, #-0x10]!
  115. STP X10, X11, [SP, #-0x10]!
  116. STP X12, X13, [SP, #-0x10]!
  117. STP X14, X15, [SP, #-0x10]!
  118. STP X16, X17, [SP, #-0x10]!
  119. STP X18, X19, [SP, #-0x10]!
  120. STP X20, X21, [SP, #-0x10]!
  121. STP X22, X23, [SP, #-0x10]!
  122. STP X24, X25, [SP, #-0x10]!
  123. STP X26, X27, [SP, #-0x10]!
  124. STP X28, X29, [SP, #-0x10]!
  125. MRS X28, FPCR
  126. MRS X29, FPSR
  127. STP X28, X29, [SP, #-0x10]!
  128. MRS X29, SP_EL0
  129. STP X29, X30, [SP, #-0x10]!
  130. MOV X19, #((3 << 6) | 0x4 | 0x1) /* el1h, disable interrupt */
  131. MOV X18, X30
  132. STP X18, X19, [SP, #-0x10]!
  133. .endm
  134. #ifdef RT_USING_SMP
  135. .macro RESTORE_CONTEXT
  136. /* Set the SP to point to the stack of the task being restored. */
  137. MOV SP, X0
  138. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  139. TST X3, #0x1f
  140. MSR SPSR_EL1, X3
  141. MSR ELR_EL1, X2
  142. LDP X29, X30, [SP], #0x10
  143. MSR SP_EL0, X29
  144. LDP X28, X29, [SP], #0x10
  145. MSR FPCR, X28
  146. MSR FPSR, X29
  147. LDP X28, X29, [SP], #0x10
  148. LDP X26, X27, [SP], #0x10
  149. LDP X24, X25, [SP], #0x10
  150. LDP X22, X23, [SP], #0x10
  151. LDP X20, X21, [SP], #0x10
  152. LDP X18, X19, [SP], #0x10
  153. LDP X16, X17, [SP], #0x10
  154. LDP X14, X15, [SP], #0x10
  155. LDP X12, X13, [SP], #0x10
  156. LDP X10, X11, [SP], #0x10
  157. LDP X8, X9, [SP], #0x10
  158. LDP X6, X7, [SP], #0x10
  159. LDP X4, X5, [SP], #0x10
  160. LDP X2, X3, [SP], #0x10
  161. LDP X0, X1, [SP], #0x10
  162. RESTORE_FPU SP
  163. #ifdef RT_USING_LWP
  164. BEQ arch_ret_to_user
  165. #endif
  166. ERET
  167. .endm
  168. #else
  169. .macro RESTORE_CONTEXT
  170. /* Set the SP to point to the stack of the task being restored. */
  171. MOV SP, X0
  172. #ifdef RT_USING_LWP
  173. BL rt_thread_self
  174. MOV X19, X0
  175. BL lwp_aspace_switch
  176. MOV X0, X19
  177. BL lwp_user_setting_restore
  178. #endif
  179. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  180. TST X3, #0x1f
  181. MSR SPSR_EL1, X3
  182. MSR ELR_EL1, X2
  183. LDP X29, X30, [SP], #0x10
  184. MSR SP_EL0, X29
  185. LDP X28, X29, [SP], #0x10
  186. MSR FPCR, X28
  187. MSR FPSR, X29
  188. LDP X28, X29, [SP], #0x10
  189. LDP X26, X27, [SP], #0x10
  190. LDP X24, X25, [SP], #0x10
  191. LDP X22, X23, [SP], #0x10
  192. LDP X20, X21, [SP], #0x10
  193. LDP X18, X19, [SP], #0x10
  194. LDP X16, X17, [SP], #0x10
  195. LDP X14, X15, [SP], #0x10
  196. LDP X12, X13, [SP], #0x10
  197. LDP X10, X11, [SP], #0x10
  198. LDP X8, X9, [SP], #0x10
  199. LDP X6, X7, [SP], #0x10
  200. LDP X4, X5, [SP], #0x10
  201. LDP X2, X3, [SP], #0x10
  202. LDP X0, X1, [SP], #0x10
  203. RESTORE_FPU SP
  204. #ifdef RT_USING_LWP
  205. BEQ arch_ret_to_user
  206. #endif
  207. ERET
  208. .endm
  209. #endif
  210. .macro RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  211. /* the SP is already ok */
  212. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  213. TST X3, #0x1f
  214. MSR SPSR_EL1, X3
  215. MSR ELR_EL1, X2
  216. LDP X29, X30, [SP], #0x10
  217. MSR SP_EL0, X29
  218. LDP X28, X29, [SP], #0x10
  219. MSR FPCR, X28
  220. MSR FPSR, X29
  221. LDP X28, X29, [SP], #0x10
  222. LDP X26, X27, [SP], #0x10
  223. LDP X24, X25, [SP], #0x10
  224. LDP X22, X23, [SP], #0x10
  225. LDP X20, X21, [SP], #0x10
  226. LDP X18, X19, [SP], #0x10
  227. LDP X16, X17, [SP], #0x10
  228. LDP X14, X15, [SP], #0x10
  229. LDP X12, X13, [SP], #0x10
  230. LDP X10, X11, [SP], #0x10
  231. LDP X8, X9, [SP], #0x10
  232. LDP X6, X7, [SP], #0x10
  233. LDP X4, X5, [SP], #0x10
  234. LDP X2, X3, [SP], #0x10
  235. LDP X0, X1, [SP], #0x10
  236. RESTORE_FPU SP
  237. #ifdef RT_USING_LWP
  238. BEQ arch_ret_to_user
  239. #endif
  240. ERET
  241. .endm
  242. .macro SAVE_USER_CTX
  243. MRS X1, SPSR_EL1
  244. AND X1, X1, 0xf
  245. CMP X1, XZR
  246. BNE 1f
  247. BL lwp_uthread_ctx_save
  248. LDP X0, X1, [SP]
  249. 1:
  250. .endm
  251. #ifdef RT_USING_SMP
  252. #define rt_hw_interrupt_disable rt_hw_local_irq_disable
  253. #define rt_hw_interrupt_enable rt_hw_local_irq_enable
  254. #endif
  255. .text
  256. /*
  257. * rt_base_t rt_hw_interrupt_disable();
  258. */
  259. .globl rt_hw_interrupt_disable
  260. rt_hw_interrupt_disable:
  261. MRS X0, DAIF
  262. MSR DAIFSet, #3
  263. DSB SY
  264. RET
  265. /*
  266. * void rt_hw_interrupt_enable(rt_base_t level);
  267. */
  268. .globl rt_hw_interrupt_enable
  269. rt_hw_interrupt_enable:
  270. DSB SY
  271. AND X0, X0, #0xc0
  272. MRS X1, DAIF
  273. BIC X1, X1, #0xc0
  274. ORR X0, X0, X1
  275. MSR DAIF, X0
  276. RET
  277. .text
  278. #ifdef RT_USING_SMP
  279. /*
  280. * void rt_hw_context_switch_to(rt_uint3 to, struct rt_thread *to_thread);
  281. * X0 --> to (thread stack)
  282. * X1 --> to_thread
  283. */
  284. .globl rt_hw_context_switch_to
  285. rt_hw_context_switch_to:
  286. LDR X0, [X0]
  287. MOV SP, X0
  288. MOV X0, X1
  289. BL rt_cpus_lock_status_restore
  290. #ifdef RT_USING_LWP
  291. BL rt_thread_self
  292. BL lwp_user_setting_restore
  293. #endif
  294. B rt_hw_context_switch_exit
  295. /*
  296. * void rt_hw_context_switch(rt_uint32 from, rt_uint32
  297. to, struct rt_thread *to_thread);
  298. * X0 --> from (from_thread stack)
  299. * X1 --> to (to_thread stack)
  300. * X2 --> to_thread
  301. */
  302. .globl rt_hw_context_switch
  303. rt_hw_context_switch:
  304. SAVE_CONTEXT_FROM_EL1
  305. MOV X3, SP
  306. STR X3, [X0] // store sp in preempted tasks TCB
  307. LDR X0, [X1] // get new task stack pointer
  308. MOV SP, X0
  309. MOV X0, X2
  310. BL rt_cpus_lock_status_restore
  311. #ifdef RT_USING_LWP
  312. BL rt_thread_self
  313. BL lwp_user_setting_restore
  314. #endif
  315. B rt_hw_context_switch_exit
  316. /*
  317. * void rt_hw_context_switch_interrupt(context, from sp, to sp, tp tcb)
  318. * X0 :interrupt context
  319. * X1 :addr of from_thread's sp
  320. * X2 :addr of to_thread's sp
  321. * X3 :to_thread's tcb
  322. */
  323. .globl rt_hw_context_switch_interrupt
  324. rt_hw_context_switch_interrupt:
  325. STP X0, X1, [SP, #-0x10]!
  326. STP X2, X3, [SP, #-0x10]!
  327. STP X29, X30, [SP, #-0x10]!
  328. #ifdef RT_USING_LWP
  329. BL rt_thread_self
  330. BL lwp_user_setting_save
  331. #endif
  332. LDP X29, X30, [SP], #0x10
  333. LDP X2, X3, [SP], #0x10
  334. LDP X0, X1, [SP], #0x10
  335. STR X0, [X1]
  336. LDR X0, [X2]
  337. MOV SP, X0
  338. MOV X0, X3
  339. MOV X19, X0
  340. BL rt_cpus_lock_status_restore
  341. MOV X0, X19
  342. #ifdef RT_USING_LWP
  343. BL lwp_user_setting_restore
  344. #endif
  345. B rt_hw_context_switch_exit
  346. .globl vector_fiq
  347. vector_fiq:
  348. B .
  349. .globl vector_irq
  350. vector_irq:
  351. SAVE_CONTEXT
  352. STP X0, X1, [SP, #-0x10]! /* X0 is thread sp */
  353. BL rt_interrupt_enter
  354. LDP X0, X1, [SP]
  355. #ifdef RT_USING_LWP
  356. SAVE_USER_CTX
  357. #endif
  358. BL rt_hw_trap_irq
  359. #ifdef RT_USING_LWP
  360. BL lwp_uthread_ctx_restore
  361. #endif
  362. BL rt_interrupt_leave
  363. LDP X0, X1, [SP], #0x10
  364. BL rt_scheduler_do_irq_switch
  365. B rt_hw_context_switch_exit
  366. .global rt_hw_context_switch_exit
  367. rt_hw_context_switch_exit:
  368. CLREX
  369. MOV X0, SP
  370. RESTORE_CONTEXT
  371. #else /* RT_USING_SMP */
  372. /*
  373. * void rt_hw_context_switch_to(rt_ubase_t to);
  374. * X0 --> to sp
  375. */
  376. .globl rt_hw_context_switch_to
  377. rt_hw_context_switch_to:
  378. CLREX
  379. LDR X0, [X0]
  380. RESTORE_CONTEXT
  381. /*
  382. * void rt_hw_context_switch(rt_ubase_t from, rt_ubase_t to);
  383. * X0 --> from sp
  384. * X1 --> to sp
  385. * X2 --> to thread
  386. */
  387. .globl rt_hw_context_switch
  388. rt_hw_context_switch:
  389. CLREX
  390. SAVE_CONTEXT_FROM_EL1
  391. MOV X2, SP
  392. STR X2, [X0] // store sp in preempted tasks TCB
  393. LDR X0, [X1] // get new task stack pointer
  394. RESTORE_CONTEXT
  395. /*
  396. * void rt_hw_context_switch_interrupt(rt_ubase_t from, rt_ubase_t to, rt_thread_t from_thread, rt_thread_t to_thread);
  397. */
  398. .globl rt_thread_switch_interrupt_flag
  399. .globl rt_interrupt_from_thread
  400. .globl rt_interrupt_to_thread
  401. .globl rt_hw_context_switch_interrupt
  402. rt_hw_context_switch_interrupt:
  403. CLREX
  404. LDR X6, =rt_thread_switch_interrupt_flag
  405. LDR X7, [X6]
  406. CMP X7, #1
  407. B.EQ _reswitch
  408. LDR X4, =rt_interrupt_from_thread // set rt_interrupt_from_thread
  409. STR X0, [X4]
  410. MOV X7, #1 // set rt_thread_switch_interrupt_flag to 1
  411. STR X7, [X6]
  412. STP X1, X30, [SP, #-0x10]!
  413. #ifdef RT_USING_LWP
  414. MOV X0, X2
  415. BL lwp_user_setting_save
  416. #endif
  417. LDP X1, X30, [SP], #0x10
  418. _reswitch:
  419. LDR X6, =rt_interrupt_to_thread // set rt_interrupt_to_thread
  420. STR X1, [X6]
  421. RET
  422. .text
  423. // -- Exception handlers ----------------------------------
  424. .align 8
  425. .globl vector_fiq
  426. vector_fiq:
  427. SAVE_CONTEXT
  428. STP X0, X1, [SP, #-0x10]!
  429. BL rt_hw_trap_fiq
  430. LDP X0, X1, [SP], #0x10
  431. RESTORE_CONTEXT
  432. .globl rt_interrupt_enter
  433. .globl rt_interrupt_leave
  434. .globl rt_thread_switch_interrupt_flag
  435. .globl rt_interrupt_from_thread
  436. .globl rt_interrupt_to_thread
  437. // -------------------------------------------------------------------
  438. .align 8
  439. .globl vector_irq
  440. vector_irq:
  441. SAVE_CONTEXT
  442. STP X0, X1, [SP, #-0x10]! /* X0 is thread sp */
  443. BL rt_interrupt_enter
  444. BL rt_hw_trap_irq
  445. BL rt_interrupt_leave
  446. LDP X0, X1, [SP], #0x10
  447. // if rt_thread_switch_interrupt_flag set, jump to
  448. // rt_hw_context_switch_interrupt_do and don't return
  449. LDR X1, =rt_thread_switch_interrupt_flag
  450. LDR X2, [X1]
  451. CMP X2, #1
  452. B.NE vector_irq_exit
  453. MOV X2, #0 // clear flag
  454. STR X2, [X1]
  455. LDR X3, =rt_interrupt_from_thread
  456. LDR X4, [X3]
  457. STR x0, [X4] // store sp in preempted tasks's TCB
  458. LDR x3, =rt_interrupt_to_thread
  459. LDR X4, [X3]
  460. LDR x0, [X4] // get new task's stack pointer
  461. RESTORE_CONTEXT
  462. vector_irq_exit:
  463. MOV SP, X0
  464. RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  465. #endif /* RT_USING_SMP */
  466. // -------------------------------------------------
  467. START_POINT(vector_exception)
  468. SAVE_CONTEXT
  469. STP X0, X1, [SP, #-0x10]!
  470. #ifdef RT_USING_LWP
  471. SAVE_USER_CTX
  472. #endif
  473. BL rt_hw_trap_exception
  474. #ifdef RT_USING_LWP
  475. BL lwp_uthread_ctx_restore
  476. #endif
  477. LDP X0, X1, [SP], #0x10
  478. MOV SP, X0
  479. RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  480. START_POINT_END(vector_exception)
  481. START_POINT(vector_serror)
  482. SAVE_CONTEXT
  483. #ifdef RT_USING_LWP
  484. SAVE_USER_CTX
  485. #endif
  486. STP X0, X1, [SP, #-0x10]!
  487. BL rt_hw_trap_serror
  488. b .
  489. START_POINT_END(vector_exception)