context_gcc.S 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586
  1. /*
  2. * Copyright (c) 2006-2024, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2021-05-18 Jesven the first version
  9. * 2023-06-24 WangXiaoyao Support backtrace for user thread
  10. * 2024-01-06 Shell Fix barrier on irq_disable/enable
  11. */
  12. #ifndef __ASSEMBLY__
  13. #define __ASSEMBLY__
  14. #endif
  15. #include "rtconfig.h"
  16. #include "asm-generic.h"
  17. #include "asm-fpu.h"
  18. #include "armv8.h"
  19. .text
  20. .weak rt_hw_cpu_id_set
  21. .type rt_hw_cpu_id_set, @function
  22. rt_hw_cpu_id_set:
  23. mrs x0, mpidr_el1 /* MPIDR_EL1: Multi-Processor Affinity Register */
  24. #ifdef ARCH_ARM_CORTEX_A55
  25. lsr x0, x0, #8
  26. #endif
  27. and x0, x0, #15
  28. msr tpidr_el1, x0
  29. ret
  30. /*
  31. int rt_hw_cpu_id(void)
  32. */
  33. .weak rt_hw_cpu_id
  34. .type rt_hw_cpu_id, @function
  35. rt_hw_cpu_id:
  36. mrs x0, tpidr_el1
  37. ret
  38. /*
  39. void rt_hw_set_process_id(size_t id)
  40. */
  41. .global rt_hw_set_process_id
  42. rt_hw_set_process_id:
  43. msr CONTEXTIDR_EL1, x0
  44. ret
  45. /*
  46. *enable gtimer
  47. */
  48. .globl rt_hw_gtimer_enable
  49. rt_hw_gtimer_enable:
  50. MOV X0,#1
  51. MSR CNTP_CTL_EL0,X0
  52. RET
  53. /*
  54. *set gtimer CNTP_TVAL_EL0 value
  55. */
  56. .globl rt_hw_set_gtimer_val
  57. rt_hw_set_gtimer_val:
  58. MSR CNTP_TVAL_EL0,X0
  59. RET
  60. /*
  61. *get gtimer CNTP_TVAL_EL0 value
  62. */
  63. .globl rt_hw_get_gtimer_val
  64. rt_hw_get_gtimer_val:
  65. MRS X0,CNTP_TVAL_EL0
  66. RET
  67. .globl rt_hw_get_cntpct_val
  68. rt_hw_get_cntpct_val:
  69. MRS X0, CNTPCT_EL0
  70. RET
  71. /*
  72. *get gtimer frq value
  73. */
  74. .globl rt_hw_get_gtimer_frq
  75. rt_hw_get_gtimer_frq:
  76. MRS X0,CNTFRQ_EL0
  77. RET
  78. START_POINT(_thread_start)
  79. blr x19
  80. mov x29, #0
  81. blr x20
  82. b . /* never here */
  83. START_POINT_END(_thread_start)
  84. .macro SAVE_CONTEXT
  85. /* Save the entire context. */
  86. SAVE_FPU SP
  87. STP X0, X1, [SP, #-0x10]!
  88. STP X2, X3, [SP, #-0x10]!
  89. STP X4, X5, [SP, #-0x10]!
  90. STP X6, X7, [SP, #-0x10]!
  91. STP X8, X9, [SP, #-0x10]!
  92. STP X10, X11, [SP, #-0x10]!
  93. STP X12, X13, [SP, #-0x10]!
  94. STP X14, X15, [SP, #-0x10]!
  95. STP X16, X17, [SP, #-0x10]!
  96. STP X18, X19, [SP, #-0x10]!
  97. STP X20, X21, [SP, #-0x10]!
  98. STP X22, X23, [SP, #-0x10]!
  99. STP X24, X25, [SP, #-0x10]!
  100. STP X26, X27, [SP, #-0x10]!
  101. STP X28, X29, [SP, #-0x10]!
  102. MRS X28, FPCR
  103. MRS X29, FPSR
  104. STP X28, X29, [SP, #-0x10]!
  105. MRS X29, SP_EL0
  106. STP X29, X30, [SP, #-0x10]!
  107. MRS X3, SPSR_EL1
  108. MRS X2, ELR_EL1
  109. STP X2, X3, [SP, #-0x10]!
  110. MOV X0, SP /* Move SP into X0 for saving. */
  111. .endm
  112. .macro SAVE_CONTEXT_FROM_EL1
  113. /* Save the entire context. */
  114. SAVE_FPU SP
  115. STP X0, X1, [SP, #-0x10]!
  116. STP X2, X3, [SP, #-0x10]!
  117. STP X4, X5, [SP, #-0x10]!
  118. STP X6, X7, [SP, #-0x10]!
  119. STP X8, X9, [SP, #-0x10]!
  120. STP X10, X11, [SP, #-0x10]!
  121. STP X12, X13, [SP, #-0x10]!
  122. STP X14, X15, [SP, #-0x10]!
  123. STP X16, X17, [SP, #-0x10]!
  124. STP X18, X19, [SP, #-0x10]!
  125. STP X20, X21, [SP, #-0x10]!
  126. STP X22, X23, [SP, #-0x10]!
  127. STP X24, X25, [SP, #-0x10]!
  128. STP X26, X27, [SP, #-0x10]!
  129. STP X28, X29, [SP, #-0x10]!
  130. MRS X28, FPCR
  131. MRS X29, FPSR
  132. STP X28, X29, [SP, #-0x10]!
  133. MRS X29, SP_EL0
  134. STP X29, X30, [SP, #-0x10]!
  135. MOV X19, #((3 << 6) | 0x4 | 0x1) /* el1h, disable interrupt */
  136. MOV X18, X30
  137. STP X18, X19, [SP, #-0x10]!
  138. .endm
  139. #ifdef RT_USING_SMP
  140. .macro RESTORE_CONTEXT
  141. /* Set the SP to point to the stack of the task being restored. */
  142. MOV SP, X0
  143. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  144. TST X3, #0x1f
  145. MSR SPSR_EL1, X3
  146. MSR ELR_EL1, X2
  147. LDP X29, X30, [SP], #0x10
  148. MSR SP_EL0, X29
  149. LDP X28, X29, [SP], #0x10
  150. MSR FPCR, X28
  151. MSR FPSR, X29
  152. LDP X28, X29, [SP], #0x10
  153. LDP X26, X27, [SP], #0x10
  154. LDP X24, X25, [SP], #0x10
  155. LDP X22, X23, [SP], #0x10
  156. LDP X20, X21, [SP], #0x10
  157. LDP X18, X19, [SP], #0x10
  158. LDP X16, X17, [SP], #0x10
  159. LDP X14, X15, [SP], #0x10
  160. LDP X12, X13, [SP], #0x10
  161. LDP X10, X11, [SP], #0x10
  162. LDP X8, X9, [SP], #0x10
  163. LDP X6, X7, [SP], #0x10
  164. LDP X4, X5, [SP], #0x10
  165. LDP X2, X3, [SP], #0x10
  166. LDP X0, X1, [SP], #0x10
  167. RESTORE_FPU SP
  168. #ifdef RT_USING_LWP
  169. BEQ arch_ret_to_user
  170. #endif
  171. ERET
  172. .endm
  173. #else
  174. .macro RESTORE_CONTEXT
  175. /* Set the SP to point to the stack of the task being restored. */
  176. MOV SP, X0
  177. #ifdef RT_USING_LWP
  178. BL rt_thread_self
  179. MOV X19, X0
  180. BL lwp_aspace_switch
  181. MOV X0, X19
  182. BL lwp_user_setting_restore
  183. #endif
  184. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  185. TST X3, #0x1f
  186. MSR SPSR_EL1, X3
  187. MSR ELR_EL1, X2
  188. LDP X29, X30, [SP], #0x10
  189. MSR SP_EL0, X29
  190. LDP X28, X29, [SP], #0x10
  191. MSR FPCR, X28
  192. MSR FPSR, X29
  193. LDP X28, X29, [SP], #0x10
  194. LDP X26, X27, [SP], #0x10
  195. LDP X24, X25, [SP], #0x10
  196. LDP X22, X23, [SP], #0x10
  197. LDP X20, X21, [SP], #0x10
  198. LDP X18, X19, [SP], #0x10
  199. LDP X16, X17, [SP], #0x10
  200. LDP X14, X15, [SP], #0x10
  201. LDP X12, X13, [SP], #0x10
  202. LDP X10, X11, [SP], #0x10
  203. LDP X8, X9, [SP], #0x10
  204. LDP X6, X7, [SP], #0x10
  205. LDP X4, X5, [SP], #0x10
  206. LDP X2, X3, [SP], #0x10
  207. LDP X0, X1, [SP], #0x10
  208. RESTORE_FPU SP
  209. #ifdef RT_USING_LWP
  210. BEQ arch_ret_to_user
  211. #endif
  212. ERET
  213. .endm
  214. #endif
  215. .macro RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  216. /* the SP is already ok */
  217. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  218. TST X3, #0x1f
  219. MSR SPSR_EL1, X3
  220. MSR ELR_EL1, X2
  221. LDP X29, X30, [SP], #0x10
  222. MSR SP_EL0, X29
  223. LDP X28, X29, [SP], #0x10
  224. MSR FPCR, X28
  225. MSR FPSR, X29
  226. LDP X28, X29, [SP], #0x10
  227. LDP X26, X27, [SP], #0x10
  228. LDP X24, X25, [SP], #0x10
  229. LDP X22, X23, [SP], #0x10
  230. LDP X20, X21, [SP], #0x10
  231. LDP X18, X19, [SP], #0x10
  232. LDP X16, X17, [SP], #0x10
  233. LDP X14, X15, [SP], #0x10
  234. LDP X12, X13, [SP], #0x10
  235. LDP X10, X11, [SP], #0x10
  236. LDP X8, X9, [SP], #0x10
  237. LDP X6, X7, [SP], #0x10
  238. LDP X4, X5, [SP], #0x10
  239. LDP X2, X3, [SP], #0x10
  240. LDP X0, X1, [SP], #0x10
  241. RESTORE_FPU SP
  242. #ifdef RT_USING_LWP
  243. BEQ arch_ret_to_user
  244. #endif
  245. ERET
  246. .endm
  247. .macro SAVE_USER_CTX
  248. MRS X1, SPSR_EL1
  249. AND X1, X1, 0xf
  250. CMP X1, XZR
  251. BNE 1f
  252. BL lwp_uthread_ctx_save
  253. LDP X0, X1, [SP]
  254. 1:
  255. .endm
  256. .macro RESTORE_USER_CTX, ctx
  257. LDR X1, [\ctx, #CONTEXT_OFFSET_SPSR_EL1]
  258. AND X1, X1, 0x1f
  259. CMP X1, XZR
  260. BNE 1f
  261. BL lwp_uthread_ctx_restore
  262. 1:
  263. .endm
  264. #ifdef RT_USING_SMP
  265. #define rt_hw_interrupt_disable rt_hw_local_irq_disable
  266. #define rt_hw_interrupt_enable rt_hw_local_irq_enable
  267. #endif
  268. .text
  269. .global rt_hw_interrupt_is_disabled
  270. rt_hw_interrupt_is_disabled:
  271. MRS X0, DAIF
  272. TST X0, #0xc0
  273. CSET X0, NE
  274. RET
  275. /*
  276. * rt_base_t rt_hw_interrupt_disable();
  277. */
  278. .globl rt_hw_interrupt_disable
  279. rt_hw_interrupt_disable:
  280. MRS X0, DAIF
  281. MSR DAIFSet, #3
  282. DSB NSH
  283. ISB
  284. RET
  285. /*
  286. * void rt_hw_interrupt_enable(rt_base_t level);
  287. */
  288. .globl rt_hw_interrupt_enable
  289. rt_hw_interrupt_enable:
  290. ISB
  291. DSB NSH
  292. AND X0, X0, #0xc0
  293. MRS X1, DAIF
  294. BIC X1, X1, #0xc0
  295. ORR X0, X0, X1
  296. MSR DAIF, X0
  297. RET
  298. .text
  299. #ifdef RT_USING_SMP
  300. /*
  301. * void rt_hw_context_switch_to(rt_uint3 to, struct rt_thread *to_thread);
  302. * X0 --> to (thread stack)
  303. * X1 --> to_thread
  304. */
  305. .globl rt_hw_context_switch_to
  306. rt_hw_context_switch_to:
  307. LDR X0, [X0]
  308. MOV SP, X0
  309. MOV X0, X1
  310. BL rt_cpus_lock_status_restore
  311. #ifdef RT_USING_LWP
  312. BL rt_thread_self
  313. BL lwp_user_setting_restore
  314. #endif
  315. B rt_hw_context_switch_exit
  316. /*
  317. * void rt_hw_context_switch(rt_uint32 from, rt_uint32
  318. to, struct rt_thread *to_thread);
  319. * X0 --> from (from_thread stack)
  320. * X1 --> to (to_thread stack)
  321. * X2 --> to_thread
  322. */
  323. .globl rt_hw_context_switch
  324. rt_hw_context_switch:
  325. SAVE_CONTEXT_FROM_EL1
  326. MOV X3, SP
  327. STR X3, [X0] // store sp in preempted tasks TCB
  328. LDR X0, [X1] // get new task stack pointer
  329. MOV SP, X0
  330. MOV X0, X2
  331. BL rt_cpus_lock_status_restore
  332. #ifdef RT_USING_LWP
  333. BL rt_thread_self
  334. BL lwp_user_setting_restore
  335. #endif
  336. B rt_hw_context_switch_exit
  337. /*
  338. * void rt_hw_context_switch_interrupt(context, from sp, to sp, tp tcb)
  339. * X0 :interrupt context
  340. * X1 :addr of from_thread's sp
  341. * X2 :addr of to_thread's sp
  342. * X3 :to_thread's tcb
  343. */
  344. .globl rt_hw_context_switch_interrupt
  345. rt_hw_context_switch_interrupt:
  346. STP X0, X1, [SP, #-0x10]!
  347. STP X2, X3, [SP, #-0x10]!
  348. STP X29, X30, [SP, #-0x10]!
  349. #ifdef RT_USING_LWP
  350. BL rt_thread_self
  351. BL lwp_user_setting_save
  352. #endif
  353. LDP X29, X30, [SP], #0x10
  354. LDP X2, X3, [SP], #0x10
  355. LDP X0, X1, [SP], #0x10
  356. STR X0, [X1]
  357. LDR X0, [X2]
  358. MOV SP, X0
  359. MOV X0, X3
  360. MOV X19, X0
  361. BL rt_cpus_lock_status_restore
  362. MOV X0, X19
  363. #ifdef RT_USING_LWP
  364. BL lwp_user_setting_restore
  365. #endif
  366. B rt_hw_context_switch_exit
  367. .globl vector_fiq
  368. vector_fiq:
  369. B .
  370. START_POINT(vector_irq)
  371. SAVE_CONTEXT
  372. STP X0, X1, [SP, #-0x10]! /* X0 is thread sp */
  373. BL rt_interrupt_enter
  374. LDP X0, X1, [SP]
  375. #ifdef RT_USING_LWP
  376. SAVE_USER_CTX
  377. #endif
  378. BL rt_hw_trap_irq
  379. #ifdef RT_USING_LWP
  380. LDP X0, X1, [SP]
  381. RESTORE_USER_CTX X0
  382. #endif
  383. BL rt_interrupt_leave
  384. LDP X0, X1, [SP], #0x10
  385. BL rt_scheduler_do_irq_switch
  386. B rt_hw_context_switch_exit
  387. START_POINT_END(vector_irq)
  388. .global rt_hw_context_switch_exit
  389. rt_hw_context_switch_exit:
  390. CLREX
  391. MOV X0, SP
  392. RESTORE_CONTEXT
  393. #else /* RT_USING_SMP */
  394. /*
  395. * void rt_hw_context_switch_to(rt_ubase_t to);
  396. * X0 --> to sp
  397. */
  398. .globl rt_hw_context_switch_to
  399. rt_hw_context_switch_to:
  400. CLREX
  401. LDR X0, [X0]
  402. RESTORE_CONTEXT
  403. /*
  404. * void rt_hw_context_switch(rt_ubase_t from, rt_ubase_t to);
  405. * X0 --> from sp
  406. * X1 --> to sp
  407. * X2 --> to thread
  408. */
  409. .globl rt_hw_context_switch
  410. rt_hw_context_switch:
  411. CLREX
  412. SAVE_CONTEXT_FROM_EL1
  413. MOV X2, SP
  414. STR X2, [X0] // store sp in preempted tasks TCB
  415. LDR X0, [X1] // get new task stack pointer
  416. RESTORE_CONTEXT
  417. /*
  418. * void rt_hw_context_switch_interrupt(rt_ubase_t from, rt_ubase_t to, rt_thread_t from_thread, rt_thread_t to_thread);
  419. */
  420. .globl rt_thread_switch_interrupt_flag
  421. .globl rt_interrupt_from_thread
  422. .globl rt_interrupt_to_thread
  423. .globl rt_hw_context_switch_interrupt
  424. rt_hw_context_switch_interrupt:
  425. CLREX
  426. LDR X6, =rt_thread_switch_interrupt_flag
  427. LDR X7, [X6]
  428. CMP X7, #1
  429. B.EQ _reswitch
  430. LDR X4, =rt_interrupt_from_thread // set rt_interrupt_from_thread
  431. STR X0, [X4]
  432. MOV X7, #1 // set rt_thread_switch_interrupt_flag to 1
  433. STR X7, [X6]
  434. STP X1, X30, [SP, #-0x10]!
  435. #ifdef RT_USING_LWP
  436. MOV X0, X2
  437. BL lwp_user_setting_save
  438. #endif
  439. LDP X1, X30, [SP], #0x10
  440. _reswitch:
  441. LDR X6, =rt_interrupt_to_thread // set rt_interrupt_to_thread
  442. STR X1, [X6]
  443. RET
  444. .text
  445. // -- Exception handlers ----------------------------------
  446. .align 8
  447. .globl vector_fiq
  448. vector_fiq:
  449. SAVE_CONTEXT
  450. STP X0, X1, [SP, #-0x10]!
  451. BL rt_hw_trap_fiq
  452. LDP X0, X1, [SP], #0x10
  453. RESTORE_CONTEXT
  454. .globl rt_interrupt_enter
  455. .globl rt_interrupt_leave
  456. .globl rt_thread_switch_interrupt_flag
  457. .globl rt_interrupt_from_thread
  458. .globl rt_interrupt_to_thread
  459. // -------------------------------------------------------------------
  460. .align 8
  461. .globl vector_irq
  462. vector_irq:
  463. SAVE_CONTEXT
  464. STP X0, X1, [SP, #-0x10]! /* X0 is thread sp */
  465. BL rt_interrupt_enter
  466. BL rt_hw_trap_irq
  467. BL rt_interrupt_leave
  468. LDP X0, X1, [SP], #0x10
  469. // if rt_thread_switch_interrupt_flag set, jump to
  470. // rt_hw_context_switch_interrupt_do and don't return
  471. LDR X1, =rt_thread_switch_interrupt_flag
  472. LDR X2, [X1]
  473. CMP X2, #1
  474. B.NE vector_irq_exit
  475. MOV X2, #0 // clear flag
  476. STR X2, [X1]
  477. LDR X3, =rt_interrupt_from_thread
  478. LDR X4, [X3]
  479. STR x0, [X4] // store sp in preempted tasks's TCB
  480. LDR x3, =rt_interrupt_to_thread
  481. LDR X4, [X3]
  482. LDR x0, [X4] // get new task's stack pointer
  483. RESTORE_CONTEXT
  484. vector_irq_exit:
  485. MOV SP, X0
  486. RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  487. #endif /* RT_USING_SMP */
  488. // -------------------------------------------------
  489. START_POINT(vector_exception)
  490. SAVE_CONTEXT
  491. STP X0, X1, [SP, #-0x10]!
  492. #ifdef RT_USING_LWP
  493. SAVE_USER_CTX
  494. #endif
  495. BL rt_hw_trap_exception
  496. #ifdef RT_USING_LWP
  497. LDP X0, X1, [SP]
  498. RESTORE_USER_CTX X0
  499. #endif
  500. LDP X0, X1, [SP], #0x10
  501. MOV SP, X0
  502. RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  503. START_POINT_END(vector_exception)
  504. START_POINT(vector_serror)
  505. SAVE_CONTEXT
  506. #ifdef RT_USING_LWP
  507. SAVE_USER_CTX
  508. #endif
  509. STP X0, X1, [SP, #-0x10]!
  510. BL rt_hw_trap_serror
  511. b .
  512. START_POINT_END(vector_serror)