context_gcc.S 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606
  1. /*
  2. * Copyright (c) 2006-2024, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2021-05-18 Jesven the first version
  9. * 2023-06-24 WangXiaoyao Support backtrace for user thread
  10. * 2024-01-06 Shell Fix barrier on irq_disable/enable
  11. */
  12. #ifndef __ASSEMBLY__
  13. #define __ASSEMBLY__
  14. #endif
  15. #include "rtconfig.h"
  16. #include "asm-generic.h"
  17. #include "asm-fpu.h"
  18. #include "armv8.h"
  19. #ifndef RT_USING_SMP
  20. .bss
  21. .align 3
  22. rt_interrupt_from_thread: .comm 8, 8
  23. rt_interrupt_to_thread: .comm 8, 8
  24. rt_thread_switch_interrupt_flag: .comm 8, 8
  25. #endif
  26. .text
  27. .weak rt_hw_cpu_id_set
  28. .type rt_hw_cpu_id_set, @function
  29. rt_hw_cpu_id_set:
  30. mrs x0, mpidr_el1 /* MPIDR_EL1: Multi-Processor Affinity Register */
  31. #ifdef ARCH_ARM_CORTEX_A55
  32. lsr x0, x0, #8
  33. #endif
  34. and x0, x0, #15
  35. msr tpidr_el1, x0
  36. ret
  37. /*
  38. int rt_hw_cpu_id(void)
  39. */
  40. .weak rt_hw_cpu_id
  41. .type rt_hw_cpu_id, @function
  42. rt_hw_cpu_id:
  43. mrs x0, tpidr_el1
  44. ret
  45. /*
  46. void rt_hw_set_process_id(size_t id)
  47. */
  48. .global rt_hw_set_process_id
  49. rt_hw_set_process_id:
  50. msr CONTEXTIDR_EL1, x0
  51. ret
  52. /*
  53. *enable gtimer
  54. */
  55. .globl rt_hw_gtimer_enable
  56. rt_hw_gtimer_enable:
  57. MOV X0,#1
  58. MSR CNTP_CTL_EL0,X0
  59. RET
  60. /*
  61. *set gtimer CNTP_TVAL_EL0 value
  62. */
  63. .globl rt_hw_set_gtimer_val
  64. rt_hw_set_gtimer_val:
  65. MSR CNTP_TVAL_EL0,X0
  66. RET
  67. /*
  68. *get gtimer CNTP_TVAL_EL0 value
  69. */
  70. .globl rt_hw_get_gtimer_val
  71. rt_hw_get_gtimer_val:
  72. MRS X0,CNTP_TVAL_EL0
  73. RET
  74. .globl rt_hw_get_cntpct_val
  75. rt_hw_get_cntpct_val:
  76. MRS X0, CNTPCT_EL0
  77. RET
  78. /*
  79. *get gtimer frq value
  80. */
  81. .globl rt_hw_get_gtimer_frq
  82. rt_hw_get_gtimer_frq:
  83. MRS X0,CNTFRQ_EL0
  84. RET
  85. START_POINT(_thread_start)
  86. blr x19
  87. mov x29, #0
  88. blr x20
  89. b . /* never here */
  90. START_POINT_END(_thread_start)
  91. .macro SAVE_CONTEXT
  92. /* Save the entire context. */
  93. SAVE_FPU SP
  94. STP X0, X1, [SP, #-0x10]!
  95. STP X2, X3, [SP, #-0x10]!
  96. STP X4, X5, [SP, #-0x10]!
  97. STP X6, X7, [SP, #-0x10]!
  98. STP X8, X9, [SP, #-0x10]!
  99. STP X10, X11, [SP, #-0x10]!
  100. STP X12, X13, [SP, #-0x10]!
  101. STP X14, X15, [SP, #-0x10]!
  102. STP X16, X17, [SP, #-0x10]!
  103. STP X18, X19, [SP, #-0x10]!
  104. STP X20, X21, [SP, #-0x10]!
  105. STP X22, X23, [SP, #-0x10]!
  106. STP X24, X25, [SP, #-0x10]!
  107. STP X26, X27, [SP, #-0x10]!
  108. STP X28, X29, [SP, #-0x10]!
  109. MRS X28, FPCR
  110. MRS X29, FPSR
  111. STP X28, X29, [SP, #-0x10]!
  112. MRS X29, SP_EL0
  113. STP X29, X30, [SP, #-0x10]!
  114. MRS X3, SPSR_EL1
  115. MRS X2, ELR_EL1
  116. STP X2, X3, [SP, #-0x10]!
  117. MOV X0, SP /* Move SP into X0 for saving. */
  118. .endm
  119. .macro SAVE_CONTEXT_FROM_EL1
  120. /* Save the entire context. */
  121. SAVE_FPU SP
  122. STP X0, X1, [SP, #-0x10]!
  123. STP X2, X3, [SP, #-0x10]!
  124. STP X4, X5, [SP, #-0x10]!
  125. STP X6, X7, [SP, #-0x10]!
  126. STP X8, X9, [SP, #-0x10]!
  127. STP X10, X11, [SP, #-0x10]!
  128. STP X12, X13, [SP, #-0x10]!
  129. STP X14, X15, [SP, #-0x10]!
  130. STP X16, X17, [SP, #-0x10]!
  131. STP X18, X19, [SP, #-0x10]!
  132. STP X20, X21, [SP, #-0x10]!
  133. STP X22, X23, [SP, #-0x10]!
  134. STP X24, X25, [SP, #-0x10]!
  135. STP X26, X27, [SP, #-0x10]!
  136. STP X28, X29, [SP, #-0x10]!
  137. MRS X28, FPCR
  138. MRS X29, FPSR
  139. STP X28, X29, [SP, #-0x10]!
  140. MRS X29, SP_EL0
  141. STP X29, X30, [SP, #-0x10]!
  142. MOV X19, #((3 << 6) | 0x4 | 0x1) /* el1h, disable interrupt */
  143. MOV X18, X30
  144. STP X18, X19, [SP, #-0x10]!
  145. .endm
  146. #ifdef RT_USING_SMP
  147. .macro RESTORE_CONTEXT
  148. /* Set the SP to point to the stack of the task being restored. */
  149. MOV SP, X0
  150. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  151. TST X3, #0x1f
  152. MSR SPSR_EL1, X3
  153. MSR ELR_EL1, X2
  154. LDP X29, X30, [SP], #0x10
  155. MSR SP_EL0, X29
  156. LDP X28, X29, [SP], #0x10
  157. MSR FPCR, X28
  158. MSR FPSR, X29
  159. LDP X28, X29, [SP], #0x10
  160. LDP X26, X27, [SP], #0x10
  161. LDP X24, X25, [SP], #0x10
  162. LDP X22, X23, [SP], #0x10
  163. LDP X20, X21, [SP], #0x10
  164. LDP X18, X19, [SP], #0x10
  165. LDP X16, X17, [SP], #0x10
  166. LDP X14, X15, [SP], #0x10
  167. LDP X12, X13, [SP], #0x10
  168. LDP X10, X11, [SP], #0x10
  169. LDP X8, X9, [SP], #0x10
  170. LDP X6, X7, [SP], #0x10
  171. LDP X4, X5, [SP], #0x10
  172. LDP X2, X3, [SP], #0x10
  173. LDP X0, X1, [SP], #0x10
  174. RESTORE_FPU SP
  175. #ifdef RT_USING_LWP
  176. BEQ arch_ret_to_user
  177. #endif
  178. ERET
  179. .endm
  180. #else
  181. .macro RESTORE_CONTEXT
  182. /* Set the SP to point to the stack of the task being restored. */
  183. MOV SP, X0
  184. #ifdef RT_USING_LWP
  185. BL rt_thread_self
  186. MOV X19, X0
  187. BL lwp_aspace_switch
  188. MOV X0, X19
  189. BL lwp_user_setting_restore
  190. #endif
  191. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  192. TST X3, #0x1f
  193. MSR SPSR_EL1, X3
  194. MSR ELR_EL1, X2
  195. LDP X29, X30, [SP], #0x10
  196. MSR SP_EL0, X29
  197. LDP X28, X29, [SP], #0x10
  198. MSR FPCR, X28
  199. MSR FPSR, X29
  200. LDP X28, X29, [SP], #0x10
  201. LDP X26, X27, [SP], #0x10
  202. LDP X24, X25, [SP], #0x10
  203. LDP X22, X23, [SP], #0x10
  204. LDP X20, X21, [SP], #0x10
  205. LDP X18, X19, [SP], #0x10
  206. LDP X16, X17, [SP], #0x10
  207. LDP X14, X15, [SP], #0x10
  208. LDP X12, X13, [SP], #0x10
  209. LDP X10, X11, [SP], #0x10
  210. LDP X8, X9, [SP], #0x10
  211. LDP X6, X7, [SP], #0x10
  212. LDP X4, X5, [SP], #0x10
  213. LDP X2, X3, [SP], #0x10
  214. LDP X0, X1, [SP], #0x10
  215. RESTORE_FPU SP
  216. #ifdef RT_USING_LWP
  217. BEQ arch_ret_to_user
  218. #endif
  219. ERET
  220. .endm
  221. #endif
  222. .macro RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  223. /* the SP is already ok */
  224. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  225. TST X3, #0x1f
  226. MSR SPSR_EL1, X3
  227. MSR ELR_EL1, X2
  228. LDP X29, X30, [SP], #0x10
  229. MSR SP_EL0, X29
  230. LDP X28, X29, [SP], #0x10
  231. MSR FPCR, X28
  232. MSR FPSR, X29
  233. LDP X28, X29, [SP], #0x10
  234. LDP X26, X27, [SP], #0x10
  235. LDP X24, X25, [SP], #0x10
  236. LDP X22, X23, [SP], #0x10
  237. LDP X20, X21, [SP], #0x10
  238. LDP X18, X19, [SP], #0x10
  239. LDP X16, X17, [SP], #0x10
  240. LDP X14, X15, [SP], #0x10
  241. LDP X12, X13, [SP], #0x10
  242. LDP X10, X11, [SP], #0x10
  243. LDP X8, X9, [SP], #0x10
  244. LDP X6, X7, [SP], #0x10
  245. LDP X4, X5, [SP], #0x10
  246. LDP X2, X3, [SP], #0x10
  247. LDP X0, X1, [SP], #0x10
  248. RESTORE_FPU SP
  249. #ifdef RT_USING_LWP
  250. BEQ arch_ret_to_user
  251. #endif
  252. ERET
  253. .endm
  254. .macro SAVE_USER_CTX
  255. MRS X1, SPSR_EL1
  256. AND X1, X1, 0xf
  257. CMP X1, XZR
  258. BNE 1f
  259. BL lwp_uthread_ctx_save
  260. LDP X0, X1, [SP]
  261. 1:
  262. .endm
  263. .macro RESTORE_USER_CTX, ctx
  264. LDR X1, [\ctx, #CONTEXT_OFFSET_SPSR_EL1]
  265. AND X1, X1, 0x1f
  266. CMP X1, XZR
  267. BNE 1f
  268. BL lwp_uthread_ctx_restore
  269. 1:
  270. .endm
  271. #ifdef RT_USING_SMP
  272. #define rt_hw_interrupt_disable rt_hw_local_irq_disable
  273. #define rt_hw_interrupt_enable rt_hw_local_irq_enable
  274. #endif
  275. .text
  276. .global rt_hw_interrupt_is_disabled
  277. rt_hw_interrupt_is_disabled:
  278. MRS X0, DAIF
  279. TST X0, #0xc0
  280. CSET X0, NE
  281. RET
  282. /*
  283. * rt_base_t rt_hw_interrupt_disable();
  284. */
  285. .globl rt_hw_interrupt_disable
  286. rt_hw_interrupt_disable:
  287. MRS X0, DAIF
  288. AND X0, X0, #0xc0
  289. CMP X0, #0xc0
  290. /* branch if bits not both set(zero) */
  291. BNE 1f
  292. RET
  293. 1:
  294. MSR DAIFSet, #3
  295. DSB NSH
  296. ISB
  297. RET
  298. /*
  299. * void rt_hw_interrupt_enable(rt_base_t level);
  300. */
  301. .globl rt_hw_interrupt_enable
  302. rt_hw_interrupt_enable:
  303. AND X0, X0, #0xc0
  304. CMP X0, #0xc0
  305. /* branch if one of the bits not set(zero) */
  306. BNE 1f
  307. RET
  308. 1:
  309. ISB
  310. DSB NSH
  311. AND X0, X0, #0xc0
  312. MRS X1, DAIF
  313. BIC X1, X1, #0xc0
  314. ORR X0, X0, X1
  315. MSR DAIF, X0
  316. RET
  317. .text
  318. #ifdef RT_USING_SMP
  319. /*
  320. * void rt_hw_context_switch_to(rt_uint3 to, struct rt_thread *to_thread);
  321. * X0 --> to (thread stack)
  322. * X1 --> to_thread
  323. */
  324. .globl rt_hw_context_switch_to
  325. rt_hw_context_switch_to:
  326. LDR X0, [X0]
  327. MOV SP, X0
  328. MOV X0, X1
  329. BL rt_cpus_lock_status_restore
  330. #ifdef RT_USING_LWP
  331. BL rt_thread_self
  332. BL lwp_user_setting_restore
  333. #endif
  334. B rt_hw_context_switch_exit
  335. /*
  336. * void rt_hw_context_switch(rt_uint32 from, rt_uint32
  337. to, struct rt_thread *to_thread);
  338. * X0 --> from (from_thread stack)
  339. * X1 --> to (to_thread stack)
  340. * X2 --> to_thread
  341. */
  342. .globl rt_hw_context_switch
  343. rt_hw_context_switch:
  344. SAVE_CONTEXT_FROM_EL1
  345. MOV X3, SP
  346. STR X3, [X0] // store sp in preempted tasks TCB
  347. LDR X0, [X1] // get new task stack pointer
  348. MOV SP, X0
  349. MOV X0, X2
  350. BL rt_cpus_lock_status_restore
  351. #ifdef RT_USING_LWP
  352. BL rt_thread_self
  353. BL lwp_user_setting_restore
  354. #endif
  355. B rt_hw_context_switch_exit
  356. /*
  357. * void rt_hw_context_switch_interrupt(context, from sp, to sp, tp tcb)
  358. * X0 :interrupt context
  359. * X1 :addr of from_thread's sp
  360. * X2 :addr of to_thread's sp
  361. * X3 :to_thread's tcb
  362. */
  363. .globl rt_hw_context_switch_interrupt
  364. rt_hw_context_switch_interrupt:
  365. STP X0, X1, [SP, #-0x10]!
  366. STP X2, X3, [SP, #-0x10]!
  367. STP X29, X30, [SP, #-0x10]!
  368. #ifdef RT_USING_LWP
  369. BL rt_thread_self
  370. BL lwp_user_setting_save
  371. #endif
  372. LDP X29, X30, [SP], #0x10
  373. LDP X2, X3, [SP], #0x10
  374. LDP X0, X1, [SP], #0x10
  375. STR X0, [X1]
  376. LDR X0, [X2]
  377. MOV SP, X0
  378. MOV X0, X3
  379. MOV X19, X0
  380. BL rt_cpus_lock_status_restore
  381. MOV X0, X19
  382. #ifdef RT_USING_LWP
  383. BL lwp_user_setting_restore
  384. #endif
  385. B rt_hw_context_switch_exit
  386. .globl vector_fiq
  387. vector_fiq:
  388. B .
  389. START_POINT(vector_irq)
  390. SAVE_CONTEXT
  391. STP X0, X1, [SP, #-0x10]! /* X0 is thread sp */
  392. BL rt_interrupt_enter
  393. LDP X0, X1, [SP]
  394. #ifdef RT_USING_LWP
  395. SAVE_USER_CTX
  396. #endif
  397. BL rt_hw_trap_irq
  398. #ifdef RT_USING_LWP
  399. LDP X0, X1, [SP]
  400. RESTORE_USER_CTX X0
  401. #endif
  402. BL rt_interrupt_leave
  403. LDP X0, X1, [SP], #0x10
  404. BL rt_scheduler_do_irq_switch
  405. B rt_hw_context_switch_exit
  406. START_POINT_END(vector_irq)
  407. .global rt_hw_context_switch_exit
  408. rt_hw_context_switch_exit:
  409. CLREX
  410. MOV X0, SP
  411. RESTORE_CONTEXT
  412. #else /* !RT_USING_SMP */
  413. /*
  414. * void rt_hw_context_switch_to(rt_ubase_t to);
  415. * X0 --> to sp
  416. */
  417. .globl rt_hw_context_switch_to
  418. rt_hw_context_switch_to:
  419. CLREX
  420. LDR X0, [X0]
  421. RESTORE_CONTEXT
  422. /*
  423. * void rt_hw_context_switch(rt_ubase_t from, rt_ubase_t to);
  424. * X0 --> from sp
  425. * X1 --> to sp
  426. * X2 --> to thread
  427. */
  428. .globl rt_hw_context_switch
  429. rt_hw_context_switch:
  430. CLREX
  431. SAVE_CONTEXT_FROM_EL1
  432. MOV X2, SP
  433. STR X2, [X0] // store sp in preempted tasks TCB
  434. LDR X0, [X1] // get new task stack pointer
  435. RESTORE_CONTEXT
  436. /*
  437. * void rt_hw_context_switch_interrupt(rt_ubase_t from, rt_ubase_t to, rt_thread_t from_thread, rt_thread_t to_thread);
  438. */
  439. .globl rt_thread_switch_interrupt_flag
  440. .globl rt_interrupt_from_thread
  441. .globl rt_interrupt_to_thread
  442. .globl rt_hw_context_switch_interrupt
  443. rt_hw_context_switch_interrupt:
  444. CLREX
  445. LDR X6, =rt_thread_switch_interrupt_flag
  446. LDR X7, [X6]
  447. CMP X7, #1
  448. B.EQ _reswitch
  449. LDR X4, =rt_interrupt_from_thread // set rt_interrupt_from_thread
  450. STR X0, [X4]
  451. MOV X7, #1 // set rt_thread_switch_interrupt_flag to 1
  452. STR X7, [X6]
  453. STP X1, X30, [SP, #-0x10]!
  454. #ifdef RT_USING_LWP
  455. MOV X0, X2
  456. BL lwp_user_setting_save
  457. #endif
  458. LDP X1, X30, [SP], #0x10
  459. _reswitch:
  460. LDR X6, =rt_interrupt_to_thread // set rt_interrupt_to_thread
  461. STR X1, [X6]
  462. RET
  463. .text
  464. // -- Exception handlers ----------------------------------
  465. .align 8
  466. .globl vector_fiq
  467. vector_fiq:
  468. SAVE_CONTEXT
  469. STP X0, X1, [SP, #-0x10]!
  470. BL rt_hw_trap_fiq
  471. LDP X0, X1, [SP], #0x10
  472. RESTORE_CONTEXT
  473. .globl rt_interrupt_enter
  474. .globl rt_interrupt_leave
  475. .globl rt_thread_switch_interrupt_flag
  476. .globl rt_interrupt_from_thread
  477. .globl rt_interrupt_to_thread
  478. // -------------------------------------------------------------------
  479. .align 8
  480. .globl vector_irq
  481. vector_irq:
  482. SAVE_CONTEXT
  483. STP X0, X1, [SP, #-0x10]! /* X0 is thread sp */
  484. BL rt_interrupt_enter
  485. BL rt_hw_trap_irq
  486. BL rt_interrupt_leave
  487. LDP X0, X1, [SP], #0x10
  488. // if rt_thread_switch_interrupt_flag set, jump to
  489. // rt_hw_context_switch_interrupt_do and don't return
  490. LDR X1, =rt_thread_switch_interrupt_flag
  491. LDR X2, [X1]
  492. CMP X2, #1
  493. B.NE vector_irq_exit
  494. MOV X2, #0 // clear flag
  495. STR X2, [X1]
  496. LDR X3, =rt_interrupt_from_thread
  497. LDR X4, [X3]
  498. STR x0, [X4] // store sp in preempted tasks's TCB
  499. LDR x3, =rt_interrupt_to_thread
  500. LDR X4, [X3]
  501. LDR x0, [X4] // get new task's stack pointer
  502. RESTORE_CONTEXT
  503. vector_irq_exit:
  504. MOV SP, X0
  505. RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  506. #endif /* RT_USING_SMP */
  507. // -------------------------------------------------
  508. START_POINT(vector_exception)
  509. SAVE_CONTEXT
  510. STP X0, X1, [SP, #-0x10]!
  511. #ifdef RT_USING_LWP
  512. SAVE_USER_CTX
  513. #endif
  514. BL rt_hw_trap_exception
  515. #ifdef RT_USING_LWP
  516. LDP X0, X1, [SP]
  517. RESTORE_USER_CTX X0
  518. #endif
  519. LDP X0, X1, [SP], #0x10
  520. MOV SP, X0
  521. RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  522. START_POINT_END(vector_exception)
  523. START_POINT(vector_serror)
  524. SAVE_CONTEXT
  525. #ifdef RT_USING_LWP
  526. SAVE_USER_CTX
  527. #endif
  528. STP X0, X1, [SP, #-0x10]!
  529. BL rt_hw_trap_serror
  530. b .
  531. START_POINT_END(vector_serror)