context_gcc.S 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633
  1. /*
  2. * Copyright (c) 2006-2024, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2021-05-18 Jesven the first version
  9. * 2023-06-24 WangXiaoyao Support backtrace for user thread
  10. * 2024-01-06 Shell Fix barrier on irq_disable/enable
  11. * 2024-01-18 Shell fix implicit dependency of cpuid management
  12. */
  13. #ifndef __ASSEMBLY__
  14. #define __ASSEMBLY__
  15. #endif
  16. #include "rtconfig.h"
  17. #include "asm-generic.h"
  18. #include "asm-fpu.h"
  19. #include "armv8.h"
  20. #ifndef RT_USING_SMP
  21. .section .data
  22. rt_interrupt_from_thread: .zero 8
  23. rt_interrupt_to_thread: .zero 8
  24. rt_thread_switch_interrupt_flag: .zero 8
  25. #endif
  26. .text
  27. /**
  28. * #ifdef RT_USING_OFW
  29. * void rt_hw_cpu_id_set(long cpuid)
  30. * #else
  31. * void rt_hw_cpu_id_set(void)
  32. * #endif
  33. */
  34. .type rt_hw_cpu_id_set, @function
  35. rt_hw_cpu_id_set:
  36. #ifdef ARCH_USING_GENERIC_CPUID
  37. .globl rt_hw_cpu_id_set
  38. #else /* !ARCH_USING_GENERIC_CPUID */
  39. .weak rt_hw_cpu_id_set
  40. #endif /* ARCH_USING_GENERIC_CPUID */
  41. #ifndef RT_USING_OFW
  42. mrs x0, mpidr_el1 /* MPIDR_EL1: Multi-Processor Affinity Register */
  43. #ifdef ARCH_ARM_CORTEX_A55
  44. lsr x0, x0, #8
  45. #endif /* ARCH_ARM_CORTEX_A55 */
  46. and x0, x0, #15
  47. #endif /* !RT_USING_OFW */
  48. #ifdef ARCH_USING_HW_THREAD_SELF
  49. msr tpidrro_el0, x0
  50. #else /* !ARCH_USING_HW_THREAD_SELF */
  51. msr tpidr_el1, x0
  52. #endif /* ARCH_USING_HW_THREAD_SELF */
  53. ret
  54. /*
  55. int rt_hw_cpu_id(void)
  56. */
  57. .weak rt_hw_cpu_id
  58. .type rt_hw_cpu_id, @function
  59. rt_hw_cpu_id:
  60. #if RT_CPUS_NR > 1
  61. mrs x0, tpidr_el1
  62. #else
  63. mov x0, xzr
  64. #endif
  65. ret
  66. /*
  67. void rt_hw_set_process_id(size_t id)
  68. */
  69. .global rt_hw_set_process_id
  70. rt_hw_set_process_id:
  71. msr CONTEXTIDR_EL1, x0
  72. ret
  73. /*
  74. *enable gtimer
  75. */
  76. .globl rt_hw_gtimer_enable
  77. rt_hw_gtimer_enable:
  78. MOV X0,#1
  79. MSR CNTP_CTL_EL0,X0
  80. RET
  81. /*
  82. *set gtimer CNTP_TVAL_EL0 value
  83. */
  84. .globl rt_hw_set_gtimer_val
  85. rt_hw_set_gtimer_val:
  86. MSR CNTP_TVAL_EL0,X0
  87. RET
  88. /*
  89. *get gtimer CNTP_TVAL_EL0 value
  90. */
  91. .globl rt_hw_get_gtimer_val
  92. rt_hw_get_gtimer_val:
  93. MRS X0,CNTP_TVAL_EL0
  94. RET
  95. .globl rt_hw_get_cntpct_val
  96. rt_hw_get_cntpct_val:
  97. MRS X0, CNTPCT_EL0
  98. RET
  99. /*
  100. *get gtimer frq value
  101. */
  102. .globl rt_hw_get_gtimer_frq
  103. rt_hw_get_gtimer_frq:
  104. MRS X0,CNTFRQ_EL0
  105. RET
  106. START_POINT(_thread_start)
  107. blr x19
  108. mov x29, #0
  109. blr x20
  110. b . /* never here */
  111. START_POINT_END(_thread_start)
  112. .macro SAVE_CONTEXT
  113. /* Save the entire context. */
  114. SAVE_FPU SP
  115. STP X0, X1, [SP, #-0x10]!
  116. STP X2, X3, [SP, #-0x10]!
  117. STP X4, X5, [SP, #-0x10]!
  118. STP X6, X7, [SP, #-0x10]!
  119. STP X8, X9, [SP, #-0x10]!
  120. STP X10, X11, [SP, #-0x10]!
  121. STP X12, X13, [SP, #-0x10]!
  122. STP X14, X15, [SP, #-0x10]!
  123. STP X16, X17, [SP, #-0x10]!
  124. STP X18, X19, [SP, #-0x10]!
  125. STP X20, X21, [SP, #-0x10]!
  126. STP X22, X23, [SP, #-0x10]!
  127. STP X24, X25, [SP, #-0x10]!
  128. STP X26, X27, [SP, #-0x10]!
  129. STP X28, X29, [SP, #-0x10]!
  130. MRS X28, FPCR
  131. MRS X29, FPSR
  132. STP X28, X29, [SP, #-0x10]!
  133. MRS X29, SP_EL0
  134. STP X29, X30, [SP, #-0x10]!
  135. MRS X3, SPSR_EL1
  136. MRS X2, ELR_EL1
  137. STP X2, X3, [SP, #-0x10]!
  138. MOV X0, SP /* Move SP into X0 for saving. */
  139. .endm
  140. .macro SAVE_CONTEXT_FROM_EL1
  141. /* Save the entire context. */
  142. SAVE_FPU SP
  143. STP X0, X1, [SP, #-0x10]!
  144. STP X2, X3, [SP, #-0x10]!
  145. STP X4, X5, [SP, #-0x10]!
  146. STP X6, X7, [SP, #-0x10]!
  147. STP X8, X9, [SP, #-0x10]!
  148. STP X10, X11, [SP, #-0x10]!
  149. STP X12, X13, [SP, #-0x10]!
  150. STP X14, X15, [SP, #-0x10]!
  151. STP X16, X17, [SP, #-0x10]!
  152. STP X18, X19, [SP, #-0x10]!
  153. STP X20, X21, [SP, #-0x10]!
  154. STP X22, X23, [SP, #-0x10]!
  155. STP X24, X25, [SP, #-0x10]!
  156. STP X26, X27, [SP, #-0x10]!
  157. STP X28, X29, [SP, #-0x10]!
  158. MRS X28, FPCR
  159. MRS X29, FPSR
  160. STP X28, X29, [SP, #-0x10]!
  161. MRS X29, SP_EL0
  162. STP X29, X30, [SP, #-0x10]!
  163. MOV X19, #((3 << 6) | 0x4 | 0x1) /* el1h, disable interrupt */
  164. MOV X18, X30
  165. STP X18, X19, [SP, #-0x10]!
  166. .endm
  167. #ifdef RT_USING_SMP
  168. .macro RESTORE_CONTEXT
  169. /* Set the SP to point to the stack of the task being restored. */
  170. MOV SP, X0
  171. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  172. TST X3, #0x1f
  173. MSR SPSR_EL1, X3
  174. MSR ELR_EL1, X2
  175. LDP X29, X30, [SP], #0x10
  176. MSR SP_EL0, X29
  177. LDP X28, X29, [SP], #0x10
  178. MSR FPCR, X28
  179. MSR FPSR, X29
  180. LDP X28, X29, [SP], #0x10
  181. LDP X26, X27, [SP], #0x10
  182. LDP X24, X25, [SP], #0x10
  183. LDP X22, X23, [SP], #0x10
  184. LDP X20, X21, [SP], #0x10
  185. LDP X18, X19, [SP], #0x10
  186. LDP X16, X17, [SP], #0x10
  187. LDP X14, X15, [SP], #0x10
  188. LDP X12, X13, [SP], #0x10
  189. LDP X10, X11, [SP], #0x10
  190. LDP X8, X9, [SP], #0x10
  191. LDP X6, X7, [SP], #0x10
  192. LDP X4, X5, [SP], #0x10
  193. LDP X2, X3, [SP], #0x10
  194. LDP X0, X1, [SP], #0x10
  195. RESTORE_FPU SP
  196. #ifdef RT_USING_SMART
  197. BEQ arch_ret_to_user
  198. #endif
  199. ERET
  200. .endm
  201. #else
  202. .macro RESTORE_CONTEXT
  203. /* Set the SP to point to the stack of the task being restored. */
  204. MOV SP, X0
  205. #ifdef RT_USING_SMART
  206. BL rt_thread_self
  207. MOV X19, X0
  208. BL lwp_aspace_switch
  209. MOV X0, X19
  210. BL lwp_user_setting_restore
  211. #endif
  212. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  213. TST X3, #0x1f
  214. MSR SPSR_EL1, X3
  215. MSR ELR_EL1, X2
  216. LDP X29, X30, [SP], #0x10
  217. MSR SP_EL0, X29
  218. LDP X28, X29, [SP], #0x10
  219. MSR FPCR, X28
  220. MSR FPSR, X29
  221. LDP X28, X29, [SP], #0x10
  222. LDP X26, X27, [SP], #0x10
  223. LDP X24, X25, [SP], #0x10
  224. LDP X22, X23, [SP], #0x10
  225. LDP X20, X21, [SP], #0x10
  226. LDP X18, X19, [SP], #0x10
  227. LDP X16, X17, [SP], #0x10
  228. LDP X14, X15, [SP], #0x10
  229. LDP X12, X13, [SP], #0x10
  230. LDP X10, X11, [SP], #0x10
  231. LDP X8, X9, [SP], #0x10
  232. LDP X6, X7, [SP], #0x10
  233. LDP X4, X5, [SP], #0x10
  234. LDP X2, X3, [SP], #0x10
  235. LDP X0, X1, [SP], #0x10
  236. RESTORE_FPU SP
  237. #ifdef RT_USING_SMART
  238. BEQ arch_ret_to_user
  239. #endif
  240. ERET
  241. .endm
  242. #endif
  243. .macro RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  244. /* the SP is already ok */
  245. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  246. TST X3, #0x1f
  247. MSR SPSR_EL1, X3
  248. MSR ELR_EL1, X2
  249. LDP X29, X30, [SP], #0x10
  250. MSR SP_EL0, X29
  251. LDP X28, X29, [SP], #0x10
  252. MSR FPCR, X28
  253. MSR FPSR, X29
  254. LDP X28, X29, [SP], #0x10
  255. LDP X26, X27, [SP], #0x10
  256. LDP X24, X25, [SP], #0x10
  257. LDP X22, X23, [SP], #0x10
  258. LDP X20, X21, [SP], #0x10
  259. LDP X18, X19, [SP], #0x10
  260. LDP X16, X17, [SP], #0x10
  261. LDP X14, X15, [SP], #0x10
  262. LDP X12, X13, [SP], #0x10
  263. LDP X10, X11, [SP], #0x10
  264. LDP X8, X9, [SP], #0x10
  265. LDP X6, X7, [SP], #0x10
  266. LDP X4, X5, [SP], #0x10
  267. LDP X2, X3, [SP], #0x10
  268. LDP X0, X1, [SP], #0x10
  269. RESTORE_FPU SP
  270. #ifdef RT_USING_SMART
  271. BEQ arch_ret_to_user
  272. #endif
  273. ERET
  274. .endm
  275. .macro SAVE_USER_CTX
  276. MRS X1, SPSR_EL1
  277. AND X1, X1, 0xf
  278. CMP X1, XZR
  279. BNE 1f
  280. BL lwp_uthread_ctx_save
  281. LDP X0, X1, [SP]
  282. 1:
  283. .endm
  284. .macro RESTORE_USER_CTX, ctx
  285. LDR X1, [\ctx, #CONTEXT_OFFSET_SPSR_EL1]
  286. AND X1, X1, 0x1f
  287. CMP X1, XZR
  288. BNE 1f
  289. BL lwp_uthread_ctx_restore
  290. 1:
  291. .endm
  292. #ifdef RT_USING_SMP
  293. #define rt_hw_interrupt_disable rt_hw_local_irq_disable
  294. #define rt_hw_interrupt_enable rt_hw_local_irq_enable
  295. #endif
  296. .text
  297. .global rt_hw_interrupt_is_disabled
  298. rt_hw_interrupt_is_disabled:
  299. MRS X0, DAIF
  300. TST X0, #0xc0
  301. CSET X0, NE
  302. RET
  303. /*
  304. * rt_base_t rt_hw_interrupt_disable();
  305. */
  306. .globl rt_hw_interrupt_disable
  307. rt_hw_interrupt_disable:
  308. MRS X0, DAIF
  309. AND X0, X0, #0xc0
  310. CMP X0, #0xc0
  311. /* branch if bits not both set(zero) */
  312. BNE 1f
  313. RET
  314. 1:
  315. MSR DAIFSet, #3
  316. DSB NSH
  317. ISB
  318. RET
  319. /*
  320. * void rt_hw_interrupt_enable(rt_base_t level);
  321. */
  322. .globl rt_hw_interrupt_enable
  323. rt_hw_interrupt_enable:
  324. AND X0, X0, #0xc0
  325. CMP X0, #0xc0
  326. /* branch if one of the bits not set(zero) */
  327. BNE 1f
  328. RET
  329. 1:
  330. ISB
  331. DSB NSH
  332. AND X0, X0, #0xc0
  333. MRS X1, DAIF
  334. BIC X1, X1, #0xc0
  335. ORR X0, X0, X1
  336. MSR DAIF, X0
  337. RET
  338. .text
  339. #ifdef RT_USING_SMP
  340. /*
  341. * void rt_hw_context_switch_to(rt_uint3 to, struct rt_thread *to_thread);
  342. * X0 --> to (thread stack)
  343. * X1 --> to_thread
  344. */
  345. .globl rt_hw_context_switch_to
  346. rt_hw_context_switch_to:
  347. LDR X0, [X0]
  348. MOV SP, X0
  349. MOV X0, X1
  350. BL rt_cpus_lock_status_restore
  351. #ifdef RT_USING_SMART
  352. BL rt_thread_self
  353. BL lwp_user_setting_restore
  354. #endif
  355. B rt_hw_context_switch_exit
  356. /*
  357. * void rt_hw_context_switch(rt_uint32 from, rt_uint32 to, struct rt_thread *to_thread);
  358. * X0 --> from (from_thread stack)
  359. * X1 --> to (to_thread stack)
  360. * X2 --> to_thread
  361. */
  362. .globl rt_hw_context_switch
  363. rt_hw_context_switch:
  364. SAVE_CONTEXT_FROM_EL1
  365. mov X3, SP
  366. str X3, [X0] // store sp in preempted tasks TCB
  367. ldr X0, [X1] // get new task stack pointer
  368. mov SP, X0
  369. /* backup thread self */
  370. mov x19, x2
  371. mov x0, x19
  372. bl rt_cpus_lock_status_restore
  373. #ifdef RT_USING_SMART
  374. mov x0, x19
  375. bl lwp_user_setting_restore
  376. #endif
  377. b rt_hw_context_switch_exit
  378. /*
  379. * void rt_hw_context_switch_interrupt(context, from sp, to sp, tp tcb)
  380. * X0 :interrupt context
  381. * X1 :addr of from_thread's sp
  382. * X2 :addr of to_thread's sp
  383. * X3 :to_thread's tcb
  384. */
  385. .globl rt_hw_context_switch_interrupt
  386. rt_hw_context_switch_interrupt:
  387. STP X0, X1, [SP, #-0x10]!
  388. STP X2, X3, [SP, #-0x10]!
  389. STP X29, X30, [SP, #-0x10]!
  390. #ifdef RT_USING_SMART
  391. BL rt_thread_self
  392. BL lwp_user_setting_save
  393. #endif
  394. LDP X29, X30, [SP], #0x10
  395. LDP X2, X3, [SP], #0x10
  396. LDP X0, X1, [SP], #0x10
  397. STR X0, [X1]
  398. LDR X0, [X2]
  399. MOV SP, X0
  400. MOV X0, X3
  401. MOV X19, X0
  402. BL rt_cpus_lock_status_restore
  403. MOV X0, X19
  404. #ifdef RT_USING_SMART
  405. BL lwp_user_setting_restore
  406. #endif
  407. B rt_hw_context_switch_exit
  408. .globl vector_fiq
  409. vector_fiq:
  410. B .
  411. START_POINT(vector_irq)
  412. SAVE_CONTEXT
  413. STP X0, X1, [SP, #-0x10]! /* X0 is thread sp */
  414. BL rt_interrupt_enter
  415. LDP X0, X1, [SP]
  416. #ifdef RT_USING_SMART
  417. SAVE_USER_CTX
  418. #endif
  419. BL rt_hw_trap_irq
  420. #ifdef RT_USING_SMART
  421. LDP X0, X1, [SP]
  422. RESTORE_USER_CTX X0
  423. #endif
  424. BL rt_interrupt_leave
  425. LDP X0, X1, [SP], #0x10
  426. BL rt_scheduler_do_irq_switch
  427. B rt_hw_context_switch_exit
  428. START_POINT_END(vector_irq)
  429. .global rt_hw_context_switch_exit
  430. rt_hw_context_switch_exit:
  431. CLREX
  432. MOV X0, SP
  433. RESTORE_CONTEXT
  434. #else /* !RT_USING_SMP */
  435. /*
  436. * void rt_hw_context_switch_to(rt_ubase_t to);
  437. * X0 --> to sp
  438. */
  439. .globl rt_hw_context_switch_to
  440. rt_hw_context_switch_to:
  441. CLREX
  442. LDR X0, [X0]
  443. RESTORE_CONTEXT
  444. /*
  445. * void rt_hw_context_switch(rt_ubase_t from, rt_ubase_t to);
  446. * X0 --> from sp
  447. * X1 --> to sp
  448. * X2 --> to thread
  449. */
  450. .globl rt_hw_context_switch
  451. rt_hw_context_switch:
  452. CLREX
  453. SAVE_CONTEXT_FROM_EL1
  454. MOV X2, SP
  455. STR X2, [X0] // store sp in preempted tasks TCB
  456. LDR X0, [X1] // get new task stack pointer
  457. RESTORE_CONTEXT
  458. /*
  459. * void rt_hw_context_switch_interrupt(rt_ubase_t from, rt_ubase_t to, rt_thread_t from_thread, rt_thread_t to_thread);
  460. */
  461. .globl rt_thread_switch_interrupt_flag
  462. .globl rt_interrupt_from_thread
  463. .globl rt_interrupt_to_thread
  464. .globl rt_hw_context_switch_interrupt
  465. rt_hw_context_switch_interrupt:
  466. CLREX
  467. LDR X6, =rt_thread_switch_interrupt_flag
  468. LDR X7, [X6]
  469. CMP X7, #1
  470. B.EQ _reswitch
  471. LDR X4, =rt_interrupt_from_thread // set rt_interrupt_from_thread
  472. STR X0, [X4]
  473. MOV X7, #1 // set rt_thread_switch_interrupt_flag to 1
  474. STR X7, [X6]
  475. STP X1, X30, [SP, #-0x10]!
  476. #ifdef RT_USING_SMART
  477. MOV X0, X2
  478. BL lwp_user_setting_save
  479. #endif
  480. LDP X1, X30, [SP], #0x10
  481. _reswitch:
  482. LDR X6, =rt_interrupt_to_thread // set rt_interrupt_to_thread
  483. STR X1, [X6]
  484. RET
  485. .text
  486. // -- Exception handlers ----------------------------------
  487. .align 8
  488. .globl vector_fiq
  489. vector_fiq:
  490. SAVE_CONTEXT
  491. STP X0, X1, [SP, #-0x10]!
  492. BL rt_hw_trap_fiq
  493. LDP X0, X1, [SP], #0x10
  494. RESTORE_CONTEXT
  495. .globl rt_interrupt_enter
  496. .globl rt_interrupt_leave
  497. .globl rt_thread_switch_interrupt_flag
  498. .globl rt_interrupt_from_thread
  499. .globl rt_interrupt_to_thread
  500. // -------------------------------------------------------------------
  501. .align 8
  502. .globl vector_irq
  503. vector_irq:
  504. SAVE_CONTEXT
  505. STP X0, X1, [SP, #-0x10]! /* X0 is thread sp */
  506. BL rt_interrupt_enter
  507. BL rt_hw_trap_irq
  508. BL rt_interrupt_leave
  509. LDP X0, X1, [SP], #0x10
  510. // if rt_thread_switch_interrupt_flag set, jump to
  511. // rt_hw_context_switch_interrupt_do and don't return
  512. LDR X1, =rt_thread_switch_interrupt_flag
  513. LDR X2, [X1]
  514. CMP X2, #1
  515. B.NE vector_irq_exit
  516. MOV X2, #0 // clear flag
  517. STR X2, [X1]
  518. LDR X3, =rt_interrupt_from_thread
  519. LDR X4, [X3]
  520. STR x0, [X4] // store sp in preempted tasks's TCB
  521. LDR x3, =rt_interrupt_to_thread
  522. LDR X4, [X3]
  523. LDR x0, [X4] // get new task's stack pointer
  524. RESTORE_CONTEXT
  525. vector_irq_exit:
  526. MOV SP, X0
  527. RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  528. #endif /* RT_USING_SMP */
  529. // -------------------------------------------------
  530. START_POINT(vector_exception)
  531. SAVE_CONTEXT
  532. STP X0, X1, [SP, #-0x10]!
  533. #ifdef RT_USING_SMART
  534. SAVE_USER_CTX
  535. #endif
  536. BL rt_hw_trap_exception
  537. #ifdef RT_USING_SMART
  538. LDP X0, X1, [SP]
  539. RESTORE_USER_CTX X0
  540. #endif
  541. LDP X0, X1, [SP], #0x10
  542. MOV SP, X0
  543. RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  544. START_POINT_END(vector_exception)
  545. START_POINT(vector_serror)
  546. SAVE_CONTEXT
  547. #ifdef RT_USING_SMART
  548. SAVE_USER_CTX
  549. #endif
  550. STP X0, X1, [SP, #-0x10]!
  551. BL rt_hw_trap_serror
  552. b .
  553. START_POINT_END(vector_serror)