context_gcc.S 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642
  1. /*
  2. * Copyright (c) 2006-2024, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2021-05-18 Jesven the first version
  9. * 2023-06-24 WangXiaoyao Support backtrace for user thread
  10. * 2024-01-06 Shell Fix barrier on irq_disable/enable
  11. * 2024-01-18 Shell fix implicit dependency of cpuid management
  12. */
  13. #ifndef __ASSEMBLY__
  14. #define __ASSEMBLY__
  15. #endif
  16. #include "rtconfig.h"
  17. #include "asm-generic.h"
  18. #include "asm-fpu.h"
  19. #include "armv8.h"
  20. #ifndef RT_USING_SMP
  21. .section .data
  22. rt_interrupt_from_thread: .zero 8
  23. rt_interrupt_to_thread: .zero 8
  24. rt_thread_switch_interrupt_flag: .zero 8
  25. #endif
  26. .text
  27. /**
  28. * #ifdef RT_USING_OFW
  29. * void rt_hw_cpu_id_set(long cpuid)
  30. * #else
  31. * void rt_hw_cpu_id_set(void)
  32. * #endif
  33. */
  34. .type rt_hw_cpu_id_set, @function
  35. rt_hw_cpu_id_set:
  36. #ifdef ARCH_USING_GENERIC_CPUID
  37. .globl rt_hw_cpu_id_set
  38. #else /* !ARCH_USING_GENERIC_CPUID */
  39. .weak rt_hw_cpu_id_set
  40. #endif /* ARCH_USING_GENERIC_CPUID */
  41. #ifndef RT_USING_OFW
  42. mrs x0, mpidr_el1 /* MPIDR_EL1: Multi-Processor Affinity Register */
  43. #ifdef ARCH_ARM_CORTEX_A55
  44. lsr x0, x0, #8
  45. #endif /* ARCH_ARM_CORTEX_A55 */
  46. and x0, x0, #15
  47. #endif /* !RT_USING_OFW */
  48. #ifdef ARCH_USING_HW_THREAD_SELF
  49. msr tpidrro_el0, x0
  50. #else /* !ARCH_USING_HW_THREAD_SELF */
  51. msr tpidr_el1, x0
  52. #endif /* ARCH_USING_HW_THREAD_SELF */
  53. ret
  54. /*
  55. int rt_hw_cpu_id(void)
  56. */
  57. .type rt_hw_cpu_id, @function
  58. rt_hw_cpu_id:
  59. #ifdef ARCH_USING_GENERIC_CPUID
  60. .globl rt_hw_cpu_id
  61. #else /* !ARCH_USING_GENERIC_CPUID */
  62. .weak rt_hw_cpu_id
  63. #endif /* ARCH_USING_GENERIC_CPUID */
  64. #if RT_CPUS_NR > 1
  65. #ifdef ARCH_USING_GENERIC_CPUID
  66. mrs x0, tpidrro_el0
  67. #else /* !ARCH_USING_GENERIC_CPUID */
  68. mrs x0, tpidr_el1
  69. #endif /* ARCH_USING_GENERIC_CPUID */
  70. #else /* RT_CPUS_NR == 1 */
  71. mov x0, xzr
  72. #endif
  73. ret
  74. /*
  75. void rt_hw_set_process_id(size_t id)
  76. */
  77. .global rt_hw_set_process_id
  78. rt_hw_set_process_id:
  79. msr CONTEXTIDR_EL1, x0
  80. ret
  81. /*
  82. *enable gtimer
  83. */
  84. .globl rt_hw_gtimer_enable
  85. rt_hw_gtimer_enable:
  86. MOV X0,#1
  87. MSR CNTP_CTL_EL0,X0
  88. RET
  89. /*
  90. *set gtimer CNTP_TVAL_EL0 value
  91. */
  92. .globl rt_hw_set_gtimer_val
  93. rt_hw_set_gtimer_val:
  94. MSR CNTP_TVAL_EL0,X0
  95. RET
  96. /*
  97. *get gtimer CNTP_TVAL_EL0 value
  98. */
  99. .globl rt_hw_get_gtimer_val
  100. rt_hw_get_gtimer_val:
  101. MRS X0,CNTP_TVAL_EL0
  102. RET
  103. .globl rt_hw_get_cntpct_val
  104. rt_hw_get_cntpct_val:
  105. MRS X0, CNTPCT_EL0
  106. RET
  107. /*
  108. *get gtimer frq value
  109. */
  110. .globl rt_hw_get_gtimer_frq
  111. rt_hw_get_gtimer_frq:
  112. MRS X0,CNTFRQ_EL0
  113. RET
  114. START_POINT(_thread_start)
  115. blr x19
  116. mov x29, #0
  117. blr x20
  118. b . /* never here */
  119. START_POINT_END(_thread_start)
  120. .macro SAVE_CONTEXT
  121. /* Save the entire context. */
  122. SAVE_FPU SP
  123. STP X0, X1, [SP, #-0x10]!
  124. STP X2, X3, [SP, #-0x10]!
  125. STP X4, X5, [SP, #-0x10]!
  126. STP X6, X7, [SP, #-0x10]!
  127. STP X8, X9, [SP, #-0x10]!
  128. STP X10, X11, [SP, #-0x10]!
  129. STP X12, X13, [SP, #-0x10]!
  130. STP X14, X15, [SP, #-0x10]!
  131. STP X16, X17, [SP, #-0x10]!
  132. STP X18, X19, [SP, #-0x10]!
  133. STP X20, X21, [SP, #-0x10]!
  134. STP X22, X23, [SP, #-0x10]!
  135. STP X24, X25, [SP, #-0x10]!
  136. STP X26, X27, [SP, #-0x10]!
  137. STP X28, X29, [SP, #-0x10]!
  138. MRS X28, FPCR
  139. MRS X29, FPSR
  140. STP X28, X29, [SP, #-0x10]!
  141. MRS X29, SP_EL0
  142. STP X29, X30, [SP, #-0x10]!
  143. MRS X3, SPSR_EL1
  144. MRS X2, ELR_EL1
  145. STP X2, X3, [SP, #-0x10]!
  146. MOV X0, SP /* Move SP into X0 for saving. */
  147. .endm
  148. .macro SAVE_CONTEXT_FROM_EL1
  149. /* Save the entire context. */
  150. SAVE_FPU SP
  151. STP X0, X1, [SP, #-0x10]!
  152. STP X2, X3, [SP, #-0x10]!
  153. STP X4, X5, [SP, #-0x10]!
  154. STP X6, X7, [SP, #-0x10]!
  155. STP X8, X9, [SP, #-0x10]!
  156. STP X10, X11, [SP, #-0x10]!
  157. STP X12, X13, [SP, #-0x10]!
  158. STP X14, X15, [SP, #-0x10]!
  159. STP X16, X17, [SP, #-0x10]!
  160. STP X18, X19, [SP, #-0x10]!
  161. STP X20, X21, [SP, #-0x10]!
  162. STP X22, X23, [SP, #-0x10]!
  163. STP X24, X25, [SP, #-0x10]!
  164. STP X26, X27, [SP, #-0x10]!
  165. STP X28, X29, [SP, #-0x10]!
  166. MRS X28, FPCR
  167. MRS X29, FPSR
  168. STP X28, X29, [SP, #-0x10]!
  169. MRS X29, SP_EL0
  170. STP X29, X30, [SP, #-0x10]!
  171. MOV X19, #((3 << 6) | 0x4 | 0x1) /* el1h, disable interrupt */
  172. MOV X18, X30
  173. STP X18, X19, [SP, #-0x10]!
  174. .endm
  175. #ifdef RT_USING_SMP
  176. .macro RESTORE_CONTEXT
  177. /* Set the SP to point to the stack of the task being restored. */
  178. MOV SP, X0
  179. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  180. TST X3, #0x1f
  181. MSR SPSR_EL1, X3
  182. MSR ELR_EL1, X2
  183. LDP X29, X30, [SP], #0x10
  184. MSR SP_EL0, X29
  185. LDP X28, X29, [SP], #0x10
  186. MSR FPCR, X28
  187. MSR FPSR, X29
  188. LDP X28, X29, [SP], #0x10
  189. LDP X26, X27, [SP], #0x10
  190. LDP X24, X25, [SP], #0x10
  191. LDP X22, X23, [SP], #0x10
  192. LDP X20, X21, [SP], #0x10
  193. LDP X18, X19, [SP], #0x10
  194. LDP X16, X17, [SP], #0x10
  195. LDP X14, X15, [SP], #0x10
  196. LDP X12, X13, [SP], #0x10
  197. LDP X10, X11, [SP], #0x10
  198. LDP X8, X9, [SP], #0x10
  199. LDP X6, X7, [SP], #0x10
  200. LDP X4, X5, [SP], #0x10
  201. LDP X2, X3, [SP], #0x10
  202. LDP X0, X1, [SP], #0x10
  203. RESTORE_FPU SP
  204. #ifdef RT_USING_SMART
  205. BEQ arch_ret_to_user
  206. #endif
  207. ERET
  208. .endm
  209. #else
  210. .macro RESTORE_CONTEXT
  211. /* Set the SP to point to the stack of the task being restored. */
  212. MOV SP, X0
  213. #ifdef RT_USING_SMART
  214. BL rt_thread_self
  215. MOV X19, X0
  216. BL lwp_aspace_switch
  217. MOV X0, X19
  218. BL lwp_user_setting_restore
  219. #endif
  220. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  221. TST X3, #0x1f
  222. MSR SPSR_EL1, X3
  223. MSR ELR_EL1, X2
  224. LDP X29, X30, [SP], #0x10
  225. MSR SP_EL0, X29
  226. LDP X28, X29, [SP], #0x10
  227. MSR FPCR, X28
  228. MSR FPSR, X29
  229. LDP X28, X29, [SP], #0x10
  230. LDP X26, X27, [SP], #0x10
  231. LDP X24, X25, [SP], #0x10
  232. LDP X22, X23, [SP], #0x10
  233. LDP X20, X21, [SP], #0x10
  234. LDP X18, X19, [SP], #0x10
  235. LDP X16, X17, [SP], #0x10
  236. LDP X14, X15, [SP], #0x10
  237. LDP X12, X13, [SP], #0x10
  238. LDP X10, X11, [SP], #0x10
  239. LDP X8, X9, [SP], #0x10
  240. LDP X6, X7, [SP], #0x10
  241. LDP X4, X5, [SP], #0x10
  242. LDP X2, X3, [SP], #0x10
  243. LDP X0, X1, [SP], #0x10
  244. RESTORE_FPU SP
  245. #ifdef RT_USING_SMART
  246. BEQ arch_ret_to_user
  247. #endif
  248. ERET
  249. .endm
  250. #endif
  251. .macro RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  252. /* the SP is already ok */
  253. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  254. TST X3, #0x1f
  255. MSR SPSR_EL1, X3
  256. MSR ELR_EL1, X2
  257. LDP X29, X30, [SP], #0x10
  258. MSR SP_EL0, X29
  259. LDP X28, X29, [SP], #0x10
  260. MSR FPCR, X28
  261. MSR FPSR, X29
  262. LDP X28, X29, [SP], #0x10
  263. LDP X26, X27, [SP], #0x10
  264. LDP X24, X25, [SP], #0x10
  265. LDP X22, X23, [SP], #0x10
  266. LDP X20, X21, [SP], #0x10
  267. LDP X18, X19, [SP], #0x10
  268. LDP X16, X17, [SP], #0x10
  269. LDP X14, X15, [SP], #0x10
  270. LDP X12, X13, [SP], #0x10
  271. LDP X10, X11, [SP], #0x10
  272. LDP X8, X9, [SP], #0x10
  273. LDP X6, X7, [SP], #0x10
  274. LDP X4, X5, [SP], #0x10
  275. LDP X2, X3, [SP], #0x10
  276. LDP X0, X1, [SP], #0x10
  277. RESTORE_FPU SP
  278. #ifdef RT_USING_SMART
  279. BEQ arch_ret_to_user
  280. #endif
  281. ERET
  282. .endm
  283. .macro SAVE_USER_CTX
  284. MRS X1, SPSR_EL1
  285. AND X1, X1, 0xf
  286. CMP X1, XZR
  287. BNE 1f
  288. BL lwp_uthread_ctx_save
  289. LDP X0, X1, [SP]
  290. 1:
  291. .endm
  292. .macro RESTORE_USER_CTX, ctx
  293. LDR X1, [\ctx, #CONTEXT_OFFSET_SPSR_EL1]
  294. AND X1, X1, 0x1f
  295. CMP X1, XZR
  296. BNE 1f
  297. BL lwp_uthread_ctx_restore
  298. 1:
  299. .endm
  300. #ifdef RT_USING_SMP
  301. #define rt_hw_interrupt_disable rt_hw_local_irq_disable
  302. #define rt_hw_interrupt_enable rt_hw_local_irq_enable
  303. #endif
  304. .text
  305. .global rt_hw_interrupt_is_disabled
  306. rt_hw_interrupt_is_disabled:
  307. MRS X0, DAIF
  308. TST X0, #0xc0
  309. CSET X0, NE
  310. RET
  311. /*
  312. * rt_base_t rt_hw_interrupt_disable();
  313. */
  314. .globl rt_hw_interrupt_disable
  315. rt_hw_interrupt_disable:
  316. MRS X0, DAIF
  317. AND X0, X0, #0xc0
  318. CMP X0, #0xc0
  319. /* branch if bits not both set(zero) */
  320. BNE 1f
  321. RET
  322. 1:
  323. MSR DAIFSet, #3
  324. DSB NSH
  325. ISB
  326. RET
  327. /*
  328. * void rt_hw_interrupt_enable(rt_base_t level);
  329. */
  330. .globl rt_hw_interrupt_enable
  331. rt_hw_interrupt_enable:
  332. AND X0, X0, #0xc0
  333. CMP X0, #0xc0
  334. /* branch if one of the bits not set(zero) */
  335. BNE 1f
  336. RET
  337. 1:
  338. ISB
  339. DSB NSH
  340. AND X0, X0, #0xc0
  341. MRS X1, DAIF
  342. BIC X1, X1, #0xc0
  343. ORR X0, X0, X1
  344. MSR DAIF, X0
  345. RET
  346. .text
  347. #ifdef RT_USING_SMP
  348. /*
  349. * void rt_hw_context_switch_to(rt_uint3 to, struct rt_thread *to_thread);
  350. * X0 --> to (thread stack)
  351. * X1 --> to_thread
  352. */
  353. .globl rt_hw_context_switch_to
  354. rt_hw_context_switch_to:
  355. LDR X0, [X0]
  356. MOV SP, X0
  357. MOV X0, X1
  358. BL rt_cpus_lock_status_restore
  359. #ifdef RT_USING_SMART
  360. BL rt_thread_self
  361. BL lwp_user_setting_restore
  362. #endif
  363. B rt_hw_context_switch_exit
  364. /*
  365. * void rt_hw_context_switch(rt_uint32 from, rt_uint32 to, struct rt_thread *to_thread);
  366. * X0 --> from (from_thread stack)
  367. * X1 --> to (to_thread stack)
  368. * X2 --> to_thread
  369. */
  370. .globl rt_hw_context_switch
  371. rt_hw_context_switch:
  372. SAVE_CONTEXT_FROM_EL1
  373. mov X3, SP
  374. str X3, [X0] // store sp in preempted tasks TCB
  375. ldr X0, [X1] // get new task stack pointer
  376. mov SP, X0
  377. /* backup thread self */
  378. mov x19, x2
  379. mov x0, x19
  380. bl rt_cpus_lock_status_restore
  381. #ifdef RT_USING_SMART
  382. mov x0, x19
  383. bl lwp_user_setting_restore
  384. #endif
  385. b rt_hw_context_switch_exit
  386. /*
  387. * void rt_hw_context_switch_interrupt(context, from sp, to sp, tp tcb)
  388. * X0 :interrupt context
  389. * X1 :addr of from_thread's sp
  390. * X2 :addr of to_thread's sp
  391. * X3 :to_thread's tcb
  392. */
  393. .globl rt_hw_context_switch_interrupt
  394. rt_hw_context_switch_interrupt:
  395. STP X0, X1, [SP, #-0x10]!
  396. STP X2, X3, [SP, #-0x10]!
  397. STP X29, X30, [SP, #-0x10]!
  398. #ifdef RT_USING_SMART
  399. BL rt_thread_self
  400. BL lwp_user_setting_save
  401. #endif
  402. LDP X29, X30, [SP], #0x10
  403. LDP X2, X3, [SP], #0x10
  404. LDP X0, X1, [SP], #0x10
  405. STR X0, [X1]
  406. LDR X0, [X2]
  407. MOV SP, X0
  408. MOV X0, X3
  409. MOV X19, X0
  410. BL rt_cpus_lock_status_restore
  411. MOV X0, X19
  412. #ifdef RT_USING_SMART
  413. BL lwp_user_setting_restore
  414. #endif
  415. B rt_hw_context_switch_exit
  416. .globl vector_fiq
  417. vector_fiq:
  418. B .
  419. START_POINT(vector_irq)
  420. SAVE_CONTEXT
  421. STP X0, X1, [SP, #-0x10]! /* X0 is thread sp */
  422. BL rt_interrupt_enter
  423. LDP X0, X1, [SP]
  424. #ifdef RT_USING_SMART
  425. SAVE_USER_CTX
  426. #endif
  427. BL rt_hw_trap_irq
  428. #ifdef RT_USING_SMART
  429. LDP X0, X1, [SP]
  430. RESTORE_USER_CTX X0
  431. #endif
  432. BL rt_interrupt_leave
  433. LDP X0, X1, [SP], #0x10
  434. BL rt_scheduler_do_irq_switch
  435. B rt_hw_context_switch_exit
  436. START_POINT_END(vector_irq)
  437. .global rt_hw_context_switch_exit
  438. rt_hw_context_switch_exit:
  439. CLREX
  440. MOV X0, SP
  441. RESTORE_CONTEXT
  442. #else /* !RT_USING_SMP */
  443. /*
  444. * void rt_hw_context_switch_to(rt_ubase_t to);
  445. * X0 --> to sp
  446. */
  447. .globl rt_hw_context_switch_to
  448. rt_hw_context_switch_to:
  449. CLREX
  450. LDR X0, [X0]
  451. RESTORE_CONTEXT
  452. /*
  453. * void rt_hw_context_switch(rt_ubase_t from, rt_ubase_t to);
  454. * X0 --> from sp
  455. * X1 --> to sp
  456. * X2 --> to thread
  457. */
  458. .globl rt_hw_context_switch
  459. rt_hw_context_switch:
  460. CLREX
  461. SAVE_CONTEXT_FROM_EL1
  462. MOV X2, SP
  463. STR X2, [X0] // store sp in preempted tasks TCB
  464. LDR X0, [X1] // get new task stack pointer
  465. RESTORE_CONTEXT
  466. /*
  467. * void rt_hw_context_switch_interrupt(rt_ubase_t from, rt_ubase_t to, rt_thread_t from_thread, rt_thread_t to_thread);
  468. */
  469. .globl rt_thread_switch_interrupt_flag
  470. .globl rt_interrupt_from_thread
  471. .globl rt_interrupt_to_thread
  472. .globl rt_hw_context_switch_interrupt
  473. rt_hw_context_switch_interrupt:
  474. CLREX
  475. LDR X6, =rt_thread_switch_interrupt_flag
  476. LDR X7, [X6]
  477. CMP X7, #1
  478. B.EQ _reswitch
  479. LDR X4, =rt_interrupt_from_thread // set rt_interrupt_from_thread
  480. STR X0, [X4]
  481. MOV X7, #1 // set rt_thread_switch_interrupt_flag to 1
  482. STR X7, [X6]
  483. STP X1, X30, [SP, #-0x10]!
  484. #ifdef RT_USING_SMART
  485. MOV X0, X2
  486. BL lwp_user_setting_save
  487. #endif
  488. LDP X1, X30, [SP], #0x10
  489. _reswitch:
  490. LDR X6, =rt_interrupt_to_thread // set rt_interrupt_to_thread
  491. STR X1, [X6]
  492. RET
  493. .text
  494. // -- Exception handlers ----------------------------------
  495. .align 8
  496. .globl vector_fiq
  497. vector_fiq:
  498. SAVE_CONTEXT
  499. STP X0, X1, [SP, #-0x10]!
  500. BL rt_hw_trap_fiq
  501. LDP X0, X1, [SP], #0x10
  502. RESTORE_CONTEXT
  503. .globl rt_interrupt_enter
  504. .globl rt_interrupt_leave
  505. .globl rt_thread_switch_interrupt_flag
  506. .globl rt_interrupt_from_thread
  507. .globl rt_interrupt_to_thread
  508. // -------------------------------------------------------------------
  509. .align 8
  510. .globl vector_irq
  511. vector_irq:
  512. SAVE_CONTEXT
  513. STP X0, X1, [SP, #-0x10]! /* X0 is thread sp */
  514. BL rt_interrupt_enter
  515. BL rt_hw_trap_irq
  516. BL rt_interrupt_leave
  517. LDP X0, X1, [SP], #0x10
  518. // if rt_thread_switch_interrupt_flag set, jump to
  519. // rt_hw_context_switch_interrupt_do and don't return
  520. LDR X1, =rt_thread_switch_interrupt_flag
  521. LDR X2, [X1]
  522. CMP X2, #1
  523. B.NE vector_irq_exit
  524. MOV X2, #0 // clear flag
  525. STR X2, [X1]
  526. LDR X3, =rt_interrupt_from_thread
  527. LDR X4, [X3]
  528. STR x0, [X4] // store sp in preempted tasks's TCB
  529. LDR x3, =rt_interrupt_to_thread
  530. LDR X4, [X3]
  531. LDR x0, [X4] // get new task's stack pointer
  532. RESTORE_CONTEXT
  533. vector_irq_exit:
  534. MOV SP, X0
  535. RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  536. #endif /* RT_USING_SMP */
  537. // -------------------------------------------------
  538. START_POINT(vector_exception)
  539. SAVE_CONTEXT
  540. STP X0, X1, [SP, #-0x10]!
  541. #ifdef RT_USING_SMART
  542. SAVE_USER_CTX
  543. #endif
  544. BL rt_hw_trap_exception
  545. #ifdef RT_USING_SMART
  546. LDP X0, X1, [SP]
  547. RESTORE_USER_CTX X0
  548. #endif
  549. LDP X0, X1, [SP], #0x10
  550. MOV SP, X0
  551. RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  552. START_POINT_END(vector_exception)
  553. START_POINT(vector_serror)
  554. SAVE_CONTEXT
  555. #ifdef RT_USING_SMART
  556. SAVE_USER_CTX
  557. #endif
  558. STP X0, X1, [SP, #-0x10]!
  559. BL rt_hw_trap_serror
  560. b .
  561. START_POINT_END(vector_serror)