context_gcc.S 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583
  1. /*
  2. * Copyright (c) 2006-2021, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2021-05-18 Jesven the first version
  9. * 2023-06-24 WangXiaoyao Support backtrace for user thread
  10. */
  11. #ifndef __ASSEMBLY__
  12. #define __ASSEMBLY__
  13. #endif
  14. #include "rtconfig.h"
  15. #include "asm-generic.h"
  16. #include "asm-fpu.h"
  17. #include "armv8.h"
  18. .text
  19. .weak rt_hw_cpu_id_set
  20. .type rt_hw_cpu_id_set, @function
  21. rt_hw_cpu_id_set:
  22. mrs x0, mpidr_el1 /* MPIDR_EL1: Multi-Processor Affinity Register */
  23. #ifdef ARCH_ARM_CORTEX_A55
  24. lsr x0, x0, #8
  25. #endif
  26. and x0, x0, #15
  27. msr tpidr_el1, x0
  28. ret
  29. /*
  30. int rt_hw_cpu_id(void)
  31. */
  32. .weak rt_hw_cpu_id
  33. .type rt_hw_cpu_id, @function
  34. rt_hw_cpu_id:
  35. mrs x0, tpidr_el1 /* MPIDR_EL1: Multi-Processor Affinity Register */
  36. ret
  37. /*
  38. void rt_hw_set_process_id(size_t id)
  39. */
  40. .global rt_hw_set_process_id
  41. rt_hw_set_process_id:
  42. msr CONTEXTIDR_EL1, x0
  43. ret
  44. /*
  45. *enable gtimer
  46. */
  47. .globl rt_hw_gtimer_enable
  48. rt_hw_gtimer_enable:
  49. MOV X0,#1
  50. MSR CNTP_CTL_EL0,X0
  51. RET
  52. /*
  53. *set gtimer CNTP_TVAL_EL0 value
  54. */
  55. .globl rt_hw_set_gtimer_val
  56. rt_hw_set_gtimer_val:
  57. MSR CNTP_TVAL_EL0,X0
  58. RET
  59. /*
  60. *get gtimer CNTP_TVAL_EL0 value
  61. */
  62. .globl rt_hw_get_gtimer_val
  63. rt_hw_get_gtimer_val:
  64. MRS X0,CNTP_TVAL_EL0
  65. RET
  66. .globl rt_hw_get_cntpct_val
  67. rt_hw_get_cntpct_val:
  68. MRS X0, CNTPCT_EL0
  69. RET
  70. /*
  71. *get gtimer frq value
  72. */
  73. .globl rt_hw_get_gtimer_frq
  74. rt_hw_get_gtimer_frq:
  75. MRS X0,CNTFRQ_EL0
  76. RET
  77. START_POINT(_thread_start)
  78. blr x19
  79. mov x29, #0
  80. blr x20
  81. b . /* never here */
  82. START_POINT_END(_thread_start)
  83. .macro SAVE_CONTEXT
  84. /* Save the entire context. */
  85. SAVE_FPU SP
  86. STP X0, X1, [SP, #-0x10]!
  87. STP X2, X3, [SP, #-0x10]!
  88. STP X4, X5, [SP, #-0x10]!
  89. STP X6, X7, [SP, #-0x10]!
  90. STP X8, X9, [SP, #-0x10]!
  91. STP X10, X11, [SP, #-0x10]!
  92. STP X12, X13, [SP, #-0x10]!
  93. STP X14, X15, [SP, #-0x10]!
  94. STP X16, X17, [SP, #-0x10]!
  95. STP X18, X19, [SP, #-0x10]!
  96. STP X20, X21, [SP, #-0x10]!
  97. STP X22, X23, [SP, #-0x10]!
  98. STP X24, X25, [SP, #-0x10]!
  99. STP X26, X27, [SP, #-0x10]!
  100. STP X28, X29, [SP, #-0x10]!
  101. MRS X28, FPCR
  102. MRS X29, FPSR
  103. STP X28, X29, [SP, #-0x10]!
  104. MRS X29, SP_EL0
  105. STP X29, X30, [SP, #-0x10]!
  106. MRS X3, SPSR_EL1
  107. MRS X2, ELR_EL1
  108. STP X2, X3, [SP, #-0x10]!
  109. MOV X0, SP /* Move SP into X0 for saving. */
  110. .endm
  111. .macro SAVE_CONTEXT_FROM_EL1
  112. /* Save the entire context. */
  113. SAVE_FPU SP
  114. STP X0, X1, [SP, #-0x10]!
  115. STP X2, X3, [SP, #-0x10]!
  116. STP X4, X5, [SP, #-0x10]!
  117. STP X6, X7, [SP, #-0x10]!
  118. STP X8, X9, [SP, #-0x10]!
  119. STP X10, X11, [SP, #-0x10]!
  120. STP X12, X13, [SP, #-0x10]!
  121. STP X14, X15, [SP, #-0x10]!
  122. STP X16, X17, [SP, #-0x10]!
  123. STP X18, X19, [SP, #-0x10]!
  124. STP X20, X21, [SP, #-0x10]!
  125. STP X22, X23, [SP, #-0x10]!
  126. STP X24, X25, [SP, #-0x10]!
  127. STP X26, X27, [SP, #-0x10]!
  128. STP X28, X29, [SP, #-0x10]!
  129. MRS X28, FPCR
  130. MRS X29, FPSR
  131. STP X28, X29, [SP, #-0x10]!
  132. MRS X29, SP_EL0
  133. STP X29, X30, [SP, #-0x10]!
  134. MOV X19, #((3 << 6) | 0x4 | 0x1) /* el1h, disable interrupt */
  135. MOV X18, X30
  136. STP X18, X19, [SP, #-0x10]!
  137. .endm
  138. #ifdef RT_USING_SMP
  139. .macro RESTORE_CONTEXT
  140. /* Set the SP to point to the stack of the task being restored. */
  141. MOV SP, X0
  142. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  143. TST X3, #0x1f
  144. MSR SPSR_EL1, X3
  145. MSR ELR_EL1, X2
  146. LDP X29, X30, [SP], #0x10
  147. MSR SP_EL0, X29
  148. LDP X28, X29, [SP], #0x10
  149. MSR FPCR, X28
  150. MSR FPSR, X29
  151. LDP X28, X29, [SP], #0x10
  152. LDP X26, X27, [SP], #0x10
  153. LDP X24, X25, [SP], #0x10
  154. LDP X22, X23, [SP], #0x10
  155. LDP X20, X21, [SP], #0x10
  156. LDP X18, X19, [SP], #0x10
  157. LDP X16, X17, [SP], #0x10
  158. LDP X14, X15, [SP], #0x10
  159. LDP X12, X13, [SP], #0x10
  160. LDP X10, X11, [SP], #0x10
  161. LDP X8, X9, [SP], #0x10
  162. LDP X6, X7, [SP], #0x10
  163. LDP X4, X5, [SP], #0x10
  164. LDP X2, X3, [SP], #0x10
  165. LDP X0, X1, [SP], #0x10
  166. RESTORE_FPU SP
  167. #ifdef RT_USING_LWP
  168. BEQ arch_ret_to_user
  169. #endif
  170. ERET
  171. .endm
  172. #else
  173. .macro RESTORE_CONTEXT
  174. /* Set the SP to point to the stack of the task being restored. */
  175. MOV SP, X0
  176. #ifdef RT_USING_LWP
  177. BL rt_thread_self
  178. MOV X19, X0
  179. BL lwp_aspace_switch
  180. MOV X0, X19
  181. BL lwp_user_setting_restore
  182. #endif
  183. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  184. TST X3, #0x1f
  185. MSR SPSR_EL1, X3
  186. MSR ELR_EL1, X2
  187. LDP X29, X30, [SP], #0x10
  188. MSR SP_EL0, X29
  189. LDP X28, X29, [SP], #0x10
  190. MSR FPCR, X28
  191. MSR FPSR, X29
  192. LDP X28, X29, [SP], #0x10
  193. LDP X26, X27, [SP], #0x10
  194. LDP X24, X25, [SP], #0x10
  195. LDP X22, X23, [SP], #0x10
  196. LDP X20, X21, [SP], #0x10
  197. LDP X18, X19, [SP], #0x10
  198. LDP X16, X17, [SP], #0x10
  199. LDP X14, X15, [SP], #0x10
  200. LDP X12, X13, [SP], #0x10
  201. LDP X10, X11, [SP], #0x10
  202. LDP X8, X9, [SP], #0x10
  203. LDP X6, X7, [SP], #0x10
  204. LDP X4, X5, [SP], #0x10
  205. LDP X2, X3, [SP], #0x10
  206. LDP X0, X1, [SP], #0x10
  207. RESTORE_FPU SP
  208. #ifdef RT_USING_LWP
  209. BEQ arch_ret_to_user
  210. #endif
  211. ERET
  212. .endm
  213. #endif
  214. .macro RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  215. /* the SP is already ok */
  216. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  217. TST X3, #0x1f
  218. MSR SPSR_EL1, X3
  219. MSR ELR_EL1, X2
  220. LDP X29, X30, [SP], #0x10
  221. MSR SP_EL0, X29
  222. LDP X28, X29, [SP], #0x10
  223. MSR FPCR, X28
  224. MSR FPSR, X29
  225. LDP X28, X29, [SP], #0x10
  226. LDP X26, X27, [SP], #0x10
  227. LDP X24, X25, [SP], #0x10
  228. LDP X22, X23, [SP], #0x10
  229. LDP X20, X21, [SP], #0x10
  230. LDP X18, X19, [SP], #0x10
  231. LDP X16, X17, [SP], #0x10
  232. LDP X14, X15, [SP], #0x10
  233. LDP X12, X13, [SP], #0x10
  234. LDP X10, X11, [SP], #0x10
  235. LDP X8, X9, [SP], #0x10
  236. LDP X6, X7, [SP], #0x10
  237. LDP X4, X5, [SP], #0x10
  238. LDP X2, X3, [SP], #0x10
  239. LDP X0, X1, [SP], #0x10
  240. RESTORE_FPU SP
  241. #ifdef RT_USING_LWP
  242. BEQ arch_ret_to_user
  243. #endif
  244. ERET
  245. .endm
  246. .macro SAVE_USER_CTX
  247. MRS X1, SPSR_EL1
  248. AND X1, X1, 0xf
  249. CMP X1, XZR
  250. BNE 1f
  251. BL lwp_uthread_ctx_save
  252. LDP X0, X1, [SP]
  253. 1:
  254. .endm
  255. .macro RESTORE_USER_CTX, ctx
  256. LDR X1, [\ctx, #CONTEXT_OFFSET_SPSR_EL1]
  257. AND X1, X1, 0x1f
  258. CMP X1, XZR
  259. BNE 1f
  260. BL lwp_uthread_ctx_restore
  261. 1:
  262. .endm
  263. #ifdef RT_USING_SMP
  264. #define rt_hw_interrupt_disable rt_hw_local_irq_disable
  265. #define rt_hw_interrupt_enable rt_hw_local_irq_enable
  266. #endif
  267. .text
  268. .global rt_hw_interrupt_is_disabled
  269. rt_hw_interrupt_is_disabled:
  270. MRS X0, DAIF
  271. TST X0, #0xc0
  272. CSET X0, NE
  273. RET
  274. /*
  275. * rt_base_t rt_hw_interrupt_disable();
  276. */
  277. .globl rt_hw_interrupt_disable
  278. rt_hw_interrupt_disable:
  279. MRS X0, DAIF
  280. MSR DAIFSet, #3
  281. DSB SY
  282. RET
  283. /*
  284. * void rt_hw_interrupt_enable(rt_base_t level);
  285. */
  286. .globl rt_hw_interrupt_enable
  287. rt_hw_interrupt_enable:
  288. DSB SY
  289. AND X0, X0, #0xc0
  290. MRS X1, DAIF
  291. BIC X1, X1, #0xc0
  292. ORR X0, X0, X1
  293. MSR DAIF, X0
  294. RET
  295. .text
  296. #ifdef RT_USING_SMP
  297. /*
  298. * void rt_hw_context_switch_to(rt_uint3 to, struct rt_thread *to_thread);
  299. * X0 --> to (thread stack)
  300. * X1 --> to_thread
  301. */
  302. .globl rt_hw_context_switch_to
  303. rt_hw_context_switch_to:
  304. LDR X0, [X0]
  305. MOV SP, X0
  306. MOV X0, X1
  307. BL rt_cpus_lock_status_restore
  308. #ifdef RT_USING_LWP
  309. BL rt_thread_self
  310. BL lwp_user_setting_restore
  311. #endif
  312. B rt_hw_context_switch_exit
  313. /*
  314. * void rt_hw_context_switch(rt_uint32 from, rt_uint32
  315. to, struct rt_thread *to_thread);
  316. * X0 --> from (from_thread stack)
  317. * X1 --> to (to_thread stack)
  318. * X2 --> to_thread
  319. */
  320. .globl rt_hw_context_switch
  321. rt_hw_context_switch:
  322. SAVE_CONTEXT_FROM_EL1
  323. MOV X3, SP
  324. STR X3, [X0] // store sp in preempted tasks TCB
  325. LDR X0, [X1] // get new task stack pointer
  326. MOV SP, X0
  327. MOV X0, X2
  328. BL rt_cpus_lock_status_restore
  329. #ifdef RT_USING_LWP
  330. BL rt_thread_self
  331. BL lwp_user_setting_restore
  332. #endif
  333. B rt_hw_context_switch_exit
  334. /*
  335. * void rt_hw_context_switch_interrupt(context, from sp, to sp, tp tcb)
  336. * X0 :interrupt context
  337. * X1 :addr of from_thread's sp
  338. * X2 :addr of to_thread's sp
  339. * X3 :to_thread's tcb
  340. */
  341. .globl rt_hw_context_switch_interrupt
  342. rt_hw_context_switch_interrupt:
  343. STP X0, X1, [SP, #-0x10]!
  344. STP X2, X3, [SP, #-0x10]!
  345. STP X29, X30, [SP, #-0x10]!
  346. #ifdef RT_USING_LWP
  347. BL rt_thread_self
  348. BL lwp_user_setting_save
  349. #endif
  350. LDP X29, X30, [SP], #0x10
  351. LDP X2, X3, [SP], #0x10
  352. LDP X0, X1, [SP], #0x10
  353. STR X0, [X1]
  354. LDR X0, [X2]
  355. MOV SP, X0
  356. MOV X0, X3
  357. MOV X19, X0
  358. BL rt_cpus_lock_status_restore
  359. MOV X0, X19
  360. #ifdef RT_USING_LWP
  361. BL lwp_user_setting_restore
  362. #endif
  363. B rt_hw_context_switch_exit
  364. .globl vector_fiq
  365. vector_fiq:
  366. B .
  367. START_POINT(vector_irq)
  368. SAVE_CONTEXT
  369. STP X0, X1, [SP, #-0x10]! /* X0 is thread sp */
  370. BL rt_interrupt_enter
  371. LDP X0, X1, [SP]
  372. #ifdef RT_USING_LWP
  373. SAVE_USER_CTX
  374. #endif
  375. BL rt_hw_trap_irq
  376. #ifdef RT_USING_LWP
  377. LDP X0, X1, [SP]
  378. RESTORE_USER_CTX X0
  379. #endif
  380. BL rt_interrupt_leave
  381. LDP X0, X1, [SP], #0x10
  382. BL rt_scheduler_do_irq_switch
  383. B rt_hw_context_switch_exit
  384. START_POINT_END(vector_irq)
  385. .global rt_hw_context_switch_exit
  386. rt_hw_context_switch_exit:
  387. CLREX
  388. MOV X0, SP
  389. RESTORE_CONTEXT
  390. #else /* RT_USING_SMP */
  391. /*
  392. * void rt_hw_context_switch_to(rt_ubase_t to);
  393. * X0 --> to sp
  394. */
  395. .globl rt_hw_context_switch_to
  396. rt_hw_context_switch_to:
  397. CLREX
  398. LDR X0, [X0]
  399. RESTORE_CONTEXT
  400. /*
  401. * void rt_hw_context_switch(rt_ubase_t from, rt_ubase_t to);
  402. * X0 --> from sp
  403. * X1 --> to sp
  404. * X2 --> to thread
  405. */
  406. .globl rt_hw_context_switch
  407. rt_hw_context_switch:
  408. CLREX
  409. SAVE_CONTEXT_FROM_EL1
  410. MOV X2, SP
  411. STR X2, [X0] // store sp in preempted tasks TCB
  412. LDR X0, [X1] // get new task stack pointer
  413. RESTORE_CONTEXT
  414. /*
  415. * void rt_hw_context_switch_interrupt(rt_ubase_t from, rt_ubase_t to, rt_thread_t from_thread, rt_thread_t to_thread);
  416. */
  417. .globl rt_thread_switch_interrupt_flag
  418. .globl rt_interrupt_from_thread
  419. .globl rt_interrupt_to_thread
  420. .globl rt_hw_context_switch_interrupt
  421. rt_hw_context_switch_interrupt:
  422. CLREX
  423. LDR X6, =rt_thread_switch_interrupt_flag
  424. LDR X7, [X6]
  425. CMP X7, #1
  426. B.EQ _reswitch
  427. LDR X4, =rt_interrupt_from_thread // set rt_interrupt_from_thread
  428. STR X0, [X4]
  429. MOV X7, #1 // set rt_thread_switch_interrupt_flag to 1
  430. STR X7, [X6]
  431. STP X1, X30, [SP, #-0x10]!
  432. #ifdef RT_USING_LWP
  433. MOV X0, X2
  434. BL lwp_user_setting_save
  435. #endif
  436. LDP X1, X30, [SP], #0x10
  437. _reswitch:
  438. LDR X6, =rt_interrupt_to_thread // set rt_interrupt_to_thread
  439. STR X1, [X6]
  440. RET
  441. .text
  442. // -- Exception handlers ----------------------------------
  443. .align 8
  444. .globl vector_fiq
  445. vector_fiq:
  446. SAVE_CONTEXT
  447. STP X0, X1, [SP, #-0x10]!
  448. BL rt_hw_trap_fiq
  449. LDP X0, X1, [SP], #0x10
  450. RESTORE_CONTEXT
  451. .globl rt_interrupt_enter
  452. .globl rt_interrupt_leave
  453. .globl rt_thread_switch_interrupt_flag
  454. .globl rt_interrupt_from_thread
  455. .globl rt_interrupt_to_thread
  456. // -------------------------------------------------------------------
  457. .align 8
  458. .globl vector_irq
  459. vector_irq:
  460. SAVE_CONTEXT
  461. STP X0, X1, [SP, #-0x10]! /* X0 is thread sp */
  462. BL rt_interrupt_enter
  463. BL rt_hw_trap_irq
  464. BL rt_interrupt_leave
  465. LDP X0, X1, [SP], #0x10
  466. // if rt_thread_switch_interrupt_flag set, jump to
  467. // rt_hw_context_switch_interrupt_do and don't return
  468. LDR X1, =rt_thread_switch_interrupt_flag
  469. LDR X2, [X1]
  470. CMP X2, #1
  471. B.NE vector_irq_exit
  472. MOV X2, #0 // clear flag
  473. STR X2, [X1]
  474. LDR X3, =rt_interrupt_from_thread
  475. LDR X4, [X3]
  476. STR x0, [X4] // store sp in preempted tasks's TCB
  477. LDR x3, =rt_interrupt_to_thread
  478. LDR X4, [X3]
  479. LDR x0, [X4] // get new task's stack pointer
  480. RESTORE_CONTEXT
  481. vector_irq_exit:
  482. MOV SP, X0
  483. RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  484. #endif /* RT_USING_SMP */
  485. // -------------------------------------------------
  486. START_POINT(vector_exception)
  487. SAVE_CONTEXT
  488. STP X0, X1, [SP, #-0x10]!
  489. #ifdef RT_USING_LWP
  490. SAVE_USER_CTX
  491. #endif
  492. BL rt_hw_trap_exception
  493. #ifdef RT_USING_LWP
  494. LDP X0, X1, [SP]
  495. RESTORE_USER_CTX X0
  496. #endif
  497. LDP X0, X1, [SP], #0x10
  498. MOV SP, X0
  499. RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  500. START_POINT_END(vector_exception)
  501. START_POINT(vector_serror)
  502. SAVE_CONTEXT
  503. #ifdef RT_USING_LWP
  504. SAVE_USER_CTX
  505. #endif
  506. STP X0, X1, [SP, #-0x10]!
  507. BL rt_hw_trap_serror
  508. b .
  509. START_POINT_END(vector_exception)