context_gcc.S 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529
  1. /*
  2. * Copyright (c) 2006-2021, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2021-05-18 Jesven the first version
  9. */
  10. #include "rtconfig.h"
  11. #include "asm-generic.h"
  12. #include "asm-fpu.h"
  13. .text
  14. .weak rt_hw_cpu_id_set
  15. .type rt_hw_cpu_id_set, @function
  16. rt_hw_cpu_id_set:
  17. mrs x0, mpidr_el1 /* MPIDR_EL1: Multi-Processor Affinity Register */
  18. #ifdef ARCH_ARM_CORTEX_A55
  19. lsr x0, x0, #8
  20. #endif
  21. and x0, x0, #15
  22. msr tpidr_el1, x0
  23. ret
  24. /*
  25. int rt_hw_cpu_id(void)
  26. */
  27. .weak rt_hw_cpu_id
  28. .type rt_hw_cpu_id, @function
  29. rt_hw_cpu_id:
  30. mrs x0, tpidr_el1 /* MPIDR_EL1: Multi-Processor Affinity Register */
  31. ret
  32. /*
  33. void rt_hw_set_process_id(size_t id)
  34. */
  35. .global rt_hw_set_process_id
  36. rt_hw_set_process_id:
  37. msr CONTEXTIDR_EL1, x0
  38. ret
  39. /*
  40. *enable gtimer
  41. */
  42. .globl rt_hw_gtimer_enable
  43. rt_hw_gtimer_enable:
  44. MOV X0,#1
  45. MSR CNTP_CTL_EL0,X0
  46. RET
  47. /*
  48. *set gtimer CNTP_TVAL_EL0 value
  49. */
  50. .globl rt_hw_set_gtimer_val
  51. rt_hw_set_gtimer_val:
  52. MSR CNTP_TVAL_EL0,X0
  53. RET
  54. /*
  55. *get gtimer CNTP_TVAL_EL0 value
  56. */
  57. .globl rt_hw_get_gtimer_val
  58. rt_hw_get_gtimer_val:
  59. MRS X0,CNTP_TVAL_EL0
  60. RET
  61. .globl rt_hw_get_cntpct_val
  62. rt_hw_get_cntpct_val:
  63. MRS X0, CNTPCT_EL0
  64. RET
  65. /*
  66. *get gtimer frq value
  67. */
  68. .globl rt_hw_get_gtimer_frq
  69. rt_hw_get_gtimer_frq:
  70. MRS X0,CNTFRQ_EL0
  71. RET
  72. START_POINT(_thread_start)
  73. blr x19
  74. mov x29, #0
  75. blr x20
  76. b . /* never here */
  77. START_POINT_END(_thread_start)
  78. .macro SAVE_CONTEXT
  79. /* Save the entire context. */
  80. SAVE_FPU SP
  81. STP X0, X1, [SP, #-0x10]!
  82. STP X2, X3, [SP, #-0x10]!
  83. STP X4, X5, [SP, #-0x10]!
  84. STP X6, X7, [SP, #-0x10]!
  85. STP X8, X9, [SP, #-0x10]!
  86. STP X10, X11, [SP, #-0x10]!
  87. STP X12, X13, [SP, #-0x10]!
  88. STP X14, X15, [SP, #-0x10]!
  89. STP X16, X17, [SP, #-0x10]!
  90. STP X18, X19, [SP, #-0x10]!
  91. STP X20, X21, [SP, #-0x10]!
  92. STP X22, X23, [SP, #-0x10]!
  93. STP X24, X25, [SP, #-0x10]!
  94. STP X26, X27, [SP, #-0x10]!
  95. STP X28, X29, [SP, #-0x10]!
  96. MRS X28, FPCR
  97. MRS X29, FPSR
  98. STP X28, X29, [SP, #-0x10]!
  99. MRS X29, SP_EL0
  100. STP X29, X30, [SP, #-0x10]!
  101. MRS X3, SPSR_EL1
  102. MRS X2, ELR_EL1
  103. STP X2, X3, [SP, #-0x10]!
  104. MOV X0, SP /* Move SP into X0 for saving. */
  105. .endm
  106. .macro SAVE_CONTEXT_FROM_EL1
  107. /* Save the entire context. */
  108. SAVE_FPU SP
  109. STP X0, X1, [SP, #-0x10]!
  110. STP X2, X3, [SP, #-0x10]!
  111. STP X4, X5, [SP, #-0x10]!
  112. STP X6, X7, [SP, #-0x10]!
  113. STP X8, X9, [SP, #-0x10]!
  114. STP X10, X11, [SP, #-0x10]!
  115. STP X12, X13, [SP, #-0x10]!
  116. STP X14, X15, [SP, #-0x10]!
  117. STP X16, X17, [SP, #-0x10]!
  118. STP X18, X19, [SP, #-0x10]!
  119. STP X20, X21, [SP, #-0x10]!
  120. STP X22, X23, [SP, #-0x10]!
  121. STP X24, X25, [SP, #-0x10]!
  122. STP X26, X27, [SP, #-0x10]!
  123. STP X28, X29, [SP, #-0x10]!
  124. MRS X28, FPCR
  125. MRS X29, FPSR
  126. STP X28, X29, [SP, #-0x10]!
  127. MRS X29, SP_EL0
  128. STP X29, X30, [SP, #-0x10]!
  129. MOV X19, #((3 << 6) | 0x4 | 0x1) /* el1h, disable interrupt */
  130. MOV X18, X30
  131. STP X18, X19, [SP, #-0x10]!
  132. .endm
  133. #ifdef RT_USING_SMP
  134. .macro RESTORE_CONTEXT
  135. /* Set the SP to point to the stack of the task being restored. */
  136. MOV SP, X0
  137. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  138. TST X3, #0x1f
  139. MSR SPSR_EL1, X3
  140. MSR ELR_EL1, X2
  141. LDP X29, X30, [SP], #0x10
  142. MSR SP_EL0, X29
  143. LDP X28, X29, [SP], #0x10
  144. MSR FPCR, X28
  145. MSR FPSR, X29
  146. LDP X28, X29, [SP], #0x10
  147. LDP X26, X27, [SP], #0x10
  148. LDP X24, X25, [SP], #0x10
  149. LDP X22, X23, [SP], #0x10
  150. LDP X20, X21, [SP], #0x10
  151. LDP X18, X19, [SP], #0x10
  152. LDP X16, X17, [SP], #0x10
  153. LDP X14, X15, [SP], #0x10
  154. LDP X12, X13, [SP], #0x10
  155. LDP X10, X11, [SP], #0x10
  156. LDP X8, X9, [SP], #0x10
  157. LDP X6, X7, [SP], #0x10
  158. LDP X4, X5, [SP], #0x10
  159. LDP X2, X3, [SP], #0x10
  160. LDP X0, X1, [SP], #0x10
  161. RESTORE_FPU SP
  162. #ifdef RT_USING_LWP
  163. BEQ arch_ret_to_user
  164. #endif
  165. ERET
  166. .endm
  167. #else
  168. .macro RESTORE_CONTEXT
  169. /* Set the SP to point to the stack of the task being restored. */
  170. MOV SP, X0
  171. #ifdef RT_USING_LWP
  172. BL rt_thread_self
  173. MOV X19, X0
  174. BL lwp_aspace_switch
  175. MOV X0, X19
  176. BL lwp_user_setting_restore
  177. #endif
  178. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  179. TST X3, #0x1f
  180. MSR SPSR_EL1, X3
  181. MSR ELR_EL1, X2
  182. LDP X29, X30, [SP], #0x10
  183. MSR SP_EL0, X29
  184. LDP X28, X29, [SP], #0x10
  185. MSR FPCR, X28
  186. MSR FPSR, X29
  187. LDP X28, X29, [SP], #0x10
  188. LDP X26, X27, [SP], #0x10
  189. LDP X24, X25, [SP], #0x10
  190. LDP X22, X23, [SP], #0x10
  191. LDP X20, X21, [SP], #0x10
  192. LDP X18, X19, [SP], #0x10
  193. LDP X16, X17, [SP], #0x10
  194. LDP X14, X15, [SP], #0x10
  195. LDP X12, X13, [SP], #0x10
  196. LDP X10, X11, [SP], #0x10
  197. LDP X8, X9, [SP], #0x10
  198. LDP X6, X7, [SP], #0x10
  199. LDP X4, X5, [SP], #0x10
  200. LDP X2, X3, [SP], #0x10
  201. LDP X0, X1, [SP], #0x10
  202. RESTORE_FPU SP
  203. #ifdef RT_USING_LWP
  204. BEQ arch_ret_to_user
  205. #endif
  206. ERET
  207. .endm
  208. #endif
  209. .macro RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  210. /* the SP is already ok */
  211. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  212. TST X3, #0x1f
  213. MSR SPSR_EL1, X3
  214. MSR ELR_EL1, X2
  215. LDP X29, X30, [SP], #0x10
  216. MSR SP_EL0, X29
  217. LDP X28, X29, [SP], #0x10
  218. MSR FPCR, X28
  219. MSR FPSR, X29
  220. LDP X28, X29, [SP], #0x10
  221. LDP X26, X27, [SP], #0x10
  222. LDP X24, X25, [SP], #0x10
  223. LDP X22, X23, [SP], #0x10
  224. LDP X20, X21, [SP], #0x10
  225. LDP X18, X19, [SP], #0x10
  226. LDP X16, X17, [SP], #0x10
  227. LDP X14, X15, [SP], #0x10
  228. LDP X12, X13, [SP], #0x10
  229. LDP X10, X11, [SP], #0x10
  230. LDP X8, X9, [SP], #0x10
  231. LDP X6, X7, [SP], #0x10
  232. LDP X4, X5, [SP], #0x10
  233. LDP X2, X3, [SP], #0x10
  234. LDP X0, X1, [SP], #0x10
  235. RESTORE_FPU SP
  236. #ifdef RT_USING_LWP
  237. BEQ arch_ret_to_user
  238. #endif
  239. ERET
  240. .endm
  241. #ifdef RT_USING_SMP
  242. #define rt_hw_interrupt_disable rt_hw_local_irq_disable
  243. #define rt_hw_interrupt_enable rt_hw_local_irq_enable
  244. #endif
  245. .text
  246. /*
  247. * rt_base_t rt_hw_interrupt_disable();
  248. */
  249. .globl rt_hw_interrupt_disable
  250. rt_hw_interrupt_disable:
  251. MRS X0, DAIF
  252. MSR DAIFSet, #3
  253. DSB SY
  254. RET
  255. /*
  256. * void rt_hw_interrupt_enable(rt_base_t level);
  257. */
  258. .globl rt_hw_interrupt_enable
  259. rt_hw_interrupt_enable:
  260. DSB SY
  261. AND X0, X0, #0xc0
  262. MRS X1, DAIF
  263. BIC X1, X1, #0xc0
  264. ORR X0, X0, X1
  265. MSR DAIF, X0
  266. RET
  267. .text
  268. #ifdef RT_USING_SMP
  269. /*
  270. * void rt_hw_context_switch_to(rt_uint3 to, struct rt_thread *to_thread);
  271. * X0 --> to (thread stack)
  272. * X1 --> to_thread
  273. */
  274. .globl rt_hw_context_switch_to
  275. rt_hw_context_switch_to:
  276. LDR X0, [X0]
  277. MOV SP, X0
  278. MOV X0, X1
  279. BL rt_cpus_lock_status_restore
  280. #ifdef RT_USING_LWP
  281. BL rt_thread_self
  282. BL lwp_user_setting_restore
  283. #endif
  284. B rt_hw_context_switch_exit
  285. /*
  286. * void rt_hw_context_switch(rt_uint32 from, rt_uint32
  287. to, struct rt_thread *to_thread);
  288. * X0 --> from (from_thread stack)
  289. * X1 --> to (to_thread stack)
  290. * X2 --> to_thread
  291. */
  292. .globl rt_hw_context_switch
  293. rt_hw_context_switch:
  294. SAVE_CONTEXT_FROM_EL1
  295. MOV X3, SP
  296. STR X3, [X0] // store sp in preempted tasks TCB
  297. LDR X0, [X1] // get new task stack pointer
  298. MOV SP, X0
  299. MOV X0, X2
  300. BL rt_cpus_lock_status_restore
  301. #ifdef RT_USING_LWP
  302. BL rt_thread_self
  303. BL lwp_user_setting_restore
  304. #endif
  305. B rt_hw_context_switch_exit
  306. /*
  307. * void rt_hw_context_switch_interrupt(context, from sp, to sp, tp tcb)
  308. * X0 :interrupt context
  309. * X1 :addr of from_thread's sp
  310. * X2 :addr of to_thread's sp
  311. * X3 :to_thread's tcb
  312. */
  313. .globl rt_hw_context_switch_interrupt
  314. rt_hw_context_switch_interrupt:
  315. STP X0, X1, [SP, #-0x10]!
  316. STP X2, X3, [SP, #-0x10]!
  317. STP X29, X30, [SP, #-0x10]!
  318. #ifdef RT_USING_LWP
  319. BL rt_thread_self
  320. BL lwp_user_setting_save
  321. #endif
  322. LDP X29, X30, [SP], #0x10
  323. LDP X2, X3, [SP], #0x10
  324. LDP X0, X1, [SP], #0x10
  325. STR X0, [X1]
  326. LDR X0, [X2]
  327. MOV SP, X0
  328. MOV X0, X3
  329. MOV X19, X0
  330. BL rt_cpus_lock_status_restore
  331. MOV X0, X19
  332. #ifdef RT_USING_LWP
  333. BL lwp_user_setting_restore
  334. #endif
  335. B rt_hw_context_switch_exit
  336. .globl vector_fiq
  337. vector_fiq:
  338. B .
  339. .globl vector_irq
  340. vector_irq:
  341. SAVE_CONTEXT
  342. STP X0, X1, [SP, #-0x10]! /* X0 is thread sp */
  343. BL rt_interrupt_enter
  344. BL rt_hw_trap_irq
  345. BL rt_interrupt_leave
  346. LDP X0, X1, [SP], #0x10
  347. BL rt_scheduler_do_irq_switch
  348. B rt_hw_context_switch_exit
  349. .global rt_hw_context_switch_exit
  350. rt_hw_context_switch_exit:
  351. CLREX
  352. MOV X0, SP
  353. RESTORE_CONTEXT
  354. #else /* RT_USING_SMP */
  355. /*
  356. * void rt_hw_context_switch_to(rt_ubase_t to);
  357. * X0 --> to sp
  358. */
  359. .globl rt_hw_context_switch_to
  360. rt_hw_context_switch_to:
  361. CLREX
  362. LDR X0, [X0]
  363. RESTORE_CONTEXT
  364. /*
  365. * void rt_hw_context_switch(rt_ubase_t from, rt_ubase_t to);
  366. * X0 --> from sp
  367. * X1 --> to sp
  368. * X2 --> to thread
  369. */
  370. .globl rt_hw_context_switch
  371. rt_hw_context_switch:
  372. CLREX
  373. SAVE_CONTEXT_FROM_EL1
  374. MOV X2, SP
  375. STR X2, [X0] // store sp in preempted tasks TCB
  376. LDR X0, [X1] // get new task stack pointer
  377. RESTORE_CONTEXT
  378. /*
  379. * void rt_hw_context_switch_interrupt(rt_ubase_t from, rt_ubase_t to, rt_thread_t from_thread, rt_thread_t to_thread);
  380. */
  381. .globl rt_thread_switch_interrupt_flag
  382. .globl rt_interrupt_from_thread
  383. .globl rt_interrupt_to_thread
  384. .globl rt_hw_context_switch_interrupt
  385. rt_hw_context_switch_interrupt:
  386. CLREX
  387. LDR X6, =rt_thread_switch_interrupt_flag
  388. LDR X7, [X6]
  389. CMP X7, #1
  390. B.EQ _reswitch
  391. LDR X4, =rt_interrupt_from_thread // set rt_interrupt_from_thread
  392. STR X0, [X4]
  393. MOV X7, #1 // set rt_thread_switch_interrupt_flag to 1
  394. STR X7, [X6]
  395. STP X1, X30, [SP, #-0x10]!
  396. #ifdef RT_USING_LWP
  397. MOV X0, X2
  398. BL lwp_user_setting_save
  399. #endif
  400. LDP X1, X30, [SP], #0x10
  401. _reswitch:
  402. LDR X6, =rt_interrupt_to_thread // set rt_interrupt_to_thread
  403. STR X1, [X6]
  404. RET
  405. .text
  406. // -- Exception handlers ----------------------------------
  407. .align 8
  408. .globl vector_fiq
  409. vector_fiq:
  410. SAVE_CONTEXT
  411. STP X0, X1, [SP, #-0x10]!
  412. BL rt_hw_trap_fiq
  413. LDP X0, X1, [SP], #0x10
  414. RESTORE_CONTEXT
  415. .globl rt_interrupt_enter
  416. .globl rt_interrupt_leave
  417. .globl rt_thread_switch_interrupt_flag
  418. .globl rt_interrupt_from_thread
  419. .globl rt_interrupt_to_thread
  420. // -------------------------------------------------------------------
  421. .align 8
  422. .globl vector_irq
  423. vector_irq:
  424. SAVE_CONTEXT
  425. STP X0, X1, [SP, #-0x10]! /* X0 is thread sp */
  426. BL rt_interrupt_enter
  427. BL rt_hw_trap_irq
  428. BL rt_interrupt_leave
  429. LDP X0, X1, [SP], #0x10
  430. // if rt_thread_switch_interrupt_flag set, jump to
  431. // rt_hw_context_switch_interrupt_do and don't return
  432. LDR X1, =rt_thread_switch_interrupt_flag
  433. LDR X2, [X1]
  434. CMP X2, #1
  435. B.NE vector_irq_exit
  436. MOV X2, #0 // clear flag
  437. STR X2, [X1]
  438. LDR X3, =rt_interrupt_from_thread
  439. LDR X4, [X3]
  440. STR x0, [X4] // store sp in preempted tasks's TCB
  441. LDR x3, =rt_interrupt_to_thread
  442. LDR X4, [X3]
  443. LDR x0, [X4] // get new task's stack pointer
  444. RESTORE_CONTEXT
  445. vector_irq_exit:
  446. MOV SP, X0
  447. RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  448. #endif /* RT_USING_SMP */
  449. // -------------------------------------------------
  450. START_POINT(vector_exception)
  451. SAVE_CONTEXT
  452. STP X0, X1, [SP, #-0x10]!
  453. BL rt_hw_trap_exception
  454. LDP X0, X1, [SP], #0x10
  455. MOV SP, X0
  456. RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  457. START_POINT_END(vector_exception)
  458. START_POINT(vector_serror)
  459. SAVE_CONTEXT
  460. STP X0, X1, [SP, #-0x10]!
  461. BL rt_hw_trap_serror
  462. b .
  463. START_POINT_END(vector_exception)