context_gcc.S 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531
  1. /*
  2. * Copyright (c) 2006-2021, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2021-05-18 Jesven the first version
  9. */
  10. #include "rtconfig.h"
  11. #include "asm-fpu.h"
  12. .text
  13. .weak rt_hw_cpu_id_set
  14. .type rt_hw_cpu_id_set, @function
  15. rt_hw_cpu_id_set:
  16. mrs x0, mpidr_el1 /* MPIDR_EL1: Multi-Processor Affinity Register */
  17. and x0, x0, #15
  18. msr tpidr_el1, x0
  19. ret
  20. /*
  21. int rt_hw_cpu_id(void)
  22. */
  23. .global rt_hw_cpu_id
  24. .type rt_hw_cpu_id, @function
  25. rt_hw_cpu_id:
  26. mrs x0, tpidr_el1 /* MPIDR_EL1: Multi-Processor Affinity Register */
  27. ret
  28. /*
  29. void set_process_id(size_t id)
  30. */
  31. .global set_process_id
  32. set_process_id:
  33. msr CONTEXTIDR_EL1, x0
  34. ret
  35. /*
  36. *enable gtimer
  37. */
  38. .globl rt_hw_gtimer_enable
  39. rt_hw_gtimer_enable:
  40. MOV X0,#1
  41. MSR CNTP_CTL_EL0,X0
  42. RET
  43. /*
  44. *set gtimer CNTP_TVAL_EL0 value
  45. */
  46. .globl rt_hw_set_gtimer_val
  47. rt_hw_set_gtimer_val:
  48. MSR CNTP_TVAL_EL0,X0
  49. RET
  50. /*
  51. *get gtimer CNTP_TVAL_EL0 value
  52. */
  53. .globl rt_hw_get_gtimer_val
  54. rt_hw_get_gtimer_val:
  55. MRS X0,CNTP_TVAL_EL0
  56. RET
  57. .globl rt_hw_get_cntpct_val
  58. rt_hw_get_cntpct_val:
  59. MRS X0, CNTPCT_EL0
  60. RET
  61. /*
  62. *get gtimer frq value
  63. */
  64. .globl rt_hw_get_gtimer_frq
  65. rt_hw_get_gtimer_frq:
  66. MRS X0,CNTFRQ_EL0
  67. RET
  68. .macro SAVE_CONTEXT
  69. /* Save the entire context. */
  70. SAVE_FPU SP
  71. STP X0, X1, [SP, #-0x10]!
  72. STP X2, X3, [SP, #-0x10]!
  73. STP X4, X5, [SP, #-0x10]!
  74. STP X6, X7, [SP, #-0x10]!
  75. STP X8, X9, [SP, #-0x10]!
  76. STP X10, X11, [SP, #-0x10]!
  77. STP X12, X13, [SP, #-0x10]!
  78. STP X14, X15, [SP, #-0x10]!
  79. STP X16, X17, [SP, #-0x10]!
  80. STP X18, X19, [SP, #-0x10]!
  81. STP X20, X21, [SP, #-0x10]!
  82. STP X22, X23, [SP, #-0x10]!
  83. STP X24, X25, [SP, #-0x10]!
  84. STP X26, X27, [SP, #-0x10]!
  85. STP X28, X29, [SP, #-0x10]!
  86. MRS X28, FPCR
  87. MRS X29, FPSR
  88. STP X28, X29, [SP, #-0x10]!
  89. MRS X29, SP_EL0
  90. STP X29, X30, [SP, #-0x10]!
  91. MRS X3, SPSR_EL1
  92. MRS X2, ELR_EL1
  93. STP X2, X3, [SP, #-0x10]!
  94. MOV X0, SP /* Move SP into X0 for saving. */
  95. .endm
  96. .macro SAVE_CONTEXT_FROM_EL1
  97. /* Save the entire context. */
  98. SAVE_FPU SP
  99. STP X0, X1, [SP, #-0x10]!
  100. STP X2, X3, [SP, #-0x10]!
  101. STP X4, X5, [SP, #-0x10]!
  102. STP X6, X7, [SP, #-0x10]!
  103. STP X8, X9, [SP, #-0x10]!
  104. STP X10, X11, [SP, #-0x10]!
  105. STP X12, X13, [SP, #-0x10]!
  106. STP X14, X15, [SP, #-0x10]!
  107. STP X16, X17, [SP, #-0x10]!
  108. STP X18, X19, [SP, #-0x10]!
  109. STP X20, X21, [SP, #-0x10]!
  110. STP X22, X23, [SP, #-0x10]!
  111. STP X24, X25, [SP, #-0x10]!
  112. STP X26, X27, [SP, #-0x10]!
  113. STP X28, X29, [SP, #-0x10]!
  114. MRS X28, FPCR
  115. MRS X29, FPSR
  116. STP X28, X29, [SP, #-0x10]!
  117. MRS X29, SP_EL0
  118. STP X29, X30, [SP, #-0x10]!
  119. MOV X19, #((3 << 6) | 0x4 | 0x1) /* el1h, disable interrupt */
  120. MOV X18, X30
  121. STP X18, X19, [SP, #-0x10]!
  122. .endm
  123. #ifdef RT_USING_SMP
  124. .macro RESTORE_CONTEXT
  125. /* Set the SP to point to the stack of the task being restored. */
  126. MOV SP, X0
  127. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  128. TST X3, #0x1f
  129. MSR SPSR_EL1, X3
  130. MSR ELR_EL1, X2
  131. LDP X29, X30, [SP], #0x10
  132. MSR SP_EL0, X29
  133. LDP X28, X29, [SP], #0x10
  134. MSR FPCR, X28
  135. MSR FPSR, X29
  136. LDP X28, X29, [SP], #0x10
  137. LDP X26, X27, [SP], #0x10
  138. LDP X24, X25, [SP], #0x10
  139. LDP X22, X23, [SP], #0x10
  140. LDP X20, X21, [SP], #0x10
  141. LDP X18, X19, [SP], #0x10
  142. LDP X16, X17, [SP], #0x10
  143. LDP X14, X15, [SP], #0x10
  144. LDP X12, X13, [SP], #0x10
  145. LDP X10, X11, [SP], #0x10
  146. LDP X8, X9, [SP], #0x10
  147. LDP X6, X7, [SP], #0x10
  148. LDP X4, X5, [SP], #0x10
  149. LDP X2, X3, [SP], #0x10
  150. LDP X0, X1, [SP], #0x10
  151. RESTORE_FPU SP
  152. BEQ ret_to_user
  153. ERET
  154. .endm
  155. #else
  156. .macro RESTORE_CONTEXT
  157. /* Set the SP to point to the stack of the task being restored. */
  158. MOV SP, X0
  159. BL rt_thread_self
  160. MOV X19, X0
  161. BL lwp_mmu_switch
  162. MOV X0, X19
  163. BL lwp_user_setting_restore
  164. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  165. TST X3, #0x1f
  166. MSR SPSR_EL1, X3
  167. MSR ELR_EL1, X2
  168. LDP X29, X30, [SP], #0x10
  169. MSR SP_EL0, X29
  170. LDP X28, X29, [SP], #0x10
  171. MSR FPCR, X28
  172. MSR FPSR, X29
  173. LDP X28, X29, [SP], #0x10
  174. LDP X26, X27, [SP], #0x10
  175. LDP X24, X25, [SP], #0x10
  176. LDP X22, X23, [SP], #0x10
  177. LDP X20, X21, [SP], #0x10
  178. LDP X18, X19, [SP], #0x10
  179. LDP X16, X17, [SP], #0x10
  180. LDP X14, X15, [SP], #0x10
  181. LDP X12, X13, [SP], #0x10
  182. LDP X10, X11, [SP], #0x10
  183. LDP X8, X9, [SP], #0x10
  184. LDP X6, X7, [SP], #0x10
  185. LDP X4, X5, [SP], #0x10
  186. LDP X2, X3, [SP], #0x10
  187. LDP X0, X1, [SP], #0x10
  188. RESTORE_FPU SP
  189. BEQ ret_to_user
  190. ERET
  191. .endm
  192. #endif
  193. .macro RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  194. /* the SP is already ok */
  195. LDP X2, X3, [SP], #0x10 /* SPSR and ELR. */
  196. TST X3, #0x1f
  197. MSR SPSR_EL1, X3
  198. MSR ELR_EL1, X2
  199. LDP X29, X30, [SP], #0x10
  200. MSR SP_EL0, X29
  201. LDP X28, X29, [SP], #0x10
  202. MSR FPCR, X28
  203. MSR FPSR, X29
  204. LDP X28, X29, [SP], #0x10
  205. LDP X26, X27, [SP], #0x10
  206. LDP X24, X25, [SP], #0x10
  207. LDP X22, X23, [SP], #0x10
  208. LDP X20, X21, [SP], #0x10
  209. LDP X18, X19, [SP], #0x10
  210. LDP X16, X17, [SP], #0x10
  211. LDP X14, X15, [SP], #0x10
  212. LDP X12, X13, [SP], #0x10
  213. LDP X10, X11, [SP], #0x10
  214. LDP X8, X9, [SP], #0x10
  215. LDP X6, X7, [SP], #0x10
  216. LDP X4, X5, [SP], #0x10
  217. LDP X2, X3, [SP], #0x10
  218. LDP X0, X1, [SP], #0x10
  219. RESTORE_FPU SP
  220. BEQ ret_to_user
  221. ERET
  222. .endm
  223. #ifdef RT_USING_SMP
  224. #define rt_hw_interrupt_disable rt_hw_local_irq_disable
  225. #define rt_hw_interrupt_enable rt_hw_local_irq_enable
  226. #endif
  227. .text
  228. /*
  229. * rt_base_t rt_hw_interrupt_disable();
  230. */
  231. .globl rt_hw_interrupt_disable
  232. rt_hw_interrupt_disable:
  233. MRS X0, DAIF
  234. MSR DAIFSet, #3
  235. DSB SY
  236. RET
  237. /*
  238. * void rt_hw_interrupt_enable(rt_base_t level);
  239. */
  240. .globl rt_hw_interrupt_enable
  241. rt_hw_interrupt_enable:
  242. DSB SY
  243. AND X0, X0, #0xc0
  244. MRS X1, DAIF
  245. BIC X1, X1, #0xc0
  246. ORR X0, X0, X1
  247. MSR DAIF, X0
  248. RET
  249. .text
  250. #ifdef RT_USING_SMP
  251. /*
  252. * void rt_hw_context_switch_to(rt_uint3 to, struct rt_thread *to_thread);
  253. * X0 --> to (thread stack)
  254. * X1 --> to_thread
  255. */
  256. .globl rt_hw_context_switch_to
  257. rt_hw_context_switch_to:
  258. LDR X0, [X0]
  259. MOV SP, X0
  260. MOV X0, X1
  261. BL rt_cpus_lock_status_restore
  262. BL rt_thread_self
  263. BL lwp_user_setting_restore
  264. B rt_hw_context_switch_exit
  265. /*
  266. * void rt_hw_context_switch(rt_uint32 from, rt_uint32
  267. to, struct rt_thread *to_thread);
  268. * X0 --> from (from_thread stack)
  269. * X1 --> to (to_thread stack)
  270. * X2 --> to_thread
  271. */
  272. .globl rt_hw_context_switch
  273. rt_hw_context_switch:
  274. SAVE_CONTEXT_FROM_EL1
  275. MOV X3, SP
  276. STR X3, [X0] // store sp in preempted tasks TCB
  277. LDR X0, [X1] // get new task stack pointer
  278. MOV SP, X0
  279. MOV X0, X2
  280. BL rt_cpus_lock_status_restore
  281. BL rt_thread_self
  282. BL lwp_user_setting_restore
  283. B rt_hw_context_switch_exit
  284. /*
  285. * void rt_hw_context_switch_interrupt(context, from sp, to sp, tp tcb)
  286. * X0 :interrupt context
  287. * X1 :addr of from_thread's sp
  288. * X2 :addr of to_thread's sp
  289. * X3 :to_thread's tcb
  290. */
  291. .globl rt_hw_context_switch_interrupt
  292. rt_hw_context_switch_interrupt:
  293. STP X0, X1, [SP, #-0x10]!
  294. STP X2, X3, [SP, #-0x10]!
  295. STP X29, X30, [SP, #-0x10]!
  296. BL rt_thread_self
  297. BL lwp_user_setting_save
  298. LDP X29, X30, [SP], #0x10
  299. LDP X2, X3, [SP], #0x10
  300. LDP X0, X1, [SP], #0x10
  301. STR X0, [X1]
  302. LDR X0, [X2]
  303. MOV SP, X0
  304. MOV X0, X3
  305. MOV X19, X0
  306. BL rt_cpus_lock_status_restore
  307. MOV X0, X19
  308. BL lwp_user_setting_restore
  309. B rt_hw_context_switch_exit
  310. .globl vector_fiq
  311. vector_fiq:
  312. B .
  313. .globl vector_irq
  314. vector_irq:
  315. CLREX
  316. SAVE_CONTEXT
  317. STP X0, X1, [SP, #-0x10]! /* X0 is thread sp */
  318. BL rt_interrupt_enter
  319. BL rt_hw_trap_irq
  320. BL rt_interrupt_leave
  321. LDP X0, X1, [SP], #0x10
  322. BL rt_scheduler_do_irq_switch
  323. B rt_hw_context_switch_exit
  324. .global rt_hw_context_switch_exit
  325. rt_hw_context_switch_exit:
  326. MOV X0, SP
  327. RESTORE_CONTEXT
  328. #else
  329. /*
  330. * void rt_hw_context_switch_to(rt_ubase_t to);
  331. * X0 --> to sp
  332. */
  333. .globl rt_hw_context_switch_to
  334. rt_hw_context_switch_to:
  335. LDR X0, [X0]
  336. RESTORE_CONTEXT
  337. /*
  338. * void rt_hw_context_switch(rt_ubase_t from, rt_ubase_t to);
  339. * X0 --> from sp
  340. * X1 --> to sp
  341. * X2 --> to thread
  342. */
  343. .globl rt_hw_context_switch
  344. rt_hw_context_switch:
  345. SAVE_CONTEXT_FROM_EL1
  346. MOV X2, SP
  347. STR X2, [X0] // store sp in preempted tasks TCB
  348. LDR X0, [X1] // get new task stack pointer
  349. RESTORE_CONTEXT
  350. /*
  351. * void rt_hw_context_switch_interrupt(rt_ubase_t from, rt_ubase_t to, rt_thread_t from_thread, rt_thread_t to_thread);
  352. */
  353. .globl rt_thread_switch_interrupt_flag
  354. .globl rt_interrupt_from_thread
  355. .globl rt_interrupt_to_thread
  356. .globl rt_hw_context_switch_interrupt
  357. rt_hw_context_switch_interrupt:
  358. LDR X6, =rt_thread_switch_interrupt_flag
  359. LDR X7, [X6]
  360. CMP X7, #1
  361. B.EQ _reswitch
  362. LDR X4, =rt_interrupt_from_thread // set rt_interrupt_from_thread
  363. STR X0, [X4]
  364. MOV X7, #1 // set rt_thread_switch_interrupt_flag to 1
  365. STR X7, [X6]
  366. STP X1, X30, [SP, #-0x10]!
  367. MOV X0, X2
  368. BL lwp_user_setting_save
  369. LDP X1, X30, [SP], #0x10
  370. _reswitch:
  371. LDR X6, =rt_interrupt_to_thread // set rt_interrupt_to_thread
  372. STR X1, [X6]
  373. RET
  374. .text
  375. // -- Exception handlers ----------------------------------
  376. .align 8
  377. .globl vector_fiq
  378. vector_fiq:
  379. SAVE_CONTEXT
  380. STP X0, X1, [SP, #-0x10]!
  381. BL rt_hw_trap_fiq
  382. LDP X0, X1, [SP], #0x10
  383. RESTORE_CONTEXT
  384. .globl rt_interrupt_enter
  385. .globl rt_interrupt_leave
  386. .globl rt_thread_switch_interrupt_flag
  387. .globl rt_interrupt_from_thread
  388. .globl rt_interrupt_to_thread
  389. // -------------------------------------------------------------------
  390. .align 8
  391. .globl vector_irq
  392. vector_irq:
  393. SAVE_CONTEXT
  394. STP X0, X1, [SP, #-0x10]! /* X0 is thread sp */
  395. BL rt_interrupt_enter
  396. BL rt_hw_trap_irq
  397. BL rt_interrupt_leave
  398. LDP X0, X1, [SP], #0x10
  399. // if rt_thread_switch_interrupt_flag set, jump to
  400. // rt_hw_context_switch_interrupt_do and don't return
  401. LDR X1, =rt_thread_switch_interrupt_flag
  402. LDR X2, [X1]
  403. CMP X2, #1
  404. B.NE vector_irq_exit
  405. MOV X2, #0 // clear flag
  406. STR X2, [X1]
  407. LDR X3, =rt_interrupt_from_thread
  408. LDR X4, [X3]
  409. STR x0, [X4] // store sp in preempted tasks's TCB
  410. LDR x3, =rt_interrupt_to_thread
  411. LDR X4, [X3]
  412. LDR x0, [X4] // get new task's stack pointer
  413. RESTORE_CONTEXT
  414. vector_irq_exit:
  415. MOV SP, X0
  416. RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  417. #endif
  418. // -------------------------------------------------
  419. .globl vector_exception
  420. vector_exception:
  421. SAVE_CONTEXT
  422. STP X0, X1, [SP, #-0x10]!
  423. BL rt_hw_trap_exception
  424. LDP X0, X1, [SP], #0x10
  425. MOV SP, X0
  426. RESTORE_CONTEXT_WITHOUT_MMU_SWITCH
  427. .globl vector_serror
  428. vector_serror:
  429. SAVE_CONTEXT
  430. STP X0, X1, [SP, #-0x10]!
  431. BL rt_hw_trap_serror
  432. b .
  433. .global switch_mmu
  434. switch_mmu:
  435. MSR TTBR0_EL1, X0
  436. MRS X1, TCR_EL1
  437. CMP X0, XZR
  438. ORR X1, X1, #(1 << 7)
  439. BEQ 1f
  440. BIC X1, X1, #(1 << 7)
  441. 1:
  442. MSR TCR_EL1, X1
  443. DSB SY
  444. ISB
  445. TLBI VMALLE1
  446. DSB SY
  447. ISB
  448. IC IALLUIS
  449. DSB SY
  450. ISB
  451. RET
  452. .global mmu_table_get
  453. mmu_table_get:
  454. MRS X0, TTBR0_EL1
  455. RET