lwp_gcc.S 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524
  1. /*
  2. * Copyright (c) 2006-2023, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2021-05-18 Jesven first version
  9. * 2023-07-16 Shell Move part of the codes to C from asm in signal handling
  10. */
  11. #ifndef __ASSEMBLY__
  12. #define __ASSEMBLY__
  13. #endif
  14. #include "rtconfig.h"
  15. #include "asm-generic.h"
  16. #include "asm-fpu.h"
  17. #include "armv8.h"
  18. /*********************
  19. * SPSR BIT *
  20. *********************/
  21. #define SPSR_Mode(v) ((v) << 0)
  22. #define SPSR_A64 (0 << 4)
  23. #define SPSR_RESEVRED_5 (0 << 5)
  24. #define SPSR_FIQ_MASKED(v) ((v) << 6)
  25. #define SPSR_IRQ_MASKED(v) ((v) << 7)
  26. #define SPSR_SERROR_MASKED(v) ((v) << 8)
  27. #define SPSR_D_MASKED(v) ((v) << 9)
  28. #define SPSR_RESEVRED_10_19 (0 << 10)
  29. #define SPSR_IL(v) ((v) << 20)
  30. #define SPSR_SS(v) ((v) << 21)
  31. #define SPSR_RESEVRED_22_27 (0 << 22)
  32. #define SPSR_V(v) ((v) << 28)
  33. #define SPSR_C(v) ((v) << 29)
  34. #define SPSR_Z(v) ((v) << 30)
  35. #define SPSR_N(v) ((v) << 31)
  36. /**************************************************/
  37. .text
  38. /*
  39. * void arch_start_umode(args, text, ustack, kstack);
  40. */
  41. .global arch_start_umode
  42. .type arch_start_umode, % function
  43. arch_start_umode:
  44. mov sp, x3
  45. mov x4, #(SPSR_Mode(0) | SPSR_A64)
  46. msr daifset, #3
  47. dsb sy
  48. mrs x30, sp_el0
  49. /* user stack top */
  50. msr sp_el0, x2
  51. mov x3, x2
  52. msr spsr_el1, x4
  53. msr elr_el1, x1
  54. eret
  55. /*
  56. * void arch_crt_start_umode(args, text, ustack, kstack);
  57. */
  58. .global arch_crt_start_umode
  59. .type arch_crt_start_umode, % function
  60. arch_crt_start_umode:
  61. sub x4, x2, #0x10
  62. adr x2, lwp_thread_return
  63. ldr x5, [x2]
  64. str x5, [x4]
  65. ldr x5, [x2, #4]
  66. str x5, [x4, #4]
  67. ldr x5, [x2, #8]
  68. str x5, [x4, #8]
  69. mov x5, x4
  70. dc cvau, x5
  71. add x5, x5, #8
  72. dc cvau, x5
  73. dsb sy
  74. ic ialluis
  75. dsb sy
  76. msr sp_el0, x4
  77. mov sp, x3
  78. mov x4, #(SPSR_Mode(0) | SPSR_A64)
  79. msr daifset, #3
  80. dsb sy
  81. mrs x30, sp_el0
  82. msr spsr_el1, x4
  83. msr elr_el1, x1
  84. eret
  85. .global arch_get_user_sp
  86. arch_get_user_sp:
  87. mrs x0, sp_el0
  88. ret
  89. .global arch_fork_exit
  90. .global arch_clone_exit
  91. arch_fork_exit:
  92. arch_clone_exit:
  93. b arch_syscall_exit
  94. /*
  95. void lwp_exec_user(void *args, void *kernel_stack, void *user_entry)
  96. */
  97. .global lwp_exec_user
  98. lwp_exec_user:
  99. mov sp, x1
  100. mov x4, #(SPSR_Mode(0) | SPSR_A64)
  101. ldr x3, =0x0000ffff80000000
  102. msr daifset, #3
  103. msr spsr_el1, x4
  104. msr elr_el1, x2
  105. eret
  106. /*
  107. * void SVC_Handler(regs);
  108. * since this routine reset the SP, we take it as a start point
  109. */
  110. START_POINT(SVC_Handler)
  111. /* x0 is initial sp */
  112. mov sp, x0
  113. msr daifclr, #3 /* enable interrupt */
  114. bl rt_thread_self
  115. bl lwp_user_setting_save
  116. ldp x8, x9, [sp, #(CONTEXT_OFFSET_X8)]
  117. and x0, x8, #0xf000
  118. cmp x0, #0xe000
  119. beq arch_signal_quit
  120. cmp x0, #0xf000
  121. beq ret_from_user
  122. uxtb x0, w8
  123. bl lwp_get_sys_api
  124. cmp x0, xzr
  125. mov x30, x0
  126. beq arch_syscall_exit
  127. ldp x0, x1, [sp, #(CONTEXT_OFFSET_X0)]
  128. ldp x2, x3, [sp, #(CONTEXT_OFFSET_X2)]
  129. ldp x4, x5, [sp, #(CONTEXT_OFFSET_X4)]
  130. ldp x6, x7, [sp, #(CONTEXT_OFFSET_X6)]
  131. blr x30
  132. /* jump explictly, make this code position independant */
  133. b arch_syscall_exit
  134. START_POINT_END(SVC_Handler)
  135. .global arch_syscall_exit
  136. arch_syscall_exit:
  137. msr daifset, #3
  138. ldp x2, x3, [sp], #0x10 /* SPSR and ELR. */
  139. msr spsr_el1, x3
  140. msr elr_el1, x2
  141. ldp x29, x30, [sp], #0x10
  142. msr sp_el0, x29
  143. ldp x28, x29, [sp], #0x10
  144. msr fpcr, x28
  145. msr fpsr, x29
  146. ldp x28, x29, [sp], #0x10
  147. ldp x26, x27, [sp], #0x10
  148. ldp x24, x25, [sp], #0x10
  149. ldp x22, x23, [sp], #0x10
  150. ldp x20, x21, [sp], #0x10
  151. ldp x18, x19, [sp], #0x10
  152. ldp x16, x17, [sp], #0x10
  153. ldp x14, x15, [sp], #0x10
  154. ldp x12, x13, [sp], #0x10
  155. ldp x10, x11, [sp], #0x10
  156. ldp x8, x9, [sp], #0x10
  157. add sp, sp, #0x40
  158. RESTORE_FPU sp
  159. /* the sp is reset to the outer most level, irq and fiq are disabled */
  160. START_POINT(arch_ret_to_user)
  161. msr daifset, #3
  162. /* save exception frame */
  163. SAVE_FPU sp
  164. stp x0, x1, [sp, #-0x10]!
  165. stp x2, x3, [sp, #-0x10]!
  166. stp x4, x5, [sp, #-0x10]!
  167. stp x6, x7, [sp, #-0x10]!
  168. stp x8, x9, [sp, #-0x10]!
  169. stp x10, x11, [sp, #-0x10]!
  170. stp x12, x13, [sp, #-0x10]!
  171. stp x14, x15, [sp, #-0x10]!
  172. stp x16, x17, [sp, #-0x10]!
  173. stp x18, x19, [sp, #-0x10]!
  174. stp x20, x21, [sp, #-0x10]!
  175. stp x22, x23, [sp, #-0x10]!
  176. stp x24, x25, [sp, #-0x10]!
  177. stp x26, x27, [sp, #-0x10]!
  178. stp x28, x29, [sp, #-0x10]!
  179. mrs x0, fpcr
  180. mrs x1, fpsr
  181. stp x0, x1, [sp, #-0x10]!
  182. stp x29, x30, [sp, #-0x10]!
  183. /* pre-action */
  184. bl lwp_check_debug
  185. bl lwp_check_exit_request
  186. cbz w0, 1f
  187. /* exit on event */
  188. msr daifclr, #3
  189. mov x0, xzr
  190. b sys_exit
  191. 1:
  192. /* check if dbg ops exist */
  193. ldr x0, =rt_dbg_ops
  194. ldr x0, [x0]
  195. cbz x0, 3f
  196. bl dbg_thread_in_debug
  197. mov x1, #(1 << 21)
  198. mrs x2, spsr_el1
  199. cbz w0, 2f
  200. orr x2, x2, x1
  201. msr spsr_el1, x2
  202. b 3f
  203. 2:
  204. bic x2, x2, x1
  205. msr spsr_el1, x2
  206. 3:
  207. /**
  208. * push 2 dummy words to simulate a exception frame of interrupt
  209. * Note: in kernel state, the context switch dont saved the context
  210. */
  211. mrs x0, spsr_el1
  212. mrs x1, elr_el1
  213. stp x1, x0, [sp, #-0x10]!
  214. mov x0, sp
  215. msr daifclr, #3
  216. bl lwp_thread_signal_catch
  217. msr daifset, #3
  218. ldp x1, x0, [sp], #0x10
  219. msr spsr_el1, x0
  220. msr elr_el1, x1
  221. /* check debug */
  222. /* restore exception frame */
  223. ldp x29, x30, [sp], #0x10
  224. ldp x0, x1, [sp], #0x10
  225. msr fpcr, x0
  226. msr fpsr, x1
  227. ldp x28, x29, [sp], #0x10
  228. ldp x26, x27, [sp], #0x10
  229. ldp x24, x25, [sp], #0x10
  230. ldp x22, x23, [sp], #0x10
  231. ldp x20, x21, [sp], #0x10
  232. ldp x18, x19, [sp], #0x10
  233. ldp x16, x17, [sp], #0x10
  234. ldp x14, x15, [sp], #0x10
  235. ldp x12, x13, [sp], #0x10
  236. ldp x10, x11, [sp], #0x10
  237. ldp x8, x9, [sp], #0x10
  238. ldp x6, x7, [sp], #0x10
  239. ldp x4, x5, [sp], #0x10
  240. ldp x2, x3, [sp], #0x10
  241. ldp x0, x1, [sp], #0x10
  242. RESTORE_FPU sp
  243. stp x0, x1, [sp, #-0x10]!
  244. ldr x0, =rt_dbg_ops
  245. ldr x0, [x0]
  246. cmp x0, xzr
  247. ldp x0, x1, [sp], #0x10
  248. beq 1f
  249. /* save */
  250. SAVE_FPU sp
  251. stp x0, x1, [sp, #-0x10]!
  252. stp x2, x3, [sp, #-0x10]!
  253. stp x4, x5, [sp, #-0x10]!
  254. stp x6, x7, [sp, #-0x10]!
  255. stp x8, x9, [sp, #-0x10]!
  256. stp x10, x11, [sp, #-0x10]!
  257. stp x12, x13, [sp, #-0x10]!
  258. stp x14, x15, [sp, #-0x10]!
  259. stp x16, x17, [sp, #-0x10]!
  260. stp x18, x19, [sp, #-0x10]!
  261. stp x20, x21, [sp, #-0x10]!
  262. stp x22, x23, [sp, #-0x10]!
  263. stp x24, x25, [sp, #-0x10]!
  264. stp x26, x27, [sp, #-0x10]!
  265. stp x28, x29, [sp, #-0x10]!
  266. mrs x0, fpcr
  267. mrs x1, fpsr
  268. stp x0, x1, [sp, #-0x10]!
  269. stp x29, x30, [sp, #-0x10]!
  270. mrs x0, elr_el1
  271. bl dbg_attach_req
  272. /* restore */
  273. ldp x29, x30, [sp], #0x10
  274. ldp x0, x1, [sp], #0x10
  275. msr fpcr, x0
  276. msr fpsr, x1
  277. ldp x28, x29, [sp], #0x10
  278. ldp x26, x27, [sp], #0x10
  279. ldp x24, x25, [sp], #0x10
  280. ldp x22, x23, [sp], #0x10
  281. ldp x20, x21, [sp], #0x10
  282. ldp x18, x19, [sp], #0x10
  283. ldp x16, x17, [sp], #0x10
  284. ldp x14, x15, [sp], #0x10
  285. ldp x12, x13, [sp], #0x10
  286. ldp x10, x11, [sp], #0x10
  287. ldp x8, x9, [sp], #0x10
  288. ldp x6, x7, [sp], #0x10
  289. ldp x4, x5, [sp], #0x10
  290. ldp x2, x3, [sp], #0x10
  291. ldp x0, x1, [sp], #0x10
  292. RESTORE_FPU sp
  293. 1:
  294. eret
  295. START_POINT_END(arch_ret_to_user)
  296. .global lwp_check_debug
  297. lwp_check_debug:
  298. ldr x0, =rt_dbg_ops
  299. ldr x0, [x0]
  300. cbnz x0, 1f
  301. ret
  302. 1:
  303. stp x29, x30, [sp, #-0x10]!
  304. bl dbg_check_suspend
  305. cbz w0, lwp_check_debug_quit
  306. mrs x2, sp_el0
  307. sub x2, x2, #0x10
  308. mov x3, x2
  309. msr sp_el0, x2
  310. ldr x0, =lwp_debugreturn
  311. ldr w1, [x0]
  312. str w1, [x2]
  313. ldr w1, [x0, #4]
  314. str w1, [x2, #4]
  315. dc cvau, x2
  316. add x2, x2, #4
  317. dc cvau, x2
  318. dsb sy
  319. isb sy
  320. ic ialluis
  321. isb sy
  322. mrs x0, elr_el1
  323. mrs x1, spsr_el1
  324. stp x0, x1, [sp, #-0x10]!
  325. msr elr_el1, x3 /* lwp_debugreturn */
  326. mov x1, #(SPSR_Mode(0) | SPSR_A64)
  327. orr x1, x1, #(1 << 21)
  328. msr spsr_el1, x1
  329. eret
  330. ret_from_user:
  331. /* sp_el0 += 16 for drop ins lwp_debugreturn */
  332. mrs x0, sp_el0
  333. add x0, x0, #0x10
  334. msr sp_el0, x0
  335. /* now is el1, sp is pos(empty) - sizeof(context) */
  336. mov x0, sp
  337. add x0, x0, #0x220
  338. mov sp, x0
  339. ldp x0, x1, [sp], #0x10 /* x1 is origin spsr_el1 */
  340. msr elr_el1, x0 /* x0 is origin elr_el1 */
  341. msr spsr_el1, x1
  342. lwp_check_debug_quit:
  343. ldp x29, x30, [sp], #0x10
  344. ret
  345. arch_signal_quit:
  346. /* drop current exception frame */
  347. add sp, sp, #CONTEXT_SIZE
  348. mrs x0, sp_el0
  349. bl arch_signal_ucontext_restore
  350. add x0, x0, #-CONTEXT_SIZE
  351. msr sp_el0, x0
  352. /**
  353. * Note: Since we will reset spsr, but the reschedule will
  354. * corrupt the spsr, we diable irq for a short period here
  355. */
  356. msr daifset, #3
  357. /* restore previous exception frame */
  358. msr spsel, #0
  359. ldp x2, x3, [sp], #0x10
  360. msr elr_el1, x2
  361. msr spsr_el1, x3
  362. ldp x29, x30, [sp], #0x10
  363. ldp x28, x29, [sp], #0x10
  364. msr fpcr, x28
  365. msr fpsr, x29
  366. ldp x28, x29, [sp], #0x10
  367. ldp x26, x27, [sp], #0x10
  368. ldp x24, x25, [sp], #0x10
  369. ldp x22, x23, [sp], #0x10
  370. ldp x20, x21, [sp], #0x10
  371. ldp x18, x19, [sp], #0x10
  372. ldp x16, x17, [sp], #0x10
  373. ldp x14, x15, [sp], #0x10
  374. ldp x12, x13, [sp], #0x10
  375. ldp x10, x11, [sp], #0x10
  376. ldp x8, x9, [sp], #0x10
  377. ldp x6, x7, [sp], #0x10
  378. ldp x4, x5, [sp], #0x10
  379. ldp x2, x3, [sp], #0x10
  380. ldp x0, x1, [sp], #0x10
  381. RESTORE_FPU sp
  382. msr spsel, #1
  383. b arch_ret_to_user
  384. /**
  385. * rt_noreturn
  386. * void arch_thread_signal_enter(
  387. * int signo, -> x0
  388. * siginfo_t *psiginfo, -> x1
  389. * void *exp_frame, -> x2
  390. * void *entry_uaddr, -> x3
  391. * lwp_sigset_t *save_sig_mask, -> x4
  392. * )
  393. */
  394. .global arch_thread_signal_enter
  395. arch_thread_signal_enter:
  396. mov x19, x0
  397. mov x20, x2 /* exp_frame */
  398. mov x21, x3
  399. /**
  400. * move exception frame to user stack
  401. */
  402. mrs x0, sp_el0
  403. mov x3, x4
  404. /* arch_signal_ucontext_save(user_sp, psiginfo, exp_frame, save_sig_mask); */
  405. bl arch_signal_ucontext_save
  406. dc cvau, x0
  407. dsb sy
  408. ic ialluis
  409. dsb sy
  410. /**
  411. * Brief: Prepare the environment for signal handler
  412. */
  413. /**
  414. * reset the cpsr
  415. * and drop exp frame on kernel stack, reset kernel sp
  416. *
  417. * Note: Since we will reset spsr, but the reschedule will
  418. * corrupt the spsr, we diable irq for a short period here
  419. */
  420. msr daifset, #3
  421. ldr x1, [x20, #CONTEXT_OFFSET_SPSR_EL1]
  422. msr spsr_el1, x1
  423. add sp, x20, #CONTEXT_SIZE
  424. /** reset user sp */
  425. msr sp_el0, x0
  426. /** set the return address to the sigreturn */
  427. mov x30, x0
  428. /** set the entry address of signal handler */
  429. msr elr_el1, x21
  430. /* siginfo is above the return address */
  431. add x2, x30, 16
  432. add x1, x2, #CONTEXT_SIZE
  433. mov x0, x19
  434. /**
  435. * handler(signo, psi, ucontext);
  436. *
  437. */
  438. eret
  439. lwp_debugreturn:
  440. mov x8, 0xf000
  441. svc #0
  442. .global lwp_sigreturn
  443. lwp_sigreturn:
  444. mov x8, #0xe000
  445. svc #0
  446. lwp_thread_return:
  447. mov x0, xzr
  448. mov x8, #0x01
  449. svc #0
  450. .globl arch_get_tidr
  451. arch_get_tidr:
  452. mrs x0, tpidr_el0
  453. ret
  454. .global arch_set_thread_area
  455. arch_set_thread_area:
  456. .globl arch_set_tidr
  457. arch_set_tidr:
  458. msr tpidr_el0, x0
  459. ret