lwp_gcc.S 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517
  1. /*
  2. * Copyright (c) 2006-2023, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2021-05-18 Jesven first version
  9. * 2023-07-16 Shell Move part of the codes to C from asm in signal handling
  10. */
  11. #ifndef __ASSEMBLY__
  12. #define __ASSEMBLY__
  13. #endif
  14. #include "rtconfig.h"
  15. #include "asm-generic.h"
  16. #include "asm-fpu.h"
  17. #include "armv8.h"
  18. /*********************
  19. * SPSR BIT *
  20. *********************/
  21. #define SPSR_Mode(v) ((v) << 0)
  22. #define SPSR_A64 (0 << 4)
  23. #define SPSR_RESEVRED_5 (0 << 5)
  24. #define SPSR_FIQ_MASKED(v) ((v) << 6)
  25. #define SPSR_IRQ_MASKED(v) ((v) << 7)
  26. #define SPSR_SERROR_MASKED(v) ((v) << 8)
  27. #define SPSR_D_MASKED(v) ((v) << 9)
  28. #define SPSR_RESEVRED_10_19 (0 << 10)
  29. #define SPSR_IL(v) ((v) << 20)
  30. #define SPSR_SS(v) ((v) << 21)
  31. #define SPSR_RESEVRED_22_27 (0 << 22)
  32. #define SPSR_V(v) ((v) << 28)
  33. #define SPSR_C(v) ((v) << 29)
  34. #define SPSR_Z(v) ((v) << 30)
  35. #define SPSR_N(v) ((v) << 31)
  36. /**************************************************/
  37. .text
  38. /*
  39. * void arch_start_umode(args, text, ustack, kstack);
  40. */
  41. .global arch_start_umode
  42. .type arch_start_umode, % function
  43. arch_start_umode:
  44. mov sp, x3
  45. mov x4, #(SPSR_Mode(0) | SPSR_A64)
  46. mov x3, x2 ;/* user stack top */
  47. msr daifset, #3
  48. dsb sy
  49. mrs x30, sp_el0
  50. msr spsr_el1, x4
  51. msr elr_el1, x1
  52. eret
  53. /*
  54. * void arch_crt_start_umode(args, text, ustack, kstack);
  55. */
  56. .global arch_crt_start_umode
  57. .type arch_crt_start_umode, % function
  58. arch_crt_start_umode:
  59. sub x4, x2, #0x10
  60. adr x2, lwp_thread_return
  61. ldr x5, [x2]
  62. str x5, [x4]
  63. ldr x5, [x2, #4]
  64. str x5, [x4, #4]
  65. ldr x5, [x2, #8]
  66. str x5, [x4, #8]
  67. mov x5, x4
  68. dc cvau, x5
  69. add x5, x5, #8
  70. dc cvau, x5
  71. dsb sy
  72. ic ialluis
  73. dsb sy
  74. msr sp_el0, x4
  75. mov sp, x3
  76. mov x4, #(SPSR_Mode(0) | SPSR_A64)
  77. msr daifset, #3
  78. dsb sy
  79. mrs x30, sp_el0
  80. msr spsr_el1, x4
  81. msr elr_el1, x1
  82. eret
  83. /*
  84. void arch_set_thread_context(void *exit_addr, void *new_thread_stack, void *user_stack, void **thread_sp);
  85. */
  86. .global arch_set_thread_context
  87. arch_set_thread_context:
  88. sub x1, x1, #CONTEXT_SIZE
  89. str x2, [x1, #CONTEXT_OFFSET_SP_EL0]
  90. sub x1, x1, #CONTEXT_SIZE
  91. str xzr, [x1, #CONTEXT_OFFSET_X0] /* new thread return 0 */
  92. mov x4, #((3 << 6) | 0x4 | 0x1) /* el1h, disable interrupt */
  93. str x4, [x1, #CONTEXT_OFFSET_SPSR_EL1]
  94. str x0, [x1, #CONTEXT_OFFSET_ELR_EL1]
  95. str x1, [x3]
  96. ret
  97. .global arch_get_user_sp
  98. arch_get_user_sp:
  99. mrs x0, sp_el0
  100. ret
  101. .global arch_fork_exit
  102. .global arch_clone_exit
  103. arch_fork_exit:
  104. arch_clone_exit:
  105. b arch_syscall_exit
  106. /*
  107. void lwp_exec_user(void *args, void *kernel_stack, void *user_entry)
  108. */
  109. .global lwp_exec_user
  110. lwp_exec_user:
  111. mov sp, x1
  112. mov x4, #(SPSR_Mode(0) | SPSR_A64)
  113. ldr x3, =0x0000ffff80000000
  114. msr daifset, #3
  115. msr spsr_el1, x4
  116. msr elr_el1, x2
  117. eret
  118. /*
  119. * void SVC_Handler(regs);
  120. * since this routine reset the SP, we take it as a start point
  121. */
  122. START_POINT(SVC_Handler)
  123. /* x0 is initial sp */
  124. mov sp, x0
  125. msr daifclr, #3 /* enable interrupt */
  126. bl rt_thread_self
  127. bl lwp_user_setting_save
  128. ldp x8, x9, [sp, #(CONTEXT_OFFSET_X8)]
  129. and x0, x8, #0xf000
  130. cmp x0, #0xe000
  131. beq arch_signal_quit
  132. cmp x0, #0xf000
  133. beq ret_from_user
  134. uxtb x0, w8
  135. bl lwp_get_sys_api
  136. cmp x0, xzr
  137. mov x30, x0
  138. beq arch_syscall_exit
  139. ldp x0, x1, [sp, #(CONTEXT_OFFSET_X0)]
  140. ldp x2, x3, [sp, #(CONTEXT_OFFSET_X2)]
  141. ldp x4, x5, [sp, #(CONTEXT_OFFSET_X4)]
  142. ldp x6, x7, [sp, #(CONTEXT_OFFSET_X6)]
  143. blr x30
  144. /* jump explictly, make this code position independant */
  145. b arch_syscall_exit
  146. START_POINT_END(SVC_Handler)
  147. .global arch_syscall_exit
  148. arch_syscall_exit:
  149. msr daifset, #3
  150. ldp x2, x3, [sp], #0x10 /* SPSR and ELR. */
  151. msr spsr_el1, x3
  152. msr elr_el1, x2
  153. ldp x29, x30, [sp], #0x10
  154. msr sp_el0, x29
  155. ldp x28, x29, [sp], #0x10
  156. msr fpcr, x28
  157. msr fpsr, x29
  158. ldp x28, x29, [sp], #0x10
  159. ldp x26, x27, [sp], #0x10
  160. ldp x24, x25, [sp], #0x10
  161. ldp x22, x23, [sp], #0x10
  162. ldp x20, x21, [sp], #0x10
  163. ldp x18, x19, [sp], #0x10
  164. ldp x16, x17, [sp], #0x10
  165. ldp x14, x15, [sp], #0x10
  166. ldp x12, x13, [sp], #0x10
  167. ldp x10, x11, [sp], #0x10
  168. ldp x8, x9, [sp], #0x10
  169. add sp, sp, #0x40
  170. RESTORE_FPU sp
  171. /* the sp is reset to the outer most level */
  172. START_POINT(arch_ret_to_user)
  173. /* save exception frame */
  174. SAVE_FPU sp
  175. stp x0, x1, [sp, #-0x10]!
  176. stp x2, x3, [sp, #-0x10]!
  177. stp x4, x5, [sp, #-0x10]!
  178. stp x6, x7, [sp, #-0x10]!
  179. stp x8, x9, [sp, #-0x10]!
  180. stp x10, x11, [sp, #-0x10]!
  181. stp x12, x13, [sp, #-0x10]!
  182. stp x14, x15, [sp, #-0x10]!
  183. stp x16, x17, [sp, #-0x10]!
  184. stp x18, x19, [sp, #-0x10]!
  185. stp x20, x21, [sp, #-0x10]!
  186. stp x22, x23, [sp, #-0x10]!
  187. stp x24, x25, [sp, #-0x10]!
  188. stp x26, x27, [sp, #-0x10]!
  189. stp x28, x29, [sp, #-0x10]!
  190. mrs x0, fpcr
  191. mrs x1, fpsr
  192. stp x0, x1, [sp, #-0x10]!
  193. stp x29, x30, [sp, #-0x10]!
  194. /* pre-action */
  195. bl lwp_check_debug
  196. bl lwp_check_exit_request
  197. cbz w0, 1f
  198. /* exit on event */
  199. mov x0, xzr
  200. b sys_exit
  201. 1:
  202. /* check if dbg ops exist */
  203. ldr x0, =rt_dbg_ops
  204. ldr x0, [x0]
  205. cbz x0, 3f
  206. bl dbg_thread_in_debug
  207. mov x1, #(1 << 21)
  208. mrs x2, spsr_el1
  209. cbz w0, 2f
  210. orr x2, x2, x1
  211. msr spsr_el1, x2
  212. b 3f
  213. 2:
  214. bic x2, x2, x1
  215. msr spsr_el1, x2
  216. 3:
  217. /**
  218. * push 2 dummy words to simulate a exception frame of interrupt
  219. */
  220. add sp, sp, #-0x10
  221. mov x0, sp
  222. bl lwp_thread_signal_catch
  223. add sp, sp, #0x10
  224. /* check debug */
  225. /* restore exception frame */
  226. ldp x29, x30, [sp], #0x10
  227. ldp x0, x1, [sp], #0x10
  228. msr fpcr, x0
  229. msr fpsr, x1
  230. ldp x28, x29, [sp], #0x10
  231. ldp x26, x27, [sp], #0x10
  232. ldp x24, x25, [sp], #0x10
  233. ldp x22, x23, [sp], #0x10
  234. ldp x20, x21, [sp], #0x10
  235. ldp x18, x19, [sp], #0x10
  236. ldp x16, x17, [sp], #0x10
  237. ldp x14, x15, [sp], #0x10
  238. ldp x12, x13, [sp], #0x10
  239. ldp x10, x11, [sp], #0x10
  240. ldp x8, x9, [sp], #0x10
  241. ldp x6, x7, [sp], #0x10
  242. ldp x4, x5, [sp], #0x10
  243. ldp x2, x3, [sp], #0x10
  244. ldp x0, x1, [sp], #0x10
  245. RESTORE_FPU sp
  246. stp x0, x1, [sp, #-0x10]!
  247. ldr x0, =rt_dbg_ops
  248. ldr x0, [x0]
  249. cmp x0, xzr
  250. ldp x0, x1, [sp], #0x10
  251. beq 1f
  252. /* save */
  253. SAVE_FPU sp
  254. stp x0, x1, [sp, #-0x10]!
  255. stp x2, x3, [sp, #-0x10]!
  256. stp x4, x5, [sp, #-0x10]!
  257. stp x6, x7, [sp, #-0x10]!
  258. stp x8, x9, [sp, #-0x10]!
  259. stp x10, x11, [sp, #-0x10]!
  260. stp x12, x13, [sp, #-0x10]!
  261. stp x14, x15, [sp, #-0x10]!
  262. stp x16, x17, [sp, #-0x10]!
  263. stp x18, x19, [sp, #-0x10]!
  264. stp x20, x21, [sp, #-0x10]!
  265. stp x22, x23, [sp, #-0x10]!
  266. stp x24, x25, [sp, #-0x10]!
  267. stp x26, x27, [sp, #-0x10]!
  268. stp x28, x29, [sp, #-0x10]!
  269. mrs x0, fpcr
  270. mrs x1, fpsr
  271. stp x0, x1, [sp, #-0x10]!
  272. stp x29, x30, [sp, #-0x10]!
  273. mrs x0, elr_el1
  274. bl dbg_attach_req
  275. /* restore */
  276. ldp x29, x30, [sp], #0x10
  277. ldp x0, x1, [sp], #0x10
  278. msr fpcr, x0
  279. msr fpsr, x1
  280. ldp x28, x29, [sp], #0x10
  281. ldp x26, x27, [sp], #0x10
  282. ldp x24, x25, [sp], #0x10
  283. ldp x22, x23, [sp], #0x10
  284. ldp x20, x21, [sp], #0x10
  285. ldp x18, x19, [sp], #0x10
  286. ldp x16, x17, [sp], #0x10
  287. ldp x14, x15, [sp], #0x10
  288. ldp x12, x13, [sp], #0x10
  289. ldp x10, x11, [sp], #0x10
  290. ldp x8, x9, [sp], #0x10
  291. ldp x6, x7, [sp], #0x10
  292. ldp x4, x5, [sp], #0x10
  293. ldp x2, x3, [sp], #0x10
  294. ldp x0, x1, [sp], #0x10
  295. RESTORE_FPU sp
  296. 1:
  297. eret
  298. START_POINT_END(arch_ret_to_user)
  299. .global lwp_check_debug
  300. lwp_check_debug:
  301. ldr x0, =rt_dbg_ops
  302. ldr x0, [x0]
  303. cbnz x0, 1f
  304. ret
  305. 1:
  306. stp x29, x30, [sp, #-0x10]!
  307. bl dbg_check_suspend
  308. cbz w0, lwp_check_debug_quit
  309. mrs x2, sp_el0
  310. sub x2, x2, #0x10
  311. mov x3, x2
  312. msr sp_el0, x2
  313. ldr x0, =lwp_debugreturn
  314. ldr w1, [x0]
  315. str w1, [x2]
  316. ldr w1, [x0, #4]
  317. str w1, [x2, #4]
  318. dc cvau, x2
  319. add x2, x2, #4
  320. dc cvau, x2
  321. dsb sy
  322. isb sy
  323. ic ialluis
  324. isb sy
  325. mrs x0, elr_el1
  326. mrs x1, spsr_el1
  327. stp x0, x1, [sp, #-0x10]!
  328. msr elr_el1, x3 /* lwp_debugreturn */
  329. mov x1, #(SPSR_Mode(0) | SPSR_A64)
  330. orr x1, x1, #(1 << 21)
  331. msr spsr_el1, x1
  332. eret
  333. ret_from_user:
  334. /* sp_el0 += 16 for drop ins lwp_debugreturn */
  335. mrs x0, sp_el0
  336. add x0, x0, #0x10
  337. msr sp_el0, x0
  338. /* now is el1, sp is pos(empty) - sizeof(context) */
  339. mov x0, sp
  340. add x0, x0, #0x220
  341. mov sp, x0
  342. ldp x0, x1, [sp], #0x10 /* x1 is origin spsr_el1 */
  343. msr elr_el1, x0 /* x0 is origin elr_el1 */
  344. msr spsr_el1, x1
  345. lwp_check_debug_quit:
  346. ldp x29, x30, [sp], #0x10
  347. ret
  348. arch_signal_quit:
  349. msr daifset, #3
  350. /* drop current exception frame */
  351. add sp, sp, #CONTEXT_SIZE
  352. mrs x0, sp_el0
  353. bl arch_signal_ucontext_restore
  354. add x0, x0, #-CONTEXT_SIZE
  355. msr sp_el0, x0
  356. /* restore previous exception frame */
  357. msr spsel, #0
  358. ldp x2, x3, [sp], #0x10
  359. msr elr_el1, x2
  360. msr spsr_el1, x3
  361. ldp x29, x30, [sp], #0x10
  362. // msr sp_el0, x29
  363. ldp x28, x29, [sp], #0x10
  364. msr fpcr, x28
  365. msr fpsr, x29
  366. ldp x28, x29, [sp], #0x10
  367. ldp x26, x27, [sp], #0x10
  368. ldp x24, x25, [sp], #0x10
  369. ldp x22, x23, [sp], #0x10
  370. ldp x20, x21, [sp], #0x10
  371. ldp x18, x19, [sp], #0x10
  372. ldp x16, x17, [sp], #0x10
  373. ldp x14, x15, [sp], #0x10
  374. ldp x12, x13, [sp], #0x10
  375. ldp x10, x11, [sp], #0x10
  376. ldp x8, x9, [sp], #0x10
  377. ldp x6, x7, [sp], #0x10
  378. ldp x4, x5, [sp], #0x10
  379. ldp x2, x3, [sp], #0x10
  380. ldp x0, x1, [sp], #0x10
  381. RESTORE_FPU sp
  382. msr spsel, #1
  383. b arch_ret_to_user
  384. /**
  385. * rt_noreturn
  386. * void arch_thread_signal_enter(
  387. * int signo, -> x0
  388. * siginfo_t *psiginfo, -> x1
  389. * void *exp_frame, -> x2
  390. * void *entry_uaddr, -> x3
  391. * lwp_sigset_t *save_sig_mask, -> x4
  392. * )
  393. */
  394. .global arch_thread_signal_enter
  395. arch_thread_signal_enter:
  396. mov x19, x0
  397. mov x20, x2 /* exp_frame */
  398. mov x21, x3
  399. /**
  400. * move exception frame to user stack
  401. */
  402. mrs x0, sp_el0
  403. mrs x3, elr_el1
  404. mov x5, x4
  405. /** FIXME: spsr must restore from exception frame */
  406. mrs x4, spsr_el1
  407. /* arch_signal_ucontext_save(user_sp, psiginfo, exp_frame, elr, spsr, save_sig_mask); */
  408. bl arch_signal_ucontext_save
  409. dc cvau, x0
  410. dsb sy
  411. ic ialluis
  412. dsb sy
  413. /**
  414. * @brief Prepare the environment for signal handler
  415. */
  416. /** drop exp frame on kernel stack, reset kernel sp */
  417. add sp, x20, #CONTEXT_SIZE
  418. /** reset user sp */
  419. msr sp_el0, x0
  420. /** set the return address to the sigreturn */
  421. mov x30, x0
  422. /** set the entry address of signal handler */
  423. msr elr_el1, x21
  424. /* siginfo is above the return address */
  425. add x2, x30, 16
  426. add x1, x2, #CONTEXT_SIZE
  427. mov x0, x19
  428. /**
  429. * handler(signo, psi, ucontext);
  430. */
  431. eret
  432. lwp_debugreturn:
  433. mov x8, 0xf000
  434. svc #0
  435. .global lwp_sigreturn
  436. lwp_sigreturn:
  437. mov x8, #0xe000
  438. svc #0
  439. lwp_thread_return:
  440. mov x0, xzr
  441. mov x8, #0x01
  442. svc #0
  443. .globl arch_get_tidr
  444. arch_get_tidr:
  445. mrs x0, tpidr_el0
  446. ret
  447. .global arch_set_thread_area
  448. arch_set_thread_area:
  449. .globl arch_set_tidr
  450. arch_set_tidr:
  451. msr tpidr_el0, x0
  452. ret