lwp_gcc.S 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591
  1. /*
  2. * Copyright (c) 2006-2023, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2021-05-18 Jesven first version
  9. * 2023-07-16 Shell Move part of the codes to C from asm in signal handling
  10. * 2023-08-03 Shell Support of syscall restart (SA_RESTART)
  11. */
  12. #ifndef __ASSEMBLY__
  13. #define __ASSEMBLY__
  14. #endif
  15. #include <rtconfig.h>
  16. #include <asm-generic.h>
  17. #include <asm-fpu.h>
  18. #include <armv8.h>
  19. #include <lwp_arch.h>
  20. /*********************
  21. * SPSR BIT *
  22. *********************/
  23. #define SPSR_Mode(v) ((v) << 0)
  24. #define SPSR_A64 (0 << 4)
  25. #define SPSR_RESEVRED_5 (0 << 5)
  26. #define SPSR_FIQ_MASKED(v) ((v) << 6)
  27. #define SPSR_IRQ_MASKED(v) ((v) << 7)
  28. #define SPSR_SERROR_MASKED(v) ((v) << 8)
  29. #define SPSR_D_MASKED(v) ((v) << 9)
  30. #define SPSR_RESEVRED_10_19 (0 << 10)
  31. #define SPSR_IL(v) ((v) << 20)
  32. #define SPSR_SS(v) ((v) << 21)
  33. #define SPSR_RESEVRED_22_27 (0 << 22)
  34. #define SPSR_V(v) ((v) << 28)
  35. #define SPSR_C(v) ((v) << 29)
  36. #define SPSR_Z(v) ((v) << 30)
  37. #define SPSR_N(v) ((v) << 31)
  38. /**************************************************/
  39. .text
  40. /*
  41. * void arch_start_umode(args, text, ustack, kstack);
  42. */
  43. .global arch_start_umode
  44. .type arch_start_umode, % function
  45. arch_start_umode:
  46. mov sp, x3
  47. mov x4, #(SPSR_Mode(0) | SPSR_A64)
  48. msr daifset, #3
  49. dsb sy
  50. mrs x30, sp_el0
  51. /* user stack top */
  52. msr sp_el0, x2
  53. mov x3, x2
  54. msr spsr_el1, x4
  55. msr elr_el1, x1
  56. eret
  57. /*
  58. * void arch_crt_start_umode(args, text, ustack, kstack);
  59. */
  60. .global arch_crt_start_umode
  61. .type arch_crt_start_umode, % function
  62. arch_crt_start_umode:
  63. sub x4, x2, #0x10
  64. adr x2, lwp_thread_return
  65. ldr x5, [x2]
  66. str x5, [x4]
  67. ldr x5, [x2, #4]
  68. str x5, [x4, #4]
  69. ldr x5, [x2, #8]
  70. str x5, [x4, #8]
  71. mov x5, x4
  72. dc cvau, x5
  73. add x5, x5, #8
  74. dc cvau, x5
  75. dsb sy
  76. ic ialluis
  77. dsb sy
  78. msr sp_el0, x4
  79. mov sp, x3
  80. mov x4, #(SPSR_Mode(0) | SPSR_A64)
  81. msr daifset, #3
  82. dsb sy
  83. mrs x30, sp_el0
  84. msr spsr_el1, x4
  85. msr elr_el1, x1
  86. eret
  87. .global arch_get_user_sp
  88. arch_get_user_sp:
  89. mrs x0, sp_el0
  90. ret
  91. .global arch_fork_exit
  92. .global arch_clone_exit
  93. arch_fork_exit:
  94. arch_clone_exit:
  95. b arch_syscall_exit
  96. /*
  97. void lwp_exec_user(void *args, void *kernel_stack, void *user_entry)
  98. */
  99. .global lwp_exec_user
  100. lwp_exec_user:
  101. mov sp, x1
  102. mov x4, #(SPSR_Mode(0) | SPSR_A64)
  103. ldr x3, =0x0000ffff80000000
  104. msr daifset, #3
  105. msr spsr_el1, x4
  106. msr elr_el1, x2
  107. eret
  108. /*
  109. * void SVC_Handler(regs);
  110. * since this routine reset the SP, we take it as a start point
  111. */
  112. START_POINT(SVC_Handler)
  113. mov fp, xzr
  114. mov lr, xzr
  115. /* x0 is initial sp */
  116. mov sp, x0
  117. msr daifclr, #3 /* enable interrupt */
  118. GET_THREAD_SELF x0
  119. bl lwp_user_setting_save
  120. ldp x8, x9, [sp, #(CONTEXT_OFFSET_X8)]
  121. and x0, x8, #0xf000
  122. cmp x0, #0xe000
  123. beq arch_signal_quit
  124. cmp x0, #0xf000
  125. beq ret_from_user
  126. uxtb x0, w8
  127. bl lwp_get_sys_api
  128. cmp x0, xzr
  129. mov x30, x0
  130. beq arch_syscall_exit
  131. ldp x0, x1, [sp, #(CONTEXT_OFFSET_X0)]
  132. ldp x2, x3, [sp, #(CONTEXT_OFFSET_X2)]
  133. ldp x4, x5, [sp, #(CONTEXT_OFFSET_X4)]
  134. ldp x6, x7, [sp, #(CONTEXT_OFFSET_X6)]
  135. blr x30
  136. /* jump explictly, make this code position independant */
  137. b arch_syscall_exit
  138. START_POINT_END(SVC_Handler)
  139. .global arch_syscall_exit
  140. arch_syscall_exit:
  141. /**
  142. * @brief back up former x0 which is required to restart syscall, then setup
  143. * syscall return value in stack frame
  144. */
  145. mov x1, sp
  146. bl arch_syscall_prepare_signal
  147. msr daifset, #3
  148. ldp x2, x3, [sp], #0x10 /* SPSR and ELR. */
  149. msr spsr_el1, x3
  150. msr elr_el1, x2
  151. ldp x29, x30, [sp], #0x10
  152. msr sp_el0, x29
  153. ldp x28, x29, [sp], #0x10
  154. msr fpcr, x28
  155. msr fpsr, x29
  156. ldp x28, x29, [sp], #0x10
  157. ldp x26, x27, [sp], #0x10
  158. ldp x24, x25, [sp], #0x10
  159. ldp x22, x23, [sp], #0x10
  160. ldp x20, x21, [sp], #0x10
  161. ldp x18, x19, [sp], #0x10
  162. ldp x16, x17, [sp], #0x10
  163. ldp x14, x15, [sp], #0x10
  164. ldp x12, x13, [sp], #0x10
  165. ldp x10, x11, [sp], #0x10
  166. ldp x8, x9, [sp], #0x10
  167. ldp x6, x7, [sp], #0x10
  168. ldp x4, x5, [sp], #0x10
  169. ldp x2, x3, [sp], #0x10
  170. ldp x0, x1, [sp], #0x10
  171. RESTORE_FPU sp
  172. /* the sp is reset to the outer most level, irq and fiq are disabled */
  173. START_POINT(arch_ret_to_user)
  174. msr daifset, #3
  175. /* save exception frame */
  176. SAVE_FPU sp
  177. stp x0, x1, [sp, #-0x10]!
  178. stp x2, x3, [sp, #-0x10]!
  179. stp x4, x5, [sp, #-0x10]!
  180. stp x6, x7, [sp, #-0x10]!
  181. stp x8, x9, [sp, #-0x10]!
  182. stp x10, x11, [sp, #-0x10]!
  183. stp x12, x13, [sp, #-0x10]!
  184. stp x14, x15, [sp, #-0x10]!
  185. stp x16, x17, [sp, #-0x10]!
  186. stp x18, x19, [sp, #-0x10]!
  187. stp x20, x21, [sp, #-0x10]!
  188. stp x22, x23, [sp, #-0x10]!
  189. stp x24, x25, [sp, #-0x10]!
  190. stp x26, x27, [sp, #-0x10]!
  191. stp x28, x29, [sp, #-0x10]!
  192. mrs x0, fpcr
  193. mrs x1, fpsr
  194. stp x0, x1, [sp, #-0x10]!
  195. stp x29, x30, [sp, #-0x10]!
  196. /* pre-action */
  197. bl lwp_check_debug
  198. bl lwp_check_exit_request
  199. cbz w0, 1f
  200. /* exit on event */
  201. msr daifclr, #3
  202. mov x0, xzr
  203. b sys_exit
  204. 1:
  205. /* check if dbg ops exist */
  206. ldr x0, =rt_dbg_ops
  207. ldr x0, [x0]
  208. cbz x0, 3f
  209. bl dbg_thread_in_debug
  210. mov x1, #(1 << 21)
  211. mrs x2, spsr_el1
  212. cbz w0, 2f
  213. orr x2, x2, x1
  214. msr spsr_el1, x2
  215. b 3f
  216. 2:
  217. bic x2, x2, x1
  218. msr spsr_el1, x2
  219. 3:
  220. /**
  221. * push 2 dummy words to simulate a exception frame of interrupt
  222. * Note: in kernel state, the context switch dont saved the context
  223. */
  224. mrs x0, spsr_el1
  225. mrs x1, elr_el1
  226. stp x1, x0, [sp, #-0x10]!
  227. mov x0, sp
  228. msr daifclr, #3
  229. bl lwp_thread_signal_catch
  230. msr daifset, #3
  231. ldp x1, x0, [sp], #0x10
  232. msr spsr_el1, x0
  233. msr elr_el1, x1
  234. /* check debug */
  235. /* restore exception frame */
  236. ldp x29, x30, [sp], #0x10
  237. ldp x0, x1, [sp], #0x10
  238. msr fpcr, x0
  239. msr fpsr, x1
  240. ldp x28, x29, [sp], #0x10
  241. ldp x26, x27, [sp], #0x10
  242. ldp x24, x25, [sp], #0x10
  243. ldp x22, x23, [sp], #0x10
  244. ldp x20, x21, [sp], #0x10
  245. ldp x18, x19, [sp], #0x10
  246. ldp x16, x17, [sp], #0x10
  247. ldp x14, x15, [sp], #0x10
  248. ldp x12, x13, [sp], #0x10
  249. ldp x10, x11, [sp], #0x10
  250. ldp x8, x9, [sp], #0x10
  251. ldp x6, x7, [sp], #0x10
  252. ldp x4, x5, [sp], #0x10
  253. ldp x2, x3, [sp], #0x10
  254. ldp x0, x1, [sp], #0x10
  255. RESTORE_FPU sp
  256. stp x0, x1, [sp, #-0x10]!
  257. ldr x0, =rt_dbg_ops
  258. ldr x0, [x0]
  259. cmp x0, xzr
  260. ldp x0, x1, [sp], #0x10
  261. beq 1f
  262. /* save */
  263. SAVE_FPU sp
  264. stp x0, x1, [sp, #-0x10]!
  265. stp x2, x3, [sp, #-0x10]!
  266. stp x4, x5, [sp, #-0x10]!
  267. stp x6, x7, [sp, #-0x10]!
  268. stp x8, x9, [sp, #-0x10]!
  269. stp x10, x11, [sp, #-0x10]!
  270. stp x12, x13, [sp, #-0x10]!
  271. stp x14, x15, [sp, #-0x10]!
  272. stp x16, x17, [sp, #-0x10]!
  273. stp x18, x19, [sp, #-0x10]!
  274. stp x20, x21, [sp, #-0x10]!
  275. stp x22, x23, [sp, #-0x10]!
  276. stp x24, x25, [sp, #-0x10]!
  277. stp x26, x27, [sp, #-0x10]!
  278. stp x28, x29, [sp, #-0x10]!
  279. mrs x0, fpcr
  280. mrs x1, fpsr
  281. stp x0, x1, [sp, #-0x10]!
  282. stp x29, x30, [sp, #-0x10]!
  283. mrs x0, elr_el1
  284. bl dbg_attach_req
  285. /* restore */
  286. ldp x29, x30, [sp], #0x10
  287. ldp x0, x1, [sp], #0x10
  288. msr fpcr, x0
  289. msr fpsr, x1
  290. ldp x28, x29, [sp], #0x10
  291. ldp x26, x27, [sp], #0x10
  292. ldp x24, x25, [sp], #0x10
  293. ldp x22, x23, [sp], #0x10
  294. ldp x20, x21, [sp], #0x10
  295. ldp x18, x19, [sp], #0x10
  296. ldp x16, x17, [sp], #0x10
  297. ldp x14, x15, [sp], #0x10
  298. ldp x12, x13, [sp], #0x10
  299. ldp x10, x11, [sp], #0x10
  300. ldp x8, x9, [sp], #0x10
  301. ldp x6, x7, [sp], #0x10
  302. ldp x4, x5, [sp], #0x10
  303. ldp x2, x3, [sp], #0x10
  304. ldp x0, x1, [sp], #0x10
  305. RESTORE_FPU sp
  306. 1:
  307. eret
  308. START_POINT_END(arch_ret_to_user)
  309. .global lwp_check_debug
  310. lwp_check_debug:
  311. ldr x0, =rt_dbg_ops
  312. ldr x0, [x0]
  313. cbnz x0, 1f
  314. ret
  315. 1:
  316. stp x29, x30, [sp, #-0x10]!
  317. bl dbg_check_suspend
  318. cbz w0, lwp_check_debug_quit
  319. mrs x2, sp_el0
  320. sub x2, x2, #0x10
  321. mov x3, x2
  322. msr sp_el0, x2
  323. ldr x0, =lwp_debugreturn
  324. ldr w1, [x0]
  325. str w1, [x2]
  326. ldr w1, [x0, #4]
  327. str w1, [x2, #4]
  328. dc cvau, x2
  329. add x2, x2, #4
  330. dc cvau, x2
  331. dsb sy
  332. isb sy
  333. ic ialluis
  334. isb sy
  335. mrs x0, elr_el1
  336. mrs x1, spsr_el1
  337. stp x0, x1, [sp, #-0x10]!
  338. msr elr_el1, x3 /* lwp_debugreturn */
  339. mov x1, #(SPSR_Mode(0) | SPSR_A64)
  340. orr x1, x1, #(1 << 21)
  341. msr spsr_el1, x1
  342. eret
  343. ret_from_user:
  344. /* sp_el0 += 16 for drop ins lwp_debugreturn */
  345. mrs x0, sp_el0
  346. add x0, x0, #0x10
  347. msr sp_el0, x0
  348. /* now is el1, sp is pos(empty) - sizeof(context) */
  349. mov x0, sp
  350. add x0, x0, #0x220
  351. mov sp, x0
  352. ldp x0, x1, [sp], #0x10 /* x1 is origin spsr_el1 */
  353. msr elr_el1, x0 /* x0 is origin elr_el1 */
  354. msr spsr_el1, x1
  355. lwp_check_debug_quit:
  356. ldp x29, x30, [sp], #0x10
  357. ret
  358. .global arch_syscall_restart
  359. arch_syscall_restart:
  360. msr daifset, 3
  361. mov sp, x1
  362. /* drop exception frame in user stack */
  363. msr sp_el0, x0
  364. /* restore previous exception frame */
  365. msr spsel, #0
  366. ldp x2, x3, [sp], #0x10
  367. msr elr_el1, x2
  368. msr spsr_el1, x3
  369. ldp x29, x30, [sp], #0x10
  370. ldp x28, x29, [sp], #0x10
  371. msr fpcr, x28
  372. msr fpsr, x29
  373. ldp x28, x29, [sp], #0x10
  374. ldp x26, x27, [sp], #0x10
  375. ldp x24, x25, [sp], #0x10
  376. ldp x22, x23, [sp], #0x10
  377. ldp x20, x21, [sp], #0x10
  378. ldp x18, x19, [sp], #0x10
  379. ldp x16, x17, [sp], #0x10
  380. ldp x14, x15, [sp], #0x10
  381. ldp x12, x13, [sp], #0x10
  382. ldp x10, x11, [sp], #0x10
  383. ldp x8, x9, [sp], #0x10
  384. ldp x6, x7, [sp], #0x10
  385. ldp x4, x5, [sp], #0x10
  386. ldp x2, x3, [sp], #0x10
  387. ldp x0, x1, [sp], #0x10
  388. RESTORE_FPU sp
  389. msr spsel, #1
  390. b vector_exception
  391. arch_signal_quit:
  392. /* drop current exception frame */
  393. add sp, sp, #CONTEXT_SIZE
  394. mov x1, sp
  395. mrs x0, sp_el0
  396. bl arch_signal_ucontext_restore
  397. add x0, x0, #-CONTEXT_SIZE
  398. msr sp_el0, x0
  399. /**
  400. * Note: Since we will reset spsr, but the reschedule will
  401. * corrupt the spsr, we diable irq for a short period here
  402. */
  403. msr daifset, #3
  404. /* restore previous exception frame */
  405. msr spsel, #0
  406. ldp x2, x3, [sp], #0x10
  407. msr elr_el1, x2
  408. msr spsr_el1, x3
  409. ldp x29, x30, [sp], #0x10
  410. ldp x28, x29, [sp], #0x10
  411. msr fpcr, x28
  412. msr fpsr, x29
  413. ldp x28, x29, [sp], #0x10
  414. ldp x26, x27, [sp], #0x10
  415. ldp x24, x25, [sp], #0x10
  416. ldp x22, x23, [sp], #0x10
  417. ldp x20, x21, [sp], #0x10
  418. ldp x18, x19, [sp], #0x10
  419. ldp x16, x17, [sp], #0x10
  420. ldp x14, x15, [sp], #0x10
  421. ldp x12, x13, [sp], #0x10
  422. ldp x10, x11, [sp], #0x10
  423. ldp x8, x9, [sp], #0x10
  424. ldp x6, x7, [sp], #0x10
  425. ldp x4, x5, [sp], #0x10
  426. ldp x2, x3, [sp], #0x10
  427. ldp x0, x1, [sp], #0x10
  428. RESTORE_FPU sp
  429. msr spsel, #1
  430. b arch_ret_to_user
  431. /**
  432. * rt_noreturn
  433. * void arch_thread_signal_enter(
  434. * int signo, -> x0
  435. * siginfo_t *psiginfo, -> x1
  436. * void *exp_frame, -> x2
  437. * void *entry_uaddr, -> x3
  438. * lwp_sigset_t *save_sig_mask, -> x4
  439. * )
  440. */
  441. .global arch_thread_signal_enter
  442. arch_thread_signal_enter:
  443. mov x19, x0
  444. mov x20, x2 /* exp_frame */
  445. mov x21, x3
  446. /**
  447. * move exception frame to user stack
  448. */
  449. mrs x0, sp_el0
  450. mov x3, x4
  451. /* arch_signal_ucontext_save(user_sp, psiginfo, exp_frame, save_sig_mask); */
  452. bl arch_signal_ucontext_save
  453. mov x22, x0
  454. /* get and saved pointer to uframe */
  455. bl arch_signal_ucontext_get_frame
  456. mov x2, x0
  457. mov x0, x22
  458. dc cvau, x0
  459. dsb sy
  460. ic ialluis
  461. dsb sy
  462. /**
  463. * Brief: Prepare the environment for signal handler
  464. */
  465. /**
  466. * reset the cpsr
  467. * and drop exp frame on kernel stack, reset kernel sp
  468. *
  469. * Note: Since we will reset spsr, but the reschedule will
  470. * corrupt the spsr, we diable irq for a short period here
  471. */
  472. msr daifset, #3
  473. ldr x1, [x20, #CONTEXT_OFFSET_SPSR_EL1]
  474. msr spsr_el1, x1
  475. add sp, x20, #CONTEXT_SIZE
  476. /** reset user sp */
  477. msr sp_el0, x0
  478. /** set the return address to the sigreturn */
  479. mov x30, x0
  480. cbnz x21, 1f
  481. mov x21, x30
  482. 1:
  483. /** set the entry address of signal handler */
  484. msr elr_el1, x21
  485. /* siginfo is above the return address */
  486. add x1, x30, UCTX_ABI_OFFSET_TO_SI
  487. /* uframe is saved in x2 */
  488. mov x0, x19
  489. /**
  490. * handler(signo, psi, ucontext);
  491. *
  492. */
  493. eret
  494. lwp_debugreturn:
  495. mov x8, 0xf000
  496. svc #0
  497. .global lwp_sigreturn
  498. lwp_sigreturn:
  499. mov x8, #0xe000
  500. svc #0
  501. lwp_thread_return:
  502. mov x0, xzr
  503. mov x8, #0x01
  504. svc #0
  505. .globl arch_get_tidr
  506. arch_get_tidr:
  507. mrs x0, tpidr_el0
  508. ret
  509. .global arch_set_thread_area
  510. arch_set_thread_area:
  511. .globl arch_set_tidr
  512. arch_set_tidr:
  513. msr tpidr_el0, x0
  514. ret