lwp_gcc.S 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588
  1. /*
  2. * Copyright (c) 2006-2023, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2021-05-18 Jesven first version
  9. * 2023-07-16 Shell Move part of the codes to C from asm in signal handling
  10. * 2023-08-03 Shell Support of syscall restart (SA_RESTART)
  11. */
  12. #ifndef __ASSEMBLY__
  13. #define __ASSEMBLY__
  14. #endif
  15. #include "rtconfig.h"
  16. #include "asm-generic.h"
  17. #include "asm-fpu.h"
  18. #include "armv8.h"
  19. #include "lwp_arch.h"
  20. /*********************
  21. * SPSR BIT *
  22. *********************/
  23. #define SPSR_Mode(v) ((v) << 0)
  24. #define SPSR_A64 (0 << 4)
  25. #define SPSR_RESEVRED_5 (0 << 5)
  26. #define SPSR_FIQ_MASKED(v) ((v) << 6)
  27. #define SPSR_IRQ_MASKED(v) ((v) << 7)
  28. #define SPSR_SERROR_MASKED(v) ((v) << 8)
  29. #define SPSR_D_MASKED(v) ((v) << 9)
  30. #define SPSR_RESEVRED_10_19 (0 << 10)
  31. #define SPSR_IL(v) ((v) << 20)
  32. #define SPSR_SS(v) ((v) << 21)
  33. #define SPSR_RESEVRED_22_27 (0 << 22)
  34. #define SPSR_V(v) ((v) << 28)
  35. #define SPSR_C(v) ((v) << 29)
  36. #define SPSR_Z(v) ((v) << 30)
  37. #define SPSR_N(v) ((v) << 31)
  38. /**************************************************/
  39. .text
  40. /*
  41. * void arch_start_umode(args, text, ustack, kstack);
  42. */
  43. .global arch_start_umode
  44. .type arch_start_umode, % function
  45. arch_start_umode:
  46. mov sp, x3
  47. mov x4, #(SPSR_Mode(0) | SPSR_A64)
  48. msr daifset, #3
  49. dsb sy
  50. mrs x30, sp_el0
  51. /* user stack top */
  52. msr sp_el0, x2
  53. mov x3, x2
  54. msr spsr_el1, x4
  55. msr elr_el1, x1
  56. eret
  57. /*
  58. * void arch_crt_start_umode(args, text, ustack, kstack);
  59. */
  60. .global arch_crt_start_umode
  61. .type arch_crt_start_umode, % function
  62. arch_crt_start_umode:
  63. sub x4, x2, #0x10
  64. adr x2, lwp_thread_return
  65. ldr x5, [x2]
  66. str x5, [x4]
  67. ldr x5, [x2, #4]
  68. str x5, [x4, #4]
  69. ldr x5, [x2, #8]
  70. str x5, [x4, #8]
  71. mov x5, x4
  72. dc cvau, x5
  73. add x5, x5, #8
  74. dc cvau, x5
  75. dsb sy
  76. ic ialluis
  77. dsb sy
  78. msr sp_el0, x4
  79. mov sp, x3
  80. mov x4, #(SPSR_Mode(0) | SPSR_A64)
  81. msr daifset, #3
  82. dsb sy
  83. mrs x30, sp_el0
  84. msr spsr_el1, x4
  85. msr elr_el1, x1
  86. eret
  87. .global arch_get_user_sp
  88. arch_get_user_sp:
  89. mrs x0, sp_el0
  90. ret
  91. .global arch_fork_exit
  92. .global arch_clone_exit
  93. arch_fork_exit:
  94. arch_clone_exit:
  95. b arch_syscall_exit
  96. /*
  97. void lwp_exec_user(void *args, void *kernel_stack, void *user_entry)
  98. */
  99. .global lwp_exec_user
  100. lwp_exec_user:
  101. mov sp, x1
  102. mov x4, #(SPSR_Mode(0) | SPSR_A64)
  103. ldr x3, =0x0000ffff80000000
  104. msr daifset, #3
  105. msr spsr_el1, x4
  106. msr elr_el1, x2
  107. eret
  108. /*
  109. * void SVC_Handler(regs);
  110. * since this routine reset the SP, we take it as a start point
  111. */
  112. START_POINT(SVC_Handler)
  113. /* x0 is initial sp */
  114. mov sp, x0
  115. msr daifclr, #3 /* enable interrupt */
  116. bl rt_thread_self
  117. bl lwp_user_setting_save
  118. ldp x8, x9, [sp, #(CONTEXT_OFFSET_X8)]
  119. and x0, x8, #0xf000
  120. cmp x0, #0xe000
  121. beq arch_signal_quit
  122. cmp x0, #0xf000
  123. beq ret_from_user
  124. uxtb x0, w8
  125. bl lwp_get_sys_api
  126. cmp x0, xzr
  127. mov x30, x0
  128. beq arch_syscall_exit
  129. ldp x0, x1, [sp, #(CONTEXT_OFFSET_X0)]
  130. ldp x2, x3, [sp, #(CONTEXT_OFFSET_X2)]
  131. ldp x4, x5, [sp, #(CONTEXT_OFFSET_X4)]
  132. ldp x6, x7, [sp, #(CONTEXT_OFFSET_X6)]
  133. blr x30
  134. /* jump explictly, make this code position independant */
  135. b arch_syscall_exit
  136. START_POINT_END(SVC_Handler)
  137. .global arch_syscall_exit
  138. arch_syscall_exit:
  139. /**
  140. * @brief back up former x0 which is required to restart syscall, then setup
  141. * syscall return value in stack frame
  142. */
  143. mov x1, sp
  144. bl arch_syscall_prepare_signal
  145. msr daifset, #3
  146. ldp x2, x3, [sp], #0x10 /* SPSR and ELR. */
  147. msr spsr_el1, x3
  148. msr elr_el1, x2
  149. ldp x29, x30, [sp], #0x10
  150. msr sp_el0, x29
  151. ldp x28, x29, [sp], #0x10
  152. msr fpcr, x28
  153. msr fpsr, x29
  154. ldp x28, x29, [sp], #0x10
  155. ldp x26, x27, [sp], #0x10
  156. ldp x24, x25, [sp], #0x10
  157. ldp x22, x23, [sp], #0x10
  158. ldp x20, x21, [sp], #0x10
  159. ldp x18, x19, [sp], #0x10
  160. ldp x16, x17, [sp], #0x10
  161. ldp x14, x15, [sp], #0x10
  162. ldp x12, x13, [sp], #0x10
  163. ldp x10, x11, [sp], #0x10
  164. ldp x8, x9, [sp], #0x10
  165. ldp x6, x7, [sp], #0x10
  166. ldp x4, x5, [sp], #0x10
  167. ldp x2, x3, [sp], #0x10
  168. ldp x0, x1, [sp], #0x10
  169. RESTORE_FPU sp
  170. /* the sp is reset to the outer most level, irq and fiq are disabled */
  171. START_POINT(arch_ret_to_user)
  172. msr daifset, #3
  173. /* save exception frame */
  174. SAVE_FPU sp
  175. stp x0, x1, [sp, #-0x10]!
  176. stp x2, x3, [sp, #-0x10]!
  177. stp x4, x5, [sp, #-0x10]!
  178. stp x6, x7, [sp, #-0x10]!
  179. stp x8, x9, [sp, #-0x10]!
  180. stp x10, x11, [sp, #-0x10]!
  181. stp x12, x13, [sp, #-0x10]!
  182. stp x14, x15, [sp, #-0x10]!
  183. stp x16, x17, [sp, #-0x10]!
  184. stp x18, x19, [sp, #-0x10]!
  185. stp x20, x21, [sp, #-0x10]!
  186. stp x22, x23, [sp, #-0x10]!
  187. stp x24, x25, [sp, #-0x10]!
  188. stp x26, x27, [sp, #-0x10]!
  189. stp x28, x29, [sp, #-0x10]!
  190. mrs x0, fpcr
  191. mrs x1, fpsr
  192. stp x0, x1, [sp, #-0x10]!
  193. stp x29, x30, [sp, #-0x10]!
  194. /* pre-action */
  195. bl lwp_check_debug
  196. bl lwp_check_exit_request
  197. cbz w0, 1f
  198. /* exit on event */
  199. msr daifclr, #3
  200. mov x0, xzr
  201. b sys_exit
  202. 1:
  203. /* check if dbg ops exist */
  204. ldr x0, =rt_dbg_ops
  205. ldr x0, [x0]
  206. cbz x0, 3f
  207. bl dbg_thread_in_debug
  208. mov x1, #(1 << 21)
  209. mrs x2, spsr_el1
  210. cbz w0, 2f
  211. orr x2, x2, x1
  212. msr spsr_el1, x2
  213. b 3f
  214. 2:
  215. bic x2, x2, x1
  216. msr spsr_el1, x2
  217. 3:
  218. /**
  219. * push 2 dummy words to simulate a exception frame of interrupt
  220. * Note: in kernel state, the context switch dont saved the context
  221. */
  222. mrs x0, spsr_el1
  223. mrs x1, elr_el1
  224. stp x1, x0, [sp, #-0x10]!
  225. mov x0, sp
  226. msr daifclr, #3
  227. bl lwp_thread_signal_catch
  228. msr daifset, #3
  229. ldp x1, x0, [sp], #0x10
  230. msr spsr_el1, x0
  231. msr elr_el1, x1
  232. /* check debug */
  233. /* restore exception frame */
  234. ldp x29, x30, [sp], #0x10
  235. ldp x0, x1, [sp], #0x10
  236. msr fpcr, x0
  237. msr fpsr, x1
  238. ldp x28, x29, [sp], #0x10
  239. ldp x26, x27, [sp], #0x10
  240. ldp x24, x25, [sp], #0x10
  241. ldp x22, x23, [sp], #0x10
  242. ldp x20, x21, [sp], #0x10
  243. ldp x18, x19, [sp], #0x10
  244. ldp x16, x17, [sp], #0x10
  245. ldp x14, x15, [sp], #0x10
  246. ldp x12, x13, [sp], #0x10
  247. ldp x10, x11, [sp], #0x10
  248. ldp x8, x9, [sp], #0x10
  249. ldp x6, x7, [sp], #0x10
  250. ldp x4, x5, [sp], #0x10
  251. ldp x2, x3, [sp], #0x10
  252. ldp x0, x1, [sp], #0x10
  253. RESTORE_FPU sp
  254. stp x0, x1, [sp, #-0x10]!
  255. ldr x0, =rt_dbg_ops
  256. ldr x0, [x0]
  257. cmp x0, xzr
  258. ldp x0, x1, [sp], #0x10
  259. beq 1f
  260. /* save */
  261. SAVE_FPU sp
  262. stp x0, x1, [sp, #-0x10]!
  263. stp x2, x3, [sp, #-0x10]!
  264. stp x4, x5, [sp, #-0x10]!
  265. stp x6, x7, [sp, #-0x10]!
  266. stp x8, x9, [sp, #-0x10]!
  267. stp x10, x11, [sp, #-0x10]!
  268. stp x12, x13, [sp, #-0x10]!
  269. stp x14, x15, [sp, #-0x10]!
  270. stp x16, x17, [sp, #-0x10]!
  271. stp x18, x19, [sp, #-0x10]!
  272. stp x20, x21, [sp, #-0x10]!
  273. stp x22, x23, [sp, #-0x10]!
  274. stp x24, x25, [sp, #-0x10]!
  275. stp x26, x27, [sp, #-0x10]!
  276. stp x28, x29, [sp, #-0x10]!
  277. mrs x0, fpcr
  278. mrs x1, fpsr
  279. stp x0, x1, [sp, #-0x10]!
  280. stp x29, x30, [sp, #-0x10]!
  281. mrs x0, elr_el1
  282. bl dbg_attach_req
  283. /* restore */
  284. ldp x29, x30, [sp], #0x10
  285. ldp x0, x1, [sp], #0x10
  286. msr fpcr, x0
  287. msr fpsr, x1
  288. ldp x28, x29, [sp], #0x10
  289. ldp x26, x27, [sp], #0x10
  290. ldp x24, x25, [sp], #0x10
  291. ldp x22, x23, [sp], #0x10
  292. ldp x20, x21, [sp], #0x10
  293. ldp x18, x19, [sp], #0x10
  294. ldp x16, x17, [sp], #0x10
  295. ldp x14, x15, [sp], #0x10
  296. ldp x12, x13, [sp], #0x10
  297. ldp x10, x11, [sp], #0x10
  298. ldp x8, x9, [sp], #0x10
  299. ldp x6, x7, [sp], #0x10
  300. ldp x4, x5, [sp], #0x10
  301. ldp x2, x3, [sp], #0x10
  302. ldp x0, x1, [sp], #0x10
  303. RESTORE_FPU sp
  304. 1:
  305. eret
  306. START_POINT_END(arch_ret_to_user)
  307. .global lwp_check_debug
  308. lwp_check_debug:
  309. ldr x0, =rt_dbg_ops
  310. ldr x0, [x0]
  311. cbnz x0, 1f
  312. ret
  313. 1:
  314. stp x29, x30, [sp, #-0x10]!
  315. bl dbg_check_suspend
  316. cbz w0, lwp_check_debug_quit
  317. mrs x2, sp_el0
  318. sub x2, x2, #0x10
  319. mov x3, x2
  320. msr sp_el0, x2
  321. ldr x0, =lwp_debugreturn
  322. ldr w1, [x0]
  323. str w1, [x2]
  324. ldr w1, [x0, #4]
  325. str w1, [x2, #4]
  326. dc cvau, x2
  327. add x2, x2, #4
  328. dc cvau, x2
  329. dsb sy
  330. isb sy
  331. ic ialluis
  332. isb sy
  333. mrs x0, elr_el1
  334. mrs x1, spsr_el1
  335. stp x0, x1, [sp, #-0x10]!
  336. msr elr_el1, x3 /* lwp_debugreturn */
  337. mov x1, #(SPSR_Mode(0) | SPSR_A64)
  338. orr x1, x1, #(1 << 21)
  339. msr spsr_el1, x1
  340. eret
  341. ret_from_user:
  342. /* sp_el0 += 16 for drop ins lwp_debugreturn */
  343. mrs x0, sp_el0
  344. add x0, x0, #0x10
  345. msr sp_el0, x0
  346. /* now is el1, sp is pos(empty) - sizeof(context) */
  347. mov x0, sp
  348. add x0, x0, #0x220
  349. mov sp, x0
  350. ldp x0, x1, [sp], #0x10 /* x1 is origin spsr_el1 */
  351. msr elr_el1, x0 /* x0 is origin elr_el1 */
  352. msr spsr_el1, x1
  353. lwp_check_debug_quit:
  354. ldp x29, x30, [sp], #0x10
  355. ret
  356. .global arch_syscall_restart
  357. arch_syscall_restart:
  358. msr daifset, 3
  359. mov sp, x1
  360. /* drop exception frame in user stack */
  361. msr sp_el0, x0
  362. /* restore previous exception frame */
  363. msr spsel, #0
  364. ldp x2, x3, [sp], #0x10
  365. msr elr_el1, x2
  366. msr spsr_el1, x3
  367. ldp x29, x30, [sp], #0x10
  368. ldp x28, x29, [sp], #0x10
  369. msr fpcr, x28
  370. msr fpsr, x29
  371. ldp x28, x29, [sp], #0x10
  372. ldp x26, x27, [sp], #0x10
  373. ldp x24, x25, [sp], #0x10
  374. ldp x22, x23, [sp], #0x10
  375. ldp x20, x21, [sp], #0x10
  376. ldp x18, x19, [sp], #0x10
  377. ldp x16, x17, [sp], #0x10
  378. ldp x14, x15, [sp], #0x10
  379. ldp x12, x13, [sp], #0x10
  380. ldp x10, x11, [sp], #0x10
  381. ldp x8, x9, [sp], #0x10
  382. ldp x6, x7, [sp], #0x10
  383. ldp x4, x5, [sp], #0x10
  384. ldp x2, x3, [sp], #0x10
  385. ldp x0, x1, [sp], #0x10
  386. RESTORE_FPU sp
  387. msr spsel, #1
  388. b vector_exception
  389. arch_signal_quit:
  390. /* drop current exception frame */
  391. add sp, sp, #CONTEXT_SIZE
  392. mov x1, sp
  393. mrs x0, sp_el0
  394. bl arch_signal_ucontext_restore
  395. add x0, x0, #-CONTEXT_SIZE
  396. msr sp_el0, x0
  397. /**
  398. * Note: Since we will reset spsr, but the reschedule will
  399. * corrupt the spsr, we diable irq for a short period here
  400. */
  401. msr daifset, #3
  402. /* restore previous exception frame */
  403. msr spsel, #0
  404. ldp x2, x3, [sp], #0x10
  405. msr elr_el1, x2
  406. msr spsr_el1, x3
  407. ldp x29, x30, [sp], #0x10
  408. ldp x28, x29, [sp], #0x10
  409. msr fpcr, x28
  410. msr fpsr, x29
  411. ldp x28, x29, [sp], #0x10
  412. ldp x26, x27, [sp], #0x10
  413. ldp x24, x25, [sp], #0x10
  414. ldp x22, x23, [sp], #0x10
  415. ldp x20, x21, [sp], #0x10
  416. ldp x18, x19, [sp], #0x10
  417. ldp x16, x17, [sp], #0x10
  418. ldp x14, x15, [sp], #0x10
  419. ldp x12, x13, [sp], #0x10
  420. ldp x10, x11, [sp], #0x10
  421. ldp x8, x9, [sp], #0x10
  422. ldp x6, x7, [sp], #0x10
  423. ldp x4, x5, [sp], #0x10
  424. ldp x2, x3, [sp], #0x10
  425. ldp x0, x1, [sp], #0x10
  426. RESTORE_FPU sp
  427. msr spsel, #1
  428. b arch_ret_to_user
  429. /**
  430. * rt_noreturn
  431. * void arch_thread_signal_enter(
  432. * int signo, -> x0
  433. * siginfo_t *psiginfo, -> x1
  434. * void *exp_frame, -> x2
  435. * void *entry_uaddr, -> x3
  436. * lwp_sigset_t *save_sig_mask, -> x4
  437. * )
  438. */
  439. .global arch_thread_signal_enter
  440. arch_thread_signal_enter:
  441. mov x19, x0
  442. mov x20, x2 /* exp_frame */
  443. mov x21, x3
  444. /**
  445. * move exception frame to user stack
  446. */
  447. mrs x0, sp_el0
  448. mov x3, x4
  449. /* arch_signal_ucontext_save(user_sp, psiginfo, exp_frame, save_sig_mask); */
  450. bl arch_signal_ucontext_save
  451. mov x22, x0
  452. /* get and saved pointer to uframe */
  453. bl arch_signal_ucontext_get_frame
  454. mov x2, x0
  455. mov x0, x22
  456. dc cvau, x0
  457. dsb sy
  458. ic ialluis
  459. dsb sy
  460. /**
  461. * Brief: Prepare the environment for signal handler
  462. */
  463. /**
  464. * reset the cpsr
  465. * and drop exp frame on kernel stack, reset kernel sp
  466. *
  467. * Note: Since we will reset spsr, but the reschedule will
  468. * corrupt the spsr, we diable irq for a short period here
  469. */
  470. msr daifset, #3
  471. ldr x1, [x20, #CONTEXT_OFFSET_SPSR_EL1]
  472. msr spsr_el1, x1
  473. add sp, x20, #CONTEXT_SIZE
  474. /** reset user sp */
  475. msr sp_el0, x0
  476. /** set the return address to the sigreturn */
  477. mov x30, x0
  478. cbnz x21, 1f
  479. mov x21, x30
  480. 1:
  481. /** set the entry address of signal handler */
  482. msr elr_el1, x21
  483. /* siginfo is above the return address */
  484. add x1, x30, UCTX_ABI_OFFSET_TO_SI
  485. /* uframe is saved in x2 */
  486. mov x0, x19
  487. /**
  488. * handler(signo, psi, ucontext);
  489. *
  490. */
  491. eret
  492. lwp_debugreturn:
  493. mov x8, 0xf000
  494. svc #0
  495. .global lwp_sigreturn
  496. lwp_sigreturn:
  497. mov x8, #0xe000
  498. svc #0
  499. lwp_thread_return:
  500. mov x0, xzr
  501. mov x8, #0x01
  502. svc #0
  503. .globl arch_get_tidr
  504. arch_get_tidr:
  505. mrs x0, tpidr_el0
  506. ret
  507. .global arch_set_thread_area
  508. arch_set_thread_area:
  509. .globl arch_set_tidr
  510. arch_set_tidr:
  511. msr tpidr_el0, x0
  512. ret