lwp_gcc.S 9.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464
  1. /*
  2. * Copyright (c) 2006-2020, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2018-12-10 Jesven first version
  9. * 2023-07-16 Shell Move part of the codes to C from asm in signal handling
  10. */
  11. #include "rtconfig.h"
  12. #include "asm-generic.h"
  13. #define Mode_USR 0x10
  14. #define Mode_FIQ 0x11
  15. #define Mode_IRQ 0x12
  16. #define Mode_SVC 0x13
  17. #define Mode_MON 0x16
  18. #define Mode_ABT 0x17
  19. #define Mode_UDF 0x1B
  20. #define Mode_SYS 0x1F
  21. #define A_Bit 0x100
  22. #define I_Bit 0x80 @; when I bit is set, IRQ is disabled
  23. #define F_Bit 0x40 @; when F bit is set, FIQ is disabled
  24. #define T_Bit 0x20
  25. .cpu cortex-a9
  26. .syntax unified
  27. .text
  28. /*
  29. * void arch_start_umode(args, text, ustack, kstack);
  30. */
  31. .global arch_start_umode
  32. .type arch_start_umode, % function
  33. arch_start_umode:
  34. mrs r9, cpsr
  35. bic r9, #0x1f
  36. orr r9, #Mode_USR
  37. cpsid i
  38. msr spsr, r9
  39. mov sp, r3
  40. mov r3, r2 ;/* user stack top */
  41. /* set data address. */
  42. movs pc, r1
  43. /*
  44. * void arch_crt_start_umode(args, text, ustack, kstack);
  45. */
  46. .global arch_crt_start_umode
  47. .type arch_crt_start_umode, % function
  48. arch_crt_start_umode:
  49. cps #Mode_SYS
  50. sub sp, r2, #16
  51. ldr r2, =lwp_thread_return
  52. ldr r4, [r2]
  53. str r4, [sp]
  54. ldr r4, [r2, #4]
  55. str r4, [sp, #4]
  56. ldr r4, [r2, #8]
  57. str r4, [sp, #8]
  58. mov r4, sp
  59. mcr p15, 0, r4, c7, c11, 1 ;//dc cmvau
  60. add r4, #4
  61. mcr p15, 0, r4, c7, c11, 1 ;//dc cmvau
  62. add r4, #4
  63. mcr p15, 0, r4, c7, c11, 1 ;//dc cmvau
  64. dsb
  65. isb
  66. mcr p15, 0, r4, c7, c5, 0 ;//iciallu
  67. dsb
  68. isb
  69. mov lr, sp
  70. cps #Mode_SVC
  71. mrs r9, cpsr
  72. bic r9, #0x1f
  73. orr r9, #Mode_USR
  74. cpsid i
  75. msr spsr, r9
  76. mov sp, r3
  77. /* set data address. */
  78. movs pc, r1
  79. /*
  80. void arch_set_thread_context(void *exit_addr, void *new_thread_stack, void *user_stack, void **thread_sp);
  81. */
  82. .global arch_set_thread_context
  83. arch_set_thread_context:
  84. sub r1, #(10 * 4 + 4 * 4) /* {r4 - r12, lr} , {r4, r5, spsr, u_pc} */
  85. stmfd r1!, {r0}
  86. mov r12, #0
  87. stmfd r1!, {r12}
  88. stmfd r1!, {r1 - r12}
  89. stmfd r1!, {r12} /* new thread return value */
  90. mrs r12, cpsr
  91. orr r12, #(1 << 7) /* disable irq */
  92. stmfd r1!, {r12} /* spsr */
  93. mov r12, #0
  94. stmfd r1!, {r12} /* now user lr is 0 */
  95. stmfd r1!, {r2} /* user sp */
  96. #ifdef RT_USING_FPU
  97. stmfd r1!, {r12} /* not use fpu */
  98. #endif
  99. str r1, [r3]
  100. mov pc, lr
  101. .global arch_get_user_sp
  102. arch_get_user_sp:
  103. cps #Mode_SYS
  104. mov r0, sp
  105. cps #Mode_SVC
  106. mov pc, lr
  107. .global sys_fork
  108. .global sys_vfork
  109. .global arch_fork_exit
  110. sys_fork:
  111. sys_vfork:
  112. push {r4 - r12, lr}
  113. bl _sys_fork
  114. arch_fork_exit:
  115. pop {r4 - r12, lr}
  116. b arch_syscall_exit
  117. .global sys_clone
  118. .global arch_clone_exit
  119. sys_clone:
  120. push {r4 - r12, lr}
  121. bl _sys_clone
  122. arch_clone_exit:
  123. pop {r4 - r12, lr}
  124. b arch_syscall_exit
  125. /*
  126. void lwp_exec_user(void *args, void *kernel_stack, void *user_entry)
  127. */
  128. .global lwp_exec_user
  129. lwp_exec_user:
  130. cpsid i
  131. mov sp, r1
  132. mov lr, r2
  133. mov r2, #Mode_USR
  134. msr spsr_cxsf, r2
  135. ldr r3, =0x80000000
  136. b arch_ret_to_user
  137. /*
  138. * void SVC_Handler(void);
  139. */
  140. .global vector_swi
  141. .type vector_swi, % function
  142. START_POINT(vector_swi)
  143. push {lr}
  144. mrs lr, spsr
  145. push {r4, r5, lr}
  146. cpsie i
  147. push {r0 - r3, r12}
  148. bl rt_thread_self
  149. bl lwp_user_setting_save
  150. and r0, r7, #0xf000
  151. cmp r0, #0xe000
  152. beq arch_signal_quit
  153. cmp r0, #0xf000
  154. beq ret_from_user
  155. and r0, r7, #0xff
  156. bl lwp_get_sys_api
  157. cmp r0, #0 /* r0 = api */
  158. mov lr, r0
  159. pop {r0 - r3, r12}
  160. beq arch_syscall_exit
  161. blx lr
  162. START_POINT_END(vector_swi)
  163. .global arch_syscall_exit
  164. arch_syscall_exit:
  165. cpsid i
  166. pop {r4, r5, lr}
  167. msr spsr_cxsf, lr
  168. pop {lr}
  169. .global arch_ret_to_user
  170. arch_ret_to_user:
  171. push {r0-r12, lr}
  172. bl lwp_check_debug
  173. bl lwp_check_exit_request
  174. cmp r0, #0
  175. beq 1f
  176. mov r0, #0
  177. b sys_exit
  178. 1:
  179. mov r0, sp
  180. /* r0 -> exp frame */
  181. bl lwp_thread_signal_catch
  182. pop {r0-r12, lr}
  183. push {r0}
  184. ldr r0, =rt_dbg_ops
  185. ldr r0, [r0]
  186. cmp r0, #0
  187. pop {r0}
  188. beq 2f
  189. push {r0-r3, r12, lr}
  190. mov r0, lr
  191. bl dbg_attach_req
  192. pop {r0-r3, r12, lr}
  193. 2:
  194. movs pc, lr
  195. #ifdef RT_USING_SMART
  196. .global lwp_check_debug
  197. lwp_check_debug:
  198. ldr r0, =rt_dbg_ops
  199. ldr r0, [r0]
  200. cmp r0, #0
  201. bne 1f
  202. bx lr
  203. 1:
  204. push {lr}
  205. bl dbg_check_suspend
  206. cmp r0, #0
  207. beq lwp_check_debug_quit
  208. cps #Mode_SYS
  209. sub sp, #8
  210. ldr r0, =lwp_debugreturn
  211. ldr r1, [r0]
  212. str r1, [sp]
  213. ldr r1, [r0, #4]
  214. str r1, [sp, #4]
  215. mov r1, sp
  216. mcr p15, 0, r1, c7, c11, 1 ;//dc cmvau
  217. add r1, #4
  218. mcr p15, 0, r1, c7, c11, 1 ;//dc cmvau
  219. dsb
  220. isb
  221. mcr p15, 0, r0, c7, c5, 0 ;//iciallu
  222. dsb
  223. isb
  224. mov r0, sp /* lwp_debugreturn */
  225. cps #Mode_SVC
  226. mrs r1, spsr
  227. push {r1}
  228. mov r1, #Mode_USR
  229. msr spsr_cxsf, r1
  230. movs pc, r0
  231. ret_from_user:
  232. cps #Mode_SYS
  233. add sp, #8
  234. cps #Mode_SVC
  235. /*
  236. pop {r0 - r3, r12}
  237. pop {r4 - r6, lr}
  238. */
  239. add sp, #(4*9)
  240. pop {r4}
  241. msr spsr_cxsf, r4
  242. lwp_check_debug_quit:
  243. pop {pc}
  244. arch_signal_quit:
  245. cpsid i
  246. /* drop context of signal handler */
  247. pop {r0 - r3, r12}
  248. pop {r4, r5, lr}
  249. pop {lr}
  250. /* restore context */
  251. cps #Mode_SYS
  252. mov r0, sp
  253. cps #Mode_SVC
  254. bl arch_signal_ucontext_restore
  255. /* lr <- *(&frame.ip) */
  256. ldr lr, [r0]
  257. cps #Mode_SYS
  258. mov sp, r0
  259. /* drop ip in the frame and restore cpsr */
  260. pop {r0}
  261. pop {r0}
  262. msr spsr_cxsf, r0
  263. pop {r0-r12, lr}
  264. cps #Mode_SVC
  265. b arch_ret_to_user
  266. /**
  267. * rt_noreturn
  268. * void arch_thread_signal_enter(
  269. * int signo, -> r0
  270. * siginfo_t *psiginfo, -> r1
  271. * void *exp_frame, -> r2
  272. * void *entry_uaddr, -> r3
  273. * lwp_sigset_t *save_sig_mask, -> ??
  274. * )
  275. */
  276. .global arch_thread_signal_enter
  277. arch_thread_signal_enter:
  278. mov r4, r0
  279. mov r5, r3
  280. cps #Mode_SYS
  281. mov r0, lr
  282. mov r3, sp
  283. cps #Mode_SVC
  284. bl arch_signal_ucontext_save
  285. /* reset user sp */
  286. cps #Mode_SYS
  287. mov sp, r0
  288. mov lr, r0
  289. cps #Mode_SVC
  290. /* r1,r2 <- new_user_sp */
  291. mov r1, r0
  292. mov r2, r0
  293. /* r0 <- signo */
  294. mov r0, r4
  295. mov r1, r0
  296. mcr p15, 0, r1, c7, c11, 1 ;//dc cmvau
  297. add r1, #4
  298. mcr p15, 0, r1, c7, c11, 1 ;//dc cmvau
  299. dsb
  300. isb
  301. mcr p15, 0, r0, c7, c5, 0 ;//iciallu
  302. dsb
  303. isb
  304. /* r4 <- &sigreturn */
  305. mov r4, r2
  306. /* lr <- user_handler() */
  307. mov lr, r5
  308. cmp lr, #0
  309. moveq lr, r4
  310. /* r1 <- siginfo */
  311. mov r1, r2
  312. add r1, #8
  313. /* handler(signo, siginfo, ucontext) */
  314. movs pc, lr
  315. lwp_debugreturn:
  316. mov r7, #0xf000
  317. svc #0
  318. .global lwp_sigreturn
  319. lwp_sigreturn:
  320. mov r7, #0xe000
  321. svc #0
  322. lwp_thread_return:
  323. mov r0, #0
  324. mov r7, #0x01
  325. svc #0
  326. #endif
  327. .global check_vfp
  328. check_vfp:
  329. #ifdef RT_USING_FPU
  330. vmrs r0, fpexc
  331. ubfx r0, r0, #30, #1
  332. #else
  333. mov r0, #0
  334. #endif
  335. mov pc, lr
  336. .global get_vfp
  337. get_vfp:
  338. #ifdef RT_USING_FPU
  339. vstmia r0!, {d0-d15}
  340. vstmia r0!, {d16-d31}
  341. vmrs r1, fpscr
  342. str r1, [r0]
  343. #endif
  344. mov pc, lr
  345. .globl arch_get_tidr
  346. arch_get_tidr:
  347. mrc p15, 0, r0, c13, c0, 3
  348. bx lr
  349. .global arch_set_thread_area
  350. arch_set_thread_area:
  351. .globl arch_set_tidr
  352. arch_set_tidr:
  353. mcr p15, 0, r0, c13, c0, 3
  354. bx lr
  355. /* kuser suppurt */
  356. .macro kuser_pad, sym, size
  357. .if (. - \sym) & 3
  358. .rept 4 - (. - \sym) & 3
  359. .byte 0
  360. .endr
  361. .endif
  362. .rept (\size - (. - \sym)) / 4
  363. .word 0xe7fddef1
  364. .endr
  365. .endm
  366. .align 5
  367. .globl __kuser_helper_start
  368. __kuser_helper_start:
  369. __kuser_cmpxchg64: @ 0xffff0f60
  370. stmfd sp!, {r4, r5, r6, lr}
  371. ldmia r0, {r4, r5} @ load old val
  372. ldmia r1, {r6, lr} @ load new val
  373. 1: ldmia r2, {r0, r1} @ load current val
  374. eors r3, r0, r4 @ compare with oldval (1)
  375. eorseq r3, r1, r5 @ compare with oldval (2)
  376. 2: stmiaeq r2, {r6, lr} @ store newval if eq
  377. rsbs r0, r3, #0 @ set return val and C flag
  378. ldmfd sp!, {r4, r5, r6, pc}
  379. kuser_pad __kuser_cmpxchg64, 64
  380. __kuser_memory_barrier: @ 0xffff0fa0
  381. dmb
  382. mov pc, lr
  383. kuser_pad __kuser_memory_barrier, 32
  384. __kuser_cmpxchg: @ 0xffff0fc0
  385. 1: ldr r3, [r2] @ load current val
  386. subs r3, r3, r0 @ compare with oldval
  387. 2: streq r1, [r2] @ store newval if eq
  388. rsbs r0, r3, #0 @ set return val and C flag
  389. mov pc, lr
  390. kuser_pad __kuser_cmpxchg, 32
  391. __kuser_get_tls: @ 0xffff0fe0
  392. mrc p15, 0, r0, c13, c0, 3 @ 0xffff0fe8 hardware TLS code
  393. mov pc, lr
  394. ldr r0, [pc, #(16 - 8)] @ read TLS, set in kuser_get_tls_init
  395. kuser_pad __kuser_get_tls, 16
  396. .rep 3
  397. .word 0 @ 0xffff0ff0 software TLS value, then
  398. .endr @ pad up to __kuser_helper_version
  399. __kuser_helper_version: @ 0xffff0ffc
  400. .word ((__kuser_helper_end - __kuser_helper_start) >> 5)
  401. .globl __kuser_helper_end
  402. __kuser_helper_end: