lwp_gcc.S 9.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469
  1. /*
  2. * Copyright (c) 2006-2020, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2018-12-10 Jesven first version
  9. * 2023-07-16 Shell Move part of the codes to C from asm in signal handling
  10. */
  11. #include "rtconfig.h"
  12. #include "asm-generic.h"
  13. #define Mode_USR 0x10
  14. #define Mode_FIQ 0x11
  15. #define Mode_IRQ 0x12
  16. #define Mode_SVC 0x13
  17. #define Mode_MON 0x16
  18. #define Mode_ABT 0x17
  19. #define Mode_UDF 0x1B
  20. #define Mode_SYS 0x1F
  21. #define A_Bit 0x100
  22. #define I_Bit 0x80 @; when I bit is set, IRQ is disabled
  23. #define F_Bit 0x40 @; when F bit is set, FIQ is disabled
  24. #define T_Bit 0x20
  25. .cpu cortex-a9
  26. .syntax unified
  27. .text
  28. /*
  29. * void arch_start_umode(args, text, ustack, kstack);
  30. */
  31. .global arch_start_umode
  32. .type arch_start_umode, % function
  33. arch_start_umode:
  34. mrs r9, cpsr
  35. bic r9, #0x1f
  36. orr r9, #Mode_USR
  37. cpsid i
  38. msr spsr, r9
  39. mov sp, r3
  40. /* set user stack top */
  41. cps #Mode_SYS
  42. mov sp, r2
  43. cps #Mode_SVC
  44. mov r3, r2
  45. /* set data address. */
  46. movs pc, r1
  47. /*
  48. * void arch_crt_start_umode(args, text, ustack, kstack);
  49. */
  50. .global arch_crt_start_umode
  51. .type arch_crt_start_umode, % function
  52. arch_crt_start_umode:
  53. cps #Mode_SYS
  54. sub sp, r2, #16
  55. ldr r2, =lwp_thread_return
  56. ldr r4, [r2]
  57. str r4, [sp]
  58. ldr r4, [r2, #4]
  59. str r4, [sp, #4]
  60. ldr r4, [r2, #8]
  61. str r4, [sp, #8]
  62. mov r4, sp
  63. mcr p15, 0, r4, c7, c11, 1 ;//dc cmvau
  64. add r4, #4
  65. mcr p15, 0, r4, c7, c11, 1 ;//dc cmvau
  66. add r4, #4
  67. mcr p15, 0, r4, c7, c11, 1 ;//dc cmvau
  68. dsb
  69. isb
  70. mcr p15, 0, r4, c7, c5, 0 ;//iciallu
  71. dsb
  72. isb
  73. mov lr, sp
  74. cps #Mode_SVC
  75. mrs r9, cpsr
  76. bic r9, #0x1f
  77. orr r9, #Mode_USR
  78. cpsid i
  79. msr spsr, r9
  80. mov sp, r3
  81. /* set data address. */
  82. movs pc, r1
  83. /*
  84. void arch_set_thread_context(void *exit_addr, void *new_thread_stack, void *user_stack, void **thread_sp);
  85. */
  86. .global arch_set_thread_context
  87. arch_set_thread_context:
  88. sub r1, #(10 * 4 + 4 * 4) /* {r4 - r12, lr} , {r4, r5, spsr, u_pc} */
  89. stmfd r1!, {r0}
  90. mov r12, #0
  91. stmfd r1!, {r12}
  92. stmfd r1!, {r1 - r12}
  93. stmfd r1!, {r12} /* new thread return value */
  94. mrs r12, cpsr
  95. orr r12, #(1 << 7) /* disable irq */
  96. stmfd r1!, {r12} /* spsr */
  97. mov r12, #0
  98. stmfd r1!, {r12} /* now user lr is 0 */
  99. stmfd r1!, {r2} /* user sp */
  100. #ifdef RT_USING_FPU
  101. stmfd r1!, {r12} /* not use fpu */
  102. #endif
  103. str r1, [r3]
  104. mov pc, lr
  105. .global arch_get_user_sp
  106. arch_get_user_sp:
  107. cps #Mode_SYS
  108. mov r0, sp
  109. cps #Mode_SVC
  110. mov pc, lr
  111. .global sys_fork
  112. .global sys_vfork
  113. .global arch_fork_exit
  114. sys_fork:
  115. sys_vfork:
  116. push {r4 - r12, lr}
  117. bl _sys_fork
  118. arch_fork_exit:
  119. pop {r4 - r12, lr}
  120. b arch_syscall_exit
  121. .global sys_clone
  122. .global arch_clone_exit
  123. sys_clone:
  124. push {r4 - r12, lr}
  125. bl _sys_clone
  126. arch_clone_exit:
  127. pop {r4 - r12, lr}
  128. b arch_syscall_exit
  129. /*
  130. void lwp_exec_user(void *args, void *kernel_stack, void *user_entry)
  131. */
  132. .global lwp_exec_user
  133. lwp_exec_user:
  134. cpsid i
  135. mov sp, r1
  136. mov lr, r2
  137. mov r2, #Mode_USR
  138. msr spsr_cxsf, r2
  139. ldr r3, =0x80000000
  140. b arch_ret_to_user
  141. /*
  142. * void SVC_Handler(void);
  143. */
  144. .global vector_swi
  145. .type vector_swi, % function
  146. START_POINT(vector_swi)
  147. push {lr}
  148. mrs lr, spsr
  149. push {r4, r5, lr}
  150. cpsie i
  151. push {r0 - r3, r12}
  152. bl rt_thread_self
  153. bl lwp_user_setting_save
  154. and r0, r7, #0xf000
  155. cmp r0, #0xe000
  156. beq arch_signal_quit
  157. cmp r0, #0xf000
  158. beq ret_from_user
  159. and r0, r7, #0xff
  160. bl lwp_get_sys_api
  161. cmp r0, #0 /* r0 = api */
  162. mov lr, r0
  163. pop {r0 - r3, r12}
  164. beq arch_syscall_exit
  165. blx lr
  166. START_POINT_END(vector_swi)
  167. .global arch_syscall_exit
  168. arch_syscall_exit:
  169. cpsid i
  170. pop {r4, r5, lr}
  171. msr spsr_cxsf, lr
  172. pop {lr}
  173. .global arch_ret_to_user
  174. arch_ret_to_user:
  175. push {r0-r12, lr}
  176. bl lwp_check_debug
  177. bl lwp_check_exit_request
  178. cmp r0, #0
  179. beq 1f
  180. mov r0, #0
  181. b sys_exit
  182. 1:
  183. mov r0, sp
  184. /* r0 -> exp frame */
  185. bl lwp_thread_signal_catch
  186. pop {r0-r12, lr}
  187. push {r0}
  188. ldr r0, =rt_dbg_ops
  189. ldr r0, [r0]
  190. cmp r0, #0
  191. pop {r0}
  192. beq 2f
  193. push {r0-r3, r12, lr}
  194. mov r0, lr
  195. bl dbg_attach_req
  196. pop {r0-r3, r12, lr}
  197. 2:
  198. movs pc, lr
  199. #ifdef RT_USING_SMART
  200. .global lwp_check_debug
  201. lwp_check_debug:
  202. ldr r0, =rt_dbg_ops
  203. ldr r0, [r0]
  204. cmp r0, #0
  205. bne 1f
  206. bx lr
  207. 1:
  208. push {lr}
  209. bl dbg_check_suspend
  210. cmp r0, #0
  211. beq lwp_check_debug_quit
  212. cps #Mode_SYS
  213. sub sp, #8
  214. ldr r0, =lwp_debugreturn
  215. ldr r1, [r0]
  216. str r1, [sp]
  217. ldr r1, [r0, #4]
  218. str r1, [sp, #4]
  219. mov r1, sp
  220. mcr p15, 0, r1, c7, c11, 1 ;//dc cmvau
  221. add r1, #4
  222. mcr p15, 0, r1, c7, c11, 1 ;//dc cmvau
  223. dsb
  224. isb
  225. mcr p15, 0, r0, c7, c5, 0 ;//iciallu
  226. dsb
  227. isb
  228. mov r0, sp /* lwp_debugreturn */
  229. cps #Mode_SVC
  230. mrs r1, spsr
  231. push {r1}
  232. mov r1, #Mode_USR
  233. msr spsr_cxsf, r1
  234. movs pc, r0
  235. ret_from_user:
  236. cps #Mode_SYS
  237. add sp, #8
  238. cps #Mode_SVC
  239. /*
  240. pop {r0 - r3, r12}
  241. pop {r4 - r6, lr}
  242. */
  243. add sp, #(4*9)
  244. pop {r4}
  245. msr spsr_cxsf, r4
  246. lwp_check_debug_quit:
  247. pop {pc}
  248. arch_signal_quit:
  249. cpsid i
  250. /* drop context of signal handler */
  251. pop {r0 - r3, r12}
  252. pop {r4, r5, lr}
  253. pop {lr}
  254. /* restore context */
  255. cps #Mode_SYS
  256. mov r0, sp
  257. cps #Mode_SVC
  258. bl arch_signal_ucontext_restore
  259. /* lr <- *(&frame.ip) */
  260. ldr lr, [r0]
  261. cps #Mode_SYS
  262. mov sp, r0
  263. /* drop ip in the frame and restore cpsr */
  264. pop {r0}
  265. pop {r0}
  266. msr spsr_cxsf, r0
  267. pop {r0-r12, lr}
  268. cps #Mode_SVC
  269. b arch_ret_to_user
  270. /**
  271. * rt_noreturn
  272. * void arch_thread_signal_enter(
  273. * int signo, -> r0
  274. * siginfo_t *psiginfo, -> r1
  275. * void *exp_frame, -> r2
  276. * void *entry_uaddr, -> r3
  277. * lwp_sigset_t *save_sig_mask, -> ??
  278. * )
  279. */
  280. .global arch_thread_signal_enter
  281. arch_thread_signal_enter:
  282. mov r4, r0
  283. mov r5, r3
  284. cps #Mode_SYS
  285. mov r0, lr
  286. mov r3, sp
  287. cps #Mode_SVC
  288. bl arch_signal_ucontext_save
  289. /* reset user sp */
  290. cps #Mode_SYS
  291. mov sp, r0
  292. mov lr, r0
  293. cps #Mode_SVC
  294. /* r1,r2 <- new_user_sp */
  295. mov r1, r0
  296. mov r2, r0
  297. /* r0 <- signo */
  298. mov r0, r4
  299. mov r1, r0
  300. mcr p15, 0, r1, c7, c11, 1 ;//dc cmvau
  301. add r1, #4
  302. mcr p15, 0, r1, c7, c11, 1 ;//dc cmvau
  303. dsb
  304. isb
  305. mcr p15, 0, r0, c7, c5, 0 ;//iciallu
  306. dsb
  307. isb
  308. /* r4 <- &sigreturn */
  309. mov r4, r2
  310. /* lr <- user_handler() */
  311. mov lr, r5
  312. cmp lr, #0
  313. moveq lr, r4
  314. /* r1 <- siginfo */
  315. mov r1, r2
  316. add r1, #8
  317. /* handler(signo, siginfo, ucontext) */
  318. movs pc, lr
  319. lwp_debugreturn:
  320. mov r7, #0xf000
  321. svc #0
  322. .global lwp_sigreturn
  323. lwp_sigreturn:
  324. mov r7, #0xe000
  325. svc #0
  326. lwp_thread_return:
  327. mov r0, #0
  328. mov r7, #0x01
  329. svc #0
  330. #endif
  331. .global check_vfp
  332. check_vfp:
  333. #ifdef RT_USING_FPU
  334. vmrs r0, fpexc
  335. ubfx r0, r0, #30, #1
  336. #else
  337. mov r0, #0
  338. #endif
  339. mov pc, lr
  340. .global get_vfp
  341. get_vfp:
  342. #ifdef RT_USING_FPU
  343. vstmia r0!, {d0-d15}
  344. vstmia r0!, {d16-d31}
  345. vmrs r1, fpscr
  346. str r1, [r0]
  347. #endif
  348. mov pc, lr
  349. .globl arch_get_tidr
  350. arch_get_tidr:
  351. mrc p15, 0, r0, c13, c0, 3
  352. bx lr
  353. .global arch_set_thread_area
  354. arch_set_thread_area:
  355. .globl arch_set_tidr
  356. arch_set_tidr:
  357. mcr p15, 0, r0, c13, c0, 3
  358. bx lr
  359. /* kuser suppurt */
  360. .macro kuser_pad, sym, size
  361. .if (. - \sym) & 3
  362. .rept 4 - (. - \sym) & 3
  363. .byte 0
  364. .endr
  365. .endif
  366. .rept (\size - (. - \sym)) / 4
  367. .word 0xe7fddef1
  368. .endr
  369. .endm
  370. .align 5
  371. .globl __kuser_helper_start
  372. __kuser_helper_start:
  373. __kuser_cmpxchg64: @ 0xffff0f60
  374. stmfd sp!, {r4, r5, r6, lr}
  375. ldmia r0, {r4, r5} @ load old val
  376. ldmia r1, {r6, lr} @ load new val
  377. 1: ldmia r2, {r0, r1} @ load current val
  378. eors r3, r0, r4 @ compare with oldval (1)
  379. eorseq r3, r1, r5 @ compare with oldval (2)
  380. 2: stmiaeq r2, {r6, lr} @ store newval if eq
  381. rsbs r0, r3, #0 @ set return val and C flag
  382. ldmfd sp!, {r4, r5, r6, pc}
  383. kuser_pad __kuser_cmpxchg64, 64
  384. __kuser_memory_barrier: @ 0xffff0fa0
  385. dmb
  386. mov pc, lr
  387. kuser_pad __kuser_memory_barrier, 32
  388. __kuser_cmpxchg: @ 0xffff0fc0
  389. 1: ldr r3, [r2] @ load current val
  390. subs r3, r3, r0 @ compare with oldval
  391. 2: streq r1, [r2] @ store newval if eq
  392. rsbs r0, r3, #0 @ set return val and C flag
  393. mov pc, lr
  394. kuser_pad __kuser_cmpxchg, 32
  395. __kuser_get_tls: @ 0xffff0fe0
  396. mrc p15, 0, r0, c13, c0, 3 @ 0xffff0fe8 hardware TLS code
  397. mov pc, lr
  398. ldr r0, [pc, #(16 - 8)] @ read TLS, set in kuser_get_tls_init
  399. kuser_pad __kuser_get_tls, 16
  400. .rep 3
  401. .word 0 @ 0xffff0ff0 software TLS value, then
  402. .endr @ pad up to __kuser_helper_version
  403. __kuser_helper_version: @ 0xffff0ffc
  404. .word ((__kuser_helper_end - __kuser_helper_start) >> 5)
  405. .globl __kuser_helper_end
  406. __kuser_helper_end: