lwp_gcc.S 9.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470
  1. /*
  2. * Copyright (c) 2006-2020, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2018-12-10 Jesven first version
  9. * 2023-07-16 Shell Move part of the codes to C from asm in signal handling
  10. */
  11. #include "rtconfig.h"
  12. #include "asm-generic.h"
  13. #define Mode_USR 0x10
  14. #define Mode_FIQ 0x11
  15. #define Mode_IRQ 0x12
  16. #define Mode_SVC 0x13
  17. #define Mode_MON 0x16
  18. #define Mode_ABT 0x17
  19. #define Mode_UDF 0x1B
  20. #define Mode_SYS 0x1F
  21. #define A_Bit 0x100
  22. #define I_Bit 0x80 @; when I bit is set, IRQ is disabled
  23. #define F_Bit 0x40 @; when F bit is set, FIQ is disabled
  24. #define T_Bit 0x20
  25. .cpu cortex-a9
  26. .syntax unified
  27. .text
  28. /*
  29. * void arch_start_umode(args, text, ustack, kstack);
  30. */
  31. .global arch_start_umode
  32. .type arch_start_umode, % function
  33. arch_start_umode:
  34. mrs r9, cpsr
  35. bic r9, #0x1f
  36. orr r9, #Mode_USR
  37. cpsid i
  38. msr spsr, r9
  39. mov sp, r3
  40. /* set user stack top */
  41. cps #Mode_SYS
  42. mov sp, r2
  43. cps #Mode_SVC
  44. mov r3, r2
  45. /* set data address. */
  46. movs pc, r1
  47. /*
  48. * void arch_crt_start_umode(args, text, ustack, kstack);
  49. */
  50. .global arch_crt_start_umode
  51. .type arch_crt_start_umode, % function
  52. arch_crt_start_umode:
  53. cps #Mode_SYS
  54. sub sp, r2, #16
  55. ldr r2, =lwp_thread_return
  56. ldr r4, [r2]
  57. str r4, [sp]
  58. ldr r4, [r2, #4]
  59. str r4, [sp, #4]
  60. ldr r4, [r2, #8]
  61. str r4, [sp, #8]
  62. mov r4, sp
  63. mcr p15, 0, r4, c7, c11, 1 ;//dc cmvau
  64. add r4, #4
  65. mcr p15, 0, r4, c7, c11, 1 ;//dc cmvau
  66. add r4, #4
  67. mcr p15, 0, r4, c7, c11, 1 ;//dc cmvau
  68. dsb
  69. isb
  70. mcr p15, 0, r4, c7, c5, 0 ;//iciallu
  71. dsb
  72. isb
  73. mov lr, sp
  74. cps #Mode_SVC
  75. mrs r9, cpsr
  76. bic r9, #0x1f
  77. orr r9, #Mode_USR
  78. cpsid i
  79. msr spsr, r9
  80. mov sp, r3
  81. /* set data address. */
  82. movs pc, r1
  83. /*
  84. void arch_set_thread_context(void *exit_addr, void *new_thread_stack, void *user_stack, void **thread_sp);
  85. */
  86. .global arch_set_thread_context
  87. arch_set_thread_context:
  88. sub r1, #(10 * 4 + 4 * 4) /* {r4 - r12, lr} , {r4, r5, spsr, u_pc} */
  89. stmfd r1!, {r0}
  90. mov r12, #0
  91. stmfd r1!, {r12}
  92. stmfd r1!, {r1 - r12}
  93. stmfd r1!, {r12} /* new thread return value */
  94. mrs r12, cpsr
  95. orr r12, #(1 << 7) /* disable irq */
  96. stmfd r1!, {r12} /* spsr */
  97. mov r12, #0
  98. stmfd r1!, {r12} /* now user lr is 0 */
  99. stmfd r1!, {r2} /* user sp */
  100. #ifdef RT_USING_FPU
  101. stmfd r1!, {r12} /* not use fpu */
  102. #endif
  103. str r1, [r3]
  104. mov pc, lr
  105. .global arch_get_user_sp
  106. arch_get_user_sp:
  107. cps #Mode_SYS
  108. mov r0, sp
  109. cps #Mode_SVC
  110. mov pc, lr
  111. .global sys_fork
  112. .global sys_vfork
  113. .global arch_fork_exit
  114. sys_fork:
  115. sys_vfork:
  116. push {r4 - r12, lr}
  117. bl _sys_fork
  118. arch_fork_exit:
  119. pop {r4 - r12, lr}
  120. b arch_syscall_exit
  121. .global sys_clone
  122. .global arch_clone_exit
  123. sys_clone:
  124. push {r4 - r12, lr}
  125. bl _sys_clone
  126. arch_clone_exit:
  127. pop {r4 - r12, lr}
  128. b arch_syscall_exit
  129. /*
  130. void lwp_exec_user(void *args, void *kernel_stack, void *user_entry)
  131. */
  132. .global lwp_exec_user
  133. lwp_exec_user:
  134. cpsid i
  135. mov sp, r1
  136. mov lr, r2
  137. mov r2, #Mode_USR
  138. msr spsr_cxsf, r2
  139. ldr r3, =0x80000000
  140. b arch_ret_to_user
  141. /*
  142. * void SVC_Handler(void);
  143. */
  144. .global vector_swi
  145. .type vector_swi, % function
  146. START_POINT(vector_swi)
  147. push {lr}
  148. mrs lr, spsr
  149. push {r4, r5, lr}
  150. cpsie i
  151. push {r0 - r3, r12}
  152. bl rt_thread_self
  153. bl lwp_user_setting_save
  154. and r0, r7, #0xf000
  155. cmp r0, #0xe000
  156. beq arch_signal_quit
  157. cmp r0, #0xf000
  158. beq ret_from_user
  159. and r0, r7, #0xff
  160. bl lwp_get_sys_api
  161. cmp r0, #0 /* r0 = api */
  162. mov lr, r0
  163. pop {r0 - r3, r12}
  164. beq arch_syscall_exit
  165. blx lr
  166. START_POINT_END(vector_swi)
  167. .global arch_syscall_exit
  168. arch_syscall_exit:
  169. cpsid i
  170. pop {r4, r5, lr}
  171. msr spsr_cxsf, lr
  172. pop {lr}
  173. .global arch_ret_to_user
  174. arch_ret_to_user:
  175. /* save all context for signal handler */
  176. push {r0-r12, lr}
  177. bl lwp_check_debug
  178. bl lwp_check_exit_request
  179. cmp r0, #0
  180. beq 1f
  181. mov r0, #0
  182. b sys_exit
  183. 1:
  184. mov r0, sp
  185. /* r0 -> exp frame */
  186. bl lwp_thread_signal_catch
  187. ldr r0, =rt_dbg_ops
  188. ldr r0, [r0]
  189. cmp r0, #0
  190. beq 2f
  191. mov r0, lr
  192. bl dbg_attach_req
  193. 2:
  194. pop {r0-r12, lr}
  195. movs pc, lr
  196. #ifdef RT_USING_SMART
  197. .global lwp_check_debug
  198. lwp_check_debug:
  199. ldr r0, =rt_dbg_ops
  200. ldr r0, [r0]
  201. cmp r0, #0
  202. bne 1f
  203. bx lr
  204. 1:
  205. push {lr}
  206. bl dbg_check_suspend
  207. cmp r0, #0
  208. beq lwp_check_debug_quit
  209. cps #Mode_SYS
  210. sub sp, #8
  211. ldr r0, =lwp_debugreturn
  212. ldr r1, [r0]
  213. str r1, [sp]
  214. ldr r1, [r0, #4]
  215. str r1, [sp, #4]
  216. mov r1, sp
  217. mcr p15, 0, r1, c7, c11, 1 ;//dc cmvau
  218. add r1, #4
  219. mcr p15, 0, r1, c7, c11, 1 ;//dc cmvau
  220. dsb
  221. isb
  222. mcr p15, 0, r0, c7, c5, 0 ;//iciallu
  223. dsb
  224. isb
  225. mov r0, sp /* lwp_debugreturn */
  226. cps #Mode_SVC
  227. mrs r1, spsr
  228. push {r1}
  229. mov r1, #Mode_USR
  230. msr spsr_cxsf, r1
  231. movs pc, r0
  232. ret_from_user:
  233. cps #Mode_SYS
  234. add sp, #8
  235. cps #Mode_SVC
  236. /*
  237. pop {r0 - r3, r12}
  238. pop {r4 - r6, lr}
  239. */
  240. add sp, #(4*9)
  241. pop {r4}
  242. msr spsr_cxsf, r4
  243. lwp_check_debug_quit:
  244. pop {pc}
  245. arch_signal_quit:
  246. cpsid i
  247. /* drop context of signal handler */
  248. pop {r0 - r3, r12}
  249. pop {r4, r5, lr}
  250. pop {lr}
  251. /* restore context */
  252. cps #Mode_SYS
  253. mov r0, sp
  254. cps #Mode_SVC
  255. bl arch_signal_ucontext_restore
  256. /* lr <- *(&frame.ip) */
  257. ldr lr, [r0]
  258. cps #Mode_SYS
  259. mov sp, r0
  260. /* drop ip in the frame and restore cpsr */
  261. pop {r0}
  262. pop {r0}
  263. msr spsr_cxsf, r0
  264. pop {r0-r12, lr}
  265. cps #Mode_SVC
  266. b arch_ret_to_user
  267. /**
  268. * rt_noreturn
  269. * void arch_thread_signal_enter(
  270. * int signo, -> r0
  271. * siginfo_t *psiginfo, -> r1
  272. * void *exp_frame, -> r2
  273. * void *entry_uaddr, -> r3
  274. * lwp_sigset_t *save_sig_mask, -> ??
  275. * )
  276. */
  277. .global arch_thread_signal_enter
  278. arch_thread_signal_enter:
  279. mov r4, r0
  280. mov r5, r3
  281. mov r6, r2
  282. cps #Mode_SYS
  283. mov r0, lr
  284. mov r3, sp
  285. cps #Mode_SVC
  286. bl arch_signal_ucontext_save
  287. /* drop volatile frame {r0-r12, lr} */
  288. add sp, r6, #14*4
  289. /* reset user sp */
  290. cps #Mode_SYS
  291. mov sp, r0
  292. mov lr, r0
  293. cps #Mode_SVC
  294. /* r1,r2 <- new_user_sp */
  295. mov r1, r0
  296. mov r2, r0
  297. mcr p15, 0, r0, c7, c11, 1 ;//dc cmvau
  298. add r0, #4
  299. mcr p15, 0, r0, c7, c11, 1 ;//dc cmvau
  300. dsb
  301. isb
  302. mcr p15, 0, r1, c7, c5, 0 ;//iciallu
  303. dsb
  304. isb
  305. /* r0 <- signo */
  306. mov r0, r4
  307. /* r4 <- &sigreturn */
  308. mov r4, r2
  309. /* lr <- user_handler() */
  310. mov lr, r5
  311. cmp lr, #0
  312. moveq lr, r4
  313. /* r1 <- siginfo */
  314. mov r1, r2
  315. add r1, #8
  316. /* handler(signo, siginfo, ucontext) */
  317. movs pc, lr
  318. lwp_debugreturn:
  319. mov r7, #0xf000
  320. svc #0
  321. .global lwp_sigreturn
  322. lwp_sigreturn:
  323. mov r7, #0xe000
  324. svc #0
  325. lwp_thread_return:
  326. mov r0, #0
  327. mov r7, #0x01
  328. svc #0
  329. #endif
  330. .global check_vfp
  331. check_vfp:
  332. #ifdef RT_USING_FPU
  333. vmrs r0, fpexc
  334. ubfx r0, r0, #30, #1
  335. #else
  336. mov r0, #0
  337. #endif
  338. mov pc, lr
  339. .global get_vfp
  340. get_vfp:
  341. #ifdef RT_USING_FPU
  342. vstmia r0!, {d0-d15}
  343. vstmia r0!, {d16-d31}
  344. vmrs r1, fpscr
  345. str r1, [r0]
  346. #endif
  347. mov pc, lr
  348. .globl arch_get_tidr
  349. arch_get_tidr:
  350. mrc p15, 0, r0, c13, c0, 3
  351. bx lr
  352. .global arch_set_thread_area
  353. arch_set_thread_area:
  354. .globl arch_set_tidr
  355. arch_set_tidr:
  356. mcr p15, 0, r0, c13, c0, 3
  357. bx lr
  358. /* kuser suppurt */
  359. .macro kuser_pad, sym, size
  360. .if (. - \sym) & 3
  361. .rept 4 - (. - \sym) & 3
  362. .byte 0
  363. .endr
  364. .endif
  365. .rept (\size - (. - \sym)) / 4
  366. .word 0xe7fddef1
  367. .endr
  368. .endm
  369. .align 5
  370. .globl __kuser_helper_start
  371. __kuser_helper_start:
  372. __kuser_cmpxchg64: @ 0xffff0f60
  373. stmfd sp!, {r4, r5, r6, lr}
  374. ldmia r0, {r4, r5} @ load old val
  375. ldmia r1, {r6, lr} @ load new val
  376. 1: ldmia r2, {r0, r1} @ load current val
  377. eors r3, r0, r4 @ compare with oldval (1)
  378. eorseq r3, r1, r5 @ compare with oldval (2)
  379. 2: stmiaeq r2, {r6, lr} @ store newval if eq
  380. rsbs r0, r3, #0 @ set return val and C flag
  381. ldmfd sp!, {r4, r5, r6, pc}
  382. kuser_pad __kuser_cmpxchg64, 64
  383. __kuser_memory_barrier: @ 0xffff0fa0
  384. dmb
  385. mov pc, lr
  386. kuser_pad __kuser_memory_barrier, 32
  387. __kuser_cmpxchg: @ 0xffff0fc0
  388. 1: ldr r3, [r2] @ load current val
  389. subs r3, r3, r0 @ compare with oldval
  390. 2: streq r1, [r2] @ store newval if eq
  391. rsbs r0, r3, #0 @ set return val and C flag
  392. mov pc, lr
  393. kuser_pad __kuser_cmpxchg, 32
  394. __kuser_get_tls: @ 0xffff0fe0
  395. mrc p15, 0, r0, c13, c0, 3 @ 0xffff0fe8 hardware TLS code
  396. mov pc, lr
  397. ldr r0, [pc, #(16 - 8)] @ read TLS, set in kuser_get_tls_init
  398. kuser_pad __kuser_get_tls, 16
  399. .rep 3
  400. .word 0 @ 0xffff0ff0 software TLS value, then
  401. .endr @ pad up to __kuser_helper_version
  402. __kuser_helper_version: @ 0xffff0ffc
  403. .word ((__kuser_helper_end - __kuser_helper_start) >> 5)
  404. .globl __kuser_helper_end
  405. __kuser_helper_end: