context_gcc.S 5.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268
  1. /*
  2. * Copyright (c) 2006-2018, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2013-07-05 Bernard the first version
  9. */
  10. #include "rtconfig.h"
  11. .section .text, "ax"
  12. #ifdef RT_USING_SMP
  13. #define rt_hw_interrupt_disable rt_hw_local_irq_disable
  14. #define rt_hw_interrupt_enable rt_hw_local_irq_enable
  15. #endif
  16. /*
  17. * rt_base_t rt_hw_interrupt_disable();
  18. */
  19. .globl rt_hw_interrupt_disable
  20. rt_hw_interrupt_disable:
  21. mrs r0, cpsr
  22. cpsid i
  23. bx lr
  24. /*
  25. * void rt_hw_interrupt_enable(rt_base_t level);
  26. */
  27. .globl rt_hw_interrupt_enable
  28. rt_hw_interrupt_enable:
  29. msr cpsr, r0
  30. bx lr
  31. /*
  32. * void rt_hw_context_switch_to(rt_uint32 to, struct rt_thread *to_thread);
  33. * r0 --> to (thread stack)
  34. * r1 --> to_thread
  35. */
  36. .globl rt_hw_context_switch_to
  37. rt_hw_context_switch_to:
  38. clrex
  39. ldr sp, [r0] @ get new task stack pointer
  40. #ifdef RT_USING_SMP
  41. mov r0, r1
  42. bl rt_cpus_lock_status_restore
  43. #ifdef RT_USING_SMART
  44. bl rt_thread_self
  45. bl lwp_user_setting_restore
  46. #endif
  47. #else
  48. #ifdef RT_USING_SMART
  49. bl rt_thread_self
  50. mov r4, r0
  51. bl lwp_aspace_switch
  52. mov r0, r4
  53. bl lwp_user_setting_restore
  54. #endif
  55. #endif /*RT_USING_SMP*/
  56. b rt_hw_context_switch_exit
  57. .section .bss.share.isr
  58. _guest_switch_lvl:
  59. .word 0
  60. .globl vmm_virq_update
  61. .section .text.isr, "ax"
  62. /*
  63. * void rt_hw_context_switch(rt_uint32 from, rt_uint32 to, struct rt_thread *to_thread);
  64. * r0 --> from (from_thread stack)
  65. * r1 --> to (to_thread stack)
  66. * r2 --> to_thread
  67. */
  68. .globl rt_hw_context_switch
  69. rt_hw_context_switch:
  70. clrex
  71. stmfd sp!, {lr} @ push pc (lr should be pushed in place of PC)
  72. stmfd sp!, {r0-r12, lr} @ push lr & register file
  73. mrs r4, cpsr
  74. tst lr, #0x01
  75. orrne r4, r4, #0x20 @ it's thumb code
  76. stmfd sp!, {r4} @ push cpsr
  77. #ifdef RT_USING_SMART
  78. stmfd sp, {r13, r14}^ @ push usr_sp usr_lr
  79. sub sp, #8
  80. #endif
  81. #ifdef RT_USING_FPU
  82. /* fpu context */
  83. vmrs r6, fpexc
  84. tst r6, #(1<<30)
  85. beq 1f
  86. vstmdb sp!, {d0-d15}
  87. vstmdb sp!, {d16-d31}
  88. vmrs r5, fpscr
  89. stmfd sp!, {r5}
  90. 1:
  91. stmfd sp!, {r6}
  92. #endif
  93. str sp, [r0] @ store sp in preempted tasks TCB
  94. ldr sp, [r1] @ get new task stack pointer
  95. #ifdef RT_USING_SMP
  96. mov r0, r2
  97. bl rt_cpus_lock_status_restore
  98. #ifdef RT_USING_SMART
  99. bl rt_thread_self
  100. bl lwp_user_setting_restore
  101. #endif
  102. #else
  103. #ifdef RT_USING_SMART
  104. bl rt_thread_self
  105. mov r4, r0
  106. bl lwp_aspace_switch
  107. mov r0, r4
  108. bl lwp_user_setting_restore
  109. #endif
  110. #endif /*RT_USING_SMP*/
  111. b rt_hw_context_switch_exit
  112. /*
  113. * void rt_hw_context_switch_interrupt(rt_uint32 from, rt_uint32 to);
  114. */
  115. .equ Mode_USR, 0x10
  116. .equ Mode_FIQ, 0x11
  117. .equ Mode_IRQ, 0x12
  118. .equ Mode_SVC, 0x13
  119. .equ Mode_ABT, 0x17
  120. .equ Mode_UND, 0x1B
  121. .equ Mode_SYS, 0x1F
  122. .equ I_Bit, 0x80 @ when I bit is set, IRQ is disabled
  123. .equ F_Bit, 0x40 @ when F bit is set, FIQ is disabled
  124. .globl rt_thread_switch_interrupt_flag
  125. .globl rt_interrupt_from_thread
  126. .globl rt_interrupt_to_thread
  127. .globl rt_hw_context_switch_interrupt
  128. rt_hw_context_switch_interrupt:
  129. clrex
  130. #ifdef RT_USING_SMP
  131. /* r0 :svc_mod context
  132. * r1 :addr of from_thread's sp
  133. * r2 :addr of to_thread's sp
  134. * r3 :to_thread's tcb
  135. */
  136. #ifdef RT_USING_SMART
  137. push {r0 - r3, lr}
  138. #ifdef RT_USING_SMART
  139. bl rt_thread_self
  140. bl lwp_user_setting_save
  141. #endif
  142. pop {r0 - r3, lr}
  143. #endif
  144. str r0, [r1]
  145. ldr sp, [r2]
  146. mov r0, r3
  147. #ifdef RT_USING_SMART
  148. mov r4, r0
  149. #endif
  150. bl rt_cpus_lock_status_restore
  151. #ifdef RT_USING_SMART
  152. mov r0, r4
  153. bl lwp_user_setting_restore
  154. #endif
  155. b rt_hw_context_switch_exit
  156. #else /*RT_USING_SMP*/
  157. /* r0 :addr of from_thread's sp
  158. * r1 :addr of to_thread's sp
  159. * r2 :from_thread's tcb
  160. * r3 :to_thread's tcb
  161. */
  162. #ifdef RT_USING_SMART
  163. /* now to_thread(r3) not used */
  164. ldr ip, =rt_thread_switch_interrupt_flag
  165. ldr r3, [ip]
  166. cmp r3, #1
  167. beq _reswitch
  168. ldr r3, =rt_interrupt_from_thread @ set rt_interrupt_from_thread
  169. str r0, [r3]
  170. mov r3, #1 @ set rt_thread_switch_interrupt_flag to 1
  171. str r3, [ip]
  172. #ifdef RT_USING_SMART
  173. push {r1, lr}
  174. mov r0, r2
  175. bl lwp_user_setting_save
  176. pop {r1, lr}
  177. #endif
  178. _reswitch:
  179. ldr ip, =rt_interrupt_to_thread @ set rt_interrupt_to_thread
  180. str r1, [ip]
  181. bx lr
  182. #else
  183. /* now from_thread(r2) to_thread(r3) not used */
  184. ldr ip, =rt_thread_switch_interrupt_flag
  185. ldr r3, [ip]
  186. cmp r3, #1
  187. beq _reswitch
  188. ldr r3, =rt_interrupt_from_thread @ set rt_interrupt_from_thread
  189. str r0, [r3]
  190. mov r3, #1 @ set rt_thread_switch_interrupt_flag to 1
  191. str r3, [ip]
  192. _reswitch:
  193. ldr ip, =rt_interrupt_to_thread @ set rt_interrupt_to_thread
  194. str r1, [ip]
  195. bx lr
  196. #endif
  197. #endif /*RT_USING_SMP*/
  198. .global rt_hw_context_switch_exit
  199. rt_hw_context_switch_exit:
  200. #ifdef RT_USING_SMP
  201. #ifdef RT_USING_SIGNALS
  202. mov r0, sp
  203. cps #Mode_IRQ
  204. bl rt_signal_check
  205. cps #Mode_SVC
  206. mov sp, r0
  207. #endif
  208. #endif
  209. #ifdef RT_USING_FPU
  210. /* fpu context */
  211. ldmfd sp!, {r6}
  212. vmsr fpexc, r6
  213. tst r6, #(1<<30)
  214. beq 1f
  215. ldmfd sp!, {r5}
  216. vmsr fpscr, r5
  217. vldmia sp!, {d16-d31}
  218. vldmia sp!, {d0-d15}
  219. 1:
  220. #endif
  221. #ifdef RT_USING_SMART
  222. ldmfd sp, {r13, r14}^ /* usr_sp, usr_lr */
  223. add sp, #8
  224. #endif
  225. ldmfd sp!, {r1}
  226. msr spsr_cxsf, r1 /* original mode */
  227. #ifdef RT_USING_SMART
  228. and r1, #0x1f
  229. cmp r1, #0x10
  230. bne 1f
  231. ldmfd sp!, {r0-r12,lr}
  232. ldmfd sp!, {lr}
  233. b arch_ret_to_user
  234. 1:
  235. #endif
  236. ldmfd sp!, {r0-r12,lr,pc}^ /* irq return */
  237. #ifdef RT_USING_FPU
  238. .global set_fpexc
  239. set_fpexc:
  240. vmsr fpexc, r0
  241. bx lr
  242. #endif