context_gcc.S 7.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255
  1. /*
  2. * File : context_ccs.asm
  3. * This file is part of RT-Thread RTOS
  4. * COPYRIGHT (C) 2006, RT-Thread Development Team
  5. *
  6. * The license and distribution terms for this file may be
  7. * found in the file LICENSE in this distribution or at
  8. * http://www.rt-thread.org/license/LICENSE
  9. *
  10. * Change Logs:
  11. * Date Author Notes
  12. * 2009-01-20 Bernard first version
  13. * 2011-07-22 Bernard added thumb mode porting
  14. * 2013-05-24 Grissiom port to CCS
  15. * 2013-05-26 Grissiom optimize for ARMv7
  16. * 2013-10-20 Grissiom port to GCC
  17. */
  18. #include <rtconfig.h>
  19. .text
  20. .arm
  21. .globl rt_thread_switch_interrupt_flag
  22. .globl rt_interrupt_from_thread
  23. .globl rt_interrupt_to_thread
  24. .globl rt_interrupt_enter
  25. .globl rt_interrupt_leave
  26. .globl rt_hw_trap_irq
  27. /*
  28. * rt_base_t rt_hw_interrupt_disable()
  29. */
  30. .globl rt_hw_interrupt_disable
  31. rt_hw_interrupt_disable:
  32. MRS r0, cpsr
  33. CPSID IF
  34. BX lr
  35. /*
  36. * void rt_hw_interrupt_enable(rt_base_t level)
  37. */
  38. .globl rt_hw_interrupt_enable
  39. rt_hw_interrupt_enable:
  40. MSR cpsr_c, r0
  41. BX lr
  42. /*
  43. * void rt_hw_context_switch(rt_uint32 from, rt_uint32 to)
  44. * r0 --> from
  45. * r1 --> to
  46. */
  47. .globl rt_hw_context_switch
  48. rt_hw_context_switch:
  49. STMDB sp!, {lr} @ push pc (lr should be pushed in place of PC)
  50. STMDB sp!, {r0-r12, lr} @ push lr & register file
  51. MRS r4, cpsr
  52. TST lr, #0x01
  53. ORRNE r4, r4, #0x20 @ it's thumb code
  54. STMDB sp!, {r4} @ push cpsr
  55. #if defined (__VFP_FP__) && !defined(__SOFTFP__) && defined(RT_VFP_LAZY_STACKING)
  56. VMRS r4, fpexc
  57. TST r4, #0x40000000
  58. BEQ __no_vfp_frame1
  59. VSTMDB sp!, {d0-d15}
  60. VMRS r5, fpscr
  61. @ TODO: add support for Common VFPv3.
  62. @ Save registers like FPINST, FPINST2
  63. STMDB sp!, {r5}
  64. __no_vfp_frame1:
  65. STMDB sp!, {r4}
  66. #endif
  67. STR sp, [r0] @ store sp in preempted tasks TCB
  68. LDR sp, [r1] @ get new task stack pointer
  69. #if defined (__VFP_FP__) && !defined(__SOFTFP__) && defined(RT_VFP_LAZY_STACKING)
  70. LDMIA sp!, {r0} @ get fpexc
  71. VMSR fpexc, r0 @ restore fpexc
  72. TST r0, #0x40000000
  73. BEQ __no_vfp_frame2
  74. LDMIA sp!, {r1} @ get fpscr
  75. VMSR fpscr, r1
  76. VLDMIA sp!, {d0-d15}
  77. __no_vfp_frame2:
  78. #endif
  79. LDMIA sp!, {r4} @ pop new task cpsr to spsr
  80. MSR spsr_cxsf, r4
  81. LDMIA sp!, {r0-r12, lr, pc}^ @ pop new task r0-r12, lr & pc, copy spsr to cpsr
  82. /*
  83. * void rt_hw_context_switch_to(rt_uint32 to)
  84. * r0 --> to
  85. */
  86. .globl rt_hw_context_switch_to
  87. rt_hw_context_switch_to:
  88. LDR sp, [r0] @ get new task stack pointer
  89. #if defined (__VFP_FP__) && !defined(__SOFTFP__) && defined(RT_VFP_LAZY_STACKING)
  90. LDMIA sp!, {r0} @ get fpexc
  91. VMSR fpexc, r0
  92. TST r0, #0x40000000
  93. BEQ __no_vfp_frame_to
  94. LDMIA sp!, {r1} @ get fpscr
  95. VMSR fpscr, r1
  96. VLDMIA sp!, {d0-d15}
  97. __no_vfp_frame_to:
  98. #endif
  99. LDMIA sp!, {r4} @ pop new task cpsr to spsr
  100. MSR spsr_cxsf, r4
  101. LDMIA sp!, {r0-r12, lr, pc}^ @ pop new task r0-r12, lr & pc, copy spsr to cpsr
  102. /*
  103. * void rt_hw_context_switch_interrupt(rt_uint32 from, rt_uint32 to)@
  104. */
  105. .globl rt_hw_context_switch_interrupt
  106. rt_hw_context_switch_interrupt:
  107. LDR r2, =rt_thread_switch_interrupt_flag
  108. LDR r3, [r2]
  109. CMP r3, #1
  110. BEQ _reswitch
  111. MOV r3, #1 @ set rt_thread_switch_interrupt_flag to 1
  112. STR r3, [r2]
  113. LDR r2, =rt_interrupt_from_thread @ set rt_interrupt_from_thread
  114. STR r0, [r2]
  115. _reswitch:
  116. LDR r2, =rt_interrupt_to_thread @ set rt_interrupt_to_thread
  117. STR r1, [r2]
  118. BX lr
  119. .globl IRQ_Handler
  120. IRQ_Handler:
  121. STMDB sp!, {r0-r12,lr}
  122. #if defined (__VFP_FP__) && !defined(__SOFTFP__) && defined(RT_VFP_LAZY_STACKING)
  123. VMRS r0, fpexc
  124. TST r0, #0x40000000
  125. BEQ __no_vfp_frame_str_irq
  126. VSTMDB sp!, {d0-d15}
  127. VMRS r1, fpscr
  128. @ TODO: add support for Common VFPv3.
  129. @ Save registers like FPINST, FPINST2
  130. STMDB sp!, {r1}
  131. __no_vfp_frame_str_irq:
  132. STMDB sp!, {r0}
  133. #endif
  134. BL rt_interrupt_enter
  135. BL rt_hw_trap_irq
  136. BL rt_interrupt_leave
  137. @ if rt_thread_switch_interrupt_flag set, jump to
  138. @ rt_hw_context_switch_interrupt_do and don't return
  139. LDR r0, =rt_thread_switch_interrupt_flag
  140. LDR r1, [r0]
  141. CMP r1, #1
  142. BEQ rt_hw_context_switch_interrupt_do
  143. #if defined (__VFP_FP__) && !defined(__SOFTFP__) && defined(RT_VFP_LAZY_STACKING)
  144. LDMIA sp!, {r0} @ get fpexc
  145. VMSR fpexc, r0
  146. TST r0, #0x40000000
  147. BEQ __no_vfp_frame_ldr_irq
  148. LDMIA sp!, {r1} @ get fpscr
  149. VMSR fpscr, r1
  150. VLDMIA sp!, {d0-d15}
  151. __no_vfp_frame_ldr_irq:
  152. #endif
  153. LDMIA sp!, {r0-r12,lr}
  154. SUBS pc, lr, #4
  155. /*
  156. * void rt_hw_context_switch_interrupt_do(rt_base_t flag)
  157. */
  158. .globl rt_hw_context_switch_interrupt_do
  159. rt_hw_context_switch_interrupt_do:
  160. MOV r1, #0 @ clear flag
  161. STR r1, [r0]
  162. #if defined (__VFP_FP__) && !defined(__SOFTFP__) && defined(RT_VFP_LAZY_STACKING)
  163. LDMIA sp!, {r0} @ get fpexc
  164. VMSR fpexc, r0
  165. TST r0, #0x40000000
  166. BEQ __no_vfp_frame_do1
  167. LDMIA sp!, {r1} @ get fpscr
  168. VMSR fpscr, r1
  169. VLDMIA sp!, {d0-d15}
  170. __no_vfp_frame_do1:
  171. #endif
  172. LDMIA sp!, {r0-r12,lr} @ reload saved registers
  173. STMDB sp, {r0-r3} @ save r0-r3. We will restore r0-r3 in the SVC
  174. @ mode so there is no need to update SP.
  175. SUB r1, sp, #16 @ save the right SP value in r1, so we could restore r0-r3.
  176. SUB r2, lr, #4 @ save old task's pc to r2
  177. MRS r3, spsr @ get cpsr of interrupt thread
  178. @ switch to SVC mode and no interrupt
  179. CPSID IF, #0x13
  180. STMDB sp!, {r2} @ push old task's pc
  181. STMDB sp!, {r4-r12,lr} @ push old task's lr,r12-r4
  182. LDMIA r1!, {r4-r7} @ restore r0-r3 of the interrupted thread
  183. STMDB sp!, {r4-r7} @ push old task's r3-r0. We don't need to push/pop them to
  184. @ r0-r3 because we just want to transfer the data and don't
  185. @ use them here.
  186. STMDB sp!, {r3} @ push old task's cpsr
  187. #if defined (__VFP_FP__) && !defined(__SOFTFP__) && defined(RT_VFP_LAZY_STACKING)
  188. VMRS r0, fpexc
  189. TST r0, #0x40000000
  190. BEQ __no_vfp_frame_do2
  191. VSTMDB sp!, {d0-d15}
  192. VMRS r1, fpscr
  193. @ TODO: add support for Common VFPv3.
  194. @ Save registers like FPINST, FPINST2
  195. STMDB sp!, {r1}
  196. __no_vfp_frame_do2:
  197. STMDB sp!, {r0}
  198. #endif
  199. LDR r4, =rt_interrupt_from_thread
  200. LDR r5, [r4]
  201. STR sp, [r5] @ store sp in preempted tasks's TCB
  202. LDR r6, =rt_interrupt_to_thread
  203. LDR r6, [r6]
  204. LDR sp, [r6] @ get new task's stack pointer
  205. #if defined (__VFP_FP__) && !defined(__SOFTFP__) && defined(RT_VFP_LAZY_STACKING)
  206. LDMIA sp!, {r0} @ get fpexc
  207. VMSR fpexc, r0
  208. TST r0, #0x40000000
  209. BEQ __no_vfp_frame_do3
  210. LDMIA sp!, {r1} @ get fpscr
  211. VMSR fpscr, r1
  212. VLDMIA sp!, {d0-d15}
  213. __no_vfp_frame_do3:
  214. #endif
  215. LDMIA sp!, {r4} @ pop new task's cpsr to spsr
  216. MSR spsr_cxsf, r4
  217. LDMIA sp!, {r0-r12,lr,pc}^ @ pop new task's r0-r12,lr & pc, copy spsr to cpsr