context_gcc.S 4.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183
  1. /*
  2. * Copyright (c) 2006-2018, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2013-07-05 Bernard the first version
  9. */
  10. #include "rtconfig.h"
  11. .section .text, "ax"
  12. #ifdef RT_USING_SMP
  13. #define rt_hw_interrupt_disable rt_hw_local_irq_disable
  14. #define rt_hw_interrupt_enable rt_hw_local_irq_enable
  15. #endif
  16. /*
  17. * rt_base_t rt_hw_interrupt_disable();
  18. */
  19. .globl rt_hw_interrupt_disable
  20. rt_hw_interrupt_disable:
  21. mrs r0, cpsr
  22. cpsid i
  23. bx lr
  24. /*
  25. * void rt_hw_interrupt_enable(rt_base_t level);
  26. */
  27. .globl rt_hw_interrupt_enable
  28. rt_hw_interrupt_enable:
  29. msr cpsr, r0
  30. bx lr
  31. /*
  32. * void rt_hw_context_switch_to(rt_uint32 to);
  33. * r0 --> to
  34. */
  35. .globl rt_hw_context_switch_to
  36. rt_hw_context_switch_to:
  37. ldr sp, [r0] @ get new task stack pointer
  38. #ifdef RT_USING_SMP
  39. mov r0, r1
  40. bl rt_cpus_lock_status_restore
  41. #endif /*RT_USING_SMP*/
  42. #ifdef RT_USING_LWP
  43. ldmfd sp, {r13, r14}^ @ pop usr_sp usr_lr
  44. add sp, #8
  45. #endif
  46. ldmfd sp!, {r4} @ pop new task spsr
  47. msr spsr_cxsf, r4
  48. ldmfd sp!, {r0-r12, lr, pc}^ @ pop new task r0-r12, lr & pc
  49. .section .bss.share.isr
  50. _guest_switch_lvl:
  51. .word 0
  52. .globl vmm_virq_update
  53. .section .text.isr, "ax"
  54. /*
  55. * void rt_hw_context_switch(rt_uint32 from, rt_uint32 to);
  56. * r0 --> from
  57. * r1 --> to
  58. */
  59. .globl rt_hw_context_switch
  60. rt_hw_context_switch:
  61. stmfd sp!, {lr} @ push pc (lr should be pushed in place of PC)
  62. stmfd sp!, {r0-r12, lr} @ push lr & register file
  63. mrs r4, cpsr
  64. tst lr, #0x01
  65. orrne r4, r4, #0x20 @ it's thumb code
  66. stmfd sp!, {r4} @ push cpsr
  67. #ifdef RT_USING_LWP
  68. stmfd sp, {r13, r14}^ @ push usr_sp usr_lr
  69. sub sp, #8
  70. #endif
  71. str sp, [r0] @ store sp in preempted tasks TCB
  72. ldr sp, [r1] @ get new task stack pointer
  73. #ifdef RT_USING_SMP
  74. mov r0, r2
  75. bl rt_cpus_lock_status_restore
  76. #endif /*RT_USING_SMP*/
  77. #ifdef RT_USING_LWP
  78. ldmfd sp, {r13, r14}^ @ pop usr_sp usr_lr
  79. add sp, #8
  80. #endif
  81. ldmfd sp!, {r4} @ pop new task cpsr to spsr
  82. msr spsr_cxsf, r4
  83. ldmfd sp!, {r0-r12, lr, pc}^ @ pop new task r0-r12, lr & pc, copy spsr to cpsr
  84. /*
  85. * void rt_hw_context_switch_interrupt(rt_uint32 from, rt_uint32 to);
  86. */
  87. .equ Mode_USR, 0x10
  88. .equ Mode_FIQ, 0x11
  89. .equ Mode_IRQ, 0x12
  90. .equ Mode_SVC, 0x13
  91. .equ Mode_ABT, 0x17
  92. .equ Mode_UND, 0x1B
  93. .equ Mode_SYS, 0x1F
  94. .equ I_Bit, 0x80 @ when I bit is set, IRQ is disabled
  95. .equ F_Bit, 0x40 @ when F bit is set, FIQ is disabled
  96. .globl rt_thread_switch_interrupt_flag
  97. .globl rt_interrupt_from_thread
  98. .globl rt_interrupt_to_thread
  99. .globl rt_hw_context_switch_interrupt
  100. rt_hw_context_switch_interrupt:
  101. #ifdef RT_USING_SMP
  102. /* r0 :irq_mod context
  103. * r1 :addr of from_thread's sp
  104. * r2 :addr of to_thread's sp
  105. * r3 :to_thread's tcb
  106. */
  107. @ r0 point to {r0-r3} in stack
  108. push {r1 - r3}
  109. mov r1, r0
  110. add r0, r0, #4*4
  111. ldmfd r0!, {r4-r12,lr}@ reload saved registers
  112. mrs r3, spsr @ get cpsr of interrupt thread
  113. sub r2, lr, #4 @ save old task's pc to r2
  114. msr cpsr_c, #I_Bit|F_Bit|Mode_SVC
  115. stmfd sp!, {r2} @ push old task's pc
  116. stmfd sp!, {r4-r12,lr}@ push old task's lr,r12-r4
  117. ldmfd r1, {r4-r7} @ restore r0-r3 of the interrupt thread
  118. stmfd sp!, {r4-r7} @ push old task's r0-r3
  119. stmfd sp!, {r3} @ push old task's cpsr
  120. #ifdef RT_USING_LWP
  121. stmfd sp, {r13,r14}^ @push usr_sp usr_lr
  122. sub sp, #8
  123. #endif
  124. msr cpsr_c, #I_Bit|F_Bit|Mode_IRQ
  125. pop {r1 - r3}
  126. mov sp, r0
  127. msr cpsr_c, #I_Bit|F_Bit|Mode_SVC
  128. str sp, [r1]
  129. ldr sp, [r2]
  130. mov r0, r3
  131. bl rt_cpus_lock_status_restore
  132. #ifdef RT_USING_LWP
  133. ldmfd sp, {r13,r14}^ @pop usr_sp usr_lr
  134. add sp, #8
  135. #endif
  136. ldmfd sp!, {r4} @ pop new task's cpsr to spsr
  137. msr spsr_cxsf, r4
  138. ldmfd sp!, {r0-r12,lr,pc}^ @ pop new task's r0-r12,lr & pc, copy spsr to cpsr
  139. #else /*RT_USING_SMP*/
  140. ldr r2, =rt_thread_switch_interrupt_flag
  141. ldr r3, [r2]
  142. cmp r3, #1
  143. beq _reswitch
  144. ldr ip, =rt_interrupt_from_thread @ set rt_interrupt_from_thread
  145. mov r3, #1 @ set rt_thread_switch_interrupt_flag to 1
  146. str r0, [ip]
  147. str r3, [r2]
  148. _reswitch:
  149. ldr r2, =rt_interrupt_to_thread @ set rt_interrupt_to_thread
  150. str r1, [r2]
  151. bx lr
  152. #endif /*RT_USING_SMP*/