context_gcc.S 5.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212
  1. /*
  2. * Copyright (c) 2006-2018, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2018/10/28 Bernard The unify RISC-V porting implementation
  9. * 2018/12/27 Jesven Add SMP support
  10. */
  11. #include "cpuport.h"
  12. #ifdef RT_USING_SMP
  13. #define rt_hw_interrupt_disable rt_hw_local_irq_disable
  14. #define rt_hw_interrupt_enable rt_hw_local_irq_enable
  15. #endif
  16. /*
  17. * rt_base_t rt_hw_interrupt_disable(void);
  18. */
  19. .globl rt_hw_interrupt_disable
  20. rt_hw_interrupt_disable:
  21. csrrci a0, mstatus, 8
  22. ret
  23. /*
  24. * void rt_hw_interrupt_enable(rt_base_t level);
  25. */
  26. .globl rt_hw_interrupt_enable
  27. rt_hw_interrupt_enable:
  28. csrw mstatus, a0
  29. ret
  30. /*
  31. * #ifdef RT_USING_SMP
  32. * void rt_hw_context_switch_to(rt_ubase_t to, stuct rt_thread *to_thread);
  33. * #else
  34. * void rt_hw_context_switch_to(rt_ubase_t to);
  35. * #endif
  36. * a0 --> to
  37. * a1 --> to_thread
  38. */
  39. .globl rt_hw_context_switch_to
  40. rt_hw_context_switch_to:
  41. LOAD sp, (a0)
  42. #ifdef RT_USING_SMP
  43. mv a0, a1
  44. jal rt_cpus_lock_status_restore
  45. #endif
  46. LOAD a0, 2 * REGBYTES(sp)
  47. csrw mstatus, a0
  48. j rt_hw_context_switch_exit
  49. /*
  50. * #ifdef RT_USING_SMP
  51. * void rt_hw_context_switch(rt_ubase_t from, rt_ubase_t to, struct rt_thread *to_thread);
  52. * #else
  53. * void rt_hw_context_switch(rt_ubase_t from, rt_ubase_t to);
  54. * #endif
  55. *
  56. * a0 --> from
  57. * a1 --> to
  58. * a2 --> to_thread
  59. */
  60. .globl rt_hw_context_switch
  61. rt_hw_context_switch:
  62. /* saved from thread context
  63. * x1/ra -> sp(0)
  64. * x1/ra -> sp(1)
  65. * mstatus.mie -> sp(2)
  66. * x(i) -> sp(i-4)
  67. */
  68. addi sp, sp, -32 * REGBYTES
  69. STORE sp, (a0)
  70. STORE x1, 0 * REGBYTES(sp)
  71. STORE x1, 1 * REGBYTES(sp)
  72. csrr a0, mstatus
  73. andi a0, a0, 8
  74. beqz a0, save_mpie
  75. li a0, 0x80
  76. save_mpie:
  77. STORE a0, 2 * REGBYTES(sp)
  78. STORE x4, 4 * REGBYTES(sp)
  79. STORE x5, 5 * REGBYTES(sp)
  80. STORE x6, 6 * REGBYTES(sp)
  81. STORE x7, 7 * REGBYTES(sp)
  82. STORE x8, 8 * REGBYTES(sp)
  83. STORE x9, 9 * REGBYTES(sp)
  84. STORE x10, 10 * REGBYTES(sp)
  85. STORE x11, 11 * REGBYTES(sp)
  86. STORE x12, 12 * REGBYTES(sp)
  87. STORE x13, 13 * REGBYTES(sp)
  88. STORE x14, 14 * REGBYTES(sp)
  89. STORE x15, 15 * REGBYTES(sp)
  90. STORE x16, 16 * REGBYTES(sp)
  91. STORE x17, 17 * REGBYTES(sp)
  92. STORE x18, 18 * REGBYTES(sp)
  93. STORE x19, 19 * REGBYTES(sp)
  94. STORE x20, 20 * REGBYTES(sp)
  95. STORE x21, 21 * REGBYTES(sp)
  96. STORE x22, 22 * REGBYTES(sp)
  97. STORE x23, 23 * REGBYTES(sp)
  98. STORE x24, 24 * REGBYTES(sp)
  99. STORE x25, 25 * REGBYTES(sp)
  100. STORE x26, 26 * REGBYTES(sp)
  101. STORE x27, 27 * REGBYTES(sp)
  102. STORE x28, 28 * REGBYTES(sp)
  103. STORE x29, 29 * REGBYTES(sp)
  104. STORE x30, 30 * REGBYTES(sp)
  105. STORE x31, 31 * REGBYTES(sp)
  106. /* restore to thread context
  107. * sp(0) -> epc;
  108. * sp(1) -> ra;
  109. * sp(i) -> x(i+2)
  110. */
  111. LOAD sp, (a1)
  112. #ifdef RT_USING_SMP
  113. mv a0, a2
  114. jal rt_cpus_lock_status_restore
  115. #endif /*RT_USING_SMP*/
  116. j rt_hw_context_switch_exit
  117. #ifdef RT_USING_SMP
  118. /*
  119. * void rt_hw_context_switch_interrupt(void *context, rt_ubase_t from, rt_ubase_t to, struct rt_thread *to_thread);
  120. *
  121. * a0 --> context
  122. * a1 --> from
  123. * a2 --> to
  124. * a3 --> to_thread
  125. */
  126. .globl rt_hw_context_switch_interrupt
  127. rt_hw_context_switch_interrupt:
  128. STORE a0, 0(a1)
  129. LOAD sp, 0(a2)
  130. move a0, a3
  131. call rt_cpus_lock_status_restore
  132. j rt_hw_context_switch_exit
  133. #endif
  134. .global rt_hw_context_switch_exit
  135. rt_hw_context_switch_exit:
  136. #ifdef RT_USING_SMP
  137. #ifdef RT_USING_SIGNALS
  138. mv a0, sp
  139. csrr t0, mhartid
  140. /* switch interrupt stack of current cpu */
  141. la sp, __stack_start__
  142. addi t1, t0, 1
  143. li t2, __STACKSIZE__
  144. mul t1, t1, t2
  145. add sp, sp, t1 /* sp = (cpuid + 1) * __STACKSIZE__ + __stack_start__ */
  146. call rt_signal_check
  147. mv sp, a0
  148. #endif
  149. #endif
  150. /* resw ra to mepc */
  151. LOAD a0, 0 * REGBYTES(sp)
  152. csrw mepc, a0
  153. LOAD x1, 1 * REGBYTES(sp)
  154. li t0, 0x00001800
  155. csrw mstatus, t0
  156. LOAD a0, 2 * REGBYTES(sp)
  157. csrs mstatus, a0
  158. LOAD x4, 4 * REGBYTES(sp)
  159. LOAD x5, 5 * REGBYTES(sp)
  160. LOAD x6, 6 * REGBYTES(sp)
  161. LOAD x7, 7 * REGBYTES(sp)
  162. LOAD x8, 8 * REGBYTES(sp)
  163. LOAD x9, 9 * REGBYTES(sp)
  164. LOAD x10, 10 * REGBYTES(sp)
  165. LOAD x11, 11 * REGBYTES(sp)
  166. LOAD x12, 12 * REGBYTES(sp)
  167. LOAD x13, 13 * REGBYTES(sp)
  168. LOAD x14, 14 * REGBYTES(sp)
  169. LOAD x15, 15 * REGBYTES(sp)
  170. LOAD x16, 16 * REGBYTES(sp)
  171. LOAD x17, 17 * REGBYTES(sp)
  172. LOAD x18, 18 * REGBYTES(sp)
  173. LOAD x19, 19 * REGBYTES(sp)
  174. LOAD x20, 20 * REGBYTES(sp)
  175. LOAD x21, 21 * REGBYTES(sp)
  176. LOAD x22, 22 * REGBYTES(sp)
  177. LOAD x23, 23 * REGBYTES(sp)
  178. LOAD x24, 24 * REGBYTES(sp)
  179. LOAD x25, 25 * REGBYTES(sp)
  180. LOAD x26, 26 * REGBYTES(sp)
  181. LOAD x27, 27 * REGBYTES(sp)
  182. LOAD x28, 28 * REGBYTES(sp)
  183. LOAD x29, 29 * REGBYTES(sp)
  184. LOAD x30, 30 * REGBYTES(sp)
  185. LOAD x31, 31 * REGBYTES(sp)
  186. addi sp, sp, 32 * REGBYTES
  187. mret