1
0

context_gcc.S 2.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120
  1. /*
  2. * Copyright (c) 2006-2020, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2021-05-18 Jesven the first version
  9. * 2023-06-24 Shell Support backtrace for user thread
  10. * 2024-01-06 Shell Fix barrier on irq_disable/enable
  11. * 2024-03-28 Shell Move vector handling codes from context_gcc.S
  12. */
  13. #ifndef __ASSEMBLY__
  14. #define __ASSEMBLY__
  15. #endif
  16. #include "context_gcc.h"
  17. #include "../include/vector_gcc.h"
  18. #include <rtconfig.h>
  19. #include <asm-generic.h>
  20. #include <asm-fpu.h>
  21. #include <armv8.h>
  22. /**
  23. * Context switch status
  24. */
  25. .section .bss
  26. rt_interrupt_from_thread:
  27. .quad 0
  28. rt_interrupt_to_thread:
  29. .quad 0
  30. rt_thread_switch_interrupt_flag:
  31. .quad 0
  32. .section .text
  33. /*
  34. * void rt_hw_context_switch_to(rt_ubase_t to);
  35. * X0 --> to sp
  36. */
  37. .globl rt_hw_context_switch_to
  38. rt_hw_context_switch_to:
  39. clrex
  40. ldr x0, [x0]
  41. RESTORE_CONTEXT_SWITCH x0
  42. NEVER_RETURN
  43. /*
  44. * void rt_hw_context_switch(rt_ubase_t from, rt_ubase_t to);
  45. * X0 --> from sp
  46. * X1 --> to sp
  47. * X2 --> to thread
  48. */
  49. .globl rt_hw_context_switch
  50. rt_hw_context_switch:
  51. clrex
  52. SAVE_CONTEXT_SWITCH x19, x20
  53. mov x2, sp
  54. str x2, [x0] // store sp in preempted tasks TCB
  55. ldr x0, [x1] // get new task stack pointer
  56. RESTORE_CONTEXT_SWITCH x0
  57. NEVER_RETURN
  58. .globl rt_thread_switch_interrupt_flag
  59. .globl rt_interrupt_from_thread
  60. .globl rt_interrupt_to_thread
  61. .globl rt_hw_context_switch_interrupt
  62. /*
  63. * void rt_hw_context_switch_interrupt(rt_ubase_t from, rt_ubase_t to, rt_thread_t from_thread, rt_thread_t to_thread);
  64. */
  65. rt_hw_context_switch_interrupt:
  66. ldr x6, =rt_thread_switch_interrupt_flag
  67. ldr x7, [x6]
  68. cmp x7, #1
  69. b.eq _reswitch
  70. /* set rt_interrupt_from_thread */
  71. ldr x4, =rt_interrupt_from_thread
  72. str x0, [x4]
  73. /* set rt_thread_switch_interrupt_flag to 1 */
  74. mov x7, #1
  75. str x7, [x6]
  76. stp x1, x30, [sp, #-0x10]!
  77. #ifdef RT_USING_SMART
  78. mov x0, x2
  79. bl lwp_user_setting_save
  80. #endif
  81. ldp x1, x30, [sp], #0x10
  82. _reswitch:
  83. ldr x6, =rt_interrupt_to_thread // set rt_interrupt_to_thread
  84. str x1, [x6]
  85. ret
  86. .globl rt_hw_context_switch_interrupt_do
  87. /**
  88. * rt_hw_context_switch_interrupt_do(void)
  89. */
  90. rt_hw_context_switch_interrupt_do:
  91. clrex
  92. SAVE_CONTEXT_SWITCH_FAST
  93. ldr x3, =rt_interrupt_from_thread
  94. ldr x4, [x3]
  95. mov x0, sp
  96. str x0, [x4] // store sp in preempted tasks's tcb
  97. ldr x3, =rt_interrupt_to_thread
  98. ldr x4, [x3]
  99. ldr x0, [x4] // get new task's stack pointer
  100. RESTORE_CONTEXT_SWITCH x0
  101. NEVER_RETURN