context_gcc.S 3.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135
  1. /*
  2. * Copyright (c) 2006-2024, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2021-05-18 Jesven the first version
  9. * 2023-06-24 Shell Support backtrace for user thread
  10. * 2024-01-06 Shell Fix barrier on irq_disable/enable
  11. * 2024-03-28 Shell Move vector handling codes from context_gcc.S
  12. */
  13. #ifndef __ASSEMBLY__
  14. #define __ASSEMBLY__
  15. #endif
  16. #include "context_gcc.h"
  17. #include "../include/vector_gcc.h"
  18. #include <rtconfig.h>
  19. #include <asm-generic.h>
  20. #include <asm-fpu.h>
  21. #include <armv8.h>
  22. .section .text
  23. .globl rt_hw_context_switch_to
  24. /*
  25. * void rt_hw_context_switch_to(rt_uint3 to, struct rt_thread *to_thread);
  26. * X0 --> to (thread stack)
  27. * X1 --> to_thread
  28. */
  29. rt_hw_context_switch_to:
  30. ldr x0, [x0]
  31. mov sp, x0
  32. /* reserved to_thread */
  33. mov x19, x1
  34. mov x0, x19
  35. bl rt_cpus_lock_status_restore
  36. #ifdef RT_USING_SMART
  37. mov x0, x19
  38. bl lwp_user_setting_restore
  39. #endif
  40. b _context_switch_exit
  41. .globl rt_hw_context_switch
  42. /*
  43. * void rt_hw_context_switch(rt_uint32 from, rt_uint32
  44. to, struct rt_thread *to_thread);
  45. * X0 --> from (from_thread stack)
  46. * X1 --> to (to_thread stack)
  47. * X2 --> to_thread
  48. */
  49. rt_hw_context_switch:
  50. SAVE_CONTEXT_SWITCH x19, x20
  51. mov x3, sp
  52. str x3, [x0] // store sp in preempted tasks TCB
  53. ldr x0, [x1] // get new task stack pointer
  54. mov sp, x0
  55. /* backup thread self */
  56. mov x19, x2
  57. mov x0, x19
  58. bl rt_cpus_lock_status_restore
  59. #ifdef RT_USING_SMART
  60. mov x0, x19
  61. bl lwp_user_setting_restore
  62. #endif
  63. b _context_switch_exit
  64. .globl rt_hw_irq_exit
  65. .globl rt_hw_context_switch_interrupt
  66. #define EXP_FRAME x19
  67. #define FROM_SPP x20
  68. #define TO_SPP x21
  69. #define TO_TCB x22
  70. /*
  71. * void rt_hw_context_switch_interrupt(context, from sp, to sp, tp tcb)
  72. * X0 :interrupt context
  73. * X1 :addr of from_thread's sp
  74. * X2 :addr of to_thread's sp
  75. * X3 :to_thread's tcb
  76. */
  77. rt_hw_context_switch_interrupt:
  78. #ifdef RT_USING_DEBUG
  79. /* debug frame for backtrace */
  80. stp x29, x30, [sp, #-0x10]!
  81. #endif /* RT_USING_DEBUG */
  82. /* we can discard all the previous ABI here */
  83. mov EXP_FRAME, x0
  84. mov FROM_SPP, x1
  85. mov TO_SPP, x2
  86. mov TO_TCB, x3
  87. #ifdef RT_USING_SMART
  88. GET_THREAD_SELF x0
  89. bl lwp_user_setting_save
  90. #endif /* RT_USING_SMART */
  91. /* reset SP of from-thread */
  92. mov sp, EXP_FRAME
  93. /* push context for swtich */
  94. adr lr, rt_hw_irq_exit
  95. SAVE_CONTEXT_SWITCH_FAST
  96. /* save SP of from-thread */
  97. mov x0, sp
  98. str x0, [FROM_SPP]
  99. /* setup SP to to-thread's */
  100. ldr x0, [TO_SPP]
  101. mov sp, x0
  102. mov x0, TO_TCB
  103. bl rt_cpus_lock_status_restore
  104. #ifdef RT_USING_SMART
  105. mov x0, TO_TCB
  106. bl lwp_user_setting_restore
  107. #endif /* RT_USING_SMART */
  108. b _context_switch_exit
  109. _context_switch_exit:
  110. .local _context_switch_exit
  111. clrex
  112. RESTORE_CONTEXT_SWITCH