Quellcode durchsuchen

[libcpu] discard rt_current_thread (#8976)

* [libcpu] rv64: discard rt_current_thread

* arm: using rt_thread_self to fetch current TCB
Shell vor 11 Monaten
Ursprung
Commit
397cdcd132

+ 4 - 5
components/lwp/arch/risc-v/rv64/lwp_gcc.S

@@ -35,7 +35,7 @@ arch_start_umode:
     // load kstack for user process
     csrw sscratch, a3
     li t0, SSTATUS_SPP | SSTATUS_SIE    // set as user mode, close interrupt
-    csrc sstatus, t0 
+    csrc sstatus, t0
     li t0, SSTATUS_SPIE // enable interrupt when return to user mode
     csrs sstatus, t0
 
@@ -112,7 +112,7 @@ arch_signal_quit:
     mv sp, a0
 
     /* restore user sp before enter trap */
-    addi a0, sp, CTX_REG_NR * REGBYTES 
+    addi a0, sp, CTX_REG_NR * REGBYTES
     csrw sscratch, a0
 
 
@@ -234,8 +234,7 @@ __restore_sp_from_sscratch: // from kernel
     j __move_stack_context
 
 __restore_sp_from_tcb: // from user
-    la a0, rt_current_thread
-    LOAD a0, 0(a0)
+    jal rt_thread_self
     jal get_thread_kernel_stack_top
     mv t0, a0
 
@@ -272,7 +271,7 @@ copy_context_loop:
     call syscall_handler
     j arch_syscall_exit
 START_POINT_END(syscall_entry)
-    
+
 .global arch_syscall_exit
 arch_syscall_exit:
     CLOSE_INTERRUPT

+ 1 - 2
libcpu/arm/cortex-m33/context_gcc.S

@@ -196,8 +196,7 @@ contex_ns_load:
 
 #if defined (RT_USING_MEM_PROTECTION)
     PUSH    {r0-r3, r12, lr}
-    LDR     r1, =rt_current_thread
-    LDR     r0, [r1]
+    BL      rt_thread_self
     BL      rt_hw_mpu_table_switch
     POP     {r0-r3, r12, lr}
 #endif

+ 1 - 2
libcpu/arm/cortex-m7/context_gcc.S

@@ -156,8 +156,7 @@ switch_to_thread:
 
 #if defined (RT_USING_MEM_PROTECTION)
     PUSH    {r0-r3, r12, lr}
-    LDR     r1, =rt_current_thread
-    LDR     r0, [r1]
+    BL      rt_thread_self
     BL      rt_hw_mpu_table_switch
     POP     {r0-r3, r12, lr}
 #endif

+ 4 - 6
libcpu/risc-v/t-head/c906/context_gcc.S

@@ -17,11 +17,10 @@
 rt_hw_context_switch_to:
     LOAD sp, (a0)
 
-    la s0, rt_current_thread
-    LOAD s1, (s0)
+    jal rt_thread_self
+    mv s1, a0
 
     #ifdef RT_USING_SMART
-        mv a0, s1
         jal lwp_aspace_switch
     #endif
 
@@ -50,11 +49,10 @@ rt_hw_context_switch:
     //restore to thread context
     LOAD sp, (a1)
 
-    la s0, rt_current_thread
-    LOAD s1, (s0)
+    jal rt_thread_self
+    mv s1, a0
 
     #ifdef RT_USING_SMART
-        mv a0, s1
         jal lwp_aspace_switch
     #endif
 

+ 4 - 6
libcpu/risc-v/virt64/context_gcc.S

@@ -75,11 +75,10 @@
 rt_hw_context_switch_to:
     LOAD sp, (a0)
 
-    la s0, rt_current_thread
-    LOAD s1, (s0)
+    jal rt_thread_self
+    mv s1, a0
 
     #ifdef RT_USING_SMART
-        mv a0, s1
         jal lwp_aspace_switch
     #endif
 
@@ -103,11 +102,10 @@ rt_hw_context_switch:
     LOAD sp, (a1)
 
     // restore Address Space
-    la s0, rt_current_thread
-    LOAD s1, (s0)
+    jal rt_thread_self
+    mv s1, a0
 
     #ifdef RT_USING_SMART
-        mv a0, s1
         jal lwp_aspace_switch
     #endif