vfp_entry_gcc.S 7.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239
  1. /*
  2. * File : vfp_entry_gcc.S
  3. * This file is part of RT-Thread RTOS
  4. * COPYRIGHT (C) 2006, RT-Thread Development Team
  5. *
  6. * This program is free software; you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License as published by
  8. * the Free Software Foundation; either version 2 of the License, or
  9. * (at your option) any later version.
  10. *
  11. * This program is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. * GNU General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU General Public License along
  17. * with this program; if not, write to the Free Software Foundation, Inc.,
  18. * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
  19. *
  20. * Change Logs:
  21. * Date Author Notes
  22. * 2014-11-07 weety first version
  23. */
  24. #include <rtconfig.h>
  25. #ifdef RT_USING_VFP
  26. #include "armv6.h"
  27. #include "vfp.h"
  28. //#define DEBUG
  29. .macro PRINT, str
  30. #ifdef DEBUG
  31. stmfd sp!, {r0-r3, ip, lr}
  32. add r0, pc, #4
  33. bl rt_kprintf
  34. b 1f
  35. .asciz "VFP: \str\n"
  36. .balign 4
  37. 1: ldmfd sp!, {r0-r3, ip, lr}
  38. #endif
  39. .endm
  40. .macro PRINT1, str, arg
  41. #ifdef DEBUG
  42. stmfd sp!, {r0-r3, ip, lr}
  43. mov r1, \arg
  44. add r0, pc, #4
  45. bl rt_kprintf
  46. b 1f
  47. .asciz "VFP: \str\n"
  48. .balign 4
  49. 1: ldmfd sp!, {r0-r3, ip, lr}
  50. #endif
  51. .endm
  52. .macro PRINT3, str, arg1, arg2, arg3
  53. #ifdef DEBUG
  54. stmfd sp!, {r0-r3, ip, lr}
  55. mov r3, \arg3
  56. mov r2, \arg2
  57. mov r1, \arg1
  58. add r0, pc, #4
  59. bl rt_kprintf
  60. b 1f
  61. .asciz "VFP: \str\n"
  62. .balign 4
  63. 1: ldmfd sp!, {r0-r3, ip, lr}
  64. #endif
  65. .endm
  66. .macro get_vfpregs_offset, rd
  67. ldr \rd, .vfp_offset
  68. ldr \rd, [\rd]
  69. .endm
  70. .vfp_offset:
  71. .word vfpregs_offset
  72. .macro vfp_restore_working_reg, base, rd0
  73. vldmia \base!, {d0-d15}
  74. #ifdef RT_USING_VFPv3
  75. vmrs \rd0, mvfr0
  76. and \rd0, \rd0, #MVFR0_A_SIMD_MASK @ A_SIMD registers
  77. cmp \rd0, #2 @ 0b0000 Not supported.
  78. @ 0b0001 Supported, 16 ¡Á64-bit registers.
  79. @ 0b0010 Supported, 32 ¡Á64-bit registers.
  80. vldmiaeq \base!, {d16-d31}
  81. addne \base, \base, #32*4 @ skip unused registers
  82. #endif
  83. .endm
  84. .macro vfp_save_working_reg, base, rd0
  85. vstmia \base!, {d0-d15} @ save the working registers
  86. #ifdef RT_USING_VFPv3
  87. vmrs \rd0, mvfr0
  88. and \rd0, \rd0, #MVFR0_A_SIMD_MASK @ A_SIMD registers
  89. cmp \rd0, #2 @ 0b0000 Not supported.
  90. @ 0b0001 Supported, 16 ¡Á64-bit registers.
  91. @ 0b0010 Supported, 32 ¡Á64-bit registers.
  92. vstmiaeq \base!, {d16-d31}
  93. addne \base, \base, #32*4 @ skip unused registers
  94. #endif
  95. .endm
  96. .macro vfp_restore_state, base, fpexc_rd, rd0, rd1, rd2
  97. ldmia \base, {\fpexc_rd, \rd0, \rd1, \rd2} @ load FPEXC, FPSCR, FPINST, FPINST2
  98. tst \fpexc_rd, #FPEXC_EX @ vfp is in the exceptional state?
  99. beq 1f
  100. vmsr fpinst, \rd1 @ restore fpinst
  101. tst \fpexc_rd, #FPEXC_FP2V @ FPINST2 instruction valid
  102. beq 1f
  103. vmsr fpinst2, \rd2 @ restore fpinst2
  104. 1:
  105. vmsr fpscr, \rd0 @ restore fpscr
  106. .endm
  107. .macro vfp_save_state, base, fpexc_rd, rd0, rd1, rd2
  108. vmrs \rd0, fpscr @ current status
  109. tst \fpexc_rd, #FPEXC_EX @ vfp is in the exceptional state?
  110. beq 1f
  111. vmrs \rd1, fpinst @ get fpinst
  112. tst \fpexc_rd, #FPEXC_FP2V @ FPINST2 instruction valid
  113. beq 1f
  114. vmrs \rd2, fpinst2 @ get fpinst2
  115. 1:
  116. stmia \base, {\fpexc_rd, \rd0, \rd1, \rd2} @ save FPEXC, FPSCR, FPINST, FPINST2
  117. .endm
  118. /*
  119. * VFP hardware support entry point.
  120. * r0 = faulted instruction
  121. * r2 = faulted PC+4
  122. * r9 = successful return
  123. * r10 = rt_thread structure
  124. * lr = failure return
  125. */
  126. .globl vfp_entry
  127. vfp_entry:
  128. ldr r1, =rt_interrupt_nest
  129. ldr r1, [r1] @ get rt_interrupt_nest
  130. cmp r1, #0 @ rt_interrupt_nest == 0?
  131. bne irq_vfp_entry @ irq handler used VFP
  132. get_vfpregs_offset r11
  133. add r10, r10, r11 @ r10 = vfpregs
  134. vmrs r1, fpexc
  135. tst r1, #FPEXC_EN
  136. bne __lookup_vfp_exceptions @ if the VFP already enabled, now checking vfp exceptions
  137. ldr r3, last_vfp_context_address
  138. orr r1, r1, #FPEXC_EN @ set VFP enable bit
  139. ldr r4, [r3] @ get last_vfp_context pointer
  140. bic r5, r1, #FPEXC_EX @ clear exceptions status
  141. cmp r4, r10
  142. beq __switch_to_the_same_thread @ switch to the same thread, checking pending exception.
  143. vmsr fpexc, r5 @ enable VFP, clear any pending exceptions
  144. /* Save the current VFP registers to the old thread context */
  145. cmp r4, #0
  146. beq __no_last_vfp_context
  147. vfp_save_working_reg r4, r5 @ save the working registers
  148. vfp_save_state r4, r1, r5, r6, r8 @ save vfp state registers
  149. __no_last_vfp_context:
  150. str r10, [r3] @ update the last_vfp_context pointer
  151. vfp_restore_working_reg r10, r5 @ restore the working registers
  152. vfp_restore_state r10, r1, r5, r6, r8 @ restore vfp state registers
  153. __switch_to_the_same_thread:
  154. tst r1, #FPEXC_EX
  155. bne __do_exception
  156. vmsr fpexc, r1 @ restore fpexc last
  157. sub r2, r2, #4
  158. str r2, [sp, #S_PC] @ retry the faulted instruction
  159. PRINT1 "return instr=0x%08x", r2
  160. mov pc, r9
  161. __lookup_vfp_exceptions:
  162. tst r1, #FPEXC_EX | FPEXC_DEX @ Check for synchronous or asynchronous exception
  163. bne __do_exception
  164. vmrs r5, fpscr
  165. tst r5, #FPSCR_IXE
  166. bne __do_exception
  167. PRINT "__lookup_vfp_exceptions"
  168. mov pc, lr
  169. __do_exception:
  170. PRINT "__do_exception"
  171. push {lr}
  172. mov r5, r1
  173. bic r5, #FPEXC_EX @ clear exception
  174. vmsr fpexc, r5
  175. bl vfp_exception @ r0 = faulted instruction, r1 = fpexc
  176. pop {pc}
  177. @mov pc, lr
  178. irq_vfp_entry:
  179. vmrs r1, fpexc
  180. tst r1, #FPEXC_EN
  181. bne __lookup_vfp_exceptions @ if the VFP already enabled, now checking vfp exceptions
  182. ldr r3, last_vfp_context_address
  183. orr r1, r1, #FPEXC_EN @ set VFP enable bit
  184. ldr r4, [r3] @ get last_vfp_context pointer
  185. bic r5, r1, #FPEXC_EX @ clear exceptions status
  186. vmsr fpexc, r5 @ enable VFP, clear any pending exceptions
  187. /* Save the current VFP registers to the old thread context */
  188. cmp r4, #0 @ last_vfp_context != NULL ?
  189. beq __no_save_vfp_context
  190. vfp_save_working_reg r4, r5 @ save the working registers
  191. vfp_save_state r4, r1, r5, r6, r8 @ save vfp state registers
  192. mov r4, #0
  193. str r4, [r3] @ update the last_vfp_context pointer
  194. @ last_vfp_context = NULL
  195. __no_save_vfp_context:
  196. sub r2, r2, #4
  197. str r2, [sp, #S_PC] @ retry the faulted instruction
  198. PRINT1 "return instr=0x%08x", r2
  199. mov pc, r9
  200. .align
  201. last_vfp_context_address:
  202. .word last_vfp_context
  203. #endif