atomic_arm.c 5.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188
  1. /*
  2. * Copyright (c) 2006-2023, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2022-07-27 flybreak the first version
  9. */
  10. #include <rtthread.h>
  11. #if defined (__IAR_SYSTEMS_ICC__) /* for IAR Compiler */
  12. #include <intrinsics.h>
  13. #include <iccarm_builtin.h>
  14. #endif
  15. /**
  16. \brief LDR Exclusive (32 bit)
  17. \details Executes a exclusive LDR instruction for 32 bit values.
  18. \param [in] ptr Pointer to data
  19. \return value of type uint32_t at (*ptr)
  20. */
  21. #if defined (__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050) /* ARM Compiler V6 */
  22. #define __LDREXW (rt_atomic_t)__builtin_arm_ldrex
  23. #elif defined(__ARMCC_VERSION) /* ARM Compiler V5 */
  24. #if __ARMCC_VERSION < 5060020
  25. #define __LDREXW(ptr) ((rt_atomic_t ) __ldrex(ptr))
  26. #else
  27. #define __LDREXW(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((rt_atomic_t ) __ldrex(ptr)) _Pragma("pop")
  28. #endif
  29. #elif defined (__IAR_SYSTEMS_ICC__) /* for IAR Compiler */
  30. _Pragma("inline=forced") __intrinsic rt_atomic_t __LDREXW(volatile rt_atomic_t *ptr)
  31. {
  32. return __iar_builtin_LDREX((volatile unsigned int *)ptr);
  33. }
  34. #elif defined (__GNUC__) /* GNU GCC Compiler */
  35. __attribute__((always_inline)) static inline rt_atomic_t __LDREXW(volatile rt_atomic_t *addr)
  36. {
  37. rt_atomic_t result;
  38. __asm volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
  39. return result;
  40. }
  41. #endif
  42. /**
  43. \brief STR Exclusive (32 bit)
  44. \details Executes a exclusive STR instruction for 32 bit values.
  45. \param [in] value Value to store
  46. \param [in] ptr Pointer to location
  47. \return 0 Function succeeded
  48. \return 1 Function failed
  49. */
  50. #if defined (__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050) /* ARM Compiler V6 */
  51. #define __STREXW (rt_atomic_t)__builtin_arm_strex
  52. #elif defined(__ARMCC_VERSION) /* ARM Compiler V5 */
  53. #if __ARMCC_VERSION < 5060020
  54. #define __STREXW(value, ptr) __strex(value, ptr)
  55. #else
  56. #define __STREXW(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
  57. #endif
  58. #elif defined (__IAR_SYSTEMS_ICC__) /* for IAR Compiler */
  59. _Pragma("inline=forced") __intrinsic rt_atomic_t __STREXW(rt_atomic_t value, volatile rt_atomic_t *ptr)
  60. {
  61. return __STREX(value, (unsigned int *)ptr);
  62. }
  63. #elif defined (__GNUC__) /* GNU GCC Compiler */
  64. __attribute__((always_inline)) static inline rt_atomic_t __STREXW(volatile rt_atomic_t value, volatile rt_atomic_t *addr)
  65. {
  66. rt_atomic_t result;
  67. __asm volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
  68. return result;
  69. }
  70. #endif
  71. rt_atomic_t rt_hw_atomic_load(volatile rt_atomic_t *ptr)
  72. {
  73. rt_atomic_t oldval;
  74. do
  75. {
  76. oldval = __LDREXW(ptr);
  77. } while ((__STREXW(oldval, ptr)) != 0U);
  78. return oldval;
  79. }
  80. void rt_hw_atomic_store(volatile rt_atomic_t *ptr, rt_atomic_t val)
  81. {
  82. do
  83. {
  84. __LDREXW(ptr);
  85. } while ((__STREXW(val, ptr)) != 0U);
  86. }
  87. rt_atomic_t rt_hw_atomic_add(volatile rt_atomic_t *ptr, rt_atomic_t val)
  88. {
  89. rt_atomic_t oldval;
  90. do
  91. {
  92. oldval = __LDREXW(ptr);
  93. } while ((__STREXW(oldval + val, ptr)) != 0U);
  94. return oldval;
  95. }
  96. rt_atomic_t rt_hw_atomic_sub(volatile rt_atomic_t *ptr, rt_atomic_t val)
  97. {
  98. rt_atomic_t oldval;
  99. do
  100. {
  101. oldval = __LDREXW(ptr);
  102. } while ((__STREXW(oldval - val, ptr)) != 0U);
  103. return oldval;
  104. }
  105. rt_atomic_t rt_hw_atomic_and(volatile rt_atomic_t *ptr, rt_atomic_t val)
  106. {
  107. rt_atomic_t oldval;
  108. do
  109. {
  110. oldval = __LDREXW(ptr);
  111. } while ((__STREXW(oldval & val, ptr)) != 0U);
  112. return oldval;
  113. }
  114. rt_atomic_t rt_hw_atomic_or(volatile rt_atomic_t *ptr, rt_atomic_t val)
  115. {
  116. rt_atomic_t oldval;
  117. do
  118. {
  119. oldval = __LDREXW(ptr);
  120. } while ((__STREXW(oldval | val, ptr)) != 0U);
  121. return oldval;
  122. }
  123. rt_atomic_t rt_hw_atomic_xor(volatile rt_atomic_t *ptr, rt_atomic_t val)
  124. {
  125. rt_atomic_t oldval;
  126. do
  127. {
  128. oldval = __LDREXW(ptr);
  129. } while ((__STREXW(oldval ^ val, ptr)) != 0U);
  130. return oldval;
  131. }
  132. rt_atomic_t rt_hw_atomic_exchange(volatile rt_atomic_t *ptr, rt_atomic_t val)
  133. {
  134. rt_atomic_t oldval;
  135. do
  136. {
  137. oldval = __LDREXW(ptr);
  138. } while ((__STREXW(val, ptr)) != 0U);
  139. return oldval;
  140. }
  141. void rt_hw_atomic_flag_clear(volatile rt_atomic_t *ptr)
  142. {
  143. do
  144. {
  145. __LDREXW(ptr);
  146. } while ((__STREXW(0, ptr)) != 0U);
  147. }
  148. rt_atomic_t rt_hw_atomic_flag_test_and_set(volatile rt_atomic_t *ptr)
  149. {
  150. rt_atomic_t oldval;
  151. do
  152. {
  153. oldval = __LDREXW(ptr);
  154. } while ((__STREXW(1, ptr)) != 0U);
  155. return oldval;
  156. }
  157. rt_atomic_t rt_hw_atomic_compare_exchange_strong(volatile rt_atomic_t *ptr, rt_atomic_t *old, rt_atomic_t new)
  158. {
  159. rt_atomic_t result;
  160. rt_atomic_t temp = *old;
  161. do
  162. {
  163. result = __LDREXW(ptr);
  164. if (result != temp)
  165. {
  166. *old = result;
  167. __STREXW(result, ptr);
  168. break;
  169. }
  170. } while ((__STREXW(new, ptr)) != 0U);
  171. return (result == temp);
  172. }