atomic_arm.c 5.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187
  1. /*
  2. * Copyright (c) 2006-2023, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2022-07-27 flybreak the first version
  9. */
  10. #include <rtthread.h>
  11. #if defined (__IAR_SYSTEMS_ICC__) /* for IAR Compiler */
  12. #include <intrinsics.h>
  13. #endif
  14. /**
  15. \brief LDR Exclusive (32 bit)
  16. \details Executes a exclusive LDR instruction for 32 bit values.
  17. \param [in] ptr Pointer to data
  18. \return value of type uint32_t at (*ptr)
  19. */
  20. #if defined (__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050) /* ARM Compiler V6 */
  21. #define __LDREXW (rt_atomic_t)__builtin_arm_ldrex
  22. #elif defined(__ARMCC_VERSION) /* ARM Compiler V5 */
  23. #if __ARMCC_VERSION < 5060020
  24. #define __LDREXW(ptr) ((rt_atomic_t ) __ldrex(ptr))
  25. #else
  26. #define __LDREXW(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((rt_atomic_t ) __ldrex(ptr)) _Pragma("pop")
  27. #endif
  28. #elif defined (__IAR_SYSTEMS_ICC__) /* for IAR Compiler */
  29. _Pragma("inline=forced") __intrinsic rt_atomic_t __LDREXW(volatile rt_atomic_t *ptr)
  30. {
  31. return __LDREX((unsigned int *)ptr);
  32. }
  33. #elif defined (__GNUC__) /* GNU GCC Compiler */
  34. __attribute__((always_inline)) static inline rt_atomic_t __LDREXW(volatile rt_atomic_t *addr)
  35. {
  36. rt_atomic_t result;
  37. __asm volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
  38. return result;
  39. }
  40. #endif
  41. /**
  42. \brief STR Exclusive (32 bit)
  43. \details Executes a exclusive STR instruction for 32 bit values.
  44. \param [in] value Value to store
  45. \param [in] ptr Pointer to location
  46. \return 0 Function succeeded
  47. \return 1 Function failed
  48. */
  49. #if defined (__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050) /* ARM Compiler V6 */
  50. #define __STREXW (rt_atomic_t)__builtin_arm_strex
  51. #elif defined(__ARMCC_VERSION) /* ARM Compiler V5 */
  52. #if __ARMCC_VERSION < 5060020
  53. #define __STREXW(value, ptr) __strex(value, ptr)
  54. #else
  55. #define __STREXW(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
  56. #endif
  57. #elif defined (__IAR_SYSTEMS_ICC__) /* for IAR Compiler */
  58. _Pragma("inline=forced") __intrinsic rt_atomic_t __STREXW(rt_atomic_t value, volatile rt_atomic_t *ptr)
  59. {
  60. return __STREX(value, (unsigned int *)ptr);
  61. }
  62. #elif defined (__GNUC__) /* GNU GCC Compiler */
  63. __attribute__((always_inline)) static inline rt_atomic_t __STREXW(volatile rt_atomic_t value, volatile rt_atomic_t *addr)
  64. {
  65. rt_atomic_t result;
  66. __asm volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
  67. return result;
  68. }
  69. #endif
  70. rt_atomic_t rt_hw_atomic_load(volatile rt_atomic_t *ptr)
  71. {
  72. rt_atomic_t oldval;
  73. do
  74. {
  75. oldval = __LDREXW(ptr);
  76. } while ((__STREXW(oldval, ptr)) != 0U);
  77. return oldval;
  78. }
  79. void rt_hw_atomic_store(volatile rt_atomic_t *ptr, rt_atomic_t val)
  80. {
  81. do
  82. {
  83. __LDREXW(ptr);
  84. } while ((__STREXW(val, ptr)) != 0U);
  85. }
  86. rt_atomic_t rt_hw_atomic_add(volatile rt_atomic_t *ptr, rt_atomic_t val)
  87. {
  88. rt_atomic_t oldval;
  89. do
  90. {
  91. oldval = __LDREXW(ptr);
  92. } while ((__STREXW(oldval + val, ptr)) != 0U);
  93. return oldval;
  94. }
  95. rt_atomic_t rt_hw_atomic_sub(volatile rt_atomic_t *ptr, rt_atomic_t val)
  96. {
  97. rt_atomic_t oldval;
  98. do
  99. {
  100. oldval = __LDREXW(ptr);
  101. } while ((__STREXW(oldval - val, ptr)) != 0U);
  102. return oldval;
  103. }
  104. rt_atomic_t rt_hw_atomic_and(volatile rt_atomic_t *ptr, rt_atomic_t val)
  105. {
  106. rt_atomic_t oldval;
  107. do
  108. {
  109. oldval = __LDREXW(ptr);
  110. } while ((__STREXW(oldval & val, ptr)) != 0U);
  111. return oldval;
  112. }
  113. rt_atomic_t rt_hw_atomic_or(volatile rt_atomic_t *ptr, rt_atomic_t val)
  114. {
  115. rt_atomic_t oldval;
  116. do
  117. {
  118. oldval = __LDREXW(ptr);
  119. } while ((__STREXW(oldval | val, ptr)) != 0U);
  120. return oldval;
  121. }
  122. rt_atomic_t rt_hw_atomic_xor(volatile rt_atomic_t *ptr, rt_atomic_t val)
  123. {
  124. rt_atomic_t oldval;
  125. do
  126. {
  127. oldval = __LDREXW(ptr);
  128. } while ((__STREXW(oldval ^ val, ptr)) != 0U);
  129. return oldval;
  130. }
  131. rt_atomic_t rt_hw_atomic_exchange(volatile rt_atomic_t *ptr, rt_atomic_t val)
  132. {
  133. rt_atomic_t oldval;
  134. do
  135. {
  136. oldval = __LDREXW(ptr);
  137. } while ((__STREXW(val, ptr)) != 0U);
  138. return oldval;
  139. }
  140. void rt_hw_atomic_flag_clear(volatile rt_atomic_t *ptr)
  141. {
  142. do
  143. {
  144. __LDREXW(ptr);
  145. } while ((__STREXW(0, ptr)) != 0U);
  146. }
  147. rt_atomic_t rt_hw_atomic_flag_test_and_set(volatile rt_atomic_t *ptr)
  148. {
  149. rt_atomic_t oldval;
  150. do
  151. {
  152. oldval = __LDREXW(ptr);
  153. } while ((__STREXW(1, ptr)) != 0U);
  154. return oldval;
  155. }
  156. rt_atomic_t rt_hw_atomic_compare_exchange_strong(volatile rt_atomic_t *ptr, rt_atomic_t *old, rt_atomic_t new)
  157. {
  158. rt_atomic_t result;
  159. rt_atomic_t temp = *old;
  160. do
  161. {
  162. result = __LDREXW(ptr);
  163. if (result != temp)
  164. {
  165. *old = result;
  166. __STREXW(result, ptr);
  167. break;
  168. }
  169. } while ((__STREXW(new, ptr)) != 0U);
  170. return (result == temp);
  171. }