atomic_arm.c 5.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184
  1. /*
  2. * Copyright (c) 2006-2023, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2022-07-27 flybreak the first version
  9. */
  10. #include <rtthread.h>
  11. /**
  12. \brief LDR Exclusive (32 bit)
  13. \details Executes a exclusive LDR instruction for 32 bit values.
  14. \param [in] ptr Pointer to data
  15. \return value of type uint32_t at (*ptr)
  16. */
  17. #if defined (__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050) /* ARM Compiler V6 */
  18. #define __LDREXW (rt_atomic_t)__builtin_arm_ldrex
  19. #elif defined(__ARMCC_VERSION) /* ARM Compiler V5 */
  20. #if __ARMCC_VERSION < 5060020
  21. #define __LDREXW(ptr) ((rt_atomic_t ) __ldrex(ptr))
  22. #else
  23. #define __LDREXW(ptr) _Pragma("push") _Pragma("diag_suppress 3731") ((rt_atomic_t ) __ldrex(ptr)) _Pragma("pop")
  24. #endif
  25. #elif defined (__IAR_SYSTEMS_ICC__) /* for IAR Compiler */
  26. _Pragma("inline=forced") __intrinsic rt_atomic_t __LDREXW(volatile rt_atomic_t volatile *ptr)
  27. {
  28. return __LDREX((unsigned long *)ptr);
  29. }
  30. #elif defined (__GNUC__) /* GNU GCC Compiler */
  31. __attribute__((always_inline)) static inline rt_atomic_t __LDREXW(volatile rt_atomic_t *addr)
  32. {
  33. rt_atomic_t result;
  34. __asm volatile ("ldrex %0, %1" : "=r" (result) : "Q" (*addr) );
  35. return result;
  36. }
  37. #endif
  38. /**
  39. \brief STR Exclusive (32 bit)
  40. \details Executes a exclusive STR instruction for 32 bit values.
  41. \param [in] value Value to store
  42. \param [in] ptr Pointer to location
  43. \return 0 Function succeeded
  44. \return 1 Function failed
  45. */
  46. #if defined (__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050) /* ARM Compiler V6 */
  47. #define __STREXW (rt_atomic_t)__builtin_arm_strex
  48. #elif defined(__ARMCC_VERSION) /* ARM Compiler V5 */
  49. #if __ARMCC_VERSION < 5060020
  50. #define __STREXW(value, ptr) __strex(value, ptr)
  51. #else
  52. #define __STREXW(value, ptr) _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr) _Pragma("pop")
  53. #endif
  54. #elif defined (__IAR_SYSTEMS_ICC__) /* for IAR Compiler */
  55. _Pragma("inline=forced") __intrinsic rt_atomic_t __STREXW(rt_atomic_t value, volatile rt_atomic_t *ptr)
  56. {
  57. return __STREX(value, (unsigned long *)ptr);
  58. }
  59. #elif defined (__GNUC__) /* GNU GCC Compiler */
  60. __attribute__((always_inline)) static inline rt_atomic_t __STREXW(volatile rt_atomic_t value, volatile rt_atomic_t *addr)
  61. {
  62. rt_atomic_t result;
  63. __asm volatile ("strex %0, %2, %1" : "=&r" (result), "=Q" (*addr) : "r" (value) );
  64. return result;
  65. }
  66. #endif
  67. rt_atomic_t rt_hw_atomic_load(volatile rt_atomic_t *ptr)
  68. {
  69. rt_atomic_t oldval;
  70. do
  71. {
  72. oldval = __LDREXW(ptr);
  73. } while ((__STREXW(oldval, ptr)) != 0U);
  74. return oldval;
  75. }
  76. void rt_hw_atomic_store(volatile rt_atomic_t *ptr, rt_atomic_t val)
  77. {
  78. do
  79. {
  80. __LDREXW(ptr);
  81. } while ((__STREXW(val, ptr)) != 0U);
  82. }
  83. rt_atomic_t rt_hw_atomic_add(volatile rt_atomic_t *ptr, rt_atomic_t val)
  84. {
  85. rt_atomic_t oldval;
  86. do
  87. {
  88. oldval = __LDREXW(ptr);
  89. } while ((__STREXW(oldval + val, ptr)) != 0U);
  90. return oldval;
  91. }
  92. rt_atomic_t rt_hw_atomic_sub(volatile rt_atomic_t *ptr, rt_atomic_t val)
  93. {
  94. rt_atomic_t oldval;
  95. do
  96. {
  97. oldval = __LDREXW(ptr);
  98. } while ((__STREXW(oldval - val, ptr)) != 0U);
  99. return oldval;
  100. }
  101. rt_atomic_t rt_hw_atomic_and(volatile rt_atomic_t *ptr, rt_atomic_t val)
  102. {
  103. rt_atomic_t oldval;
  104. do
  105. {
  106. oldval = __LDREXW(ptr);
  107. } while ((__STREXW(oldval & val, ptr)) != 0U);
  108. return oldval;
  109. }
  110. rt_atomic_t rt_hw_atomic_or(volatile rt_atomic_t *ptr, rt_atomic_t val)
  111. {
  112. rt_atomic_t oldval;
  113. do
  114. {
  115. oldval = __LDREXW(ptr);
  116. } while ((__STREXW(oldval | val, ptr)) != 0U);
  117. return oldval;
  118. }
  119. rt_atomic_t rt_hw_atomic_xor(volatile rt_atomic_t *ptr, rt_atomic_t val)
  120. {
  121. rt_atomic_t oldval;
  122. do
  123. {
  124. oldval = __LDREXW(ptr);
  125. } while ((__STREXW(oldval ^ val, ptr)) != 0U);
  126. return oldval;
  127. }
  128. rt_atomic_t rt_hw_atomic_exchange(volatile rt_atomic_t *ptr, rt_atomic_t val)
  129. {
  130. rt_atomic_t oldval;
  131. do
  132. {
  133. oldval = __LDREXW(ptr);
  134. } while ((__STREXW(val, ptr)) != 0U);
  135. return oldval;
  136. }
  137. void rt_hw_atomic_flag_clear(volatile rt_atomic_t *ptr)
  138. {
  139. do
  140. {
  141. __LDREXW(ptr);
  142. } while ((__STREXW(0, ptr)) != 0U);
  143. }
  144. rt_atomic_t rt_hw_atomic_flag_test_and_set(volatile rt_atomic_t *ptr)
  145. {
  146. rt_atomic_t oldval;
  147. do
  148. {
  149. oldval = __LDREXW(ptr);
  150. } while ((__STREXW(1, ptr)) != 0U);
  151. return oldval;
  152. }
  153. rt_atomic_t rt_hw_atomic_compare_exchange_strong(volatile rt_atomic_t *ptr, volatile rt_atomic_t *old, rt_atomic_t new)
  154. {
  155. rt_atomic_t result;
  156. rt_atomic_t temp = *old;
  157. do
  158. {
  159. result = __LDREXW(ptr);
  160. if (result != temp)
  161. {
  162. *old = result;
  163. __STREXW(result, ptr);
  164. break;
  165. }
  166. } while ((__STREXW(new, ptr)) != 0U);
  167. return (result == temp);
  168. }