atomic_8.c 4.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167
  1. /*
  2. * Copyright (c) 2006-2021, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2021-04-27 peterfan Add copyright header.
  9. */
  10. #include <rthw.h>
  11. #include <stdint.h>
  12. #include <stdbool.h>
  13. #include <rtthread.h>
  14. /*
  15. * override gcc builtin atomic function for std::atomic<int64_t>, std::atomic<uint64_t>
  16. * @see https://gcc.gnu.org/onlinedocs/gcc/_005f_005fatomic-Builtins.html
  17. */
  18. uint64_t __atomic_load_8(volatile void *ptr, int memorder)
  19. {
  20. volatile uint64_t *val_ptr = (volatile uint64_t *)ptr;
  21. rt_base_t level;
  22. uint64_t tmp;
  23. level = rt_hw_interrupt_disable();
  24. tmp = *val_ptr;
  25. rt_hw_interrupt_enable(level);
  26. return tmp;
  27. }
  28. void __atomic_store_8(volatile void *ptr, uint64_t val, int memorder)
  29. {
  30. volatile uint64_t *val_ptr = (volatile uint64_t *)ptr;
  31. rt_base_t level;
  32. level = rt_hw_interrupt_disable();
  33. *val_ptr = val;
  34. rt_hw_interrupt_enable(level);
  35. }
  36. uint64_t __atomic_exchange_8(volatile void *ptr, uint64_t val, int memorder)
  37. {
  38. volatile uint64_t *val_ptr = (volatile uint64_t *)ptr;
  39. rt_base_t level;
  40. uint64_t tmp;
  41. level = rt_hw_interrupt_disable();
  42. tmp = *val_ptr;
  43. *val_ptr = val;
  44. rt_hw_interrupt_enable(level);
  45. return tmp;
  46. }
  47. bool __atomic_compare_exchange_8(volatile void *ptr, volatile void *expected, uint64_t desired, bool weak, int success_memorder, int failure_memorder)
  48. {
  49. volatile uint64_t *val_ptr = (volatile uint64_t *)ptr;
  50. volatile uint64_t *expected_ptr = (volatile uint64_t *)expected;
  51. rt_base_t level;
  52. bool exchanged;
  53. level = rt_hw_interrupt_disable();
  54. if (*val_ptr == *expected_ptr)
  55. {
  56. *val_ptr = desired;
  57. exchanged = true;
  58. }
  59. else
  60. {
  61. *expected_ptr = *val_ptr;
  62. exchanged = false;
  63. }
  64. rt_hw_interrupt_enable(level);
  65. return exchanged;
  66. }
  67. /**
  68. * @param size is the length of the value to load.
  69. *
  70. * @param mem is the source memory to load the value from.
  71. *
  72. * @param _return is the pointer to the space where the loaded value will be stored.
  73. */
  74. void __atomic_load(size_t size, void *mem, void *_return, int model)
  75. {
  76. rt_base_t level;
  77. level = rt_hw_interrupt_disable();
  78. rt_memcpy(_return, mem, size);
  79. rt_hw_interrupt_enable(level);
  80. }
  81. /**
  82. * @param size is the length of the value to store.
  83. *
  84. * @param mem is the destination memory space to store the value.
  85. *
  86. * @param val is the pointer to the value to store.
  87. */
  88. void __atomic_store(size_t size, void *mem, void *val, int model)
  89. {
  90. rt_base_t level;
  91. level = rt_hw_interrupt_disable();
  92. rt_memcpy(mem, val, size);
  93. rt_hw_interrupt_enable(level);
  94. }
  95. /**
  96. * @param size is the length of value to exchange.
  97. *
  98. * @param mem is the destination space to exchange.
  99. *
  100. * @param val is the pointer of value to exchange.
  101. *
  102. * @param _return gives back the the value before exchanging.
  103. */
  104. void __atomic_exchange(size_t size, void *mem, void *val, void *_return, int model)
  105. {
  106. rt_base_t level;
  107. level = rt_hw_interrupt_disable();
  108. rt_memcpy(_return, mem, size);
  109. rt_memcpy(mem, val, size);
  110. rt_hw_interrupt_enable(level);
  111. }
  112. /**
  113. * @param size is the length of value to operate.
  114. *
  115. * @param obj is the destination value space to operate.
  116. *
  117. * @param expected is the value to be compared with obj.
  118. *
  119. * @param desired is the value pointer to be written into obj, under the condition that *expected equals *obj.
  120. *
  121. * @return true if succeed in writing *desired into *obj; false if not.
  122. */
  123. bool __atomic_compare_exchange(size_t size, void *obj, void *expected, void *desired, int success, int failure)
  124. {
  125. rt_base_t level;
  126. volatile bool exchanged = false;
  127. level = rt_hw_interrupt_disable();
  128. if (rt_memcmp(obj, expected, size) == 0)
  129. {
  130. rt_memcpy(obj, desired, size);
  131. exchanged = true;
  132. }
  133. else
  134. {
  135. rt_memcpy(expected, obj, size);
  136. exchanged = false;
  137. }
  138. rt_hw_interrupt_enable(level);
  139. return exchanged;
  140. }
  141. #define __atomic_fetch_op_8(OPNAME, OP) \
  142. uint64_t __atomic_fetch_##OPNAME##_8(volatile void *ptr, uint64_t val, int memorder) {\
  143. volatile uint64_t* val_ptr = (volatile uint64_t*)ptr;\
  144. rt_base_t level;\
  145. uint64_t tmp;\
  146. level = rt_hw_interrupt_disable();\
  147. tmp = *val_ptr;\
  148. *val_ptr OP##= val;\
  149. rt_hw_interrupt_enable(level);\
  150. return tmp;\
  151. }
  152. __atomic_fetch_op_8(add, +)
  153. __atomic_fetch_op_8(sub, -)
  154. __atomic_fetch_op_8( and, &)
  155. __atomic_fetch_op_8( or, |)
  156. __atomic_fetch_op_8(xor, ^)