atomic_aarch64.c 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108
  1. /*
  2. * Copyright (c) 2006-2023, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2023-05-18 GuEe-GUI first version
  9. */
  10. #include <rthw.h>
  11. #include <rtatomic.h>
  12. rt_atomic_t rt_hw_atomic_load(volatile rt_atomic_t *ptr)
  13. {
  14. rt_atomic_t ret;
  15. __asm__ volatile (
  16. " ldr %0, %1\n"
  17. " dmb ish"
  18. : "=r" (ret)
  19. : "Q" (*ptr)
  20. : "memory");
  21. return ret;
  22. }
  23. void rt_hw_atomic_store(volatile rt_atomic_t *ptr, rt_atomic_t val)
  24. {
  25. __asm__ volatile (
  26. " str %1, %0\n"
  27. " dmb ish"
  28. : "=Q" (*ptr)
  29. : "r" (val)
  30. : "memory");
  31. }
  32. #define AARCH64_ATOMIC_OP_RETURN(op, ins, constraint) \
  33. rt_atomic_t rt_hw_atomic_##op(volatile rt_atomic_t *ptr, rt_atomic_t in_val) \
  34. { \
  35. rt_atomic_t tmp, val, result; \
  36. \
  37. __asm__ volatile ( \
  38. " prfm pstl1strm, %3\n" \
  39. "1: ldxr %0, %3\n" \
  40. " "#ins " %1, %0, %4\n" \
  41. " stlxr %w2, %1, %3\n" \
  42. " cbnz %w2, 1b\n" \
  43. " dmb ish" \
  44. : "=&r" (result), "=&r" (val), "=&r" (tmp), "+Q" (*ptr) \
  45. : __RT_STRINGIFY(constraint) "r" (in_val) \
  46. : "memory"); \
  47. \
  48. return result; \
  49. }
  50. AARCH64_ATOMIC_OP_RETURN(add, add, I)
  51. AARCH64_ATOMIC_OP_RETURN(sub, sub, J)
  52. AARCH64_ATOMIC_OP_RETURN(and, and, K)
  53. AARCH64_ATOMIC_OP_RETURN(or, orr, K)
  54. AARCH64_ATOMIC_OP_RETURN(xor, eor, K)
  55. rt_atomic_t rt_hw_atomic_exchange(volatile rt_atomic_t *ptr, rt_atomic_t val)
  56. {
  57. rt_atomic_t ret, tmp;
  58. __asm__ volatile (
  59. " prfm pstl1strm, %2\n"
  60. "1: ldxr %0, %2\n"
  61. " stlxr %w1, %3, %2\n"
  62. " cbnz %w1, 1b\n"
  63. " dmb ish"
  64. : "=&r" (ret), "=&r" (tmp), "+Q" (*ptr)
  65. : "r" (val)
  66. : "memory");
  67. return ret;
  68. }
  69. void rt_hw_atomic_flag_clear(volatile rt_atomic_t *ptr)
  70. {
  71. rt_hw_atomic_and(ptr, 0);
  72. }
  73. rt_atomic_t rt_hw_atomic_flag_test_and_set(volatile rt_atomic_t *ptr)
  74. {
  75. return rt_hw_atomic_or(ptr, 1);
  76. }
  77. rt_atomic_t rt_hw_atomic_compare_exchange_strong(volatile rt_atomic_t *ptr, rt_atomic_t *old, rt_atomic_t new)
  78. {
  79. rt_atomic_t tmp, oldval;
  80. __asm__ volatile (
  81. " prfm pstl1strm, %2\n"
  82. "1: ldxr %0, %2\n"
  83. " eor %1, %0, %3\n"
  84. " cbnz %1, 2f\n"
  85. " stlxr %w1, %4, %2\n"
  86. " cbnz %w1, 1b\n"
  87. " dmb ish\n"
  88. "2:"
  89. : "=&r" (oldval), "=&r" (tmp), "+Q" (*ptr)
  90. : "Kr" (*old), "r" (new)
  91. : "memory");
  92. return oldval == *old;
  93. }