vdso_sys.h 4.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147
  1. /*
  2. * Copyright (c) 2006-2025 RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2025-04-22 ScuDays Add VDSO functionality under the riscv64 architecture.
  9. */
  10. #ifndef ASM_VDSO_SYS_H
  11. #define ASM_VDSO_SYS_H
  12. #include <time.h>
  13. #include <unistd.h>
  14. #include <sys/types.h>
  15. #include <vdso_config.h>
  16. #include <vdso_datapage.h>
  17. #define __always_unused __attribute__((__unused__))
  18. #define __maybe_unused __attribute__((__unused__))
  19. #define likely(x) __builtin_expect(!!(x), 1)
  20. #define unlikely(x) __builtin_expect(!!(x), 0)
  21. #define arch_counter_enforce_ordering \
  22. __asm__ volatile("fence rw, rw" ::: "memory")
  23. static inline uint64_t __arch_get_hw_counter(void)
  24. {
  25. uint64_t res;
  26. __asm__ volatile("rdtime %0" : "=r"(res));
  27. arch_counter_enforce_ordering;
  28. return res;
  29. }
  30. static inline uint32_t
  31. __iter_div_u64_rem(uint64_t dividend, uint32_t divisor, uint64_t *remainder)
  32. {
  33. uint32_t ret = 0;
  34. while (dividend >= divisor)
  35. {
  36. /* The following asm() prevents the compiler from
  37. optimising this loop into a modulo operation. */
  38. __asm__("" : "+rm"(dividend));
  39. dividend -= divisor;
  40. ret++;
  41. }
  42. *remainder = dividend;
  43. return ret;
  44. }
  45. #define __RT_STRINGIFY(x...) #x
  46. #define RT_STRINGIFY(x...) __RT_STRINGIFY(x)
  47. #define rt_hw_barrier(cmd, ...) \
  48. __asm__ volatile(RT_STRINGIFY(cmd) " " RT_STRINGIFY(__VA_ARGS__)::: "memory")
  49. #define rt_hw_isb() rt_hw_barrier(fence.i)
  50. #define rt_hw_dmb() rt_hw_barrier(fence, rw, rw)
  51. #define rt_hw_wmb() rt_hw_barrier(fence, w, w)
  52. #define rt_hw_rmb() rt_hw_barrier(fence, r, r)
  53. #define rt_hw_dsb() rt_hw_barrier(fence, rw, rw)
  54. #ifndef barrier
  55. #define barrier() __asm__ __volatile__("fence" : : : "memory")
  56. #endif
  57. static inline void cpu_relax(void)
  58. {
  59. __asm__ volatile("nop" ::: "memory");
  60. }
  61. #define __READ_ONCE_SIZE \
  62. ({ \
  63. switch (size) \
  64. { \
  65. case 1: \
  66. *(__u8 *)res = *(volatile __u8 *)p; \
  67. break; \
  68. case 2: \
  69. *(__u16 *)res = *(volatile __u16 *)p; \
  70. break; \
  71. case 4: \
  72. *(__u32 *)res = *(volatile __u32 *)p; \
  73. break; \
  74. case 8: \
  75. *(__u64 *)res = *(volatile __u64 *)p; \
  76. break; \
  77. default: \
  78. barrier(); \
  79. __builtin_memcpy((void *)res, (const void *)p, size); \
  80. barrier(); \
  81. } \
  82. })
  83. static inline void __read_once_size(const volatile void *p, void *res, int size)
  84. {
  85. __READ_ONCE_SIZE;
  86. }
  87. #define __READ_ONCE(x, check) \
  88. ({ \
  89. union { \
  90. typeof(x) __val; \
  91. char __c[1]; \
  92. } __u; \
  93. if (check) \
  94. __read_once_size(&(x), __u.__c, sizeof(x)); \
  95. smp_read_barrier_depends(); /* Enforce dependency ordering from x */ \
  96. __u.__val; \
  97. })
  98. #define READ_ONCE(x) __READ_ONCE(x, 1)
  99. extern struct vdso_data _vdso_data[CS_BASES] __attribute__((visibility("hidden")));
  100. static inline struct vdso_data *__arch_get_vdso_data(void)
  101. {
  102. return _vdso_data;
  103. }
  104. static inline uint32_t rt_vdso_read_begin(const struct vdso_data *vd)
  105. {
  106. uint32_t seq;
  107. while (unlikely((seq = READ_ONCE(vd->seq)) & 1))
  108. cpu_relax();
  109. rt_hw_rmb();
  110. return seq;
  111. }
  112. static inline uint32_t rt_vdso_read_retry(const struct vdso_data *vd,
  113. uint32_t start)
  114. {
  115. uint32_t seq;
  116. rt_hw_rmb();
  117. seq = READ_ONCE(vd->seq);
  118. return seq != start;
  119. }
  120. #endif