cache.c 3.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140
  1. /*
  2. * Copyright (c) 2006-2021, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2021-01-29 lizhirui first version
  9. * 2021-11-05 JasonHu add C908 cache inst
  10. * 2022-11-09 WangXiaoyao Support cache coherence operations;
  11. * improve portability and make
  12. * no assumption on undefined behavior
  13. */
  14. #include <rthw.h>
  15. #include <rtdef.h>
  16. #include <board.h>
  17. #include <riscv.h>
  18. #include "opcode.h"
  19. #include "cache.h"
  20. #define L1_CACHE_BYTES (64)
  21. /**
  22. * GCC version not support t-head cache flush, so we use fixed code to achieve.
  23. * The following function cannot be optimized.
  24. */
  25. static void dcache_wb_range(unsigned long start, unsigned long end) __attribute__((optimize("O0")));
  26. static void dcache_inv_range(unsigned long start, unsigned long end) __attribute__((optimize("O0")));
  27. static void dcache_wbinv_range(unsigned long start, unsigned long end) __attribute__((optimize("O0")));
  28. static void icache_inv_range(unsigned long start, unsigned long end) __attribute__((optimize("O0")));
  29. #define CACHE_OP_RS1 %0
  30. #define CACHE_OP_RANGE(instr) \
  31. { \
  32. rt_ubase_t i = start & ~(L1_CACHE_BYTES - 1); \
  33. for (; i < end; i += L1_CACHE_BYTES) \
  34. { \
  35. __asm__ volatile(instr ::"r"(i) \
  36. : "memory"); \
  37. } \
  38. }
  39. static void dcache_wb_range(unsigned long start, unsigned long end)
  40. {
  41. CACHE_OP_RANGE(OPC_DCACHE_CVA(CACHE_OP_RS1));
  42. }
  43. static void dcachel1_wb_range(unsigned long start, unsigned long end)
  44. {
  45. CACHE_OP_RANGE(OPC_DCACHE_CVAL1(CACHE_OP_RS1));
  46. }
  47. static void dcache_inv_range(unsigned long start, unsigned long end)
  48. {
  49. CACHE_OP_RANGE(OPC_DCACHE_IVA(CACHE_OP_RS1));
  50. }
  51. static void dcache_wbinv_range(unsigned long start, unsigned long end)
  52. {
  53. CACHE_OP_RANGE(OPC_DCACHE_CIVA(CACHE_OP_RS1));
  54. }
  55. static void icache_inv_range(unsigned long start, unsigned long end)
  56. {
  57. CACHE_OP_RANGE(OPC_ICACHE_IVA(CACHE_OP_RS1));
  58. }
  59. rt_inline rt_uint32_t rt_cpu_icache_line_size(void)
  60. {
  61. return L1_CACHE_BYTES;
  62. }
  63. rt_inline rt_uint32_t rt_cpu_dcache_line_size(void)
  64. {
  65. return L1_CACHE_BYTES;
  66. }
  67. void rt_hw_cpu_icache_invalidate_local(void *addr, int size)
  68. {
  69. icache_inv_range((unsigned long)addr, (unsigned long)((unsigned char *)addr + size));
  70. rt_hw_cpu_sync_i();
  71. }
  72. void rt_hw_cpu_dcache_invalidate_local(void *addr, int size)
  73. {
  74. dcache_inv_range((unsigned long)addr, (unsigned long)((unsigned char *)addr + size));
  75. rt_hw_cpu_sync();
  76. }
  77. void rt_hw_cpu_dcache_clean_local(void *addr, int size)
  78. {
  79. dcache_wb_range((unsigned long)addr, (unsigned long)((unsigned char *)addr + size));
  80. rt_hw_cpu_sync();
  81. }
  82. void rt_hw_cpu_dcache_clean_invalidate_local(void *addr, int size)
  83. {
  84. dcache_wbinv_range((unsigned long)addr, (unsigned long)((unsigned char *)addr + size));
  85. rt_hw_cpu_sync();
  86. }
  87. void rt_hw_cpu_dcachel1_clean_local(void *addr, int size)
  88. {
  89. __asm__ volatile(OPC_DCACHE_CVAL1(a0)::
  90. : "memory");
  91. }
  92. /**
  93. * =====================================================
  94. * Architecture Independent API
  95. * =====================================================
  96. */
  97. void rt_hw_cpu_icache_ops(int ops, void *addr, int size)
  98. {
  99. if (ops == RT_HW_CACHE_INVALIDATE)
  100. {
  101. rt_hw_cpu_icache_invalidate(addr, size);
  102. }
  103. }
  104. void rt_hw_cpu_dcache_ops(int ops, void *addr, int size)
  105. {
  106. if (ops == RT_HW_CACHE_FLUSH)
  107. {
  108. rt_hw_cpu_dcache_clean(addr, size);
  109. }
  110. else
  111. {
  112. rt_hw_cpu_dcache_invalidate(addr, size);
  113. }
  114. }
  115. void rt_hw_sync_cache_local(void *addr, int size)
  116. {
  117. rt_hw_cpu_dcachel1_clean_local(addr, size);
  118. rt_hw_cpu_icache_invalidate_local(addr, size);
  119. }