cache.c 4.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155
  1. /*
  2. * Copyright (c) 2006-2021, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2019-03-29 quanzhao the first version
  9. */
  10. #include <rthw.h>
  11. #include <rtdef.h>
  12. rt_inline rt_uint32_t rt_cpu_icache_line_size(void)
  13. {
  14. rt_uint32_t ctr;
  15. asm volatile ("mrc p15, 0, %0, c0, c0, 1" : "=r"(ctr));
  16. return 4 << (ctr & 0xF);
  17. }
  18. rt_inline rt_uint32_t rt_cpu_dcache_line_size(void)
  19. {
  20. rt_uint32_t ctr;
  21. asm volatile ("mrc p15, 0, %0, c0, c0, 1" : "=r"(ctr));
  22. return 4 << ((ctr >> 16) & 0xF);
  23. }
  24. void rt_hw_cpu_icache_invalidate(void *addr, int size)
  25. {
  26. rt_uint32_t line_size = rt_cpu_icache_line_size();
  27. rt_uint32_t start_addr = (rt_uint32_t)addr;
  28. rt_uint32_t end_addr = (rt_uint32_t) addr + size + line_size - 1;
  29. asm volatile ("dmb":::"memory");
  30. start_addr &= ~(line_size - 1);
  31. end_addr &= ~(line_size - 1);
  32. while (start_addr < end_addr)
  33. {
  34. asm volatile ("mcr p15, 0, %0, c7, c5, 1" :: "r"(start_addr)); /* icimvau */
  35. start_addr += line_size;
  36. }
  37. asm volatile ("dsb\n\tisb":::"memory");
  38. }
  39. void rt_hw_cpu_dcache_invalidate(void *addr, int size)
  40. {
  41. rt_uint32_t line_size = rt_cpu_dcache_line_size();
  42. rt_uint32_t start_addr = (rt_uint32_t)addr;
  43. rt_uint32_t end_addr = (rt_uint32_t) addr + size + line_size - 1;
  44. asm volatile ("dmb":::"memory");
  45. start_addr &= ~(line_size - 1);
  46. end_addr &= ~(line_size - 1);
  47. while (start_addr < end_addr)
  48. {
  49. asm volatile ("mcr p15, 0, %0, c7, c6, 1" :: "r"(start_addr)); /* dcimvac */
  50. start_addr += line_size;
  51. }
  52. asm volatile ("dsb":::"memory");
  53. }
  54. void rt_hw_cpu_dcache_inv_range(void *addr, int size)
  55. {
  56. rt_uint32_t line_size = rt_cpu_dcache_line_size();
  57. rt_uint32_t start_addr = (rt_uint32_t)addr;
  58. rt_uint32_t end_addr = (rt_uint32_t)addr + size;
  59. asm volatile ("dmb":::"memory");
  60. if ((start_addr & (line_size - 1)) != 0)
  61. {
  62. start_addr &= ~(line_size - 1);
  63. asm volatile ("mcr p15, 0, %0, c7, c14, 1" :: "r"(start_addr));
  64. start_addr += line_size;
  65. asm volatile ("dsb":::"memory");
  66. }
  67. if ((end_addr & (line_size - 1)) != 0)
  68. {
  69. end_addr &= ~(line_size - 1);
  70. asm volatile ("mcr p15, 0, %0, c7, c14, 1" :: "r"(end_addr));
  71. asm volatile ("dsb":::"memory");
  72. }
  73. while (start_addr < end_addr)
  74. {
  75. asm volatile ("mcr p15, 0, %0, c7, c6, 1" :: "r"(start_addr)); /* dcimvac */
  76. start_addr += line_size;
  77. }
  78. asm volatile ("dsb":::"memory");
  79. }
  80. void rt_hw_cpu_dcache_clean(void *addr, int size)
  81. {
  82. rt_uint32_t line_size = rt_cpu_dcache_line_size();
  83. rt_uint32_t start_addr = (rt_uint32_t)addr;
  84. rt_uint32_t end_addr = (rt_uint32_t) addr + size + line_size - 1;
  85. asm volatile ("dmb":::"memory");
  86. start_addr &= ~(line_size - 1);
  87. end_addr &= ~(line_size - 1);
  88. while (start_addr < end_addr)
  89. {
  90. asm volatile ("mcr p15, 0, %0, c7, c10, 1" :: "r"(start_addr)); /* dccmvac */
  91. start_addr += line_size;
  92. }
  93. asm volatile ("dsb":::"memory");
  94. }
  95. void rt_hw_cpu_dcache_clean_and_invalidate(void *addr, int size)
  96. {
  97. rt_uint32_t line_size = rt_cpu_dcache_line_size();
  98. rt_uint32_t start_addr = (rt_uint32_t)addr;
  99. rt_uint32_t end_addr = (rt_uint32_t) addr + size + line_size - 1;
  100. asm volatile ("dmb":::"memory");
  101. start_addr &= ~(line_size - 1);
  102. end_addr &= ~(line_size - 1);
  103. while (start_addr < end_addr)
  104. {
  105. asm volatile ("mcr p15, 0, %0, c7, c10, 1" :: "r"(start_addr)); /* dccmvac */
  106. asm volatile ("mcr p15, 0, %0, c7, c6, 1" :: "r"(start_addr)); /* dcimvac */
  107. start_addr += line_size;
  108. }
  109. asm volatile ("dsb":::"memory");
  110. }
  111. void rt_hw_cpu_icache_ops(int ops, void *addr, int size)
  112. {
  113. if (ops == RT_HW_CACHE_INVALIDATE)
  114. {
  115. rt_hw_cpu_icache_invalidate(addr, size);
  116. }
  117. }
  118. void rt_hw_cpu_dcache_ops(int ops, void *addr, int size)
  119. {
  120. if (ops == RT_HW_CACHE_FLUSH)
  121. {
  122. rt_hw_cpu_dcache_clean(addr, size);
  123. }
  124. else if (ops == RT_HW_CACHE_INVALIDATE)
  125. {
  126. rt_hw_cpu_dcache_invalidate(addr, size);
  127. }
  128. }
  129. rt_base_t rt_hw_cpu_icache_status(void)
  130. {
  131. return 0;
  132. }
  133. rt_base_t rt_hw_cpu_dcache_status(void)
  134. {
  135. return 0;
  136. }