cache.c 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435
  1. /*
  2. * Copyright (c) 2006-2021, RT-Thread Development Team
  3. * Copyright (c) 2014 - 2020 Xilinx, Inc. All rights reserved.
  4. * Copyright (c) 2021 WangHuachen. All rights reserved.
  5. * SPDX-License-Identifier: MIT
  6. *
  7. * Change Logs:
  8. * Date Author Notes
  9. * 2020-03-19 WangHuachen first version
  10. * 2021-05-10 WangHuachen add more functions
  11. */
  12. #include <rthw.h>
  13. #include <rtdef.h>
  14. #include "xpseudo_asm_gcc.h"
  15. #include "xreg_cortexr5.h"
  16. #define IRQ_FIQ_MASK 0xC0 /* Mask IRQ and FIQ interrupts in cpsr */
  17. typedef intptr_t INTPTR;
  18. typedef rt_uint32_t u32;
  19. #if defined (__GNUC__)
  20. #define asm_inval_dc_line_mva_poc(param) __asm__ __volatile__("mcr " \
  21. XREG_CP15_INVAL_DC_LINE_MVA_POC :: "r" (param))
  22. #define asm_clean_inval_dc_line_sw(param) __asm__ __volatile__("mcr " \
  23. XREG_CP15_CLEAN_INVAL_DC_LINE_SW :: "r" (param))
  24. #define asm_clean_inval_dc_line_mva_poc(param) __asm__ __volatile__("mcr " \
  25. XREG_CP15_CLEAN_INVAL_DC_LINE_MVA_POC :: "r" (param))
  26. #define asm_inval_ic_line_mva_pou(param) __asm__ __volatile__("mcr " \
  27. XREG_CP15_INVAL_IC_LINE_MVA_POU :: "r" (param))
  28. #elif defined (__ICCARM__)
  29. #define asm_inval_dc_line_mva_poc(param) __asm volatile("mcr " \
  30. XREG_CP15_INVAL_DC_LINE_MVA_POC :: "r" (param))
  31. #define asm_clean_inval_dc_line_sw(param) __asm volatile("mcr " \
  32. XREG_CP15_CLEAN_INVAL_DC_LINE_SW :: "r" (param))
  33. #define asm_clean_inval_dc_line_mva_poc(param) __asm volatile("mcr " \
  34. XREG_CP15_CLEAN_INVAL_DC_LINE_MVA_POC :: "r" (param))
  35. #define asm_inval_ic_line_mva_pou(param) __asm volatile("mcr " \
  36. XREG_CP15_INVAL_IC_LINE_MVA_POU :: "r" (param))
  37. #endif
  38. void Xil_DCacheEnable(void);
  39. void Xil_DCacheDisable(void);
  40. void Xil_DCacheInvalidate(void);
  41. void Xil_DCacheInvalidateRange(INTPTR adr, u32 len);
  42. void Xil_DCacheFlush(void);
  43. void Xil_DCacheFlushRange(INTPTR adr, u32 len);
  44. void Xil_DCacheInvalidateLine(INTPTR adr);
  45. void Xil_DCacheFlushLine(INTPTR adr);
  46. void Xil_DCacheStoreLine(INTPTR adr);
  47. void Xil_ICacheEnable(void);
  48. void Xil_ICacheDisable(void);
  49. void Xil_ICacheInvalidate(void);
  50. void Xil_ICacheInvalidateRange(INTPTR adr, u32 len);
  51. void Xil_ICacheInvalidateLine(INTPTR adr);
  52. void Xil_DCacheEnable(void)
  53. {
  54. register u32 CtrlReg;
  55. /* enable caches only if they are disabled */
  56. #if defined (__GNUC__)
  57. CtrlReg = mfcp(XREG_CP15_SYS_CONTROL);
  58. #elif defined (__ICCARM__)
  59. mfcp(XREG_CP15_SYS_CONTROL,CtrlReg);
  60. #endif
  61. if ((CtrlReg & XREG_CP15_CONTROL_C_BIT)==0x00000000U) {
  62. /* invalidate the Data cache */
  63. Xil_DCacheInvalidate();
  64. /* enable the Data cache */
  65. CtrlReg |= (XREG_CP15_CONTROL_C_BIT);
  66. mtcp(XREG_CP15_SYS_CONTROL, CtrlReg);
  67. }
  68. }
  69. void Xil_DCacheDisable(void)
  70. {
  71. register u32 CtrlReg;
  72. /* clean and invalidate the Data cache */
  73. Xil_DCacheFlush();
  74. /* disable the Data cache */
  75. #if defined (__GNUC__)
  76. CtrlReg = mfcp(XREG_CP15_SYS_CONTROL);
  77. #elif defined (__ICCARM__)
  78. mfcp(XREG_CP15_SYS_CONTROL,CtrlReg);
  79. #endif
  80. CtrlReg &= ~(XREG_CP15_CONTROL_C_BIT);
  81. mtcp(XREG_CP15_SYS_CONTROL, CtrlReg);
  82. }
  83. void Xil_DCacheInvalidate(void)
  84. {
  85. u32 currmask;
  86. currmask = mfcpsr();
  87. mtcpsr(currmask | IRQ_FIQ_MASK);
  88. mtcp(XREG_CP15_CACHE_SIZE_SEL, 0);
  89. /*invalidate all D cache*/
  90. mtcp(XREG_CP15_INVAL_DC_ALL, 0);
  91. mtcpsr(currmask);
  92. }
  93. void Xil_DCacheInvalidateLine(INTPTR adr)
  94. {
  95. u32 currmask;
  96. currmask = mfcpsr();
  97. mtcpsr(currmask | IRQ_FIQ_MASK);
  98. mtcp(XREG_CP15_CACHE_SIZE_SEL, 0);
  99. mtcp(XREG_CP15_INVAL_DC_LINE_MVA_POC, (adr & (~0x1F)));
  100. /* Wait for invalidate to complete */
  101. dsb();
  102. mtcpsr(currmask);
  103. }
  104. void Xil_DCacheInvalidateRange(INTPTR adr, u32 len)
  105. {
  106. const u32 cacheline = 32U;
  107. u32 end;
  108. u32 tempadr = adr;
  109. u32 tempend;
  110. u32 currmask;
  111. currmask = mfcpsr();
  112. mtcpsr(currmask | IRQ_FIQ_MASK);
  113. if (len != 0U) {
  114. end = tempadr + len;
  115. tempend = end;
  116. /* Select L1 Data cache in CSSR */
  117. mtcp(XREG_CP15_CACHE_SIZE_SEL, 0U);
  118. if ((tempadr & (cacheline-1U)) != 0U) {
  119. tempadr &= (~(cacheline - 1U));
  120. Xil_DCacheFlushLine(tempadr);
  121. }
  122. if ((tempend & (cacheline-1U)) != 0U) {
  123. tempend &= (~(cacheline - 1U));
  124. Xil_DCacheFlushLine(tempend);
  125. }
  126. while (tempadr < tempend) {
  127. /* Invalidate Data cache line */
  128. asm_inval_dc_line_mva_poc(tempadr);
  129. tempadr += cacheline;
  130. }
  131. }
  132. dsb();
  133. mtcpsr(currmask);
  134. }
  135. void Xil_DCacheFlush(void)
  136. {
  137. register u32 CsidReg, C7Reg;
  138. u32 CacheSize, LineSize, NumWays;
  139. u32 Way, WayIndex, Set, SetIndex, NumSet;
  140. u32 currmask;
  141. currmask = mfcpsr();
  142. mtcpsr(currmask | IRQ_FIQ_MASK);
  143. /* Select cache level 0 and D cache in CSSR */
  144. mtcp(XREG_CP15_CACHE_SIZE_SEL, 0);
  145. #if defined (__GNUC__)
  146. CsidReg = mfcp(XREG_CP15_CACHE_SIZE_ID);
  147. #elif defined (__ICCARM__)
  148. mfcp(XREG_CP15_CACHE_SIZE_ID,CsidReg);
  149. #endif
  150. /* Determine Cache Size */
  151. CacheSize = (CsidReg >> 13U) & 0x000001FFU;
  152. CacheSize += 0x00000001U;
  153. CacheSize *= (u32)128; /* to get number of bytes */
  154. /* Number of Ways */
  155. NumWays = (CsidReg & 0x000003ffU) >> 3U;
  156. NumWays += 0x00000001U;
  157. /* Get the cacheline size, way size, index size from csidr */
  158. LineSize = (CsidReg & 0x00000007U) + 0x00000004U;
  159. NumSet = CacheSize/NumWays;
  160. NumSet /= (0x00000001U << LineSize);
  161. Way = 0U;
  162. Set = 0U;
  163. /* Invalidate all the cachelines */
  164. for (WayIndex = 0U; WayIndex < NumWays; WayIndex++) {
  165. for (SetIndex = 0U; SetIndex < NumSet; SetIndex++) {
  166. C7Reg = Way | Set;
  167. /* Flush by Set/Way */
  168. asm_clean_inval_dc_line_sw(C7Reg);
  169. Set += (0x00000001U << LineSize);
  170. }
  171. Set = 0U;
  172. Way += 0x40000000U;
  173. }
  174. /* Wait for flush to complete */
  175. dsb();
  176. mtcpsr(currmask);
  177. mtcpsr(currmask);
  178. }
  179. void Xil_DCacheFlushLine(INTPTR adr)
  180. {
  181. u32 currmask;
  182. currmask = mfcpsr();
  183. mtcpsr(currmask | IRQ_FIQ_MASK);
  184. mtcp(XREG_CP15_CACHE_SIZE_SEL, 0);
  185. mtcp(XREG_CP15_CLEAN_INVAL_DC_LINE_MVA_POC, (adr & (~0x1F)));
  186. /* Wait for flush to complete */
  187. dsb();
  188. mtcpsr(currmask);
  189. }
  190. void Xil_DCacheFlushRange(INTPTR adr, u32 len)
  191. {
  192. u32 LocalAddr = adr;
  193. const u32 cacheline = 32U;
  194. u32 end;
  195. u32 currmask;
  196. currmask = mfcpsr();
  197. mtcpsr(currmask | IRQ_FIQ_MASK);
  198. if (len != 0x00000000U) {
  199. /* Back the starting address up to the start of a cache line
  200. * perform cache operations until adr+len
  201. */
  202. end = LocalAddr + len;
  203. LocalAddr &= ~(cacheline - 1U);
  204. while (LocalAddr < end) {
  205. /* Flush Data cache line */
  206. asm_clean_inval_dc_line_mva_poc(LocalAddr);
  207. LocalAddr += cacheline;
  208. }
  209. }
  210. dsb();
  211. mtcpsr(currmask);
  212. }
  213. void Xil_DCacheStoreLine(INTPTR adr)
  214. {
  215. u32 currmask;
  216. currmask = mfcpsr();
  217. mtcpsr(currmask | IRQ_FIQ_MASK);
  218. mtcp(XREG_CP15_CACHE_SIZE_SEL, 0);
  219. mtcp(XREG_CP15_CLEAN_DC_LINE_MVA_POC, (adr & (~0x1F)));
  220. /* Wait for store to complete */
  221. dsb();
  222. isb();
  223. mtcpsr(currmask);
  224. }
  225. void Xil_ICacheEnable(void)
  226. {
  227. register u32 CtrlReg;
  228. /* enable caches only if they are disabled */
  229. #if defined (__GNUC__)
  230. CtrlReg = mfcp(XREG_CP15_SYS_CONTROL);
  231. #elif defined (__ICCARM__)
  232. mfcp(XREG_CP15_SYS_CONTROL, CtrlReg);
  233. #endif
  234. if ((CtrlReg & XREG_CP15_CONTROL_I_BIT)==0x00000000U) {
  235. /* invalidate the instruction cache */
  236. mtcp(XREG_CP15_INVAL_IC_POU, 0);
  237. /* enable the instruction cache */
  238. CtrlReg |= (XREG_CP15_CONTROL_I_BIT);
  239. mtcp(XREG_CP15_SYS_CONTROL, CtrlReg);
  240. }
  241. }
  242. void Xil_ICacheDisable(void)
  243. {
  244. register u32 CtrlReg;
  245. dsb();
  246. /* invalidate the instruction cache */
  247. mtcp(XREG_CP15_INVAL_IC_POU, 0);
  248. /* disable the instruction cache */
  249. #if defined (__GNUC__)
  250. CtrlReg = mfcp(XREG_CP15_SYS_CONTROL);
  251. #elif defined (__ICCARM__)
  252. mfcp(XREG_CP15_SYS_CONTROL,CtrlReg);
  253. #endif
  254. CtrlReg &= ~(XREG_CP15_CONTROL_I_BIT);
  255. mtcp(XREG_CP15_SYS_CONTROL, CtrlReg);
  256. }
  257. void Xil_ICacheInvalidate(void)
  258. {
  259. u32 currmask;
  260. currmask = mfcpsr();
  261. mtcpsr(currmask | IRQ_FIQ_MASK);
  262. mtcp(XREG_CP15_CACHE_SIZE_SEL, 1);
  263. /* invalidate the instruction cache */
  264. mtcp(XREG_CP15_INVAL_IC_POU, 0);
  265. /* Wait for invalidate to complete */
  266. dsb();
  267. mtcpsr(currmask);
  268. }
  269. void Xil_ICacheInvalidateLine(INTPTR adr)
  270. {
  271. u32 currmask;
  272. currmask = mfcpsr();
  273. mtcpsr(currmask | IRQ_FIQ_MASK);
  274. mtcp(XREG_CP15_CACHE_SIZE_SEL, 1);
  275. mtcp(XREG_CP15_INVAL_IC_LINE_MVA_POU, (adr & (~0x1F)));
  276. /* Wait for invalidate to complete */
  277. dsb();
  278. mtcpsr(currmask);
  279. }
  280. void Xil_ICacheInvalidateRange(INTPTR adr, u32 len)
  281. {
  282. u32 LocalAddr = adr;
  283. const u32 cacheline = 32U;
  284. u32 end;
  285. u32 currmask;
  286. currmask = mfcpsr();
  287. mtcpsr(currmask | IRQ_FIQ_MASK);
  288. if (len != 0x00000000U) {
  289. /* Back the starting address up to the start of a cache line
  290. * perform cache operations until adr+len
  291. */
  292. end = LocalAddr + len;
  293. LocalAddr = LocalAddr & ~(cacheline - 1U);
  294. /* Select cache L0 I-cache in CSSR */
  295. mtcp(XREG_CP15_CACHE_SIZE_SEL, 1U);
  296. while (LocalAddr < end) {
  297. /* Invalidate L1 I-cache line */
  298. asm_inval_ic_line_mva_pou(LocalAddr);
  299. LocalAddr += cacheline;
  300. }
  301. }
  302. /* Wait for invalidate to complete */
  303. dsb();
  304. mtcpsr(currmask);
  305. }
  306. void rt_hw_cpu_icache_ops(int ops, void *addr, int size)
  307. {
  308. if (ops == RT_HW_CACHE_INVALIDATE)
  309. Xil_ICacheInvalidateRange((INTPTR)addr, size);
  310. }
  311. void rt_hw_cpu_dcache_ops(int ops, void *addr, int size)
  312. {
  313. if (ops == RT_HW_CACHE_FLUSH)
  314. Xil_DCacheFlushRange((intptr_t)addr, size);
  315. else if (ops == RT_HW_CACHE_INVALIDATE)
  316. Xil_DCacheInvalidateRange((intptr_t)addr, size);
  317. }
  318. rt_base_t rt_hw_cpu_icache_status(void)
  319. {
  320. register u32 CtrlReg;
  321. #if defined (__GNUC__)
  322. CtrlReg = mfcp(XREG_CP15_SYS_CONTROL);
  323. #elif defined (__ICCARM__)
  324. mfcp(XREG_CP15_SYS_CONTROL,CtrlReg);
  325. #endif
  326. return CtrlReg & XREG_CP15_CONTROL_I_BIT;
  327. }
  328. rt_base_t rt_hw_cpu_dcache_status(void)
  329. {
  330. register u32 CtrlReg;
  331. #if defined (__GNUC__)
  332. CtrlReg = mfcp(XREG_CP15_SYS_CONTROL);
  333. #elif defined (__ICCARM__)
  334. mfcp(XREG_CP15_SYS_CONTROL,CtrlReg);
  335. #endif
  336. return CtrlReg & XREG_CP15_CONTROL_C_BIT;
  337. }