hpm_l1c_drv.h 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485
  1. /*
  2. * Copyright (c) 2021 HPMicro
  3. *
  4. * SPDX-License-Identifier: BSD-3-Clause
  5. *
  6. */
  7. #ifndef _HPM_L1_CACHE_H
  8. #define _HPM_L1_CACHE_H
  9. #include "riscv/riscv_core.h"
  10. #include "hpm_common.h"
  11. #include "hpm_soc.h"
  12. /**
  13. *
  14. * @brief L1CACHE driver APIs
  15. * @defgroup l1cache_interface L1CACHE driver APIs
  16. * @{
  17. */
  18. /* cache size is 32KB */
  19. #define HPM_L1C_CACHE_SIZE (uint32_t)(32 * SIZE_1KB)
  20. #define HPM_L1C_ICACHE_SIZE (HPM_L1C_CACHE_SIZE)
  21. #define HPM_L1C_DCACHE_SIZE (HPM_L1C_CACHE_SIZE)
  22. /* cache line size is 64B */
  23. #define HPM_L1C_CACHELINE_SIZE (64)
  24. /* cache way is 128 */
  25. #define HPM_L1C_CACHELINES_PER_WAY (128)
  26. /* mcache_ctl register */
  27. /*
  28. * Controls if the instruction cache is enabled or not.
  29. *
  30. * 0 I-Cache is disabled
  31. * 1 I-Cache is enabled
  32. */
  33. #define HPM_MCACHE_CTL_IC_EN_SHIFT (0UL)
  34. #define HPM_MCACHE_CTL_IC_EN_MASK (1UL << HPM_MCACHE_CTL_IC_EN_SHIFT)
  35. #define HPM_MCACHE_CTL_IC_EN(x) \
  36. (uint32_t)(((x) << HPM_MCACHE_CTL_IC_EN_SHIFT) & HPM_MCACHE_CTL_IC_EN_MASK)
  37. /*
  38. * Controls if the data cache is enabled or not.
  39. *
  40. * 0 D-Cache is disabled
  41. * 1 D-Cache is enabled
  42. */
  43. #define HPM_MCACHE_CTL_DC_EN_SHIFT (1UL)
  44. #define HPM_MCACHE_CTL_DC_EN_MASK (1UL << HPM_MCACHE_CTL_DC_EN_SHIFT)
  45. #define HPM_MCACHE_CTL_DC_EN(x) \
  46. (uint32_t)(((x) << HPM_MCACHE_CTL_DC_EN_SHIFT) & HPM_MCACHE_CTL_DC_EN_MASK)
  47. /*
  48. * Parity/ECC error checking enable control for the instruction cache.
  49. *
  50. * 0 Disable parity/ECC
  51. * 1 Reserved
  52. * 2 Generate exceptions only on uncorrectable parity/ECC errors
  53. * 3 Generate exceptions on any type of parity/ECC errors
  54. */
  55. #define HPM_MCACHE_CTL_IC_ECCEN_SHIFT (0x2UL)
  56. #define HPM_MCACHE_CTL_IC_ECCEN_MASK (0x3UL << HPM_MCACHE_CTL_IC_ECCEN_SHIFT)
  57. #define HPM_MCACHE_CTL_IC_ECCEN(x) \
  58. (uint32_t)(((x) << HPM_MCACHE_CTL_IC_ECCEN_SHIFT) & HPM_MCACHE_CTL_IC_ECCEN_MASK)
  59. /*
  60. *
  61. * Parity/ECC error checking enable control for the data cache.
  62. *
  63. * 0 Disable parity/ECC
  64. * 1 Reserved
  65. * 2 Generate exceptions only on uncorrectable parity/ECC errors
  66. * 3 Generate exceptions on any type of parity/ECC errors
  67. */
  68. #define HPM_MCACHE_CTL_DC_ECCEN_SHIFT (0x4UL)
  69. #define HPM_MCACHE_CTL_DC_ECCEN_MASK (0x3UL << HPM_MCACHE_CTL_DC_ECCEN_SHIFT)
  70. #define HPM_MCACHE_CTL_DC_ECCEN(x) \
  71. (uint32_t)(((x) << HPM_MCACHE_CTL_DC_ECCEN_SHIFT) & HPM_MCACHE_CTL_DC_ECCEN_MASK)
  72. /*
  73. *
  74. * Controls diagnostic accesses of ECC codes of the instruction cache RAMs.
  75. * It is set to enable CCTL operations to access the ECC codes. This bit
  76. * can be set for injecting ECC errors to test the ECC handler.
  77. *
  78. * 0 Disable diagnostic accesses of ECC codes
  79. * 1 Enable diagnostic accesses of ECC codes
  80. */
  81. #define HPM_MCACHE_CTL_IC_RWECC_SHIFT (0x6UL)
  82. #define HPM_MCACHE_CTL_IC_RWECC_MASK (0x1UL << HPM_MCACHE_CTL_IC_RWECC_SHIFT)
  83. #define HPM_MCACHE_CTL_IC_RWECC(x) \
  84. (uint32_t)(((x) << HPM_MCACHE_CTL_IC_RWECC_SHIFT) & HPM_MCACHE_CTL_IC_RWECC_MASK)
  85. /*
  86. *
  87. * Controls diagnostic accesses of ECC codes of the data cache RAMs. It is
  88. * set to enable CCTL operations to access the ECC codes. This bit can be
  89. * set for injecting
  90. *
  91. * ECC errors to test the ECC handler.
  92. * 0 Disable diagnostic accesses of ECC codes
  93. * 1 Enable diagnostic accesses of ECC codes
  94. */
  95. #define HPM_MCACHE_CTL_DC_RWECC_SHIFT (0x7UL)
  96. #define HPM_MCACHE_CTL_DC_RWECC_MASK (0x1UL << HPM_MCACHE_CTL_DC_RWECC_SHIFT)
  97. #define HPM_MCACHE_CTL_DC_RWECC(x) \
  98. (uint32_t)(((x) << HPM_MCACHE_CTL_DC_RWECC_SHIFT) & HPM_MCACHE_CTL_DC_RWECC_MASK)
  99. /*
  100. * Enable bit for Superuser-mode and User-mode software to access
  101. * ucctlbeginaddr and ucctlcommand CSRs.
  102. *
  103. * 0 Disable ucctlbeginaddr and ucctlcommand accesses in S/U mode
  104. * 1 Enable ucctlbeginaddr and ucctlcommand accesses in S/U mode
  105. */
  106. #define HPM_MCACHE_CTL_CCTL_SUEN_SHIFT (0x8UL)
  107. #define HPM_MCACHE_CTL_CCTL_SUEN_MASK (0x1UL << HPM_MCACHE_CTL_CCTL_SUEN_SHIFT)
  108. #define HPM_MCACHE_CTL_CCTL_SUEN(x) \
  109. (uint32_t)(((x) << HPM_MCACHE_CTL_CCTL_SUEN_SHIFT) & HPM_MCACHE_CTL_CCTL_SUEN_MASK)
  110. /*
  111. * This bit controls hardware prefetch for instruction fetches to cacheable
  112. * memory regions when I-Cache size is not 0.
  113. *
  114. * 0 Disable hardware prefetch on instruction fetches
  115. * 1 Enable hardware prefetch on instruction fetches
  116. */
  117. #define HPM_MCACHE_CTL_IPREF_EN_SHIFT (0x9UL)
  118. #define HPM_MCACHE_CTL_IPREF_EN_MASK (0x1UL << HPM_MCACHE_CTL_IPREF_EN_SHIFT)
  119. #define HPM_MCACHE_CTL_IPREF_EN(x) \
  120. (uint32_t)(((x) << HPM_MCACHE_CTL_IPREF_EN_SHIFT) & HPM_MCACHE_CTL_IPREF_EN_MASK)
  121. /*
  122. * This bit controls hardware prefetch for load/store accesses to cacheable
  123. * memory regions when D-Cache size is not 0.
  124. *
  125. * 0 Disable hardware prefetch on load/store memory accesses.
  126. * 1 Enable hardware prefetch on load/store memory accesses.
  127. */
  128. #define HPM_MCACHE_CTL_DPREF_EN_SHIFT (0x10UL)
  129. #define HPM_MCACHE_CTL_DPREF_EN_MASK (0x1UL << HPM_MCACHE_CTL_DPREF_EN_SHIFT)
  130. #define HPM_MCACHE_CTL_DPREF_EN(x) \
  131. (uint32_t)(((x) << HPM_MCACHE_CTL_DPREF_EN_SHIFT) & HPM_MCACHE_CTL_DPREF_EN_MASK)
  132. /*
  133. * I-Cache miss allocation filling policy Value Meaning
  134. *
  135. * 0 Cache line data is returned critical (double) word first
  136. * 1 Cache line data is returned the lowest address (double) word first
  137. */
  138. #define HPM_MCACHE_CTL_IC_FIRST_WORD_SHIFT (0x11UL)
  139. #define HPM_MCACHE_CTL_IC_FIRST_WORD_MASK (0x1UL << HPM_MCACHE_CTL_IC_FIRST_WORD_SHIFT)
  140. #define HPM_MCACHE_CTL_IC_FIRST_WORD(x) \
  141. (uint32_t)(((x) << HPM_MCACHE_CTL_IC_FIRST_WORD_SHIFT) & HPM_MCACHE_CTL_IC_FIRST_WORD_MASK)
  142. /*
  143. * D-Cache miss allocation filling policy
  144. *
  145. * 0 Cache line data is returned critical (double) word first
  146. * 1 Cache line data is returned the lowest address (double) word first
  147. */
  148. #define HPM_MCACHE_CTL_DC_FIRST_WORD_SHIFT (0x12UL)
  149. #define HPM_MCACHE_CTL_DC_FIRST_WORD_MASK (0x1UL << HPM_MCACHE_CTL_DC_FIRST_WORD_SHIFT)
  150. #define HPM_MCACHE_CTL_DC_FIRST_WORD(x) \
  151. (uint32_t)(((x) << HPM_MCACHE_CTL_DC_FIRST_WORD_SHIFT) & HPM_MCACHE_CTL_DC_FIRST_WORD_MASK)
  152. /*
  153. * D-Cache Write-Around threshold
  154. *
  155. * 0 Disables streaming. All cacheable write misses allocate a cache line
  156. * according to PMA settings.
  157. * 1 Override PMA setting and do not allocate D-Cache entries after
  158. * consecutive stores to 4 cache lines.
  159. * 2 Override PMA setting and do not allocate D-Cache entries after
  160. * consecutive stores to 64 cache lines.
  161. * 3 Override PMA setting and do not allocate D-Cache entries after
  162. * consecutive stores to 128 cache lines.
  163. */
  164. #define HPM_MCACHE_CTL_DC_WAROUND_SHIFT (0x13UL)
  165. #define HPM_MCACHE_CTL_DC_WAROUND_MASK (0x3UL << HPM_MCACHE_CTL_DC_WAROUND_SHIFT)
  166. #define HPM_MCACHE_CTL_DC_WAROUND(x) \
  167. (uint32_t)(((x) << HPM_MCACHE_CTL_DC_WAROUND_SHIFT) & HPM_MCACHE_CTL_DC_WAROUND_MASK)
  168. /* CCTL command list */
  169. #define HPM_L1C_CCTL_CMD_L1D_VA_INVAL (0UL)
  170. #define HPM_L1C_CCTL_CMD_L1D_VA_WB (1UL)
  171. #define HPM_L1C_CCTL_CMD_L1D_VA_WBINVAL (2UL)
  172. #define HPM_L1C_CCTL_CMD_L1D_VA_LOCK (3UL)
  173. #define HPM_L1C_CCTL_CMD_L1D_VA_UNLOCK (4UL)
  174. #define HPM_L1C_CCTL_CMD_L1D_WBINVAL_ALL (6UL)
  175. #define HPM_L1C_CCTL_CMD_L1D_WB_ALL (7UL)
  176. #define HPM_L1C_CCTL_CMD_L1I_VA_INVAL (8UL)
  177. #define HPM_L1C_CCTL_CMD_L1I_VA_LOCK (11UL)
  178. #define HPM_L1C_CCTL_CMD_L1I_VA_UNLOCK (12UL)
  179. #define HPM_L1C_CCTL_CMD_L1D_IX_INVAL (16UL)
  180. #define HPM_L1C_CCTL_CMD_L1D_IX_WB (17UL)
  181. #define HPM_L1C_CCTL_CMD_L1D_IX_WBINVAL (18UL)
  182. #define HPM_L1C_CCTL_CMD_L1D_IX_RTAG (19UL)
  183. #define HPM_L1C_CCTL_CMD_L1D_IX_RDATA (20UL)
  184. #define HPM_L1C_CCTL_CMD_L1D_IX_WTAG (21UL)
  185. #define HPM_L1C_CCTL_CMD_L1D_IX_WDATA (22UL)
  186. #define HPM_L1C_CCTL_CMD_L1D_INVAL_ALL (23UL)
  187. #define HPM_L1C_CCTL_CMD_L1I_IX_INVAL (24UL)
  188. #define HPM_L1C_CCTL_CMD_L1I_IX_RTAG (27UL)
  189. #define HPM_L1C_CCTL_CMD_L1I_IX_RDATA (28UL)
  190. #define HPM_L1C_CCTL_CMD_L1I_IX_WTAG (29UL)
  191. #define HPM_L1C_CCTL_CMD_L1I_IX_WDATA (30UL)
  192. #define HPM_L1C_CCTL_CMD_SUCCESS (1UL)
  193. #define HPM_L1C_CCTL_CMD_FAIL (0UL)
  194. #ifdef __cplusplus
  195. extern "C" {
  196. #endif
  197. /* get cache control register value */
  198. __attribute__((always_inline)) static inline uint32_t l1c_get_control(void)
  199. {
  200. return read_csr(CSR_MCACHE_CTL);
  201. }
  202. __attribute__((always_inline)) static inline bool l1c_dc_is_enabled(void)
  203. {
  204. return l1c_get_control() & HPM_MCACHE_CTL_DC_EN_MASK;
  205. }
  206. __attribute__((always_inline)) static inline bool l1c_ic_is_enabled(void)
  207. {
  208. return l1c_get_control() & HPM_MCACHE_CTL_IC_EN_MASK;
  209. }
  210. /* mcctlbeginaddress register bitfield layout for CCTL IX type command */
  211. #define HPM_MCCTLBEGINADDR_OFFSET_SHIFT (2UL)
  212. #define HPM_MCCTLBEGINADDR_OFFSET_MASK ((uint32_t) 0xF << HPM_MCCTLBEGINADDR_OFFSET_SHIFT)
  213. #define HPM_MCCTLBEGINADDR_OFFSET(x) \
  214. (uint32_t)(((x) << HPM_MCCTLBEGINADDR_OFFSET_SHIFT) & HPM_MCCTLBEGINADDR_OFFSET_MASK)
  215. #define HPM_MCCTLBEGINADDR_INDEX_SHIFT (6UL)
  216. #define HPM_MCCTLBEGINADDR_INDEX_MASK ((uint32_t) 0x3F << HPM_MCCTLBEGINADDR_INDEX_SHIFT)
  217. #define HPM_MCCTLBEGINADDR_INDEX(x) \
  218. (uint32_t)(((x) << HPM_MCCTLBEGINADDR_INDEX_SHIFT) & HPM_MCCTLBEGINADDR_INDEX_MASK)
  219. #define HPM_MCCTLBEGINADDR_WAY_SHIFT (13UL)
  220. #define HPM_MCCTLBEGINADDR_WAY_MASK ((uint32_t) 0x3 << HPM_MCCTLBEGINADDR_WAY_SHIFT)
  221. #define HPM_MCCTLBEGINADDR_WAY(x) \
  222. (uint32_t)(((x) << HPM_MCCTLBEGINADDR_WAY_SHIFT) & HPM_MCCTLBEGINADDR_WAY_MASK)
  223. /* send IX command */
  224. __attribute__((always_inline)) static inline void l1c_cctl_address(uint32_t address)
  225. {
  226. write_csr(CSR_MCCTLBEGINADDR, address);
  227. }
  228. /* send command */
  229. __attribute__((always_inline)) static inline void l1c_cctl_cmd(uint8_t cmd)
  230. {
  231. write_csr(CSR_MCCTLCOMMAND, cmd);
  232. }
  233. __attribute__((always_inline)) static inline uint32_t l1c_cctl_get_address(void)
  234. {
  235. return read_csr(CSR_MCCTLBEGINADDR);
  236. }
  237. /* send IX command */
  238. __attribute__((always_inline)) static inline
  239. void l1c_cctl_address_cmd(uint8_t cmd, uint32_t address)
  240. {
  241. write_csr(CSR_MCCTLBEGINADDR, address);
  242. write_csr(CSR_MCCTLCOMMAND, cmd);
  243. }
  244. #define HPM_MCCTLDATA_I_TAG_ADDRESS_SHIFT (2UL)
  245. #define HPM_MCCTLDATA_I_TAG_ADDRESS_MASK (uint32_t)(0XFFFFF << HPM_MCCTLDATA_I_TAG_ADDRESS_SHIFT)
  246. #define HPM_MCCTLDATA_I_TAG_ADDRESS(x) \
  247. (uint32_t)(((x) << HPM_MCCTLDATA_I_TAG_ADDRESS_SHIFT) & HPM_MCCTLDATA_I_TAG_ADDRESS_MASK)
  248. #define HPM_MCCTLDATA_I_TAG_LOCK_DUP_SHIFT (29UL)
  249. #define HPM_MCCTLDATA_I_TAG_LOCK_DUP_MASK (uint32_t)(1 << HPM_MCCTLDATA_I_TAG_LOCK_DUP_SHIFT)
  250. #define HPM_MCCTLDATA_I_TAG_LOCK_DUP(x) \
  251. (uint32_t)(((x) << HPM_MCCTLDATA_I_TAG_LOCK_DUP_SHIFT) & HPM_MCCTLDATA_I_TAG_LOCK_DUP_MASK)
  252. #define HPM_MCCTLDATA_I_TAG_LOCK_SHIFT (30UL)
  253. #define HPM_MCCTLDATA_I_TAG_LOCK_MASK (uint32_t)(1 << HPM_MCCTLDATA_I_TAG_LOCK_SHIFT)
  254. #define HPM_MCCTLDATA_I_TAG_LOCK(x) \
  255. (uint32_t)(((x) << HPM_MCCTLDATA_I_TAG_LOCK_SHIFT) & HPM_MCCTLDATA_I_TAG_LOCK_MASK)
  256. #define HPM_MCCTLDATA_I_TAG_VALID_SHIFT (31UL)
  257. #define HPM_MCCTLDATA_I_TAG_VALID_MASK (uint32_t)(1 << HPM_MCCTLDATA_I_TAG_VALID_SHIFT)
  258. #define HPM_MCCTLDATA_I_TAG_VALID(x) \
  259. (uint32_t)(((x) << HPM_MCCTLDATA_I_TAG_VALID_SHIFT) & HPM_MCCTLDATA_I_TAG_VALID_MASK)
  260. #define HPM_MCCTLDATA_D_TAG_MESI_SHIFT (0UL)
  261. #define HPM_MCCTLDATA_D_TAG_MESI_MASK (uint32_t)(0x3 << HPM_MCCTLDATA_D_TAG_MESI_SHIFT)
  262. #define HPM_MCCTLDATA_D_TAG_MESI(x) \
  263. (uint32_t)(((x) << HPM_MCCTLDATA_D_TAG_MESI_SHIFT) & HPM_MCCTLDATA_D_TAG_MESI_MASK)
  264. #define HPM_MCCTLDATA_D_TAG_LOCK_SHIFT (3UL)
  265. #define HPM_MCCTLDATA_D_TAG_LOCK_MASK (uint32_t)(0x1 << HPM_MCCTLDATA_D_TAG_LOCK_SHIFT)
  266. #define HPM_MCCTLDATA_D_TAG_LOCK(x) \
  267. (uint32_t)(((x) << HPM_MCCTLDATA_D_TAG_LOCK_SHIFT) & HPM_MCCTLDATA_D_TAG_LOCK_MASK)
  268. #define HPM_MCCTLDATA_D_TAG_TAG_SHIFT (4UL)
  269. #define HPM_MCCTLDATA_D_TAG_TAG_MASK (uint32_t)(0xFFFF << HPM_MCCTLDATA_D_TAG_LOCK_SHIFT)
  270. #define HPM_MCCTLDATA_D_TAG_TAG(x) \
  271. (uint32_t)(((x) << HPM_MCCTLDATA_D_TAG_TAG_SHIFT) & HPM_MCCTLDATA_D_TAG_TAG_MASK)
  272. /*
  273. * @brief Cache control command read address
  274. *
  275. * Send IX read tag/data cmd
  276. * @param[in] cmd Command code
  277. * @param[in] address Target address
  278. * @param[in] ecc_data ECC value
  279. * @return data read
  280. */
  281. ATTR_ALWAYS_INLINE static inline
  282. uint32_t l1c_cctl_address_cmd_read(uint8_t cmd, uint32_t address, uint32_t *ecc_data)
  283. {
  284. write_csr(CSR_MCCTLBEGINADDR, address);
  285. write_csr(CSR_MCCTLCOMMAND, cmd);
  286. *ecc_data = read_csr(CSR_MECC_CODE);
  287. return read_csr(CSR_MCCTLDATA);
  288. }
  289. /*
  290. * @brief Cache control command write address
  291. *
  292. * Send IX write tag/data cmd
  293. * @param[in] cmd Command code
  294. * @param[in] address Target address
  295. * @param[in] data Data to be written
  296. * @param[in] ecc_data ECC of data
  297. */
  298. ATTR_ALWAYS_INLINE static inline
  299. void l1c_cctl_address_cmd_write(uint8_t cmd, uint32_t address, uint32_t data, uint32_t ecc_data)
  300. {
  301. write_csr(CSR_MCCTLBEGINADDR, address);
  302. write_csr(CSR_MCCTLCOMMAND, cmd);
  303. write_csr(CSR_MCCTLDATA, data);
  304. write_csr(CSR_MECC_CODE, ecc_data);
  305. }
  306. #define HPM_L1C_CFG_SET_SHIFT (0UL)
  307. #define HPM_L1C_CFG_SET_MASK (uint32_t)(0x7 << HPM_L1C_CFG_SET_SHIFT)
  308. #define HPM_L1C_CFG_WAY_SHIFT (3UL)
  309. #define HPM_L1C_CFG_WAY_MASK (uint32_t)(0x7 << HPM_L1C_CFG_WAY_SHIFT)
  310. #define HPM_L1C_CFG_SIZE_SHIFT (6UL)
  311. #define HPM_L1C_CFG_SIZE_MASK (uint32_t)(0x7 << HPM_L1C_CFG_SIZE_SHIFT)
  312. #define HPM_L1C_CFG_LOCK_SHIFT (9UL)
  313. #define HPM_L1C_CFG_LOCK_MASK (uint32_t)(0x1 << HPM_L1C_CFG_LOCK_SHIFT)
  314. #define HPM_L1C_CFG_ECC_SHIFT (10UL)
  315. #define HPM_L1C_CFG_ECC_MASK (uint32_t)(0x3 << HPM_L1C_CFG_ECC_SHIFT)
  316. #define HPM_L1C_CFG_LMB_SHIFT (12UL)
  317. #define HPM_L1C_CFG_LMB_MASK (uint32_t)(0x7 << HPM_L1C_CFG_LMB_SHIFT)
  318. #define HPM_L1C_CFG_LM_SIZE_SHIFT (15UL)
  319. #define HPM_L1C_CFG_LM_SIZE_MASK (uint32_t)(0x1F << HPM_L1C_CFG_LM_SIZE_SHIFT)
  320. #define HPM_L1C_CFG_LM_ECC_SHIFT (21UL)
  321. #define HPM_L1C_CFG_LM_ECC_MASK (uint32_t)(0x3 << HPM_L1C_CFG_LM_ECC_SHIFT)
  322. #define HPM_L1C_CFG_SETH_SHIFT (24UL)
  323. #define HPM_L1C_CFG_SETH_MASK (uint32_t)(0x1 << HPM_L1C_CFG_SETH_SHIFT)
  324. /**
  325. * @brief Align down based on cache line size
  326. */
  327. #define HPM_L1C_CACHELINE_ALIGN_DOWN(n) ((uint32_t)(n) & ~(HPM_L1C_CACHELINE_SIZE - 1U))
  328. /**
  329. * @brief Align up based on cache line size
  330. */
  331. #define HPM_L1C_CACHELINE_ALIGN_UP(n) HPM_L1C_CACHELINE_ALIGN_DOWN((uint32_t)(n) + HPM_L1C_CACHELINE_SIZE - 1U)
  332. /**
  333. * @brief Get I-cache configuration
  334. *
  335. * @return I-cache config register
  336. */
  337. ATTR_ALWAYS_INLINE static inline uint32_t l1c_ic_get_config(void)
  338. {
  339. return read_csr(CSR_MICM_CFG);
  340. }
  341. /**
  342. * @brief Get D-cache configuration
  343. *
  344. * @return D-cache config register
  345. */
  346. ATTR_ALWAYS_INLINE static inline uint32_t l1c_dc_get_config(void)
  347. {
  348. return read_csr(CSR_MDCM_CFG);
  349. }
  350. /*
  351. * @brief D-cache disable
  352. */
  353. void l1c_dc_disable(void);
  354. /*
  355. * @brief D-cache enable
  356. */
  357. void l1c_dc_enable(void);
  358. /*
  359. * @brief D-cache invalidate by address
  360. * @param[in] address Start address to be invalidated
  361. * @param[in] size Size of memory to be invalidated
  362. */
  363. void l1c_dc_invalidate(uint32_t address, uint32_t size);
  364. /*
  365. * @brief D-cache writeback by address
  366. * @param[in] address Start address to be writtenback
  367. * @param[in] size Size of memory to be writtenback
  368. */
  369. void l1c_dc_writeback(uint32_t address, uint32_t size);
  370. /*
  371. * @brief D-cache invalidate and writeback by address
  372. * @param[in] address Start address to be invalidated and writtenback
  373. * @param[in] size Size of memory to be invalidted and writtenback
  374. */
  375. void l1c_dc_flush(uint32_t address, uint32_t size);
  376. /*
  377. * @brief D-cache fill and lock by address
  378. * @param[in] address Start address to be filled and locked
  379. * @param[in] size Size of memory to be filled and locked
  380. */
  381. void l1c_dc_fill_lock(uint32_t address, uint32_t size);
  382. /*
  383. * @brief I-cache disable
  384. */
  385. void l1c_ic_disable(void);
  386. /*
  387. * @brief I-cache enable
  388. */
  389. void l1c_ic_enable(void);
  390. /*
  391. * @brief I-cache invalidate by address
  392. * @param[in] address Start address to be invalidated
  393. * @param[in] size Size of memory to be invalidated
  394. */
  395. void l1c_ic_invalidate(uint32_t address, uint32_t size);
  396. /*
  397. * @brief I-cache fill and lock by address
  398. * @param[in] address Start address to be locked
  399. * @param[in] size Size of memory to be locked
  400. */
  401. void l1c_ic_fill_lock(uint32_t address, uint32_t size);
  402. /*
  403. * @brief Invalidate all icache and writeback all dcache
  404. */
  405. void l1c_fence_i(void);
  406. /*
  407. * @brief Invalidate all d-cache
  408. */
  409. void l1c_dc_invalidate_all(void);
  410. /*
  411. * @brief Writeback all d-cache
  412. */
  413. void l1c_dc_writeback_all(void);
  414. /*
  415. * @brief Flush all d-cache
  416. */
  417. void l1c_dc_flush_all(void);
  418. #ifdef __cplusplus
  419. }
  420. #endif
  421. /**
  422. * @}
  423. */
  424. #endif /* _HPM_L1_CACHE_H */