cache.h 6.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232
  1. /*
  2. * Cache operations for the cache instruction.
  3. *
  4. * This file is subject to the terms and conditions of the GNU General Public
  5. * License. See the file "COPYING" in the main directory of this archive
  6. * for more details.
  7. *
  8. * (C) Copyright 1996, 97, 99, 2002, 03 Ralf Baechle
  9. * (C) Copyright 1999 Silicon Graphics, Inc.
  10. */
  11. #ifndef __CACHE_H__
  12. #define __CACHE_H__
  13. #define KUSEG 0x00000000
  14. #define KSEG0 0x80000000
  15. #define KSEG1 0xa0000000
  16. #define KSEG2 0xc0000000
  17. #define KSEG3 0xe0000000
  18. /*
  19. * Cache Operations available on all MIPS processors with R4000-style caches
  20. */
  21. #define Index_Invalidate_I 0x00
  22. #define Index_Writeback_Inv_D 0x01
  23. #define Index_Load_Tag_I 0x04
  24. #define Index_Load_Tag_D 0x05
  25. #define Index_Store_Tag_I 0x08
  26. #define Index_Store_Tag_D 0x09
  27. #define Hit_Invalidate_I 0x10
  28. #define Hit_Invalidate_D 0x11
  29. #define Hit_Writeback_Inv_D 0x15
  30. #define Hit_Writeback_I 0x18
  31. #define Hit_Writeback_D 0x19
  32. /*
  33. *The lock state is cleared by executing an Index
  34. Invalidate, Index Writeback Invalidate, Hit
  35. Invalidate, or Hit Writeback Invalidate
  36. operation to the locked line, or via an Index
  37. Store Tag operation with the lock bit reset in
  38. the TagLo register.
  39. */
  40. #define Fetch_And_Lock_I 0x1c
  41. #define Fetch_And_Lock_D 0x1d
  42. /*
  43. * R4000-specific cacheops
  44. */
  45. #define Create_Dirty_Excl_D 0x0d
  46. #define Fill 0x14
  47. /*
  48. * R4000SC and R4400SC-specific cacheops
  49. */
  50. #define Index_Invalidate_SI 0x02
  51. #define Index_Writeback_Inv_SD 0x03
  52. #define Index_Load_Tag_SI 0x06
  53. #define Index_Load_Tag_SD 0x07
  54. #define Index_Store_Tag_SI 0x0A
  55. #define Index_Store_Tag_SD 0x0B
  56. #define Create_Dirty_Excl_SD 0x0f
  57. #define Hit_Invalidate_SI 0x12
  58. #define Hit_Invalidate_SD 0x13
  59. #define Hit_Writeback_Inv_SD 0x17
  60. #define Hit_Writeback_SD 0x1b
  61. #define Hit_Set_Virtual_SI 0x1e
  62. #define Hit_Set_Virtual_SD 0x1f
  63. /*
  64. * R5000-specific cacheops
  65. */
  66. #define R5K_Page_Invalidate_S 0x17
  67. /*
  68. * RM7000-specific cacheops
  69. */
  70. #define Page_Invalidate_T 0x16
  71. /*
  72. * R1000-specific cacheops
  73. *
  74. * Cacheops 0x02, 0x06, 0x0a, 0x0c-0x0e, 0x16, 0x1a and 0x1e are unused.
  75. * Most of the _S cacheops are identical to the R4000SC _SD cacheops.
  76. */
  77. #define Index_Writeback_Inv_S 0x03
  78. #define Index_Load_Tag_S 0x07
  79. #define Index_Store_Tag_S 0x0B
  80. #define Hit_Invalidate_S 0x13
  81. #define Cache_Barrier 0x14
  82. #define Hit_Writeback_Inv_S 0x17
  83. #define Index_Load_Data_I 0x18
  84. #define Index_Load_Data_D 0x19
  85. #define Index_Load_Data_S 0x1b
  86. #define Index_Store_Data_I 0x1c
  87. #define Index_Store_Data_D 0x1d
  88. #define Index_Store_Data_S 0x1f
  89. #ifndef __ASSEMBLER__
  90. #ifndef dcache_size
  91. #define dcache_size (g_mips_core.dcache_ways * g_mips_core.dcache_lines_per_way * g_mips_core.dcache_line_size)
  92. #endif
  93. #ifndef icache_size
  94. #define icache_size (g_mips_core.dcache_ways * g_mips_core.dcache_lines_per_way * g_mips_core.dcache_line_size)
  95. #endif
  96. #ifndef cpu_dcache_line_size
  97. #define cpu_dcache_line_size() g_mips_core.icache_line_size
  98. #endif
  99. #ifndef cpu_icache_line_size
  100. #define cpu_icache_line_size() g_mips_core.icache_line_size
  101. #endif
  102. #define cache_op(op, addr) \
  103. __asm__ __volatile__( \
  104. " .set noreorder \n" \
  105. " .set mips3\n\t \n" \
  106. " cache %0, %1 \n" \
  107. " .set mips0 \n" \
  108. " .set reorder" \
  109. : \
  110. : "i" (op), "m" (*(unsigned char *)(addr)))
  111. #define cache16_unroll32(base, op) \
  112. __asm__ __volatile__( \
  113. " .set noreorder \n" \
  114. " .set mips3 \n" \
  115. " cache %1, 0x000(%0); cache %1, 0x010(%0) \n" \
  116. " cache %1, 0x020(%0); cache %1, 0x030(%0) \n" \
  117. " cache %1, 0x040(%0); cache %1, 0x050(%0) \n" \
  118. " cache %1, 0x060(%0); cache %1, 0x070(%0) \n" \
  119. " cache %1, 0x080(%0); cache %1, 0x090(%0) \n" \
  120. " cache %1, 0x0a0(%0); cache %1, 0x0b0(%0) \n" \
  121. " cache %1, 0x0c0(%0); cache %1, 0x0d0(%0) \n" \
  122. " cache %1, 0x0e0(%0); cache %1, 0x0f0(%0) \n" \
  123. " cache %1, 0x100(%0); cache %1, 0x110(%0) \n" \
  124. " cache %1, 0x120(%0); cache %1, 0x130(%0) \n" \
  125. " cache %1, 0x140(%0); cache %1, 0x150(%0) \n" \
  126. " cache %1, 0x160(%0); cache %1, 0x170(%0) \n" \
  127. " cache %1, 0x180(%0); cache %1, 0x190(%0) \n" \
  128. " cache %1, 0x1a0(%0); cache %1, 0x1b0(%0) \n" \
  129. " cache %1, 0x1c0(%0); cache %1, 0x1d0(%0) \n" \
  130. " cache %1, 0x1e0(%0); cache %1, 0x1f0(%0) \n" \
  131. " .set mips0 \n" \
  132. " .set reorder \n" \
  133. : \
  134. : "r" (base), \
  135. "i" (op));
  136. static inline void flush_icache_line_indexed(rt_ubase_t addr)
  137. {
  138. cache_op(Index_Invalidate_I, addr);
  139. }
  140. static inline void flush_dcache_line_indexed(rt_ubase_t addr)
  141. {
  142. cache_op(Index_Writeback_Inv_D, addr);
  143. }
  144. static inline void flush_icache_line(rt_ubase_t addr)
  145. {
  146. cache_op(Hit_Invalidate_I, addr);
  147. }
  148. static inline void lock_icache_line(rt_ubase_t addr)
  149. {
  150. cache_op(Fetch_And_Lock_I, addr);
  151. }
  152. static inline void lock_dcache_line(rt_ubase_t addr)
  153. {
  154. cache_op(Fetch_And_Lock_D, addr);
  155. }
  156. static inline void flush_dcache_line(rt_ubase_t addr)
  157. {
  158. cache_op(Hit_Writeback_Inv_D, addr);
  159. }
  160. static inline void invalidate_dcache_line(rt_ubase_t addr)
  161. {
  162. cache_op(Hit_Invalidate_D, addr);
  163. }
  164. static inline void blast_dcache16(void)
  165. {
  166. rt_ubase_t start = KSEG0;
  167. rt_ubase_t end = start + dcache_size;
  168. rt_ubase_t addr;
  169. for (addr = start; addr < end; addr += 0x200)
  170. cache16_unroll32(addr, Index_Writeback_Inv_D);
  171. }
  172. static inline void inv_dcache16(void)
  173. {
  174. rt_ubase_t start = KSEG0;
  175. rt_ubase_t end = start + dcache_size;
  176. rt_ubase_t addr;
  177. for (addr = start; addr < end; addr += 0x200)
  178. cache16_unroll32(addr, Hit_Invalidate_D);
  179. }
  180. static inline void blast_icache16(void)
  181. {
  182. rt_ubase_t start = KSEG0;
  183. rt_ubase_t end = start + icache_size;
  184. rt_ubase_t addr;
  185. for (addr = start; addr < end; addr += 0x200)
  186. cache16_unroll32(addr, Index_Invalidate_I);
  187. }
  188. void r4k_cache_init(void);
  189. void r4k_cache_flush_all(void);
  190. void r4k_icache_flush_all(void);
  191. void r4k_icache_flush_range(rt_ubase_t addr, rt_ubase_t size);
  192. void r4k_icache_lock_range(rt_ubase_t addr, rt_ubase_t size);
  193. void r4k_dcache_inv(rt_ubase_t addr, rt_ubase_t size);
  194. void r4k_dcache_wback_inv(rt_ubase_t addr, rt_ubase_t size);
  195. #endif /*end of __ASSEMBLER__ */
  196. #endif /* end of __CACHE_H__ */