cache_v7.c 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407
  1. /*
  2. * (C) Copyright 2010
  3. * Texas Instruments, <www.ti.com>
  4. * Aneesh V <aneesh@ti.com>
  5. *
  6. * See file CREDITS for list of people who contributed to this
  7. * project.
  8. *
  9. * This program is free software; you can redistribute it and/or
  10. * modify it under the terms of the GNU General Public License as
  11. * published by the Free Software Foundation; either version 2 of
  12. * the License, or (at your option) any later version.
  13. *
  14. * This program is distributed in the hope that it will be useful,
  15. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17. * GNU General Public License for more details.
  18. *
  19. * You should have received a copy of the GNU General Public License
  20. * along with this program; if not, write to the Free Software
  21. * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
  22. * MA 02111-1307 USA
  23. */
  24. #include <linux/types.h>
  25. #include <common.h>
  26. #include <asm/armv7.h>
  27. #include <asm/utils.h>
  28. #define ARMV7_DCACHE_INVAL_ALL 1
  29. #define ARMV7_DCACHE_CLEAN_INVAL_ALL 2
  30. #define ARMV7_DCACHE_INVAL_RANGE 3
  31. #define ARMV7_DCACHE_CLEAN_INVAL_RANGE 4
  32. #ifndef CONFIG_SYS_DCACHE_OFF
  33. /*
  34. * Write the level and type you want to Cache Size Selection Register(CSSELR)
  35. * to get size details from Current Cache Size ID Register(CCSIDR)
  36. */
  37. static void set_csselr(u32 level, u32 type)
  38. { u32 csselr = level << 1 | type;
  39. /* Write to Cache Size Selection Register(CSSELR) */
  40. asm volatile ("mcr p15, 2, %0, c0, c0, 0" : : "r" (csselr));
  41. }
  42. static u32 get_ccsidr(void)
  43. {
  44. u32 ccsidr;
  45. /* Read current CP15 Cache Size ID Register */
  46. asm volatile ("mrc p15, 1, %0, c0, c0, 0" : "=r" (ccsidr));
  47. return ccsidr;
  48. }
  49. static u32 get_clidr(void)
  50. {
  51. u32 clidr;
  52. /* Read current CP15 Cache Level ID Register */
  53. asm volatile ("mrc p15,1,%0,c0,c0,1" : "=r" (clidr));
  54. return clidr;
  55. }
  56. static void v7_inval_dcache_level_setway(u32 level, u32 num_sets,
  57. u32 num_ways, u32 way_shift,
  58. u32 log2_line_len)
  59. {
  60. int way, set, setway;
  61. /*
  62. * For optimal assembly code:
  63. * a. count down
  64. * b. have bigger loop inside
  65. */
  66. for (way = num_ways - 1; way >= 0 ; way--) {
  67. for (set = num_sets - 1; set >= 0; set--) {
  68. setway = (level << 1) | (set << log2_line_len) |
  69. (way << way_shift);
  70. /* Invalidate data/unified cache line by set/way */
  71. asm volatile (" mcr p15, 0, %0, c7, c6, 2"
  72. : : "r" (setway));
  73. }
  74. }
  75. /* DSB to make sure the operation is complete */
  76. CP15DSB;
  77. }
  78. static void v7_clean_inval_dcache_level_setway(u32 level, u32 num_sets,
  79. u32 num_ways, u32 way_shift,
  80. u32 log2_line_len)
  81. {
  82. int way, set, setway;
  83. /*
  84. * For optimal assembly code:
  85. * a. count down
  86. * b. have bigger loop inside
  87. */
  88. for (way = num_ways - 1; way >= 0 ; way--) {
  89. for (set = num_sets - 1; set >= 0; set--) {
  90. setway = (level << 1) | (set << log2_line_len) |
  91. (way << way_shift);
  92. /*
  93. * Clean & Invalidate data/unified
  94. * cache line by set/way
  95. */
  96. asm volatile (" mcr p15, 0, %0, c7, c14, 2"
  97. : : "r" (setway));
  98. }
  99. }
  100. /* DSB to make sure the operation is complete */
  101. CP15DSB;
  102. }
  103. static void v7_maint_dcache_level_setway(u32 level, u32 operation)
  104. {
  105. u32 ccsidr;
  106. u32 num_sets, num_ways, log2_line_len, log2_num_ways;
  107. u32 way_shift;
  108. set_csselr(level, ARMV7_CSSELR_IND_DATA_UNIFIED);
  109. ccsidr = get_ccsidr();
  110. log2_line_len = ((ccsidr & CCSIDR_LINE_SIZE_MASK) >>
  111. CCSIDR_LINE_SIZE_OFFSET) + 2;
  112. /* Converting from words to bytes */
  113. log2_line_len += 2;
  114. num_ways = ((ccsidr & CCSIDR_ASSOCIATIVITY_MASK) >>
  115. CCSIDR_ASSOCIATIVITY_OFFSET) + 1;
  116. num_sets = ((ccsidr & CCSIDR_NUM_SETS_MASK) >>
  117. CCSIDR_NUM_SETS_OFFSET) + 1;
  118. /*
  119. * According to ARMv7 ARM number of sets and number of ways need
  120. * not be a power of 2
  121. */
  122. log2_num_ways = log_2_n_round_up(num_ways);
  123. way_shift = (32 - log2_num_ways);
  124. if (operation == ARMV7_DCACHE_INVAL_ALL) {
  125. v7_inval_dcache_level_setway(level, num_sets, num_ways,
  126. way_shift, log2_line_len);
  127. } else if (operation == ARMV7_DCACHE_CLEAN_INVAL_ALL) {
  128. v7_clean_inval_dcache_level_setway(level, num_sets, num_ways,
  129. way_shift, log2_line_len);
  130. }
  131. }
  132. static void v7_maint_dcache_all(u32 operation)
  133. {
  134. u32 level, cache_type, level_start_bit = 0;
  135. u32 clidr = get_clidr();
  136. for (level = 0; level < 7; level++) {
  137. cache_type = (clidr >> level_start_bit) & 0x7;
  138. if ((cache_type == ARMV7_CLIDR_CTYPE_DATA_ONLY) ||
  139. (cache_type == ARMV7_CLIDR_CTYPE_INSTRUCTION_DATA) ||
  140. (cache_type == ARMV7_CLIDR_CTYPE_UNIFIED))
  141. v7_maint_dcache_level_setway(level, operation);
  142. level_start_bit += 3;
  143. }
  144. }
  145. static void v7_dcache_clean_inval_range(u32 start,
  146. u32 stop, u32 line_len)
  147. {
  148. u32 mva;
  149. /* Align start to cache line boundary */
  150. start &= ~(line_len - 1);
  151. for (mva = start; mva < stop; mva = mva + line_len) {
  152. /* DCCIMVAC - Clean & Invalidate data cache by MVA to PoC */
  153. asm volatile ("mcr p15, 0, %0, c7, c14, 1" : : "r" (mva));
  154. }
  155. }
  156. static void v7_dcache_inval_range(u32 start, u32 stop, u32 line_len)
  157. {
  158. u32 mva;
  159. /*
  160. * If start address is not aligned to cache-line do not
  161. * invalidate the first cache-line
  162. */
  163. if (start & (line_len - 1)) {
  164. printf("ERROR: %s - start address is not aligned - 0x%08x\n",
  165. __func__, start);
  166. /* move to next cache line */
  167. start = (start + line_len - 1) & ~(line_len - 1);
  168. }
  169. /*
  170. * If stop address is not aligned to cache-line do not
  171. * invalidate the last cache-line
  172. */
  173. if (stop & (line_len - 1)) {
  174. printf("ERROR: %s - stop address is not aligned - 0x%08x\n",
  175. __func__, stop);
  176. /* align to the beginning of this cache line */
  177. stop &= ~(line_len - 1);
  178. }
  179. for (mva = start; mva < stop; mva = mva + line_len) {
  180. /* DCIMVAC - Invalidate data cache by MVA to PoC */
  181. asm volatile ("mcr p15, 0, %0, c7, c6, 1" : : "r" (mva));
  182. }
  183. }
  184. static void v7_dcache_maint_range(u32 start, u32 stop, u32 range_op)
  185. {
  186. u32 line_len, ccsidr;
  187. ccsidr = get_ccsidr();
  188. line_len = ((ccsidr & CCSIDR_LINE_SIZE_MASK) >>
  189. CCSIDR_LINE_SIZE_OFFSET) + 2;
  190. /* Converting from words to bytes */
  191. line_len += 2;
  192. /* converting from log2(linelen) to linelen */
  193. line_len = 1 << line_len;
  194. switch (range_op) {
  195. case ARMV7_DCACHE_CLEAN_INVAL_RANGE:
  196. v7_dcache_clean_inval_range(start, stop, line_len);
  197. break;
  198. case ARMV7_DCACHE_INVAL_RANGE:
  199. v7_dcache_inval_range(start, stop, line_len);
  200. break;
  201. }
  202. /* DSB to make sure the operation is complete */
  203. CP15DSB;
  204. }
  205. /* Invalidate TLB */
  206. static void v7_inval_tlb(void)
  207. {
  208. /* Invalidate entire unified TLB */
  209. asm volatile ("mcr p15, 0, %0, c8, c7, 0" : : "r" (0));
  210. /* Invalidate entire data TLB */
  211. asm volatile ("mcr p15, 0, %0, c8, c6, 0" : : "r" (0));
  212. /* Invalidate entire instruction TLB */
  213. asm volatile ("mcr p15, 0, %0, c8, c5, 0" : : "r" (0));
  214. /* Full system DSB - make sure that the invalidation is complete */
  215. CP15DSB;
  216. /* Full system ISB - make sure the instruction stream sees it */
  217. CP15ISB;
  218. }
  219. void invalidate_dcache_all(void)
  220. {
  221. v7_maint_dcache_all(ARMV7_DCACHE_INVAL_ALL);
  222. v7_outer_cache_inval_all();
  223. }
  224. /*
  225. * Performs a clean & invalidation of the entire data cache
  226. * at all levels
  227. */
  228. void flush_dcache_all(void)
  229. {
  230. v7_maint_dcache_all(ARMV7_DCACHE_CLEAN_INVAL_ALL);
  231. v7_outer_cache_flush_all();
  232. }
  233. /*
  234. * Invalidates range in all levels of D-cache/unified cache used:
  235. * Affects the range [start, stop - 1]
  236. */
  237. void invalidate_dcache_range(unsigned long start, unsigned long stop)
  238. {
  239. v7_dcache_maint_range(start, stop, ARMV7_DCACHE_INVAL_RANGE);
  240. v7_outer_cache_inval_range(start, stop);
  241. }
  242. /*
  243. * Flush range(clean & invalidate) from all levels of D-cache/unified
  244. * cache used:
  245. * Affects the range [start, stop - 1]
  246. */
  247. void flush_dcache_range(unsigned long start, unsigned long stop)
  248. {
  249. v7_dcache_maint_range(start, stop, ARMV7_DCACHE_CLEAN_INVAL_RANGE);
  250. v7_outer_cache_flush_range(start, stop);
  251. }
  252. void arm_init_before_mmu(void)
  253. {
  254. v7_outer_cache_enable();
  255. invalidate_dcache_all();
  256. v7_inval_tlb();
  257. }
  258. void mmu_page_table_flush(unsigned long start, unsigned long stop)
  259. {
  260. flush_dcache_range(start, stop);
  261. v7_inval_tlb();
  262. }
  263. /*
  264. * Flush range from all levels of d-cache/unified-cache used:
  265. * Affects the range [start, start + size - 1]
  266. */
  267. void flush_cache(unsigned long start, unsigned long size)
  268. {
  269. flush_dcache_range(start, start + size);
  270. }
  271. #else /* #ifndef CONFIG_SYS_DCACHE_OFF */
  272. void invalidate_dcache_all(void)
  273. {
  274. }
  275. void flush_dcache_all(void)
  276. {
  277. }
  278. void invalidate_dcache_range(unsigned long start, unsigned long stop)
  279. {
  280. }
  281. void flush_dcache_range(unsigned long start, unsigned long stop)
  282. {
  283. }
  284. void arm_init_before_mmu(void)
  285. {
  286. }
  287. void flush_cache(unsigned long start, unsigned long size)
  288. {
  289. }
  290. void mmu_page_table_flush(unsigned long start, unsigned long stop)
  291. {
  292. }
  293. #endif /* #ifndef CONFIG_SYS_DCACHE_OFF */
  294. #ifndef CONFIG_SYS_ICACHE_OFF
  295. /* Invalidate entire I-cache and branch predictor array */
  296. void invalidate_icache_all(void)
  297. {
  298. /*
  299. * Invalidate all instruction caches to PoU.
  300. * Also flushes branch target cache.
  301. */
  302. asm volatile ("mcr p15, 0, %0, c7, c5, 0" : : "r" (0));
  303. /* Invalidate entire branch predictor array */
  304. asm volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0));
  305. /* Full system DSB - make sure that the invalidation is complete */
  306. CP15DSB;
  307. /* ISB - make sure the instruction stream sees it */
  308. CP15ISB;
  309. }
  310. #else
  311. void invalidate_icache_all(void)
  312. {
  313. }
  314. #endif
  315. /*
  316. * Stub implementations for outer cache operations
  317. */
  318. void __v7_outer_cache_enable(void)
  319. {
  320. }
  321. void v7_outer_cache_enable(void)
  322. __attribute__((weak, alias("__v7_outer_cache_enable")));
  323. void __v7_outer_cache_disable(void)
  324. {
  325. }
  326. void v7_outer_cache_disable(void)
  327. __attribute__((weak, alias("__v7_outer_cache_disable")));
  328. void __v7_outer_cache_flush_all(void)
  329. {
  330. }
  331. void v7_outer_cache_flush_all(void)
  332. __attribute__((weak, alias("__v7_outer_cache_flush_all")));
  333. void __v7_outer_cache_inval_all(void)
  334. {
  335. }
  336. void v7_outer_cache_inval_all(void)
  337. __attribute__((weak, alias("__v7_outer_cache_inval_all")));
  338. void __v7_outer_cache_flush_range(u32 start, u32 end)
  339. {
  340. }
  341. void v7_outer_cache_flush_range(u32 start, u32 end)
  342. __attribute__((weak, alias("__v7_outer_cache_flush_range")));
  343. void __v7_outer_cache_inval_range(u32 start, u32 end)
  344. {
  345. }
  346. void v7_outer_cache_inval_range(u32 start, u32 end)
  347. __attribute__((weak, alias("__v7_outer_cache_inval_range")));