cacheasm.h 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708
  1. #ifndef XTENSA_CACHEASM_H
  2. #define XTENSA_CACHEASM_H
  3. /*
  4. * THIS FILE IS GENERATED -- DO NOT MODIFY BY HAND
  5. *
  6. * include/asm-xtensa/xtensa/cacheasm.h -- assembler-specific cache
  7. * related definitions that depend on CORE configuration.
  8. *
  9. * This file is subject to the terms and conditions of the GNU General Public
  10. * License. See the file "COPYING" in the main directory of this archive
  11. * for more details.
  12. *
  13. * Copyright (C) 2002 Tensilica Inc.
  14. */
  15. #include <xtensa/coreasm.h>
  16. /*
  17. * This header file defines assembler macros of the form:
  18. * <x>cache_<func>
  19. * where <x> is 'i' or 'd' for instruction and data caches,
  20. * and <func> indicates the function of the macro.
  21. *
  22. * The following functions <func> are defined,
  23. * and apply only to the specified cache (I or D):
  24. *
  25. * reset
  26. * Resets the cache.
  27. *
  28. * sync
  29. * Makes sure any previous cache instructions have been completed;
  30. * ie. makes sure any previous cache control operations
  31. * have had full effect and been synchronized to memory.
  32. * Eg. any invalidate completed [so as not to generate a hit],
  33. * any writebacks or other pipelined writes written to memory, etc.
  34. *
  35. * invalidate_line (single cache line)
  36. * invalidate_region (specified memory range)
  37. * invalidate_all (entire cache)
  38. * Invalidates all cache entries that cache
  39. * data from the specified memory range.
  40. * NOTE: locked entries are not invalidated.
  41. *
  42. * writeback_line (single cache line)
  43. * writeback_region (specified memory range)
  44. * writeback_all (entire cache)
  45. * Writes back to memory all dirty cache entries
  46. * that cache data from the specified memory range,
  47. * and marks these entries as clean.
  48. * NOTE: on some future implementations, this might
  49. * also invalidate.
  50. * NOTE: locked entries are written back, but never invalidated.
  51. * NOTE: instruction caches never implement writeback.
  52. *
  53. * writeback_inv_line (single cache line)
  54. * writeback_inv_region (specified memory range)
  55. * writeback_inv_all (entire cache)
  56. * Writes back to memory all dirty cache entries
  57. * that cache data from the specified memory range,
  58. * and invalidates these entries (including all clean
  59. * cache entries that cache data from that range).
  60. * NOTE: locked entries are written back but not invalidated.
  61. * NOTE: instruction caches never implement writeback.
  62. *
  63. * lock_line (single cache line)
  64. * lock_region (specified memory range)
  65. * Prefetch and lock the specified memory range into cache.
  66. * NOTE: if any part of the specified memory range cannot
  67. * be locked, a ??? exception occurs. These macros don't
  68. * do anything special (yet anyway) to handle this situation.
  69. *
  70. * unlock_line (single cache line)
  71. * unlock_region (specified memory range)
  72. * unlock_all (entire cache)
  73. * Unlock cache entries that cache the specified memory range.
  74. * Entries not already locked are unaffected.
  75. */
  76. /*************************** GENERIC -- ALL CACHES ***************************/
  77. /*
  78. * The following macros assume the following cache size/parameter limits
  79. * in the current Xtensa core implementation:
  80. * cache size: 1024 bytes minimum
  81. * line size: 16 - 64 bytes
  82. * way count: 1 - 4
  83. *
  84. * Minimum entries per way (ie. per associativity) = 1024 / 64 / 4 = 4
  85. * Hence the assumption that each loop can execute four cache instructions.
  86. *
  87. * Correspondingly, the offset range of instructions is assumed able to cover
  88. * four lines, ie. offsets {0,1,2,3} * line_size are assumed valid for
  89. * both hit and indexed cache instructions. Ie. these offsets are all
  90. * valid: 0, 16, 32, 48, 64, 96, 128, 192 (for line sizes 16, 32, 64).
  91. * This is true of all original cache instructions
  92. * (dhi, ihi, dhwb, dhwbi, dii, iii) which have offsets
  93. * of 0 to 1020 in multiples of 4 (ie. 8 bits shifted by 2).
  94. * This is also true of subsequent cache instructions
  95. * (dhu, ihu, diu, iiu, diwb, diwbi, dpfl, ipfl) which have offsets
  96. * of 0 to 240 in multiples of 16 (ie. 4 bits shifted by 4).
  97. *
  98. * (Maximum cache size, currently 32k, doesn't affect the following macros.
  99. * Cache ways > MMU min page size cause aliasing but that's another matter.)
  100. */
  101. /*
  102. * Macro to apply an 'indexed' cache instruction to the entire cache.
  103. *
  104. * Parameters:
  105. * cainst instruction/ that takes an address register parameter
  106. * and an offset parameter (in range 0 .. 3*linesize).
  107. * size size of cache in bytes
  108. * linesize size of cache line in bytes
  109. * assoc_or1 number of associativities (ways/sets) in cache
  110. * if all sets affected by cainst,
  111. * or 1 if only one set (or not all sets) of the cache
  112. * is affected by cainst (eg. DIWB or DIWBI [not yet ISA defined]).
  113. * aa, ab unique address registers (temporaries)
  114. */
  115. .macro cache_index_all cainst, size, linesize, assoc_or1, aa, ab
  116. // Sanity-check on cache parameters:
  117. .ifne (\size % (\linesize * \assoc_or1 * 4))
  118. .err // cache configuration outside expected/supported range!
  119. .endif
  120. // \size byte cache, \linesize byte lines, \assoc_or1 way(s) affected by each \cainst.
  121. movi \aa, (\size / (\linesize * \assoc_or1 * 4))
  122. // Possible improvement: need only loop if \aa > 1 ;
  123. // however that particular condition is highly unlikely.
  124. movi \ab, 0 // to iterate over cache
  125. floop \aa, cachex\@
  126. \cainst \ab, 0*\linesize
  127. \cainst \ab, 1*\linesize
  128. \cainst \ab, 2*\linesize
  129. \cainst \ab, 3*\linesize
  130. addi \ab, \ab, 4*\linesize // move to next line
  131. floopend \aa, cachex\@
  132. .endm
  133. /*
  134. * Macro to apply a 'hit' cache instruction to a memory region,
  135. * ie. to any cache entries that cache a specified portion (region) of memory.
  136. * Takes care of the unaligned cases, ie. may apply to one
  137. * more cache line than $asize / lineSize if $aaddr is not aligned.
  138. *
  139. *
  140. * Parameters are:
  141. * cainst instruction/macro that takes an address register parameter
  142. * and an offset parameter (currently always zero)
  143. * and generates a cache instruction (eg. "dhi", "dhwb", "ihi", etc.)
  144. * linesize_log2 log2(size of cache line in bytes)
  145. * addr register containing start address of region (clobbered)
  146. * asize register containing size of the region in bytes (clobbered)
  147. * askew unique register used as temporary
  148. *
  149. * !?!?! 2DO: optimization: iterate max(cache_size and \asize) / linesize
  150. */
  151. .macro cache_hit_region cainst, linesize_log2, addr, asize, askew
  152. // Make \asize the number of iterations:
  153. extui \askew, \addr, 0, \linesize_log2 // get unalignment amount of \addr
  154. add \asize, \asize, \askew // ... and add it to \asize
  155. addi \asize, \asize, (1 << \linesize_log2) - 1 // round up!
  156. srli \asize, \asize, \linesize_log2
  157. // Iterate over region:
  158. floopnez \asize, cacheh\@
  159. \cainst \addr, 0
  160. addi \addr, \addr, (1 << \linesize_log2) // move to next line
  161. floopend \asize, cacheh\@
  162. .endm
  163. /*************************** INSTRUCTION CACHE ***************************/
  164. /*
  165. * Reset/initialize the instruction cache by simply invalidating it:
  166. * (need to unlock first also, if cache locking implemented):
  167. *
  168. * Parameters:
  169. * aa, ab unique address registers (temporaries)
  170. */
  171. .macro icache_reset aa, ab
  172. icache_unlock_all \aa, \ab
  173. icache_invalidate_all \aa, \ab
  174. .endm
  175. /*
  176. * Synchronize after an instruction cache operation,
  177. * to be sure everything is in sync with memory as to be
  178. * expected following any previous instruction cache control operations.
  179. *
  180. * Parameters are:
  181. * ar an address register (temporary) (currently unused, but may be used in future)
  182. */
  183. .macro icache_sync ar
  184. #if XCHAL_ICACHE_SIZE > 0
  185. isync
  186. #endif
  187. .endm
  188. /*
  189. * Invalidate a single line of the instruction cache.
  190. * Parameters are:
  191. * ar address register that contains (virtual) address to invalidate
  192. * (may get clobbered in a future implementation, but not currently)
  193. * offset (optional) offset to add to \ar to compute effective address to invalidate
  194. * (note: some number of lsbits are ignored)
  195. */
  196. .macro icache_invalidate_line ar, offset
  197. #if XCHAL_ICACHE_SIZE > 0
  198. ihi \ar, \offset // invalidate icache line
  199. /*
  200. * NOTE: in some version of the silicon [!!!SHOULD HAVE BEEN DOCUMENTED!!!]
  201. * 'ihi' doesn't work, so it had been replaced with 'iii'
  202. * (which would just invalidate more than it should,
  203. * which should be okay other than the performance hit
  204. * because cache locking did not exist in that version,
  205. * unless user somehow relies on something being cached).
  206. * [WHAT VERSION IS IT!!?!?
  207. * IS THERE ANY WAY TO TEST FOR THAT HERE, TO OUTPUT 'III' ONLY IF NEEDED!?!?].
  208. *
  209. * iii \ar, \offset
  210. */
  211. icache_sync \ar
  212. #endif
  213. .endm
  214. /*
  215. * Invalidate instruction cache entries that cache a specified portion of memory.
  216. * Parameters are:
  217. * astart start address (register gets clobbered)
  218. * asize size of the region in bytes (register gets clobbered)
  219. * ac unique register used as temporary
  220. */
  221. .macro icache_invalidate_region astart, asize, ac
  222. #if XCHAL_ICACHE_SIZE > 0
  223. // Instruction cache region invalidation:
  224. cache_hit_region ihi, XCHAL_ICACHE_LINEWIDTH, \astart, \asize, \ac
  225. icache_sync \ac
  226. // End of instruction cache region invalidation
  227. #endif
  228. .endm
  229. /*
  230. * Invalidate entire instruction cache.
  231. *
  232. * Parameters:
  233. * aa, ab unique address registers (temporaries)
  234. */
  235. .macro icache_invalidate_all aa, ab
  236. #if XCHAL_ICACHE_SIZE > 0
  237. // Instruction cache invalidation:
  238. cache_index_all iii, XCHAL_ICACHE_SIZE, XCHAL_ICACHE_LINESIZE, XCHAL_ICACHE_WAYS, \aa, \ab
  239. icache_sync \aa
  240. // End of instruction cache invalidation
  241. #endif
  242. .endm
  243. /*
  244. * Lock (prefetch & lock) a single line of the instruction cache.
  245. *
  246. * Parameters are:
  247. * ar address register that contains (virtual) address to lock
  248. * (may get clobbered in a future implementation, but not currently)
  249. * offset offset to add to \ar to compute effective address to lock
  250. * (note: some number of lsbits are ignored)
  251. */
  252. .macro icache_lock_line ar, offset
  253. #if XCHAL_ICACHE_SIZE > 0 && XCHAL_ICACHE_LINE_LOCKABLE
  254. ipfl \ar, \offset /* prefetch and lock icache line */
  255. icache_sync \ar
  256. #endif
  257. .endm
  258. /*
  259. * Lock (prefetch & lock) a specified portion of memory into the instruction cache.
  260. * Parameters are:
  261. * astart start address (register gets clobbered)
  262. * asize size of the region in bytes (register gets clobbered)
  263. * ac unique register used as temporary
  264. */
  265. .macro icache_lock_region astart, asize, ac
  266. #if XCHAL_ICACHE_SIZE > 0 && XCHAL_ICACHE_LINE_LOCKABLE
  267. // Instruction cache region lock:
  268. cache_hit_region ipfl, XCHAL_ICACHE_LINEWIDTH, \astart, \asize, \ac
  269. icache_sync \ac
  270. // End of instruction cache region lock
  271. #endif
  272. .endm
  273. /*
  274. * Unlock a single line of the instruction cache.
  275. *
  276. * Parameters are:
  277. * ar address register that contains (virtual) address to unlock
  278. * (may get clobbered in a future implementation, but not currently)
  279. * offset offset to add to \ar to compute effective address to unlock
  280. * (note: some number of lsbits are ignored)
  281. */
  282. .macro icache_unlock_line ar, offset
  283. #if XCHAL_ICACHE_SIZE > 0 && XCHAL_ICACHE_LINE_LOCKABLE
  284. ihu \ar, \offset /* unlock icache line */
  285. icache_sync \ar
  286. #endif
  287. .endm
  288. /*
  289. * Unlock a specified portion of memory from the instruction cache.
  290. * Parameters are:
  291. * astart start address (register gets clobbered)
  292. * asize size of the region in bytes (register gets clobbered)
  293. * ac unique register used as temporary
  294. */
  295. .macro icache_unlock_region astart, asize, ac
  296. #if XCHAL_ICACHE_SIZE > 0 && XCHAL_ICACHE_LINE_LOCKABLE
  297. // Instruction cache region unlock:
  298. cache_hit_region ihu, XCHAL_ICACHE_LINEWIDTH, \astart, \asize, \ac
  299. icache_sync \ac
  300. // End of instruction cache region unlock
  301. #endif
  302. .endm
  303. /*
  304. * Unlock entire instruction cache.
  305. *
  306. * Parameters:
  307. * aa, ab unique address registers (temporaries)
  308. */
  309. .macro icache_unlock_all aa, ab
  310. #if XCHAL_ICACHE_SIZE > 0 && XCHAL_ICACHE_LINE_LOCKABLE
  311. // Instruction cache unlock:
  312. cache_index_all iiu, XCHAL_ICACHE_SIZE, XCHAL_ICACHE_LINESIZE, 1, \aa, \ab
  313. icache_sync \aa
  314. // End of instruction cache unlock
  315. #endif
  316. .endm
  317. /*************************** DATA CACHE ***************************/
  318. /*
  319. * Reset/initialize the data cache by simply invalidating it
  320. * (need to unlock first also, if cache locking implemented):
  321. *
  322. * Parameters:
  323. * aa, ab unique address registers (temporaries)
  324. */
  325. .macro dcache_reset aa, ab
  326. dcache_unlock_all \aa, \ab
  327. dcache_invalidate_all \aa, \ab
  328. .endm
  329. /*
  330. * Synchronize after a data cache operation,
  331. * to be sure everything is in sync with memory as to be
  332. * expected following any previous data cache control operations.
  333. *
  334. * Parameters are:
  335. * ar an address register (temporary) (currently unused, but may be used in future)
  336. */
  337. .macro dcache_sync ar
  338. #if XCHAL_DCACHE_SIZE > 0
  339. // This previous sequence errs on the conservative side (too much so); a DSYNC should be sufficient:
  340. //memw // synchronize data cache changes relative to subsequent memory accesses
  341. //isync // be conservative and ISYNC as well (just to be sure)
  342. dsync
  343. #endif
  344. .endm
  345. /*
  346. * Synchronize after a data store operation,
  347. * to be sure the stored data is completely off the processor
  348. * (and assuming there is no buffering outside the processor,
  349. * that the data is in memory). This may be required to
  350. * ensure that the processor's write buffers are emptied.
  351. * A MEMW followed by a read guarantees this, by definition.
  352. * We also try to make sure the read itself completes.
  353. *
  354. * Parameters are:
  355. * ar an address register (temporary)
  356. */
  357. .macro write_sync ar
  358. memw // ensure previous memory accesses are complete prior to subsequent memory accesses
  359. l32i \ar, sp, 0 // completing this read ensures any previous write has completed, because of MEMW
  360. //slot
  361. add \ar, \ar, \ar // use the result of the read to help ensure the read completes (in future architectures)
  362. .endm
  363. /*
  364. * Invalidate a single line of the data cache.
  365. * Parameters are:
  366. * ar address register that contains (virtual) address to invalidate
  367. * (may get clobbered in a future implementation, but not currently)
  368. * offset (optional) offset to add to \ar to compute effective address to invalidate
  369. * (note: some number of lsbits are ignored)
  370. */
  371. .macro dcache_invalidate_line ar, offset
  372. #if XCHAL_DCACHE_SIZE > 0
  373. dhi \ar, \offset
  374. dcache_sync \ar
  375. #endif
  376. .endm
  377. /*
  378. * Invalidate data cache entries that cache a specified portion of memory.
  379. * Parameters are:
  380. * astart start address (register gets clobbered)
  381. * asize size of the region in bytes (register gets clobbered)
  382. * ac unique register used as temporary
  383. */
  384. .macro dcache_invalidate_region astart, asize, ac
  385. #if XCHAL_DCACHE_SIZE > 0
  386. // Data cache region invalidation:
  387. cache_hit_region dhi, XCHAL_DCACHE_LINEWIDTH, \astart, \asize, \ac
  388. dcache_sync \ac
  389. // End of data cache region invalidation
  390. #endif
  391. .endm
  392. #if 0
  393. /*
  394. * This is a work-around for a bug in SiChip1 (???).
  395. * There should be a proper mechanism for not outputting
  396. * these instructions when not needed.
  397. * To enable work-around, uncomment this and replace 'dii'
  398. * with 'dii_s1' everywhere, eg. in dcache_invalidate_all
  399. * macro below.
  400. */
  401. .macro dii_s1 ar, offset
  402. dii \ar, \offset
  403. or \ar, \ar, \ar
  404. or \ar, \ar, \ar
  405. or \ar, \ar, \ar
  406. or \ar, \ar, \ar
  407. .endm
  408. #endif
  409. /*
  410. * Invalidate entire data cache.
  411. *
  412. * Parameters:
  413. * aa, ab unique address registers (temporaries)
  414. */
  415. .macro dcache_invalidate_all aa, ab
  416. #if XCHAL_DCACHE_SIZE > 0
  417. // Data cache invalidation:
  418. cache_index_all dii, XCHAL_DCACHE_SIZE, XCHAL_DCACHE_LINESIZE, XCHAL_DCACHE_WAYS, \aa, \ab
  419. dcache_sync \aa
  420. // End of data cache invalidation
  421. #endif
  422. .endm
  423. /*
  424. * Writeback a single line of the data cache.
  425. * Parameters are:
  426. * ar address register that contains (virtual) address to writeback
  427. * (may get clobbered in a future implementation, but not currently)
  428. * offset offset to add to \ar to compute effective address to writeback
  429. * (note: some number of lsbits are ignored)
  430. */
  431. .macro dcache_writeback_line ar, offset
  432. #if XCHAL_DCACHE_SIZE > 0 && XCHAL_DCACHE_IS_WRITEBACK
  433. dhwb \ar, \offset
  434. dcache_sync \ar
  435. #endif
  436. .endm
  437. /*
  438. * Writeback dirty data cache entries that cache a specified portion of memory.
  439. * Parameters are:
  440. * astart start address (register gets clobbered)
  441. * asize size of the region in bytes (register gets clobbered)
  442. * ac unique register used as temporary
  443. */
  444. .macro dcache_writeback_region astart, asize, ac
  445. #if XCHAL_DCACHE_SIZE > 0 && XCHAL_DCACHE_IS_WRITEBACK
  446. // Data cache region writeback:
  447. cache_hit_region dhwb, XCHAL_DCACHE_LINEWIDTH, \astart, \asize, \ac
  448. dcache_sync \ac
  449. // End of data cache region writeback
  450. #endif
  451. .endm
  452. /*
  453. * Writeback entire data cache.
  454. * Parameters:
  455. * aa, ab unique address registers (temporaries)
  456. */
  457. .macro dcache_writeback_all aa, ab
  458. #if XCHAL_DCACHE_SIZE > 0 && XCHAL_DCACHE_IS_WRITEBACK
  459. // Data cache writeback:
  460. cache_index_all diwb, XCHAL_DCACHE_SIZE, XCHAL_DCACHE_LINESIZE, 1, \aa, \ab
  461. dcache_sync \aa
  462. // End of data cache writeback
  463. #endif
  464. .endm
  465. /*
  466. * Writeback and invalidate a single line of the data cache.
  467. * Parameters are:
  468. * ar address register that contains (virtual) address to writeback and invalidate
  469. * (may get clobbered in a future implementation, but not currently)
  470. * offset offset to add to \ar to compute effective address to writeback and invalidate
  471. * (note: some number of lsbits are ignored)
  472. */
  473. .macro dcache_writeback_inv_line ar, offset
  474. #if XCHAL_DCACHE_SIZE > 0
  475. dhwbi \ar, \offset /* writeback and invalidate dcache line */
  476. dcache_sync \ar
  477. #endif
  478. .endm
  479. /*
  480. * Writeback and invalidate data cache entries that cache a specified portion of memory.
  481. * Parameters are:
  482. * astart start address (register gets clobbered)
  483. * asize size of the region in bytes (register gets clobbered)
  484. * ac unique register used as temporary
  485. */
  486. .macro dcache_writeback_inv_region astart, asize, ac
  487. #if XCHAL_DCACHE_SIZE > 0
  488. // Data cache region writeback and invalidate:
  489. cache_hit_region dhwbi, XCHAL_DCACHE_LINEWIDTH, \astart, \asize, \ac
  490. dcache_sync \ac
  491. // End of data cache region writeback and invalidate
  492. #endif
  493. .endm
  494. /*
  495. * Writeback and invalidate entire data cache.
  496. * Parameters:
  497. * aa, ab unique address registers (temporaries)
  498. */
  499. .macro dcache_writeback_inv_all aa, ab
  500. #if XCHAL_DCACHE_SIZE > 0
  501. // Data cache writeback and invalidate:
  502. #if XCHAL_DCACHE_IS_WRITEBACK
  503. cache_index_all diwbi, XCHAL_DCACHE_SIZE, XCHAL_DCACHE_LINESIZE, 1, \aa, \ab
  504. dcache_sync \aa
  505. #else /*writeback*/
  506. // Data cache does not support writeback, so just invalidate: */
  507. dcache_invalidate_all \aa, \ab
  508. #endif /*writeback*/
  509. // End of data cache writeback and invalidate
  510. #endif
  511. .endm
  512. /*
  513. * Lock (prefetch & lock) a single line of the data cache.
  514. *
  515. * Parameters are:
  516. * ar address register that contains (virtual) address to lock
  517. * (may get clobbered in a future implementation, but not currently)
  518. * offset offset to add to \ar to compute effective address to lock
  519. * (note: some number of lsbits are ignored)
  520. */
  521. .macro dcache_lock_line ar, offset
  522. #if XCHAL_DCACHE_SIZE > 0 && XCHAL_DCACHE_LINE_LOCKABLE
  523. dpfl \ar, \offset /* prefetch and lock dcache line */
  524. dcache_sync \ar
  525. #endif
  526. .endm
  527. /*
  528. * Lock (prefetch & lock) a specified portion of memory into the data cache.
  529. * Parameters are:
  530. * astart start address (register gets clobbered)
  531. * asize size of the region in bytes (register gets clobbered)
  532. * ac unique register used as temporary
  533. */
  534. .macro dcache_lock_region astart, asize, ac
  535. #if XCHAL_DCACHE_SIZE > 0 && XCHAL_DCACHE_LINE_LOCKABLE
  536. // Data cache region lock:
  537. cache_hit_region dpfl, XCHAL_DCACHE_LINEWIDTH, \astart, \asize, \ac
  538. dcache_sync \ac
  539. // End of data cache region lock
  540. #endif
  541. .endm
  542. /*
  543. * Unlock a single line of the data cache.
  544. *
  545. * Parameters are:
  546. * ar address register that contains (virtual) address to unlock
  547. * (may get clobbered in a future implementation, but not currently)
  548. * offset offset to add to \ar to compute effective address to unlock
  549. * (note: some number of lsbits are ignored)
  550. */
  551. .macro dcache_unlock_line ar, offset
  552. #if XCHAL_DCACHE_SIZE > 0 && XCHAL_DCACHE_LINE_LOCKABLE
  553. dhu \ar, \offset /* unlock dcache line */
  554. dcache_sync \ar
  555. #endif
  556. .endm
  557. /*
  558. * Unlock a specified portion of memory from the data cache.
  559. * Parameters are:
  560. * astart start address (register gets clobbered)
  561. * asize size of the region in bytes (register gets clobbered)
  562. * ac unique register used as temporary
  563. */
  564. .macro dcache_unlock_region astart, asize, ac
  565. #if XCHAL_DCACHE_SIZE > 0 && XCHAL_DCACHE_LINE_LOCKABLE
  566. // Data cache region unlock:
  567. cache_hit_region dhu, XCHAL_DCACHE_LINEWIDTH, \astart, \asize, \ac
  568. dcache_sync \ac
  569. // End of data cache region unlock
  570. #endif
  571. .endm
  572. /*
  573. * Unlock entire data cache.
  574. *
  575. * Parameters:
  576. * aa, ab unique address registers (temporaries)
  577. */
  578. .macro dcache_unlock_all aa, ab
  579. #if XCHAL_DCACHE_SIZE > 0 && XCHAL_DCACHE_LINE_LOCKABLE
  580. // Data cache unlock:
  581. cache_index_all diu, XCHAL_DCACHE_SIZE, XCHAL_DCACHE_LINESIZE, 1, \aa, \ab
  582. dcache_sync \aa
  583. // End of data cache unlock
  584. #endif
  585. .endm
  586. #endif /*XTENSA_CACHEASM_H*/