misc.S 5.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374
  1. /*
  2. * arch/xtensa/mm/misc.S
  3. *
  4. * Miscellaneous assembly functions.
  5. *
  6. * This file is subject to the terms and conditions of the GNU General Public
  7. * License. See the file "COPYING" in the main directory of this archive
  8. * for more details.
  9. *
  10. * Copyright (C) 2001 - 2005 Tensilica Inc.
  11. *
  12. * Chris Zankel <chris@zankel.net>
  13. */
  14. /* Note: we might want to implement some of the loops as zero-overhead-loops,
  15. * where applicable and if supported by the processor.
  16. */
  17. #include <linux/linkage.h>
  18. #include <asm/page.h>
  19. #include <asm/pgtable.h>
  20. #include <xtensa/cacheasm.h>
  21. #include <xtensa/cacheattrasm.h>
  22. /* clear_page (page) */
  23. ENTRY(clear_page)
  24. entry a1, 16
  25. addi a4, a2, PAGE_SIZE
  26. movi a3, 0
  27. 1: s32i a3, a2, 0
  28. s32i a3, a2, 4
  29. s32i a3, a2, 8
  30. s32i a3, a2, 12
  31. s32i a3, a2, 16
  32. s32i a3, a2, 20
  33. s32i a3, a2, 24
  34. s32i a3, a2, 28
  35. addi a2, a2, 32
  36. blt a2, a4, 1b
  37. retw
  38. /*
  39. * copy_page (void *to, void *from)
  40. * a2 a3
  41. */
  42. ENTRY(copy_page)
  43. entry a1, 16
  44. addi a4, a2, PAGE_SIZE
  45. 1: l32i a5, a3, 0
  46. l32i a6, a3, 4
  47. l32i a7, a3, 8
  48. s32i a5, a2, 0
  49. s32i a6, a2, 4
  50. s32i a7, a2, 8
  51. l32i a5, a3, 12
  52. l32i a6, a3, 16
  53. l32i a7, a3, 20
  54. s32i a5, a2, 12
  55. s32i a6, a2, 16
  56. s32i a7, a2, 20
  57. l32i a5, a3, 24
  58. l32i a6, a3, 28
  59. s32i a5, a2, 24
  60. s32i a6, a2, 28
  61. addi a2, a2, 32
  62. addi a3, a3, 32
  63. blt a2, a4, 1b
  64. retw
  65. /*
  66. * void __flush_invalidate_cache_all(void)
  67. */
  68. ENTRY(__flush_invalidate_cache_all)
  69. entry sp, 16
  70. dcache_writeback_inv_all a2, a3
  71. icache_invalidate_all a2, a3
  72. retw
  73. /*
  74. * void __invalidate_icache_all(void)
  75. */
  76. ENTRY(__invalidate_icache_all)
  77. entry sp, 16
  78. icache_invalidate_all a2, a3
  79. retw
  80. /*
  81. * void __flush_invalidate_dcache_all(void)
  82. */
  83. ENTRY(__flush_invalidate_dcache_all)
  84. entry sp, 16
  85. dcache_writeback_inv_all a2, a3
  86. retw
  87. /*
  88. * void __flush_invalidate_cache_range(ulong start, ulong size)
  89. */
  90. ENTRY(__flush_invalidate_cache_range)
  91. entry sp, 16
  92. mov a4, a2
  93. mov a5, a3
  94. dcache_writeback_inv_region a4, a5, a6
  95. icache_invalidate_region a2, a3, a4
  96. retw
  97. /*
  98. * void __invalidate_icache_page(ulong start)
  99. */
  100. ENTRY(__invalidate_icache_page)
  101. entry sp, 16
  102. movi a3, PAGE_SIZE
  103. icache_invalidate_region a2, a3, a4
  104. retw
  105. /*
  106. * void __invalidate_dcache_page(ulong start)
  107. */
  108. ENTRY(__invalidate_dcache_page)
  109. entry sp, 16
  110. movi a3, PAGE_SIZE
  111. dcache_invalidate_region a2, a3, a4
  112. retw
  113. /*
  114. * void __invalidate_icache_range(ulong start, ulong size)
  115. */
  116. ENTRY(__invalidate_icache_range)
  117. entry sp, 16
  118. icache_invalidate_region a2, a3, a4
  119. retw
  120. /*
  121. * void __invalidate_dcache_range(ulong start, ulong size)
  122. */
  123. ENTRY(__invalidate_dcache_range)
  124. entry sp, 16
  125. dcache_invalidate_region a2, a3, a4
  126. retw
  127. /*
  128. * void __flush_dcache_page(ulong start)
  129. */
  130. ENTRY(__flush_dcache_page)
  131. entry sp, 16
  132. movi a3, PAGE_SIZE
  133. dcache_writeback_region a2, a3, a4
  134. retw
  135. /*
  136. * void __flush_invalidate_dcache_page(ulong start)
  137. */
  138. ENTRY(__flush_invalidate_dcache_page)
  139. entry sp, 16
  140. movi a3, PAGE_SIZE
  141. dcache_writeback_inv_region a2, a3, a4
  142. retw
  143. /*
  144. * void __flush_invalidate_dcache_range(ulong start, ulong size)
  145. */
  146. ENTRY(__flush_invalidate_dcache_range)
  147. entry sp, 16
  148. dcache_writeback_inv_region a2, a3, a4
  149. retw
  150. /*
  151. * void __invalidate_dcache_all(void)
  152. */
  153. ENTRY(__invalidate_dcache_all)
  154. entry sp, 16
  155. dcache_invalidate_all a2, a3
  156. retw
  157. /*
  158. * void __flush_invalidate_dcache_page_phys(ulong start)
  159. */
  160. ENTRY(__flush_invalidate_dcache_page_phys)
  161. entry sp, 16
  162. movi a3, XCHAL_DCACHE_SIZE
  163. movi a4, PAGE_MASK | 1
  164. addi a2, a2, 1
  165. 1: addi a3, a3, -XCHAL_DCACHE_LINESIZE
  166. ldct a6, a3
  167. dsync
  168. and a6, a6, a4
  169. beq a6, a2, 2f
  170. bgeui a3, 2, 1b
  171. retw
  172. 2: diwbi a3, 0
  173. bgeui a3, 2, 1b
  174. retw
  175. ENTRY(check_dcache_low0)
  176. entry sp, 16
  177. movi a3, XCHAL_DCACHE_SIZE / 4
  178. movi a4, PAGE_MASK | 1
  179. addi a2, a2, 1
  180. 1: addi a3, a3, -XCHAL_DCACHE_LINESIZE
  181. ldct a6, a3
  182. dsync
  183. and a6, a6, a4
  184. beq a6, a2, 2f
  185. bgeui a3, 2, 1b
  186. retw
  187. 2: j 2b
  188. ENTRY(check_dcache_high0)
  189. entry sp, 16
  190. movi a5, XCHAL_DCACHE_SIZE / 4
  191. movi a3, XCHAL_DCACHE_SIZE / 2
  192. movi a4, PAGE_MASK | 1
  193. addi a2, a2, 1
  194. 1: addi a3, a3, -XCHAL_DCACHE_LINESIZE
  195. addi a5, a5, -XCHAL_DCACHE_LINESIZE
  196. ldct a6, a3
  197. dsync
  198. and a6, a6, a4
  199. beq a6, a2, 2f
  200. bgeui a5, 2, 1b
  201. retw
  202. 2: j 2b
  203. ENTRY(check_dcache_low1)
  204. entry sp, 16
  205. movi a5, XCHAL_DCACHE_SIZE / 4
  206. movi a3, XCHAL_DCACHE_SIZE * 3 / 4
  207. movi a4, PAGE_MASK | 1
  208. addi a2, a2, 1
  209. 1: addi a3, a3, -XCHAL_DCACHE_LINESIZE
  210. addi a5, a5, -XCHAL_DCACHE_LINESIZE
  211. ldct a6, a3
  212. dsync
  213. and a6, a6, a4
  214. beq a6, a2, 2f
  215. bgeui a5, 2, 1b
  216. retw
  217. 2: j 2b
  218. ENTRY(check_dcache_high1)
  219. entry sp, 16
  220. movi a5, XCHAL_DCACHE_SIZE / 4
  221. movi a3, XCHAL_DCACHE_SIZE
  222. movi a4, PAGE_MASK | 1
  223. addi a2, a2, 1
  224. 1: addi a3, a3, -XCHAL_DCACHE_LINESIZE
  225. addi a5, a5, -XCHAL_DCACHE_LINESIZE
  226. ldct a6, a3
  227. dsync
  228. and a6, a6, a4
  229. beq a6, a2, 2f
  230. bgeui a5, 2, 1b
  231. retw
  232. 2: j 2b
  233. /*
  234. * void __invalidate_icache_page_phys(ulong start)
  235. */
  236. ENTRY(__invalidate_icache_page_phys)
  237. entry sp, 16
  238. movi a3, XCHAL_ICACHE_SIZE
  239. movi a4, PAGE_MASK | 1
  240. addi a2, a2, 1
  241. 1: addi a3, a3, -XCHAL_ICACHE_LINESIZE
  242. lict a6, a3
  243. isync
  244. and a6, a6, a4
  245. beq a6, a2, 2f
  246. bgeui a3, 2, 1b
  247. retw
  248. 2: iii a3, 0
  249. bgeui a3, 2, 1b
  250. retw
  251. #if 0
  252. movi a3, XCHAL_DCACHE_WAYS - 1
  253. movi a4, PAGE_SIZE
  254. 1: mov a5, a2
  255. add a6, a2, a4
  256. 2: diwbi a5, 0
  257. diwbi a5, XCHAL_DCACHE_LINESIZE
  258. diwbi a5, XCHAL_DCACHE_LINESIZE * 2
  259. diwbi a5, XCHAL_DCACHE_LINESIZE * 3
  260. addi a5, a5, XCHAL_DCACHE_LINESIZE * 4
  261. blt a5, a6, 2b
  262. addi a3, a3, -1
  263. addi a2, a2, XCHAL_DCACHE_SIZE / XCHAL_DCACHE_WAYS
  264. bgez a3, 1b
  265. retw
  266. ENTRY(__invalidate_icache_page_index)
  267. entry sp, 16
  268. movi a3, XCHAL_ICACHE_WAYS - 1
  269. movi a4, PAGE_SIZE
  270. 1: mov a5, a2
  271. add a6, a2, a4
  272. 2: iii a5, 0
  273. iii a5, XCHAL_ICACHE_LINESIZE
  274. iii a5, XCHAL_ICACHE_LINESIZE * 2
  275. iii a5, XCHAL_ICACHE_LINESIZE * 3
  276. addi a5, a5, XCHAL_ICACHE_LINESIZE * 4
  277. blt a5, a6, 2b
  278. addi a3, a3, -1
  279. addi a2, a2, XCHAL_ICACHE_SIZE / XCHAL_ICACHE_WAYS
  280. bgez a3, 2b
  281. retw
  282. #endif