evergreen.c 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765
  1. /*
  2. * Copyright 2010 Advanced Micro Devices, Inc.
  3. *
  4. * Permission is hereby granted, free of charge, to any person obtaining a
  5. * copy of this software and associated documentation files (the "Software"),
  6. * to deal in the Software without restriction, including without limitation
  7. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  8. * and/or sell copies of the Software, and to permit persons to whom the
  9. * Software is furnished to do so, subject to the following conditions:
  10. *
  11. * The above copyright notice and this permission notice shall be included in
  12. * all copies or substantial portions of the Software.
  13. *
  14. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  15. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  16. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  17. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  18. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  19. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  20. * OTHER DEALINGS IN THE SOFTWARE.
  21. *
  22. * Authors: Alex Deucher
  23. */
  24. #include <linux/firmware.h>
  25. #include <linux/platform_device.h>
  26. #include <linux/slab.h>
  27. #include "drmP.h"
  28. #include "radeon.h"
  29. #include "radeon_asic.h"
  30. #include "radeon_drm.h"
  31. #include "rv770d.h"
  32. #include "atom.h"
  33. #include "avivod.h"
  34. #include "evergreen_reg.h"
  35. static void evergreen_gpu_init(struct radeon_device *rdev);
  36. void evergreen_fini(struct radeon_device *rdev);
  37. bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
  38. {
  39. bool connected = false;
  40. /* XXX */
  41. return connected;
  42. }
  43. void evergreen_hpd_set_polarity(struct radeon_device *rdev,
  44. enum radeon_hpd_id hpd)
  45. {
  46. /* XXX */
  47. }
  48. void evergreen_hpd_init(struct radeon_device *rdev)
  49. {
  50. /* XXX */
  51. }
  52. void evergreen_bandwidth_update(struct radeon_device *rdev)
  53. {
  54. /* XXX */
  55. }
  56. void evergreen_hpd_fini(struct radeon_device *rdev)
  57. {
  58. /* XXX */
  59. }
  60. static int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
  61. {
  62. unsigned i;
  63. u32 tmp;
  64. for (i = 0; i < rdev->usec_timeout; i++) {
  65. /* read MC_STATUS */
  66. tmp = RREG32(SRBM_STATUS) & 0x1F00;
  67. if (!tmp)
  68. return 0;
  69. udelay(1);
  70. }
  71. return -1;
  72. }
  73. /*
  74. * GART
  75. */
  76. int evergreen_pcie_gart_enable(struct radeon_device *rdev)
  77. {
  78. u32 tmp;
  79. int r, i;
  80. if (rdev->gart.table.vram.robj == NULL) {
  81. dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
  82. return -EINVAL;
  83. }
  84. r = radeon_gart_table_vram_pin(rdev);
  85. if (r)
  86. return r;
  87. radeon_gart_restore(rdev);
  88. /* Setup L2 cache */
  89. WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
  90. ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
  91. EFFECTIVE_L2_QUEUE_SIZE(7));
  92. WREG32(VM_L2_CNTL2, 0);
  93. WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
  94. /* Setup TLB control */
  95. tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
  96. SYSTEM_ACCESS_MODE_NOT_IN_SYS |
  97. SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
  98. EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
  99. WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
  100. WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
  101. WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
  102. WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
  103. WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
  104. WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
  105. WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
  106. WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
  107. WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
  108. WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
  109. WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
  110. RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
  111. WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
  112. (u32)(rdev->dummy_page.addr >> 12));
  113. for (i = 1; i < 7; i++)
  114. WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
  115. r600_pcie_gart_tlb_flush(rdev);
  116. rdev->gart.ready = true;
  117. return 0;
  118. }
  119. void evergreen_pcie_gart_disable(struct radeon_device *rdev)
  120. {
  121. u32 tmp;
  122. int i, r;
  123. /* Disable all tables */
  124. for (i = 0; i < 7; i++)
  125. WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
  126. /* Setup L2 cache */
  127. WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
  128. EFFECTIVE_L2_QUEUE_SIZE(7));
  129. WREG32(VM_L2_CNTL2, 0);
  130. WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
  131. /* Setup TLB control */
  132. tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
  133. WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
  134. WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
  135. WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
  136. WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
  137. WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
  138. WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
  139. WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
  140. if (rdev->gart.table.vram.robj) {
  141. r = radeon_bo_reserve(rdev->gart.table.vram.robj, false);
  142. if (likely(r == 0)) {
  143. radeon_bo_kunmap(rdev->gart.table.vram.robj);
  144. radeon_bo_unpin(rdev->gart.table.vram.robj);
  145. radeon_bo_unreserve(rdev->gart.table.vram.robj);
  146. }
  147. }
  148. }
  149. void evergreen_pcie_gart_fini(struct radeon_device *rdev)
  150. {
  151. evergreen_pcie_gart_disable(rdev);
  152. radeon_gart_table_vram_free(rdev);
  153. radeon_gart_fini(rdev);
  154. }
  155. void evergreen_agp_enable(struct radeon_device *rdev)
  156. {
  157. u32 tmp;
  158. int i;
  159. /* Setup L2 cache */
  160. WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
  161. ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
  162. EFFECTIVE_L2_QUEUE_SIZE(7));
  163. WREG32(VM_L2_CNTL2, 0);
  164. WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
  165. /* Setup TLB control */
  166. tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
  167. SYSTEM_ACCESS_MODE_NOT_IN_SYS |
  168. SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
  169. EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
  170. WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
  171. WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
  172. WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
  173. WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
  174. WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
  175. WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
  176. WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
  177. for (i = 0; i < 7; i++)
  178. WREG32(VM_CONTEXT0_CNTL + (i * 4), 0);
  179. }
  180. static void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
  181. {
  182. save->vga_control[0] = RREG32(D1VGA_CONTROL);
  183. save->vga_control[1] = RREG32(D2VGA_CONTROL);
  184. save->vga_control[2] = RREG32(EVERGREEN_D3VGA_CONTROL);
  185. save->vga_control[3] = RREG32(EVERGREEN_D4VGA_CONTROL);
  186. save->vga_control[4] = RREG32(EVERGREEN_D5VGA_CONTROL);
  187. save->vga_control[5] = RREG32(EVERGREEN_D6VGA_CONTROL);
  188. save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
  189. save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
  190. save->crtc_control[0] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
  191. save->crtc_control[1] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
  192. save->crtc_control[2] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
  193. save->crtc_control[3] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
  194. save->crtc_control[4] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
  195. save->crtc_control[5] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
  196. /* Stop all video */
  197. WREG32(VGA_RENDER_CONTROL, 0);
  198. WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
  199. WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
  200. WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
  201. WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
  202. WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
  203. WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
  204. WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
  205. WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
  206. WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
  207. WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
  208. WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
  209. WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
  210. WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
  211. WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
  212. WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
  213. WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
  214. WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
  215. WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
  216. WREG32(D1VGA_CONTROL, 0);
  217. WREG32(D2VGA_CONTROL, 0);
  218. WREG32(EVERGREEN_D3VGA_CONTROL, 0);
  219. WREG32(EVERGREEN_D4VGA_CONTROL, 0);
  220. WREG32(EVERGREEN_D5VGA_CONTROL, 0);
  221. WREG32(EVERGREEN_D6VGA_CONTROL, 0);
  222. }
  223. static void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
  224. {
  225. WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
  226. upper_32_bits(rdev->mc.vram_start));
  227. WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
  228. upper_32_bits(rdev->mc.vram_start));
  229. WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
  230. (u32)rdev->mc.vram_start);
  231. WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
  232. (u32)rdev->mc.vram_start);
  233. WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
  234. upper_32_bits(rdev->mc.vram_start));
  235. WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
  236. upper_32_bits(rdev->mc.vram_start));
  237. WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
  238. (u32)rdev->mc.vram_start);
  239. WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
  240. (u32)rdev->mc.vram_start);
  241. WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
  242. upper_32_bits(rdev->mc.vram_start));
  243. WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
  244. upper_32_bits(rdev->mc.vram_start));
  245. WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
  246. (u32)rdev->mc.vram_start);
  247. WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
  248. (u32)rdev->mc.vram_start);
  249. WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
  250. upper_32_bits(rdev->mc.vram_start));
  251. WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
  252. upper_32_bits(rdev->mc.vram_start));
  253. WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
  254. (u32)rdev->mc.vram_start);
  255. WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
  256. (u32)rdev->mc.vram_start);
  257. WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
  258. upper_32_bits(rdev->mc.vram_start));
  259. WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
  260. upper_32_bits(rdev->mc.vram_start));
  261. WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
  262. (u32)rdev->mc.vram_start);
  263. WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
  264. (u32)rdev->mc.vram_start);
  265. WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
  266. upper_32_bits(rdev->mc.vram_start));
  267. WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
  268. upper_32_bits(rdev->mc.vram_start));
  269. WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
  270. (u32)rdev->mc.vram_start);
  271. WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
  272. (u32)rdev->mc.vram_start);
  273. WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
  274. WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
  275. /* Unlock host access */
  276. WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
  277. mdelay(1);
  278. /* Restore video state */
  279. WREG32(D1VGA_CONTROL, save->vga_control[0]);
  280. WREG32(D2VGA_CONTROL, save->vga_control[1]);
  281. WREG32(EVERGREEN_D3VGA_CONTROL, save->vga_control[2]);
  282. WREG32(EVERGREEN_D4VGA_CONTROL, save->vga_control[3]);
  283. WREG32(EVERGREEN_D5VGA_CONTROL, save->vga_control[4]);
  284. WREG32(EVERGREEN_D6VGA_CONTROL, save->vga_control[5]);
  285. WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
  286. WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
  287. WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
  288. WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
  289. WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
  290. WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
  291. WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, save->crtc_control[0]);
  292. WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, save->crtc_control[1]);
  293. WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, save->crtc_control[2]);
  294. WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, save->crtc_control[3]);
  295. WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, save->crtc_control[4]);
  296. WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, save->crtc_control[5]);
  297. WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
  298. WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
  299. WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
  300. WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
  301. WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
  302. WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
  303. WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
  304. }
  305. static void evergreen_mc_program(struct radeon_device *rdev)
  306. {
  307. struct evergreen_mc_save save;
  308. u32 tmp;
  309. int i, j;
  310. /* Initialize HDP */
  311. for (i = 0, j = 0; i < 32; i++, j += 0x18) {
  312. WREG32((0x2c14 + j), 0x00000000);
  313. WREG32((0x2c18 + j), 0x00000000);
  314. WREG32((0x2c1c + j), 0x00000000);
  315. WREG32((0x2c20 + j), 0x00000000);
  316. WREG32((0x2c24 + j), 0x00000000);
  317. }
  318. WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
  319. evergreen_mc_stop(rdev, &save);
  320. if (evergreen_mc_wait_for_idle(rdev)) {
  321. dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
  322. }
  323. /* Lockout access through VGA aperture*/
  324. WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
  325. /* Update configuration */
  326. if (rdev->flags & RADEON_IS_AGP) {
  327. if (rdev->mc.vram_start < rdev->mc.gtt_start) {
  328. /* VRAM before AGP */
  329. WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
  330. rdev->mc.vram_start >> 12);
  331. WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
  332. rdev->mc.gtt_end >> 12);
  333. } else {
  334. /* VRAM after AGP */
  335. WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
  336. rdev->mc.gtt_start >> 12);
  337. WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
  338. rdev->mc.vram_end >> 12);
  339. }
  340. } else {
  341. WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
  342. rdev->mc.vram_start >> 12);
  343. WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
  344. rdev->mc.vram_end >> 12);
  345. }
  346. WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, 0);
  347. tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
  348. tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
  349. WREG32(MC_VM_FB_LOCATION, tmp);
  350. WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
  351. WREG32(HDP_NONSURFACE_INFO, (2 << 7));
  352. WREG32(HDP_NONSURFACE_SIZE, (rdev->mc.mc_vram_size - 1) | 0x3FF);
  353. if (rdev->flags & RADEON_IS_AGP) {
  354. WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
  355. WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
  356. WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
  357. } else {
  358. WREG32(MC_VM_AGP_BASE, 0);
  359. WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
  360. WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
  361. }
  362. if (evergreen_mc_wait_for_idle(rdev)) {
  363. dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
  364. }
  365. evergreen_mc_resume(rdev, &save);
  366. /* we need to own VRAM, so turn off the VGA renderer here
  367. * to stop it overwriting our objects */
  368. rv515_vga_render_disable(rdev);
  369. }
  370. #if 0
  371. /*
  372. * CP.
  373. */
  374. static void evergreen_cp_stop(struct radeon_device *rdev)
  375. {
  376. /* XXX */
  377. }
  378. static int evergreen_cp_load_microcode(struct radeon_device *rdev)
  379. {
  380. /* XXX */
  381. return 0;
  382. }
  383. /*
  384. * Core functions
  385. */
  386. static u32 evergreen_get_tile_pipe_to_backend_map(u32 num_tile_pipes,
  387. u32 num_backends,
  388. u32 backend_disable_mask)
  389. {
  390. u32 backend_map = 0;
  391. return backend_map;
  392. }
  393. #endif
  394. static void evergreen_gpu_init(struct radeon_device *rdev)
  395. {
  396. /* XXX */
  397. }
  398. int evergreen_mc_init(struct radeon_device *rdev)
  399. {
  400. u32 tmp;
  401. int chansize, numchan;
  402. /* Get VRAM informations */
  403. rdev->mc.vram_is_ddr = true;
  404. tmp = RREG32(MC_ARB_RAMCFG);
  405. if (tmp & CHANSIZE_OVERRIDE) {
  406. chansize = 16;
  407. } else if (tmp & CHANSIZE_MASK) {
  408. chansize = 64;
  409. } else {
  410. chansize = 32;
  411. }
  412. tmp = RREG32(MC_SHARED_CHMAP);
  413. switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
  414. case 0:
  415. default:
  416. numchan = 1;
  417. break;
  418. case 1:
  419. numchan = 2;
  420. break;
  421. case 2:
  422. numchan = 4;
  423. break;
  424. case 3:
  425. numchan = 8;
  426. break;
  427. }
  428. rdev->mc.vram_width = numchan * chansize;
  429. /* Could aper size report 0 ? */
  430. rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0);
  431. rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0);
  432. /* Setup GPU memory space */
  433. /* size in MB on evergreen */
  434. rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
  435. rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
  436. rdev->mc.visible_vram_size = rdev->mc.aper_size;
  437. /* FIXME remove this once we support unmappable VRAM */
  438. if (rdev->mc.mc_vram_size > rdev->mc.aper_size) {
  439. rdev->mc.mc_vram_size = rdev->mc.aper_size;
  440. rdev->mc.real_vram_size = rdev->mc.aper_size;
  441. }
  442. r600_vram_gtt_location(rdev, &rdev->mc);
  443. radeon_update_bandwidth_info(rdev);
  444. return 0;
  445. }
  446. int evergreen_gpu_reset(struct radeon_device *rdev)
  447. {
  448. /* FIXME: implement for evergreen */
  449. return 0;
  450. }
  451. static int evergreen_startup(struct radeon_device *rdev)
  452. {
  453. #if 0
  454. int r;
  455. if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
  456. r = r600_init_microcode(rdev);
  457. if (r) {
  458. DRM_ERROR("Failed to load firmware!\n");
  459. return r;
  460. }
  461. }
  462. #endif
  463. evergreen_mc_program(rdev);
  464. #if 0
  465. if (rdev->flags & RADEON_IS_AGP) {
  466. evergreem_agp_enable(rdev);
  467. } else {
  468. r = evergreen_pcie_gart_enable(rdev);
  469. if (r)
  470. return r;
  471. }
  472. #endif
  473. evergreen_gpu_init(rdev);
  474. #if 0
  475. if (!rdev->r600_blit.shader_obj) {
  476. r = r600_blit_init(rdev);
  477. if (r) {
  478. DRM_ERROR("radeon: failed blitter (%d).\n", r);
  479. return r;
  480. }
  481. }
  482. r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
  483. if (unlikely(r != 0))
  484. return r;
  485. r = radeon_bo_pin(rdev->r600_blit.shader_obj, RADEON_GEM_DOMAIN_VRAM,
  486. &rdev->r600_blit.shader_gpu_addr);
  487. radeon_bo_unreserve(rdev->r600_blit.shader_obj);
  488. if (r) {
  489. DRM_ERROR("failed to pin blit object %d\n", r);
  490. return r;
  491. }
  492. /* Enable IRQ */
  493. r = r600_irq_init(rdev);
  494. if (r) {
  495. DRM_ERROR("radeon: IH init failed (%d).\n", r);
  496. radeon_irq_kms_fini(rdev);
  497. return r;
  498. }
  499. r600_irq_set(rdev);
  500. r = radeon_ring_init(rdev, rdev->cp.ring_size);
  501. if (r)
  502. return r;
  503. r = evergreen_cp_load_microcode(rdev);
  504. if (r)
  505. return r;
  506. r = r600_cp_resume(rdev);
  507. if (r)
  508. return r;
  509. /* write back buffer are not vital so don't worry about failure */
  510. r600_wb_enable(rdev);
  511. #endif
  512. return 0;
  513. }
  514. int evergreen_resume(struct radeon_device *rdev)
  515. {
  516. int r;
  517. /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
  518. * posting will perform necessary task to bring back GPU into good
  519. * shape.
  520. */
  521. /* post card */
  522. atom_asic_init(rdev->mode_info.atom_context);
  523. /* Initialize clocks */
  524. r = radeon_clocks_init(rdev);
  525. if (r) {
  526. return r;
  527. }
  528. r = evergreen_startup(rdev);
  529. if (r) {
  530. DRM_ERROR("r600 startup failed on resume\n");
  531. return r;
  532. }
  533. #if 0
  534. r = r600_ib_test(rdev);
  535. if (r) {
  536. DRM_ERROR("radeon: failled testing IB (%d).\n", r);
  537. return r;
  538. }
  539. #endif
  540. return r;
  541. }
  542. int evergreen_suspend(struct radeon_device *rdev)
  543. {
  544. #if 0
  545. int r;
  546. /* FIXME: we should wait for ring to be empty */
  547. r700_cp_stop(rdev);
  548. rdev->cp.ready = false;
  549. r600_wb_disable(rdev);
  550. evergreen_pcie_gart_disable(rdev);
  551. /* unpin shaders bo */
  552. r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
  553. if (likely(r == 0)) {
  554. radeon_bo_unpin(rdev->r600_blit.shader_obj);
  555. radeon_bo_unreserve(rdev->r600_blit.shader_obj);
  556. }
  557. #endif
  558. return 0;
  559. }
  560. static bool evergreen_card_posted(struct radeon_device *rdev)
  561. {
  562. u32 reg;
  563. /* first check CRTCs */
  564. reg = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) |
  565. RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) |
  566. RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) |
  567. RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) |
  568. RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) |
  569. RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
  570. if (reg & EVERGREEN_CRTC_MASTER_EN)
  571. return true;
  572. /* then check MEM_SIZE, in case the crtcs are off */
  573. if (RREG32(CONFIG_MEMSIZE))
  574. return true;
  575. return false;
  576. }
  577. /* Plan is to move initialization in that function and use
  578. * helper function so that radeon_device_init pretty much
  579. * do nothing more than calling asic specific function. This
  580. * should also allow to remove a bunch of callback function
  581. * like vram_info.
  582. */
  583. int evergreen_init(struct radeon_device *rdev)
  584. {
  585. int r;
  586. r = radeon_dummy_page_init(rdev);
  587. if (r)
  588. return r;
  589. /* This don't do much */
  590. r = radeon_gem_init(rdev);
  591. if (r)
  592. return r;
  593. /* Read BIOS */
  594. if (!radeon_get_bios(rdev)) {
  595. if (ASIC_IS_AVIVO(rdev))
  596. return -EINVAL;
  597. }
  598. /* Must be an ATOMBIOS */
  599. if (!rdev->is_atom_bios) {
  600. dev_err(rdev->dev, "Expecting atombios for R600 GPU\n");
  601. return -EINVAL;
  602. }
  603. r = radeon_atombios_init(rdev);
  604. if (r)
  605. return r;
  606. /* Post card if necessary */
  607. if (!evergreen_card_posted(rdev)) {
  608. if (!rdev->bios) {
  609. dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
  610. return -EINVAL;
  611. }
  612. DRM_INFO("GPU not posted. posting now...\n");
  613. atom_asic_init(rdev->mode_info.atom_context);
  614. }
  615. /* Initialize scratch registers */
  616. r600_scratch_init(rdev);
  617. /* Initialize surface registers */
  618. radeon_surface_init(rdev);
  619. /* Initialize clocks */
  620. radeon_get_clock_info(rdev->ddev);
  621. r = radeon_clocks_init(rdev);
  622. if (r)
  623. return r;
  624. /* Initialize power management */
  625. radeon_pm_init(rdev);
  626. /* Fence driver */
  627. r = radeon_fence_driver_init(rdev);
  628. if (r)
  629. return r;
  630. /* initialize AGP */
  631. if (rdev->flags & RADEON_IS_AGP) {
  632. r = radeon_agp_init(rdev);
  633. if (r)
  634. radeon_agp_disable(rdev);
  635. }
  636. /* initialize memory controller */
  637. r = evergreen_mc_init(rdev);
  638. if (r)
  639. return r;
  640. /* Memory manager */
  641. r = radeon_bo_init(rdev);
  642. if (r)
  643. return r;
  644. #if 0
  645. r = radeon_irq_kms_init(rdev);
  646. if (r)
  647. return r;
  648. rdev->cp.ring_obj = NULL;
  649. r600_ring_init(rdev, 1024 * 1024);
  650. rdev->ih.ring_obj = NULL;
  651. r600_ih_ring_init(rdev, 64 * 1024);
  652. r = r600_pcie_gart_init(rdev);
  653. if (r)
  654. return r;
  655. #endif
  656. rdev->accel_working = false;
  657. r = evergreen_startup(rdev);
  658. if (r) {
  659. evergreen_suspend(rdev);
  660. /*r600_wb_fini(rdev);*/
  661. /*radeon_ring_fini(rdev);*/
  662. /*evergreen_pcie_gart_fini(rdev);*/
  663. rdev->accel_working = false;
  664. }
  665. if (rdev->accel_working) {
  666. r = radeon_ib_pool_init(rdev);
  667. if (r) {
  668. DRM_ERROR("radeon: failed initializing IB pool (%d).\n", r);
  669. rdev->accel_working = false;
  670. }
  671. r = r600_ib_test(rdev);
  672. if (r) {
  673. DRM_ERROR("radeon: failed testing IB (%d).\n", r);
  674. rdev->accel_working = false;
  675. }
  676. }
  677. return 0;
  678. }
  679. void evergreen_fini(struct radeon_device *rdev)
  680. {
  681. radeon_pm_fini(rdev);
  682. evergreen_suspend(rdev);
  683. #if 0
  684. r600_blit_fini(rdev);
  685. r600_irq_fini(rdev);
  686. radeon_irq_kms_fini(rdev);
  687. radeon_ring_fini(rdev);
  688. r600_wb_fini(rdev);
  689. evergreen_pcie_gart_fini(rdev);
  690. #endif
  691. radeon_gem_fini(rdev);
  692. radeon_fence_driver_fini(rdev);
  693. radeon_clocks_fini(rdev);
  694. radeon_agp_fini(rdev);
  695. radeon_bo_fini(rdev);
  696. radeon_atombios_fini(rdev);
  697. kfree(rdev->bios);
  698. rdev->bios = NULL;
  699. radeon_dummy_page_fini(rdev);
  700. }