radeon_asic.h 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640
  1. /*
  2. * Copyright 2008 Advanced Micro Devices, Inc.
  3. * Copyright 2008 Red Hat Inc.
  4. * Copyright 2009 Jerome Glisse.
  5. *
  6. * Permission is hereby granted, free of charge, to any person obtaining a
  7. * copy of this software and associated documentation files (the "Software"),
  8. * to deal in the Software without restriction, including without limitation
  9. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  10. * and/or sell copies of the Software, and to permit persons to whom the
  11. * Software is furnished to do so, subject to the following conditions:
  12. *
  13. * The above copyright notice and this permission notice shall be included in
  14. * all copies or substantial portions of the Software.
  15. *
  16. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  17. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  18. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  19. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  20. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  21. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  22. * OTHER DEALINGS IN THE SOFTWARE.
  23. *
  24. * Authors: Dave Airlie
  25. * Alex Deucher
  26. * Jerome Glisse
  27. */
  28. #ifndef __RADEON_ASIC_H__
  29. #define __RADEON_ASIC_H__
  30. /*
  31. * common functions
  32. */
  33. void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
  34. void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
  35. void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
  36. void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
  37. void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
  38. /*
  39. * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
  40. */
  41. int r100_init(struct radeon_device *rdev);
  42. int r200_init(struct radeon_device *rdev);
  43. uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
  44. void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  45. void r100_errata(struct radeon_device *rdev);
  46. void r100_vram_info(struct radeon_device *rdev);
  47. int r100_gpu_reset(struct radeon_device *rdev);
  48. int r100_mc_init(struct radeon_device *rdev);
  49. void r100_mc_fini(struct radeon_device *rdev);
  50. u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
  51. int r100_wb_init(struct radeon_device *rdev);
  52. void r100_wb_fini(struct radeon_device *rdev);
  53. int r100_pci_gart_init(struct radeon_device *rdev);
  54. void r100_pci_gart_fini(struct radeon_device *rdev);
  55. int r100_pci_gart_enable(struct radeon_device *rdev);
  56. void r100_pci_gart_disable(struct radeon_device *rdev);
  57. void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
  58. int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
  59. int r100_cp_init(struct radeon_device *rdev, unsigned ring_size);
  60. void r100_cp_fini(struct radeon_device *rdev);
  61. void r100_cp_disable(struct radeon_device *rdev);
  62. void r100_cp_commit(struct radeon_device *rdev);
  63. void r100_ring_start(struct radeon_device *rdev);
  64. int r100_irq_set(struct radeon_device *rdev);
  65. int r100_irq_process(struct radeon_device *rdev);
  66. void r100_fence_ring_emit(struct radeon_device *rdev,
  67. struct radeon_fence *fence);
  68. int r100_cs_parse(struct radeon_cs_parser *p);
  69. void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  70. uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
  71. int r100_copy_blit(struct radeon_device *rdev,
  72. uint64_t src_offset,
  73. uint64_t dst_offset,
  74. unsigned num_pages,
  75. struct radeon_fence *fence);
  76. int r100_set_surface_reg(struct radeon_device *rdev, int reg,
  77. uint32_t tiling_flags, uint32_t pitch,
  78. uint32_t offset, uint32_t obj_size);
  79. int r100_clear_surface_reg(struct radeon_device *rdev, int reg);
  80. void r100_bandwidth_update(struct radeon_device *rdev);
  81. void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
  82. int r100_ib_test(struct radeon_device *rdev);
  83. int r100_ring_test(struct radeon_device *rdev);
  84. static struct radeon_asic r100_asic = {
  85. .init = &r100_init,
  86. .errata = &r100_errata,
  87. .vram_info = &r100_vram_info,
  88. .gpu_reset = &r100_gpu_reset,
  89. .mc_init = &r100_mc_init,
  90. .mc_fini = &r100_mc_fini,
  91. .wb_init = &r100_wb_init,
  92. .wb_fini = &r100_wb_fini,
  93. .gart_init = &r100_pci_gart_init,
  94. .gart_fini = &r100_pci_gart_fini,
  95. .gart_enable = &r100_pci_gart_enable,
  96. .gart_disable = &r100_pci_gart_disable,
  97. .gart_tlb_flush = &r100_pci_gart_tlb_flush,
  98. .gart_set_page = &r100_pci_gart_set_page,
  99. .cp_init = &r100_cp_init,
  100. .cp_fini = &r100_cp_fini,
  101. .cp_disable = &r100_cp_disable,
  102. .cp_commit = &r100_cp_commit,
  103. .ring_start = &r100_ring_start,
  104. .ring_test = &r100_ring_test,
  105. .ring_ib_execute = &r100_ring_ib_execute,
  106. .ib_test = &r100_ib_test,
  107. .irq_set = &r100_irq_set,
  108. .irq_process = &r100_irq_process,
  109. .get_vblank_counter = &r100_get_vblank_counter,
  110. .fence_ring_emit = &r100_fence_ring_emit,
  111. .cs_parse = &r100_cs_parse,
  112. .copy_blit = &r100_copy_blit,
  113. .copy_dma = NULL,
  114. .copy = &r100_copy_blit,
  115. .set_engine_clock = &radeon_legacy_set_engine_clock,
  116. .set_memory_clock = NULL,
  117. .set_pcie_lanes = NULL,
  118. .set_clock_gating = &radeon_legacy_set_clock_gating,
  119. .set_surface_reg = r100_set_surface_reg,
  120. .clear_surface_reg = r100_clear_surface_reg,
  121. .bandwidth_update = &r100_bandwidth_update,
  122. };
  123. /*
  124. * r300,r350,rv350,rv380
  125. */
  126. int r300_init(struct radeon_device *rdev);
  127. void r300_errata(struct radeon_device *rdev);
  128. void r300_vram_info(struct radeon_device *rdev);
  129. int r300_gpu_reset(struct radeon_device *rdev);
  130. int r300_mc_init(struct radeon_device *rdev);
  131. void r300_mc_fini(struct radeon_device *rdev);
  132. void r300_ring_start(struct radeon_device *rdev);
  133. void r300_fence_ring_emit(struct radeon_device *rdev,
  134. struct radeon_fence *fence);
  135. int r300_cs_parse(struct radeon_cs_parser *p);
  136. int rv370_pcie_gart_init(struct radeon_device *rdev);
  137. void rv370_pcie_gart_fini(struct radeon_device *rdev);
  138. int rv370_pcie_gart_enable(struct radeon_device *rdev);
  139. void rv370_pcie_gart_disable(struct radeon_device *rdev);
  140. void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
  141. int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
  142. uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
  143. void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  144. void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
  145. int r300_copy_dma(struct radeon_device *rdev,
  146. uint64_t src_offset,
  147. uint64_t dst_offset,
  148. unsigned num_pages,
  149. struct radeon_fence *fence);
  150. static struct radeon_asic r300_asic = {
  151. .init = &r300_init,
  152. .errata = &r300_errata,
  153. .vram_info = &r300_vram_info,
  154. .gpu_reset = &r300_gpu_reset,
  155. .mc_init = &r300_mc_init,
  156. .mc_fini = &r300_mc_fini,
  157. .wb_init = &r100_wb_init,
  158. .wb_fini = &r100_wb_fini,
  159. .gart_init = &r100_pci_gart_init,
  160. .gart_fini = &r100_pci_gart_fini,
  161. .gart_enable = &r100_pci_gart_enable,
  162. .gart_disable = &r100_pci_gart_disable,
  163. .gart_tlb_flush = &r100_pci_gart_tlb_flush,
  164. .gart_set_page = &r100_pci_gart_set_page,
  165. .cp_init = &r100_cp_init,
  166. .cp_fini = &r100_cp_fini,
  167. .cp_disable = &r100_cp_disable,
  168. .cp_commit = &r100_cp_commit,
  169. .ring_start = &r300_ring_start,
  170. .ring_test = &r100_ring_test,
  171. .ring_ib_execute = &r100_ring_ib_execute,
  172. .ib_test = &r100_ib_test,
  173. .irq_set = &r100_irq_set,
  174. .irq_process = &r100_irq_process,
  175. .get_vblank_counter = &r100_get_vblank_counter,
  176. .fence_ring_emit = &r300_fence_ring_emit,
  177. .cs_parse = &r300_cs_parse,
  178. .copy_blit = &r100_copy_blit,
  179. .copy_dma = &r300_copy_dma,
  180. .copy = &r100_copy_blit,
  181. .set_engine_clock = &radeon_legacy_set_engine_clock,
  182. .set_memory_clock = NULL,
  183. .set_pcie_lanes = &rv370_set_pcie_lanes,
  184. .set_clock_gating = &radeon_legacy_set_clock_gating,
  185. .set_surface_reg = r100_set_surface_reg,
  186. .clear_surface_reg = r100_clear_surface_reg,
  187. .bandwidth_update = &r100_bandwidth_update,
  188. };
  189. /*
  190. * r420,r423,rv410
  191. */
  192. extern int r420_init(struct radeon_device *rdev);
  193. extern void r420_fini(struct radeon_device *rdev);
  194. extern int r420_suspend(struct radeon_device *rdev);
  195. extern int r420_resume(struct radeon_device *rdev);
  196. static struct radeon_asic r420_asic = {
  197. .init = &r420_init,
  198. .fini = &r420_fini,
  199. .suspend = &r420_suspend,
  200. .resume = &r420_resume,
  201. .errata = NULL,
  202. .vram_info = NULL,
  203. .gpu_reset = &r300_gpu_reset,
  204. .mc_init = NULL,
  205. .mc_fini = NULL,
  206. .wb_init = NULL,
  207. .wb_fini = NULL,
  208. .gart_enable = NULL,
  209. .gart_disable = NULL,
  210. .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
  211. .gart_set_page = &rv370_pcie_gart_set_page,
  212. .cp_init = NULL,
  213. .cp_fini = NULL,
  214. .cp_disable = NULL,
  215. .cp_commit = &r100_cp_commit,
  216. .ring_start = &r300_ring_start,
  217. .ring_test = &r100_ring_test,
  218. .ring_ib_execute = &r100_ring_ib_execute,
  219. .ib_test = NULL,
  220. .irq_set = &r100_irq_set,
  221. .irq_process = &r100_irq_process,
  222. .get_vblank_counter = &r100_get_vblank_counter,
  223. .fence_ring_emit = &r300_fence_ring_emit,
  224. .cs_parse = &r300_cs_parse,
  225. .copy_blit = &r100_copy_blit,
  226. .copy_dma = &r300_copy_dma,
  227. .copy = &r100_copy_blit,
  228. .set_engine_clock = &radeon_atom_set_engine_clock,
  229. .set_memory_clock = &radeon_atom_set_memory_clock,
  230. .set_pcie_lanes = &rv370_set_pcie_lanes,
  231. .set_clock_gating = &radeon_atom_set_clock_gating,
  232. .set_surface_reg = r100_set_surface_reg,
  233. .clear_surface_reg = r100_clear_surface_reg,
  234. .bandwidth_update = &r100_bandwidth_update,
  235. };
  236. /*
  237. * rs400,rs480
  238. */
  239. void rs400_errata(struct radeon_device *rdev);
  240. void rs400_vram_info(struct radeon_device *rdev);
  241. int rs400_mc_init(struct radeon_device *rdev);
  242. void rs400_mc_fini(struct radeon_device *rdev);
  243. int rs400_gart_init(struct radeon_device *rdev);
  244. void rs400_gart_fini(struct radeon_device *rdev);
  245. int rs400_gart_enable(struct radeon_device *rdev);
  246. void rs400_gart_disable(struct radeon_device *rdev);
  247. void rs400_gart_tlb_flush(struct radeon_device *rdev);
  248. int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
  249. uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
  250. void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  251. static struct radeon_asic rs400_asic = {
  252. .init = &r300_init,
  253. .errata = &rs400_errata,
  254. .vram_info = &rs400_vram_info,
  255. .gpu_reset = &r300_gpu_reset,
  256. .mc_init = &rs400_mc_init,
  257. .mc_fini = &rs400_mc_fini,
  258. .wb_init = &r100_wb_init,
  259. .wb_fini = &r100_wb_fini,
  260. .gart_init = &rs400_gart_init,
  261. .gart_fini = &rs400_gart_fini,
  262. .gart_enable = &rs400_gart_enable,
  263. .gart_disable = &rs400_gart_disable,
  264. .gart_tlb_flush = &rs400_gart_tlb_flush,
  265. .gart_set_page = &rs400_gart_set_page,
  266. .cp_init = &r100_cp_init,
  267. .cp_fini = &r100_cp_fini,
  268. .cp_disable = &r100_cp_disable,
  269. .cp_commit = &r100_cp_commit,
  270. .ring_start = &r300_ring_start,
  271. .ring_test = &r100_ring_test,
  272. .ring_ib_execute = &r100_ring_ib_execute,
  273. .ib_test = &r100_ib_test,
  274. .irq_set = &r100_irq_set,
  275. .irq_process = &r100_irq_process,
  276. .get_vblank_counter = &r100_get_vblank_counter,
  277. .fence_ring_emit = &r300_fence_ring_emit,
  278. .cs_parse = &r300_cs_parse,
  279. .copy_blit = &r100_copy_blit,
  280. .copy_dma = &r300_copy_dma,
  281. .copy = &r100_copy_blit,
  282. .set_engine_clock = &radeon_legacy_set_engine_clock,
  283. .set_memory_clock = NULL,
  284. .set_pcie_lanes = NULL,
  285. .set_clock_gating = &radeon_legacy_set_clock_gating,
  286. .set_surface_reg = r100_set_surface_reg,
  287. .clear_surface_reg = r100_clear_surface_reg,
  288. .bandwidth_update = &r100_bandwidth_update,
  289. };
  290. /*
  291. * rs600.
  292. */
  293. int rs600_init(struct radeon_device *rdev);
  294. void rs600_errata(struct radeon_device *rdev);
  295. void rs600_vram_info(struct radeon_device *rdev);
  296. int rs600_mc_init(struct radeon_device *rdev);
  297. void rs600_mc_fini(struct radeon_device *rdev);
  298. int rs600_irq_set(struct radeon_device *rdev);
  299. int rs600_irq_process(struct radeon_device *rdev);
  300. u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
  301. int rs600_gart_init(struct radeon_device *rdev);
  302. void rs600_gart_fini(struct radeon_device *rdev);
  303. int rs600_gart_enable(struct radeon_device *rdev);
  304. void rs600_gart_disable(struct radeon_device *rdev);
  305. void rs600_gart_tlb_flush(struct radeon_device *rdev);
  306. int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
  307. uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
  308. void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  309. void rs600_bandwidth_update(struct radeon_device *rdev);
  310. static struct radeon_asic rs600_asic = {
  311. .init = &rs600_init,
  312. .errata = &rs600_errata,
  313. .vram_info = &rs600_vram_info,
  314. .gpu_reset = &r300_gpu_reset,
  315. .mc_init = &rs600_mc_init,
  316. .mc_fini = &rs600_mc_fini,
  317. .wb_init = &r100_wb_init,
  318. .wb_fini = &r100_wb_fini,
  319. .gart_init = &rs600_gart_init,
  320. .gart_fini = &rs600_gart_fini,
  321. .gart_enable = &rs600_gart_enable,
  322. .gart_disable = &rs600_gart_disable,
  323. .gart_tlb_flush = &rs600_gart_tlb_flush,
  324. .gart_set_page = &rs600_gart_set_page,
  325. .cp_init = &r100_cp_init,
  326. .cp_fini = &r100_cp_fini,
  327. .cp_disable = &r100_cp_disable,
  328. .cp_commit = &r100_cp_commit,
  329. .ring_start = &r300_ring_start,
  330. .ring_test = &r100_ring_test,
  331. .ring_ib_execute = &r100_ring_ib_execute,
  332. .ib_test = &r100_ib_test,
  333. .irq_set = &rs600_irq_set,
  334. .irq_process = &rs600_irq_process,
  335. .get_vblank_counter = &rs600_get_vblank_counter,
  336. .fence_ring_emit = &r300_fence_ring_emit,
  337. .cs_parse = &r300_cs_parse,
  338. .copy_blit = &r100_copy_blit,
  339. .copy_dma = &r300_copy_dma,
  340. .copy = &r100_copy_blit,
  341. .set_engine_clock = &radeon_atom_set_engine_clock,
  342. .set_memory_clock = &radeon_atom_set_memory_clock,
  343. .set_pcie_lanes = NULL,
  344. .set_clock_gating = &radeon_atom_set_clock_gating,
  345. .bandwidth_update = &rs600_bandwidth_update,
  346. };
  347. /*
  348. * rs690,rs740
  349. */
  350. void rs690_errata(struct radeon_device *rdev);
  351. void rs690_vram_info(struct radeon_device *rdev);
  352. int rs690_mc_init(struct radeon_device *rdev);
  353. void rs690_mc_fini(struct radeon_device *rdev);
  354. uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
  355. void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  356. void rs690_bandwidth_update(struct radeon_device *rdev);
  357. static struct radeon_asic rs690_asic = {
  358. .init = &rs600_init,
  359. .errata = &rs690_errata,
  360. .vram_info = &rs690_vram_info,
  361. .gpu_reset = &r300_gpu_reset,
  362. .mc_init = &rs690_mc_init,
  363. .mc_fini = &rs690_mc_fini,
  364. .wb_init = &r100_wb_init,
  365. .wb_fini = &r100_wb_fini,
  366. .gart_init = &rs400_gart_init,
  367. .gart_fini = &rs400_gart_fini,
  368. .gart_enable = &rs400_gart_enable,
  369. .gart_disable = &rs400_gart_disable,
  370. .gart_tlb_flush = &rs400_gart_tlb_flush,
  371. .gart_set_page = &rs400_gart_set_page,
  372. .cp_init = &r100_cp_init,
  373. .cp_fini = &r100_cp_fini,
  374. .cp_disable = &r100_cp_disable,
  375. .cp_commit = &r100_cp_commit,
  376. .ring_start = &r300_ring_start,
  377. .ring_test = &r100_ring_test,
  378. .ring_ib_execute = &r100_ring_ib_execute,
  379. .ib_test = &r100_ib_test,
  380. .irq_set = &rs600_irq_set,
  381. .irq_process = &rs600_irq_process,
  382. .get_vblank_counter = &rs600_get_vblank_counter,
  383. .fence_ring_emit = &r300_fence_ring_emit,
  384. .cs_parse = &r300_cs_parse,
  385. .copy_blit = &r100_copy_blit,
  386. .copy_dma = &r300_copy_dma,
  387. .copy = &r300_copy_dma,
  388. .set_engine_clock = &radeon_atom_set_engine_clock,
  389. .set_memory_clock = &radeon_atom_set_memory_clock,
  390. .set_pcie_lanes = NULL,
  391. .set_clock_gating = &radeon_atom_set_clock_gating,
  392. .set_surface_reg = r100_set_surface_reg,
  393. .clear_surface_reg = r100_clear_surface_reg,
  394. .bandwidth_update = &rs690_bandwidth_update,
  395. };
  396. /*
  397. * rv515
  398. */
  399. int rv515_init(struct radeon_device *rdev);
  400. void rv515_errata(struct radeon_device *rdev);
  401. void rv515_vram_info(struct radeon_device *rdev);
  402. int rv515_gpu_reset(struct radeon_device *rdev);
  403. int rv515_mc_init(struct radeon_device *rdev);
  404. void rv515_mc_fini(struct radeon_device *rdev);
  405. uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
  406. void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  407. void rv515_ring_start(struct radeon_device *rdev);
  408. uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
  409. void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  410. void rv515_bandwidth_update(struct radeon_device *rdev);
  411. static struct radeon_asic rv515_asic = {
  412. .init = &rv515_init,
  413. .errata = &rv515_errata,
  414. .vram_info = &rv515_vram_info,
  415. .gpu_reset = &rv515_gpu_reset,
  416. .mc_init = &rv515_mc_init,
  417. .mc_fini = &rv515_mc_fini,
  418. .wb_init = &r100_wb_init,
  419. .wb_fini = &r100_wb_fini,
  420. .gart_init = &rv370_pcie_gart_init,
  421. .gart_fini = &rv370_pcie_gart_fini,
  422. .gart_enable = &rv370_pcie_gart_enable,
  423. .gart_disable = &rv370_pcie_gart_disable,
  424. .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
  425. .gart_set_page = &rv370_pcie_gart_set_page,
  426. .cp_init = &r100_cp_init,
  427. .cp_fini = &r100_cp_fini,
  428. .cp_disable = &r100_cp_disable,
  429. .cp_commit = &r100_cp_commit,
  430. .ring_start = &rv515_ring_start,
  431. .ring_test = &r100_ring_test,
  432. .ring_ib_execute = &r100_ring_ib_execute,
  433. .ib_test = &r100_ib_test,
  434. .irq_set = &rs600_irq_set,
  435. .irq_process = &rs600_irq_process,
  436. .get_vblank_counter = &rs600_get_vblank_counter,
  437. .fence_ring_emit = &r300_fence_ring_emit,
  438. .cs_parse = &r300_cs_parse,
  439. .copy_blit = &r100_copy_blit,
  440. .copy_dma = &r300_copy_dma,
  441. .copy = &r100_copy_blit,
  442. .set_engine_clock = &radeon_atom_set_engine_clock,
  443. .set_memory_clock = &radeon_atom_set_memory_clock,
  444. .set_pcie_lanes = &rv370_set_pcie_lanes,
  445. .set_clock_gating = &radeon_atom_set_clock_gating,
  446. .set_surface_reg = r100_set_surface_reg,
  447. .clear_surface_reg = r100_clear_surface_reg,
  448. .bandwidth_update = &rv515_bandwidth_update,
  449. };
  450. /*
  451. * r520,rv530,rv560,rv570,r580
  452. */
  453. void r520_errata(struct radeon_device *rdev);
  454. void r520_vram_info(struct radeon_device *rdev);
  455. int r520_mc_init(struct radeon_device *rdev);
  456. void r520_mc_fini(struct radeon_device *rdev);
  457. void r520_bandwidth_update(struct radeon_device *rdev);
  458. static struct radeon_asic r520_asic = {
  459. .init = &rv515_init,
  460. .errata = &r520_errata,
  461. .vram_info = &r520_vram_info,
  462. .gpu_reset = &rv515_gpu_reset,
  463. .mc_init = &r520_mc_init,
  464. .mc_fini = &r520_mc_fini,
  465. .wb_init = &r100_wb_init,
  466. .wb_fini = &r100_wb_fini,
  467. .gart_init = &rv370_pcie_gart_init,
  468. .gart_fini = &rv370_pcie_gart_fini,
  469. .gart_enable = &rv370_pcie_gart_enable,
  470. .gart_disable = &rv370_pcie_gart_disable,
  471. .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
  472. .gart_set_page = &rv370_pcie_gart_set_page,
  473. .cp_init = &r100_cp_init,
  474. .cp_fini = &r100_cp_fini,
  475. .cp_disable = &r100_cp_disable,
  476. .cp_commit = &r100_cp_commit,
  477. .ring_start = &rv515_ring_start,
  478. .ring_test = &r100_ring_test,
  479. .ring_ib_execute = &r100_ring_ib_execute,
  480. .ib_test = &r100_ib_test,
  481. .irq_set = &rs600_irq_set,
  482. .irq_process = &rs600_irq_process,
  483. .get_vblank_counter = &rs600_get_vblank_counter,
  484. .fence_ring_emit = &r300_fence_ring_emit,
  485. .cs_parse = &r300_cs_parse,
  486. .copy_blit = &r100_copy_blit,
  487. .copy_dma = &r300_copy_dma,
  488. .copy = &r100_copy_blit,
  489. .set_engine_clock = &radeon_atom_set_engine_clock,
  490. .set_memory_clock = &radeon_atom_set_memory_clock,
  491. .set_pcie_lanes = &rv370_set_pcie_lanes,
  492. .set_clock_gating = &radeon_atom_set_clock_gating,
  493. .set_surface_reg = r100_set_surface_reg,
  494. .clear_surface_reg = r100_clear_surface_reg,
  495. .bandwidth_update = &r520_bandwidth_update,
  496. };
  497. /*
  498. * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
  499. */
  500. int r600_init(struct radeon_device *rdev);
  501. void r600_fini(struct radeon_device *rdev);
  502. int r600_suspend(struct radeon_device *rdev);
  503. int r600_resume(struct radeon_device *rdev);
  504. int r600_wb_init(struct radeon_device *rdev);
  505. void r600_wb_fini(struct radeon_device *rdev);
  506. void r600_cp_commit(struct radeon_device *rdev);
  507. void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
  508. uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
  509. void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  510. int r600_cs_parse(struct radeon_cs_parser *p);
  511. void r600_fence_ring_emit(struct radeon_device *rdev,
  512. struct radeon_fence *fence);
  513. int r600_copy_dma(struct radeon_device *rdev,
  514. uint64_t src_offset,
  515. uint64_t dst_offset,
  516. unsigned num_pages,
  517. struct radeon_fence *fence);
  518. int r600_irq_process(struct radeon_device *rdev);
  519. int r600_irq_set(struct radeon_device *rdev);
  520. int r600_gpu_reset(struct radeon_device *rdev);
  521. int r600_set_surface_reg(struct radeon_device *rdev, int reg,
  522. uint32_t tiling_flags, uint32_t pitch,
  523. uint32_t offset, uint32_t obj_size);
  524. int r600_clear_surface_reg(struct radeon_device *rdev, int reg);
  525. void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
  526. int r600_ib_test(struct radeon_device *rdev);
  527. int r600_ring_test(struct radeon_device *rdev);
  528. int r600_copy_blit(struct radeon_device *rdev,
  529. uint64_t src_offset, uint64_t dst_offset,
  530. unsigned num_pages, struct radeon_fence *fence);
  531. static struct radeon_asic r600_asic = {
  532. .errata = NULL,
  533. .init = &r600_init,
  534. .fini = &r600_fini,
  535. .suspend = &r600_suspend,
  536. .resume = &r600_resume,
  537. .cp_commit = &r600_cp_commit,
  538. .vram_info = NULL,
  539. .gpu_reset = &r600_gpu_reset,
  540. .mc_init = NULL,
  541. .mc_fini = NULL,
  542. .wb_init = &r600_wb_init,
  543. .wb_fini = &r600_wb_fini,
  544. .gart_enable = NULL,
  545. .gart_disable = NULL,
  546. .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
  547. .gart_set_page = &rs600_gart_set_page,
  548. .cp_init = NULL,
  549. .cp_fini = NULL,
  550. .cp_disable = NULL,
  551. .ring_start = NULL,
  552. .ring_test = &r600_ring_test,
  553. .ring_ib_execute = &r600_ring_ib_execute,
  554. .ib_test = &r600_ib_test,
  555. .irq_set = &r600_irq_set,
  556. .irq_process = &r600_irq_process,
  557. .fence_ring_emit = &r600_fence_ring_emit,
  558. .cs_parse = &r600_cs_parse,
  559. .copy_blit = &r600_copy_blit,
  560. .copy_dma = &r600_copy_blit,
  561. .copy = &r600_copy_blit,
  562. .set_engine_clock = &radeon_atom_set_engine_clock,
  563. .set_memory_clock = &radeon_atom_set_memory_clock,
  564. .set_pcie_lanes = NULL,
  565. .set_clock_gating = &radeon_atom_set_clock_gating,
  566. .set_surface_reg = r600_set_surface_reg,
  567. .clear_surface_reg = r600_clear_surface_reg,
  568. .bandwidth_update = &r520_bandwidth_update,
  569. };
  570. /*
  571. * rv770,rv730,rv710,rv740
  572. */
  573. int rv770_init(struct radeon_device *rdev);
  574. void rv770_fini(struct radeon_device *rdev);
  575. int rv770_suspend(struct radeon_device *rdev);
  576. int rv770_resume(struct radeon_device *rdev);
  577. int rv770_gpu_reset(struct radeon_device *rdev);
  578. static struct radeon_asic rv770_asic = {
  579. .errata = NULL,
  580. .init = &rv770_init,
  581. .fini = &rv770_fini,
  582. .suspend = &rv770_suspend,
  583. .resume = &rv770_resume,
  584. .cp_commit = &r600_cp_commit,
  585. .vram_info = NULL,
  586. .gpu_reset = &rv770_gpu_reset,
  587. .mc_init = NULL,
  588. .mc_fini = NULL,
  589. .wb_init = &r600_wb_init,
  590. .wb_fini = &r600_wb_fini,
  591. .gart_enable = NULL,
  592. .gart_disable = NULL,
  593. .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
  594. .gart_set_page = &rs600_gart_set_page,
  595. .cp_init = NULL,
  596. .cp_fini = NULL,
  597. .cp_disable = NULL,
  598. .ring_start = NULL,
  599. .ring_test = &r600_ring_test,
  600. .ring_ib_execute = &r600_ring_ib_execute,
  601. .ib_test = &r600_ib_test,
  602. .irq_set = &r600_irq_set,
  603. .irq_process = &r600_irq_process,
  604. .fence_ring_emit = &r600_fence_ring_emit,
  605. .cs_parse = &r600_cs_parse,
  606. .copy_blit = &r600_copy_blit,
  607. .copy_dma = &r600_copy_blit,
  608. .copy = &r600_copy_blit,
  609. .set_engine_clock = &radeon_atom_set_engine_clock,
  610. .set_memory_clock = &radeon_atom_set_memory_clock,
  611. .set_pcie_lanes = NULL,
  612. .set_clock_gating = &radeon_atom_set_clock_gating,
  613. .set_surface_reg = r600_set_surface_reg,
  614. .clear_surface_reg = r600_clear_surface_reg,
  615. .bandwidth_update = &r520_bandwidth_update,
  616. };
  617. #endif