radeon_asic.h 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528
  1. /*
  2. * Copyright 2008 Advanced Micro Devices, Inc.
  3. * Copyright 2008 Red Hat Inc.
  4. * Copyright 2009 Jerome Glisse.
  5. *
  6. * Permission is hereby granted, free of charge, to any person obtaining a
  7. * copy of this software and associated documentation files (the "Software"),
  8. * to deal in the Software without restriction, including without limitation
  9. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  10. * and/or sell copies of the Software, and to permit persons to whom the
  11. * Software is furnished to do so, subject to the following conditions:
  12. *
  13. * The above copyright notice and this permission notice shall be included in
  14. * all copies or substantial portions of the Software.
  15. *
  16. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  17. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  18. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  19. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  20. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  21. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  22. * OTHER DEALINGS IN THE SOFTWARE.
  23. *
  24. * Authors: Dave Airlie
  25. * Alex Deucher
  26. * Jerome Glisse
  27. */
  28. #ifndef __RADEON_ASIC_H__
  29. #define __RADEON_ASIC_H__
  30. /*
  31. * common functions
  32. */
  33. uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev);
  34. void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
  35. void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
  36. uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev);
  37. void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
  38. uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev);
  39. void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
  40. void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
  41. /*
  42. * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
  43. */
  44. extern int r100_init(struct radeon_device *rdev);
  45. extern void r100_fini(struct radeon_device *rdev);
  46. extern int r100_suspend(struct radeon_device *rdev);
  47. extern int r100_resume(struct radeon_device *rdev);
  48. uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
  49. void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  50. void r100_vga_set_state(struct radeon_device *rdev, bool state);
  51. int r100_gpu_reset(struct radeon_device *rdev);
  52. u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
  53. void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
  54. int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
  55. void r100_cp_commit(struct radeon_device *rdev);
  56. void r100_ring_start(struct radeon_device *rdev);
  57. int r100_irq_set(struct radeon_device *rdev);
  58. int r100_irq_process(struct radeon_device *rdev);
  59. void r100_fence_ring_emit(struct radeon_device *rdev,
  60. struct radeon_fence *fence);
  61. int r100_cs_parse(struct radeon_cs_parser *p);
  62. void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  63. uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
  64. int r100_copy_blit(struct radeon_device *rdev,
  65. uint64_t src_offset,
  66. uint64_t dst_offset,
  67. unsigned num_pages,
  68. struct radeon_fence *fence);
  69. int r100_set_surface_reg(struct radeon_device *rdev, int reg,
  70. uint32_t tiling_flags, uint32_t pitch,
  71. uint32_t offset, uint32_t obj_size);
  72. int r100_clear_surface_reg(struct radeon_device *rdev, int reg);
  73. void r100_bandwidth_update(struct radeon_device *rdev);
  74. void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
  75. int r100_ring_test(struct radeon_device *rdev);
  76. static struct radeon_asic r100_asic = {
  77. .init = &r100_init,
  78. .fini = &r100_fini,
  79. .suspend = &r100_suspend,
  80. .resume = &r100_resume,
  81. .vga_set_state = &r100_vga_set_state,
  82. .gpu_reset = &r100_gpu_reset,
  83. .gart_tlb_flush = &r100_pci_gart_tlb_flush,
  84. .gart_set_page = &r100_pci_gart_set_page,
  85. .cp_commit = &r100_cp_commit,
  86. .ring_start = &r100_ring_start,
  87. .ring_test = &r100_ring_test,
  88. .ring_ib_execute = &r100_ring_ib_execute,
  89. .irq_set = &r100_irq_set,
  90. .irq_process = &r100_irq_process,
  91. .get_vblank_counter = &r100_get_vblank_counter,
  92. .fence_ring_emit = &r100_fence_ring_emit,
  93. .cs_parse = &r100_cs_parse,
  94. .copy_blit = &r100_copy_blit,
  95. .copy_dma = NULL,
  96. .copy = &r100_copy_blit,
  97. .get_engine_clock = &radeon_legacy_get_engine_clock,
  98. .set_engine_clock = &radeon_legacy_set_engine_clock,
  99. .get_memory_clock = NULL,
  100. .set_memory_clock = NULL,
  101. .set_pcie_lanes = NULL,
  102. .set_clock_gating = &radeon_legacy_set_clock_gating,
  103. .set_surface_reg = r100_set_surface_reg,
  104. .clear_surface_reg = r100_clear_surface_reg,
  105. .bandwidth_update = &r100_bandwidth_update,
  106. };
  107. /*
  108. * r300,r350,rv350,rv380
  109. */
  110. extern int r300_init(struct radeon_device *rdev);
  111. extern void r300_fini(struct radeon_device *rdev);
  112. extern int r300_suspend(struct radeon_device *rdev);
  113. extern int r300_resume(struct radeon_device *rdev);
  114. extern int r300_gpu_reset(struct radeon_device *rdev);
  115. extern void r300_ring_start(struct radeon_device *rdev);
  116. extern void r300_fence_ring_emit(struct radeon_device *rdev,
  117. struct radeon_fence *fence);
  118. extern int r300_cs_parse(struct radeon_cs_parser *p);
  119. extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
  120. extern int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
  121. extern uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
  122. extern void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  123. extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
  124. extern int r300_copy_dma(struct radeon_device *rdev,
  125. uint64_t src_offset,
  126. uint64_t dst_offset,
  127. unsigned num_pages,
  128. struct radeon_fence *fence);
  129. static struct radeon_asic r300_asic = {
  130. .init = &r300_init,
  131. .fini = &r300_fini,
  132. .suspend = &r300_suspend,
  133. .resume = &r300_resume,
  134. .vga_set_state = &r100_vga_set_state,
  135. .gpu_reset = &r300_gpu_reset,
  136. .gart_tlb_flush = &r100_pci_gart_tlb_flush,
  137. .gart_set_page = &r100_pci_gart_set_page,
  138. .cp_commit = &r100_cp_commit,
  139. .ring_start = &r300_ring_start,
  140. .ring_test = &r100_ring_test,
  141. .ring_ib_execute = &r100_ring_ib_execute,
  142. .irq_set = &r100_irq_set,
  143. .irq_process = &r100_irq_process,
  144. .get_vblank_counter = &r100_get_vblank_counter,
  145. .fence_ring_emit = &r300_fence_ring_emit,
  146. .cs_parse = &r300_cs_parse,
  147. .copy_blit = &r100_copy_blit,
  148. .copy_dma = &r300_copy_dma,
  149. .copy = &r100_copy_blit,
  150. .get_engine_clock = &radeon_legacy_get_engine_clock,
  151. .set_engine_clock = &radeon_legacy_set_engine_clock,
  152. .get_memory_clock = NULL,
  153. .set_memory_clock = NULL,
  154. .set_pcie_lanes = &rv370_set_pcie_lanes,
  155. .set_clock_gating = &radeon_legacy_set_clock_gating,
  156. .set_surface_reg = r100_set_surface_reg,
  157. .clear_surface_reg = r100_clear_surface_reg,
  158. .bandwidth_update = &r100_bandwidth_update,
  159. };
  160. /*
  161. * r420,r423,rv410
  162. */
  163. extern int r420_init(struct radeon_device *rdev);
  164. extern void r420_fini(struct radeon_device *rdev);
  165. extern int r420_suspend(struct radeon_device *rdev);
  166. extern int r420_resume(struct radeon_device *rdev);
  167. static struct radeon_asic r420_asic = {
  168. .init = &r420_init,
  169. .fini = &r420_fini,
  170. .suspend = &r420_suspend,
  171. .resume = &r420_resume,
  172. .vga_set_state = &r100_vga_set_state,
  173. .gpu_reset = &r300_gpu_reset,
  174. .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
  175. .gart_set_page = &rv370_pcie_gart_set_page,
  176. .cp_commit = &r100_cp_commit,
  177. .ring_start = &r300_ring_start,
  178. .ring_test = &r100_ring_test,
  179. .ring_ib_execute = &r100_ring_ib_execute,
  180. .irq_set = &r100_irq_set,
  181. .irq_process = &r100_irq_process,
  182. .get_vblank_counter = &r100_get_vblank_counter,
  183. .fence_ring_emit = &r300_fence_ring_emit,
  184. .cs_parse = &r300_cs_parse,
  185. .copy_blit = &r100_copy_blit,
  186. .copy_dma = &r300_copy_dma,
  187. .copy = &r100_copy_blit,
  188. .get_engine_clock = &radeon_atom_get_engine_clock,
  189. .set_engine_clock = &radeon_atom_set_engine_clock,
  190. .get_memory_clock = &radeon_atom_get_memory_clock,
  191. .set_memory_clock = &radeon_atom_set_memory_clock,
  192. .set_pcie_lanes = &rv370_set_pcie_lanes,
  193. .set_clock_gating = &radeon_atom_set_clock_gating,
  194. .set_surface_reg = r100_set_surface_reg,
  195. .clear_surface_reg = r100_clear_surface_reg,
  196. .bandwidth_update = &r100_bandwidth_update,
  197. };
  198. /*
  199. * rs400,rs480
  200. */
  201. extern int rs400_init(struct radeon_device *rdev);
  202. extern void rs400_fini(struct radeon_device *rdev);
  203. extern int rs400_suspend(struct radeon_device *rdev);
  204. extern int rs400_resume(struct radeon_device *rdev);
  205. void rs400_gart_tlb_flush(struct radeon_device *rdev);
  206. int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
  207. uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
  208. void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  209. static struct radeon_asic rs400_asic = {
  210. .init = &rs400_init,
  211. .fini = &rs400_fini,
  212. .suspend = &rs400_suspend,
  213. .resume = &rs400_resume,
  214. .vga_set_state = &r100_vga_set_state,
  215. .gpu_reset = &r300_gpu_reset,
  216. .gart_tlb_flush = &rs400_gart_tlb_flush,
  217. .gart_set_page = &rs400_gart_set_page,
  218. .cp_commit = &r100_cp_commit,
  219. .ring_start = &r300_ring_start,
  220. .ring_test = &r100_ring_test,
  221. .ring_ib_execute = &r100_ring_ib_execute,
  222. .irq_set = &r100_irq_set,
  223. .irq_process = &r100_irq_process,
  224. .get_vblank_counter = &r100_get_vblank_counter,
  225. .fence_ring_emit = &r300_fence_ring_emit,
  226. .cs_parse = &r300_cs_parse,
  227. .copy_blit = &r100_copy_blit,
  228. .copy_dma = &r300_copy_dma,
  229. .copy = &r100_copy_blit,
  230. .get_engine_clock = &radeon_legacy_get_engine_clock,
  231. .set_engine_clock = &radeon_legacy_set_engine_clock,
  232. .get_memory_clock = NULL,
  233. .set_memory_clock = NULL,
  234. .set_pcie_lanes = NULL,
  235. .set_clock_gating = &radeon_legacy_set_clock_gating,
  236. .set_surface_reg = r100_set_surface_reg,
  237. .clear_surface_reg = r100_clear_surface_reg,
  238. .bandwidth_update = &r100_bandwidth_update,
  239. };
  240. /*
  241. * rs600.
  242. */
  243. extern int rs600_init(struct radeon_device *rdev);
  244. extern void rs600_fini(struct radeon_device *rdev);
  245. extern int rs600_suspend(struct radeon_device *rdev);
  246. extern int rs600_resume(struct radeon_device *rdev);
  247. int rs600_irq_set(struct radeon_device *rdev);
  248. int rs600_irq_process(struct radeon_device *rdev);
  249. u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
  250. void rs600_gart_tlb_flush(struct radeon_device *rdev);
  251. int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
  252. uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
  253. void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  254. void rs600_bandwidth_update(struct radeon_device *rdev);
  255. static struct radeon_asic rs600_asic = {
  256. .init = &rs600_init,
  257. .fini = &rs600_fini,
  258. .suspend = &rs600_suspend,
  259. .resume = &rs600_resume,
  260. .vga_set_state = &r100_vga_set_state,
  261. .gpu_reset = &r300_gpu_reset,
  262. .gart_tlb_flush = &rs600_gart_tlb_flush,
  263. .gart_set_page = &rs600_gart_set_page,
  264. .cp_commit = &r100_cp_commit,
  265. .ring_start = &r300_ring_start,
  266. .ring_test = &r100_ring_test,
  267. .ring_ib_execute = &r100_ring_ib_execute,
  268. .irq_set = &rs600_irq_set,
  269. .irq_process = &rs600_irq_process,
  270. .get_vblank_counter = &rs600_get_vblank_counter,
  271. .fence_ring_emit = &r300_fence_ring_emit,
  272. .cs_parse = &r300_cs_parse,
  273. .copy_blit = &r100_copy_blit,
  274. .copy_dma = &r300_copy_dma,
  275. .copy = &r100_copy_blit,
  276. .get_engine_clock = &radeon_atom_get_engine_clock,
  277. .set_engine_clock = &radeon_atom_set_engine_clock,
  278. .get_memory_clock = &radeon_atom_get_memory_clock,
  279. .set_memory_clock = &radeon_atom_set_memory_clock,
  280. .set_pcie_lanes = NULL,
  281. .set_clock_gating = &radeon_atom_set_clock_gating,
  282. .bandwidth_update = &rs600_bandwidth_update,
  283. };
  284. /*
  285. * rs690,rs740
  286. */
  287. int rs690_init(struct radeon_device *rdev);
  288. void rs690_fini(struct radeon_device *rdev);
  289. int rs690_resume(struct radeon_device *rdev);
  290. int rs690_suspend(struct radeon_device *rdev);
  291. uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
  292. void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  293. void rs690_bandwidth_update(struct radeon_device *rdev);
  294. static struct radeon_asic rs690_asic = {
  295. .init = &rs690_init,
  296. .fini = &rs690_fini,
  297. .suspend = &rs690_suspend,
  298. .resume = &rs690_resume,
  299. .vga_set_state = &r100_vga_set_state,
  300. .gpu_reset = &r300_gpu_reset,
  301. .gart_tlb_flush = &rs400_gart_tlb_flush,
  302. .gart_set_page = &rs400_gart_set_page,
  303. .cp_commit = &r100_cp_commit,
  304. .ring_start = &r300_ring_start,
  305. .ring_test = &r100_ring_test,
  306. .ring_ib_execute = &r100_ring_ib_execute,
  307. .irq_set = &rs600_irq_set,
  308. .irq_process = &rs600_irq_process,
  309. .get_vblank_counter = &rs600_get_vblank_counter,
  310. .fence_ring_emit = &r300_fence_ring_emit,
  311. .cs_parse = &r300_cs_parse,
  312. .copy_blit = &r100_copy_blit,
  313. .copy_dma = &r300_copy_dma,
  314. .copy = &r300_copy_dma,
  315. .get_engine_clock = &radeon_atom_get_engine_clock,
  316. .set_engine_clock = &radeon_atom_set_engine_clock,
  317. .get_memory_clock = &radeon_atom_get_memory_clock,
  318. .set_memory_clock = &radeon_atom_set_memory_clock,
  319. .set_pcie_lanes = NULL,
  320. .set_clock_gating = &radeon_atom_set_clock_gating,
  321. .set_surface_reg = r100_set_surface_reg,
  322. .clear_surface_reg = r100_clear_surface_reg,
  323. .bandwidth_update = &rs690_bandwidth_update,
  324. };
  325. /*
  326. * rv515
  327. */
  328. int rv515_init(struct radeon_device *rdev);
  329. void rv515_fini(struct radeon_device *rdev);
  330. int rv515_gpu_reset(struct radeon_device *rdev);
  331. uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
  332. void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  333. void rv515_ring_start(struct radeon_device *rdev);
  334. uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
  335. void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  336. void rv515_bandwidth_update(struct radeon_device *rdev);
  337. int rv515_resume(struct radeon_device *rdev);
  338. int rv515_suspend(struct radeon_device *rdev);
  339. static struct radeon_asic rv515_asic = {
  340. .init = &rv515_init,
  341. .fini = &rv515_fini,
  342. .suspend = &rv515_suspend,
  343. .resume = &rv515_resume,
  344. .vga_set_state = &r100_vga_set_state,
  345. .gpu_reset = &rv515_gpu_reset,
  346. .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
  347. .gart_set_page = &rv370_pcie_gart_set_page,
  348. .cp_commit = &r100_cp_commit,
  349. .ring_start = &rv515_ring_start,
  350. .ring_test = &r100_ring_test,
  351. .ring_ib_execute = &r100_ring_ib_execute,
  352. .irq_set = &rs600_irq_set,
  353. .irq_process = &rs600_irq_process,
  354. .get_vblank_counter = &rs600_get_vblank_counter,
  355. .fence_ring_emit = &r300_fence_ring_emit,
  356. .cs_parse = &r300_cs_parse,
  357. .copy_blit = &r100_copy_blit,
  358. .copy_dma = &r300_copy_dma,
  359. .copy = &r100_copy_blit,
  360. .get_engine_clock = &radeon_atom_get_engine_clock,
  361. .set_engine_clock = &radeon_atom_set_engine_clock,
  362. .get_memory_clock = &radeon_atom_get_memory_clock,
  363. .set_memory_clock = &radeon_atom_set_memory_clock,
  364. .set_pcie_lanes = &rv370_set_pcie_lanes,
  365. .set_clock_gating = &radeon_atom_set_clock_gating,
  366. .set_surface_reg = r100_set_surface_reg,
  367. .clear_surface_reg = r100_clear_surface_reg,
  368. .bandwidth_update = &rv515_bandwidth_update,
  369. };
  370. /*
  371. * r520,rv530,rv560,rv570,r580
  372. */
  373. int r520_init(struct radeon_device *rdev);
  374. int r520_resume(struct radeon_device *rdev);
  375. static struct radeon_asic r520_asic = {
  376. .init = &r520_init,
  377. .fini = &rv515_fini,
  378. .suspend = &rv515_suspend,
  379. .resume = &r520_resume,
  380. .vga_set_state = &r100_vga_set_state,
  381. .gpu_reset = &rv515_gpu_reset,
  382. .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
  383. .gart_set_page = &rv370_pcie_gart_set_page,
  384. .cp_commit = &r100_cp_commit,
  385. .ring_start = &rv515_ring_start,
  386. .ring_test = &r100_ring_test,
  387. .ring_ib_execute = &r100_ring_ib_execute,
  388. .irq_set = &rs600_irq_set,
  389. .irq_process = &rs600_irq_process,
  390. .get_vblank_counter = &rs600_get_vblank_counter,
  391. .fence_ring_emit = &r300_fence_ring_emit,
  392. .cs_parse = &r300_cs_parse,
  393. .copy_blit = &r100_copy_blit,
  394. .copy_dma = &r300_copy_dma,
  395. .copy = &r100_copy_blit,
  396. .get_engine_clock = &radeon_atom_get_engine_clock,
  397. .set_engine_clock = &radeon_atom_set_engine_clock,
  398. .get_memory_clock = &radeon_atom_get_memory_clock,
  399. .set_memory_clock = &radeon_atom_set_memory_clock,
  400. .set_pcie_lanes = &rv370_set_pcie_lanes,
  401. .set_clock_gating = &radeon_atom_set_clock_gating,
  402. .set_surface_reg = r100_set_surface_reg,
  403. .clear_surface_reg = r100_clear_surface_reg,
  404. .bandwidth_update = &rv515_bandwidth_update,
  405. };
  406. /*
  407. * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
  408. */
  409. int r600_init(struct radeon_device *rdev);
  410. void r600_fini(struct radeon_device *rdev);
  411. int r600_suspend(struct radeon_device *rdev);
  412. int r600_resume(struct radeon_device *rdev);
  413. void r600_vga_set_state(struct radeon_device *rdev, bool state);
  414. int r600_wb_init(struct radeon_device *rdev);
  415. void r600_wb_fini(struct radeon_device *rdev);
  416. void r600_cp_commit(struct radeon_device *rdev);
  417. void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
  418. uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
  419. void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  420. int r600_cs_parse(struct radeon_cs_parser *p);
  421. void r600_fence_ring_emit(struct radeon_device *rdev,
  422. struct radeon_fence *fence);
  423. int r600_copy_dma(struct radeon_device *rdev,
  424. uint64_t src_offset,
  425. uint64_t dst_offset,
  426. unsigned num_pages,
  427. struct radeon_fence *fence);
  428. int r600_irq_process(struct radeon_device *rdev);
  429. int r600_irq_set(struct radeon_device *rdev);
  430. int r600_gpu_reset(struct radeon_device *rdev);
  431. int r600_set_surface_reg(struct radeon_device *rdev, int reg,
  432. uint32_t tiling_flags, uint32_t pitch,
  433. uint32_t offset, uint32_t obj_size);
  434. int r600_clear_surface_reg(struct radeon_device *rdev, int reg);
  435. void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
  436. int r600_ring_test(struct radeon_device *rdev);
  437. int r600_copy_blit(struct radeon_device *rdev,
  438. uint64_t src_offset, uint64_t dst_offset,
  439. unsigned num_pages, struct radeon_fence *fence);
  440. static struct radeon_asic r600_asic = {
  441. .init = &r600_init,
  442. .fini = &r600_fini,
  443. .suspend = &r600_suspend,
  444. .resume = &r600_resume,
  445. .cp_commit = &r600_cp_commit,
  446. .vga_set_state = &r600_vga_set_state,
  447. .gpu_reset = &r600_gpu_reset,
  448. .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
  449. .gart_set_page = &rs600_gart_set_page,
  450. .ring_test = &r600_ring_test,
  451. .ring_ib_execute = &r600_ring_ib_execute,
  452. .irq_set = &r600_irq_set,
  453. .irq_process = &r600_irq_process,
  454. .fence_ring_emit = &r600_fence_ring_emit,
  455. .cs_parse = &r600_cs_parse,
  456. .copy_blit = &r600_copy_blit,
  457. .copy_dma = &r600_copy_blit,
  458. .copy = &r600_copy_blit,
  459. .get_engine_clock = &radeon_atom_get_engine_clock,
  460. .set_engine_clock = &radeon_atom_set_engine_clock,
  461. .get_memory_clock = &radeon_atom_get_memory_clock,
  462. .set_memory_clock = &radeon_atom_set_memory_clock,
  463. .set_pcie_lanes = NULL,
  464. .set_clock_gating = &radeon_atom_set_clock_gating,
  465. .set_surface_reg = r600_set_surface_reg,
  466. .clear_surface_reg = r600_clear_surface_reg,
  467. .bandwidth_update = &rv515_bandwidth_update,
  468. };
  469. /*
  470. * rv770,rv730,rv710,rv740
  471. */
  472. int rv770_init(struct radeon_device *rdev);
  473. void rv770_fini(struct radeon_device *rdev);
  474. int rv770_suspend(struct radeon_device *rdev);
  475. int rv770_resume(struct radeon_device *rdev);
  476. int rv770_gpu_reset(struct radeon_device *rdev);
  477. static struct radeon_asic rv770_asic = {
  478. .init = &rv770_init,
  479. .fini = &rv770_fini,
  480. .suspend = &rv770_suspend,
  481. .resume = &rv770_resume,
  482. .cp_commit = &r600_cp_commit,
  483. .gpu_reset = &rv770_gpu_reset,
  484. .vga_set_state = &r600_vga_set_state,
  485. .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
  486. .gart_set_page = &rs600_gart_set_page,
  487. .ring_test = &r600_ring_test,
  488. .ring_ib_execute = &r600_ring_ib_execute,
  489. .irq_set = &r600_irq_set,
  490. .irq_process = &r600_irq_process,
  491. .fence_ring_emit = &r600_fence_ring_emit,
  492. .cs_parse = &r600_cs_parse,
  493. .copy_blit = &r600_copy_blit,
  494. .copy_dma = &r600_copy_blit,
  495. .copy = &r600_copy_blit,
  496. .get_engine_clock = &radeon_atom_get_engine_clock,
  497. .set_engine_clock = &radeon_atom_set_engine_clock,
  498. .get_memory_clock = &radeon_atom_get_memory_clock,
  499. .set_memory_clock = &radeon_atom_set_memory_clock,
  500. .set_pcie_lanes = NULL,
  501. .set_clock_gating = &radeon_atom_set_clock_gating,
  502. .set_surface_reg = r600_set_surface_reg,
  503. .clear_surface_reg = r600_clear_surface_reg,
  504. .bandwidth_update = &rv515_bandwidth_update,
  505. };
  506. #endif