radeon_asic.h 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633
  1. /*
  2. * Copyright 2008 Advanced Micro Devices, Inc.
  3. * Copyright 2008 Red Hat Inc.
  4. * Copyright 2009 Jerome Glisse.
  5. *
  6. * Permission is hereby granted, free of charge, to any person obtaining a
  7. * copy of this software and associated documentation files (the "Software"),
  8. * to deal in the Software without restriction, including without limitation
  9. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  10. * and/or sell copies of the Software, and to permit persons to whom the
  11. * Software is furnished to do so, subject to the following conditions:
  12. *
  13. * The above copyright notice and this permission notice shall be included in
  14. * all copies or substantial portions of the Software.
  15. *
  16. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  17. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  18. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  19. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  20. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  21. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  22. * OTHER DEALINGS IN THE SOFTWARE.
  23. *
  24. * Authors: Dave Airlie
  25. * Alex Deucher
  26. * Jerome Glisse
  27. */
  28. #ifndef __RADEON_ASIC_H__
  29. #define __RADEON_ASIC_H__
  30. /*
  31. * common functions
  32. */
  33. void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
  34. void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
  35. void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
  36. void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
  37. void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
  38. /*
  39. * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
  40. */
  41. extern int r100_init(struct radeon_device *rdev);
  42. extern void r100_fini(struct radeon_device *rdev);
  43. extern int r100_suspend(struct radeon_device *rdev);
  44. extern int r100_resume(struct radeon_device *rdev);
  45. uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
  46. void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  47. int r100_gpu_reset(struct radeon_device *rdev);
  48. u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
  49. void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
  50. int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
  51. void r100_cp_commit(struct radeon_device *rdev);
  52. void r100_ring_start(struct radeon_device *rdev);
  53. int r100_irq_set(struct radeon_device *rdev);
  54. int r100_irq_process(struct radeon_device *rdev);
  55. void r100_fence_ring_emit(struct radeon_device *rdev,
  56. struct radeon_fence *fence);
  57. int r100_cs_parse(struct radeon_cs_parser *p);
  58. void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  59. uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
  60. int r100_copy_blit(struct radeon_device *rdev,
  61. uint64_t src_offset,
  62. uint64_t dst_offset,
  63. unsigned num_pages,
  64. struct radeon_fence *fence);
  65. int r100_set_surface_reg(struct radeon_device *rdev, int reg,
  66. uint32_t tiling_flags, uint32_t pitch,
  67. uint32_t offset, uint32_t obj_size);
  68. int r100_clear_surface_reg(struct radeon_device *rdev, int reg);
  69. void r100_bandwidth_update(struct radeon_device *rdev);
  70. void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
  71. int r100_ib_test(struct radeon_device *rdev);
  72. int r100_ring_test(struct radeon_device *rdev);
  73. static struct radeon_asic r100_asic = {
  74. .init = &r100_init,
  75. .fini = &r100_fini,
  76. .suspend = &r100_suspend,
  77. .resume = &r100_resume,
  78. .errata = NULL,
  79. .vram_info = NULL,
  80. .gpu_reset = &r100_gpu_reset,
  81. .mc_init = NULL,
  82. .mc_fini = NULL,
  83. .wb_init = NULL,
  84. .wb_fini = NULL,
  85. .gart_init = NULL,
  86. .gart_fini = NULL,
  87. .gart_enable = NULL,
  88. .gart_disable = NULL,
  89. .gart_tlb_flush = &r100_pci_gart_tlb_flush,
  90. .gart_set_page = &r100_pci_gart_set_page,
  91. .cp_init = NULL,
  92. .cp_fini = NULL,
  93. .cp_disable = NULL,
  94. .cp_commit = &r100_cp_commit,
  95. .ring_start = &r100_ring_start,
  96. .ring_test = &r100_ring_test,
  97. .ring_ib_execute = &r100_ring_ib_execute,
  98. .ib_test = NULL,
  99. .irq_set = &r100_irq_set,
  100. .irq_process = &r100_irq_process,
  101. .get_vblank_counter = &r100_get_vblank_counter,
  102. .fence_ring_emit = &r100_fence_ring_emit,
  103. .cs_parse = &r100_cs_parse,
  104. .copy_blit = &r100_copy_blit,
  105. .copy_dma = NULL,
  106. .copy = &r100_copy_blit,
  107. .set_engine_clock = &radeon_legacy_set_engine_clock,
  108. .set_memory_clock = NULL,
  109. .set_pcie_lanes = NULL,
  110. .set_clock_gating = &radeon_legacy_set_clock_gating,
  111. .set_surface_reg = r100_set_surface_reg,
  112. .clear_surface_reg = r100_clear_surface_reg,
  113. .bandwidth_update = &r100_bandwidth_update,
  114. };
  115. /*
  116. * r300,r350,rv350,rv380
  117. */
  118. extern int r300_init(struct radeon_device *rdev);
  119. extern void r300_fini(struct radeon_device *rdev);
  120. extern int r300_suspend(struct radeon_device *rdev);
  121. extern int r300_resume(struct radeon_device *rdev);
  122. extern int r300_gpu_reset(struct radeon_device *rdev);
  123. extern void r300_ring_start(struct radeon_device *rdev);
  124. extern void r300_fence_ring_emit(struct radeon_device *rdev,
  125. struct radeon_fence *fence);
  126. extern int r300_cs_parse(struct radeon_cs_parser *p);
  127. extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
  128. extern int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
  129. extern uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
  130. extern void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  131. extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
  132. extern int r300_copy_dma(struct radeon_device *rdev,
  133. uint64_t src_offset,
  134. uint64_t dst_offset,
  135. unsigned num_pages,
  136. struct radeon_fence *fence);
  137. static struct radeon_asic r300_asic = {
  138. .init = &r300_init,
  139. .fini = &r300_fini,
  140. .suspend = &r300_suspend,
  141. .resume = &r300_resume,
  142. .errata = NULL,
  143. .vram_info = NULL,
  144. .gpu_reset = &r300_gpu_reset,
  145. .mc_init = NULL,
  146. .mc_fini = NULL,
  147. .wb_init = NULL,
  148. .wb_fini = NULL,
  149. .gart_init = NULL,
  150. .gart_fini = NULL,
  151. .gart_enable = NULL,
  152. .gart_disable = NULL,
  153. .gart_tlb_flush = &r100_pci_gart_tlb_flush,
  154. .gart_set_page = &r100_pci_gart_set_page,
  155. .cp_init = NULL,
  156. .cp_fini = NULL,
  157. .cp_disable = NULL,
  158. .cp_commit = &r100_cp_commit,
  159. .ring_start = &r300_ring_start,
  160. .ring_test = &r100_ring_test,
  161. .ring_ib_execute = &r100_ring_ib_execute,
  162. .ib_test = NULL,
  163. .irq_set = &r100_irq_set,
  164. .irq_process = &r100_irq_process,
  165. .get_vblank_counter = &r100_get_vblank_counter,
  166. .fence_ring_emit = &r300_fence_ring_emit,
  167. .cs_parse = &r300_cs_parse,
  168. .copy_blit = &r100_copy_blit,
  169. .copy_dma = &r300_copy_dma,
  170. .copy = &r100_copy_blit,
  171. .set_engine_clock = &radeon_legacy_set_engine_clock,
  172. .set_memory_clock = NULL,
  173. .set_pcie_lanes = &rv370_set_pcie_lanes,
  174. .set_clock_gating = &radeon_legacy_set_clock_gating,
  175. .set_surface_reg = r100_set_surface_reg,
  176. .clear_surface_reg = r100_clear_surface_reg,
  177. .bandwidth_update = &r100_bandwidth_update,
  178. };
  179. /*
  180. * r420,r423,rv410
  181. */
  182. extern int r420_init(struct radeon_device *rdev);
  183. extern void r420_fini(struct radeon_device *rdev);
  184. extern int r420_suspend(struct radeon_device *rdev);
  185. extern int r420_resume(struct radeon_device *rdev);
  186. static struct radeon_asic r420_asic = {
  187. .init = &r420_init,
  188. .fini = &r420_fini,
  189. .suspend = &r420_suspend,
  190. .resume = &r420_resume,
  191. .errata = NULL,
  192. .vram_info = NULL,
  193. .gpu_reset = &r300_gpu_reset,
  194. .mc_init = NULL,
  195. .mc_fini = NULL,
  196. .wb_init = NULL,
  197. .wb_fini = NULL,
  198. .gart_enable = NULL,
  199. .gart_disable = NULL,
  200. .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
  201. .gart_set_page = &rv370_pcie_gart_set_page,
  202. .cp_init = NULL,
  203. .cp_fini = NULL,
  204. .cp_disable = NULL,
  205. .cp_commit = &r100_cp_commit,
  206. .ring_start = &r300_ring_start,
  207. .ring_test = &r100_ring_test,
  208. .ring_ib_execute = &r100_ring_ib_execute,
  209. .ib_test = NULL,
  210. .irq_set = &r100_irq_set,
  211. .irq_process = &r100_irq_process,
  212. .get_vblank_counter = &r100_get_vblank_counter,
  213. .fence_ring_emit = &r300_fence_ring_emit,
  214. .cs_parse = &r300_cs_parse,
  215. .copy_blit = &r100_copy_blit,
  216. .copy_dma = &r300_copy_dma,
  217. .copy = &r100_copy_blit,
  218. .set_engine_clock = &radeon_atom_set_engine_clock,
  219. .set_memory_clock = &radeon_atom_set_memory_clock,
  220. .set_pcie_lanes = &rv370_set_pcie_lanes,
  221. .set_clock_gating = &radeon_atom_set_clock_gating,
  222. .set_surface_reg = r100_set_surface_reg,
  223. .clear_surface_reg = r100_clear_surface_reg,
  224. .bandwidth_update = &r100_bandwidth_update,
  225. };
  226. /*
  227. * rs400,rs480
  228. */
  229. extern int rs400_init(struct radeon_device *rdev);
  230. extern void rs400_fini(struct radeon_device *rdev);
  231. extern int rs400_suspend(struct radeon_device *rdev);
  232. extern int rs400_resume(struct radeon_device *rdev);
  233. void rs400_gart_tlb_flush(struct radeon_device *rdev);
  234. int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
  235. uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
  236. void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  237. static struct radeon_asic rs400_asic = {
  238. .init = &rs400_init,
  239. .fini = &rs400_fini,
  240. .suspend = &rs400_suspend,
  241. .resume = &rs400_resume,
  242. .errata = NULL,
  243. .vram_info = NULL,
  244. .gpu_reset = &r300_gpu_reset,
  245. .mc_init = NULL,
  246. .mc_fini = NULL,
  247. .wb_init = NULL,
  248. .wb_fini = NULL,
  249. .gart_init = NULL,
  250. .gart_fini = NULL,
  251. .gart_enable = NULL,
  252. .gart_disable = NULL,
  253. .gart_tlb_flush = &rs400_gart_tlb_flush,
  254. .gart_set_page = &rs400_gart_set_page,
  255. .cp_init = NULL,
  256. .cp_fini = NULL,
  257. .cp_disable = NULL,
  258. .cp_commit = &r100_cp_commit,
  259. .ring_start = &r300_ring_start,
  260. .ring_test = &r100_ring_test,
  261. .ring_ib_execute = &r100_ring_ib_execute,
  262. .ib_test = NULL,
  263. .irq_set = &r100_irq_set,
  264. .irq_process = &r100_irq_process,
  265. .get_vblank_counter = &r100_get_vblank_counter,
  266. .fence_ring_emit = &r300_fence_ring_emit,
  267. .cs_parse = &r300_cs_parse,
  268. .copy_blit = &r100_copy_blit,
  269. .copy_dma = &r300_copy_dma,
  270. .copy = &r100_copy_blit,
  271. .set_engine_clock = &radeon_legacy_set_engine_clock,
  272. .set_memory_clock = NULL,
  273. .set_pcie_lanes = NULL,
  274. .set_clock_gating = &radeon_legacy_set_clock_gating,
  275. .set_surface_reg = r100_set_surface_reg,
  276. .clear_surface_reg = r100_clear_surface_reg,
  277. .bandwidth_update = &r100_bandwidth_update,
  278. };
  279. /*
  280. * rs600.
  281. */
  282. int rs600_init(struct radeon_device *rdev);
  283. void rs600_errata(struct radeon_device *rdev);
  284. void rs600_vram_info(struct radeon_device *rdev);
  285. int rs600_mc_init(struct radeon_device *rdev);
  286. void rs600_mc_fini(struct radeon_device *rdev);
  287. int rs600_irq_set(struct radeon_device *rdev);
  288. int rs600_irq_process(struct radeon_device *rdev);
  289. u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
  290. int rs600_gart_init(struct radeon_device *rdev);
  291. void rs600_gart_fini(struct radeon_device *rdev);
  292. int rs600_gart_enable(struct radeon_device *rdev);
  293. void rs600_gart_disable(struct radeon_device *rdev);
  294. void rs600_gart_tlb_flush(struct radeon_device *rdev);
  295. int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
  296. uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
  297. void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  298. void rs600_bandwidth_update(struct radeon_device *rdev);
  299. static struct radeon_asic rs600_asic = {
  300. .init = &rs600_init,
  301. .errata = &rs600_errata,
  302. .vram_info = &rs600_vram_info,
  303. .gpu_reset = &r300_gpu_reset,
  304. .mc_init = &rs600_mc_init,
  305. .mc_fini = &rs600_mc_fini,
  306. .wb_init = &r100_wb_init,
  307. .wb_fini = &r100_wb_fini,
  308. .gart_init = &rs600_gart_init,
  309. .gart_fini = &rs600_gart_fini,
  310. .gart_enable = &rs600_gart_enable,
  311. .gart_disable = &rs600_gart_disable,
  312. .gart_tlb_flush = &rs600_gart_tlb_flush,
  313. .gart_set_page = &rs600_gart_set_page,
  314. .cp_init = &r100_cp_init,
  315. .cp_fini = &r100_cp_fini,
  316. .cp_disable = &r100_cp_disable,
  317. .cp_commit = &r100_cp_commit,
  318. .ring_start = &r300_ring_start,
  319. .ring_test = &r100_ring_test,
  320. .ring_ib_execute = &r100_ring_ib_execute,
  321. .ib_test = &r100_ib_test,
  322. .irq_set = &rs600_irq_set,
  323. .irq_process = &rs600_irq_process,
  324. .get_vblank_counter = &rs600_get_vblank_counter,
  325. .fence_ring_emit = &r300_fence_ring_emit,
  326. .cs_parse = &r300_cs_parse,
  327. .copy_blit = &r100_copy_blit,
  328. .copy_dma = &r300_copy_dma,
  329. .copy = &r100_copy_blit,
  330. .set_engine_clock = &radeon_atom_set_engine_clock,
  331. .set_memory_clock = &radeon_atom_set_memory_clock,
  332. .set_pcie_lanes = NULL,
  333. .set_clock_gating = &radeon_atom_set_clock_gating,
  334. .bandwidth_update = &rs600_bandwidth_update,
  335. };
  336. /*
  337. * rs690,rs740
  338. */
  339. int rs690_init(struct radeon_device *rdev);
  340. void rs690_fini(struct radeon_device *rdev);
  341. int rs690_resume(struct radeon_device *rdev);
  342. int rs690_suspend(struct radeon_device *rdev);
  343. uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
  344. void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  345. void rs690_bandwidth_update(struct radeon_device *rdev);
  346. static struct radeon_asic rs690_asic = {
  347. .init = &rs690_init,
  348. .fini = &rs690_fini,
  349. .suspend = &rs690_suspend,
  350. .resume = &rs690_resume,
  351. .errata = NULL,
  352. .vram_info = NULL,
  353. .gpu_reset = &r300_gpu_reset,
  354. .mc_init = NULL,
  355. .mc_fini = NULL,
  356. .wb_init = NULL,
  357. .wb_fini = NULL,
  358. .gart_init = NULL,
  359. .gart_fini = NULL,
  360. .gart_enable = NULL,
  361. .gart_disable = NULL,
  362. .gart_tlb_flush = &rs400_gart_tlb_flush,
  363. .gart_set_page = &rs400_gart_set_page,
  364. .cp_init = NULL,
  365. .cp_fini = NULL,
  366. .cp_disable = NULL,
  367. .cp_commit = &r100_cp_commit,
  368. .ring_start = &r300_ring_start,
  369. .ring_test = &r100_ring_test,
  370. .ring_ib_execute = &r100_ring_ib_execute,
  371. .ib_test = NULL,
  372. .irq_set = &rs600_irq_set,
  373. .irq_process = &rs600_irq_process,
  374. .get_vblank_counter = &rs600_get_vblank_counter,
  375. .fence_ring_emit = &r300_fence_ring_emit,
  376. .cs_parse = &r300_cs_parse,
  377. .copy_blit = &r100_copy_blit,
  378. .copy_dma = &r300_copy_dma,
  379. .copy = &r300_copy_dma,
  380. .set_engine_clock = &radeon_atom_set_engine_clock,
  381. .set_memory_clock = &radeon_atom_set_memory_clock,
  382. .set_pcie_lanes = NULL,
  383. .set_clock_gating = &radeon_atom_set_clock_gating,
  384. .set_surface_reg = r100_set_surface_reg,
  385. .clear_surface_reg = r100_clear_surface_reg,
  386. .bandwidth_update = &rs690_bandwidth_update,
  387. };
  388. /*
  389. * rv515
  390. */
  391. int rv515_init(struct radeon_device *rdev);
  392. void rv515_fini(struct radeon_device *rdev);
  393. int rv515_gpu_reset(struct radeon_device *rdev);
  394. uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
  395. void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  396. void rv515_ring_start(struct radeon_device *rdev);
  397. uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
  398. void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  399. void rv515_bandwidth_update(struct radeon_device *rdev);
  400. int rv515_resume(struct radeon_device *rdev);
  401. int rv515_suspend(struct radeon_device *rdev);
  402. static struct radeon_asic rv515_asic = {
  403. .init = &rv515_init,
  404. .fini = &rv515_fini,
  405. .suspend = &rv515_suspend,
  406. .resume = &rv515_resume,
  407. .errata = NULL,
  408. .vram_info = NULL,
  409. .gpu_reset = &rv515_gpu_reset,
  410. .mc_init = NULL,
  411. .mc_fini = NULL,
  412. .wb_init = NULL,
  413. .wb_fini = NULL,
  414. .gart_init = &rv370_pcie_gart_init,
  415. .gart_fini = &rv370_pcie_gart_fini,
  416. .gart_enable = NULL,
  417. .gart_disable = NULL,
  418. .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
  419. .gart_set_page = &rv370_pcie_gart_set_page,
  420. .cp_init = NULL,
  421. .cp_fini = NULL,
  422. .cp_disable = NULL,
  423. .cp_commit = &r100_cp_commit,
  424. .ring_start = &rv515_ring_start,
  425. .ring_test = &r100_ring_test,
  426. .ring_ib_execute = &r100_ring_ib_execute,
  427. .ib_test = NULL,
  428. .irq_set = &rs600_irq_set,
  429. .irq_process = &rs600_irq_process,
  430. .get_vblank_counter = &rs600_get_vblank_counter,
  431. .fence_ring_emit = &r300_fence_ring_emit,
  432. .cs_parse = &r300_cs_parse,
  433. .copy_blit = &r100_copy_blit,
  434. .copy_dma = &r300_copy_dma,
  435. .copy = &r100_copy_blit,
  436. .set_engine_clock = &radeon_atom_set_engine_clock,
  437. .set_memory_clock = &radeon_atom_set_memory_clock,
  438. .set_pcie_lanes = &rv370_set_pcie_lanes,
  439. .set_clock_gating = &radeon_atom_set_clock_gating,
  440. .set_surface_reg = r100_set_surface_reg,
  441. .clear_surface_reg = r100_clear_surface_reg,
  442. .bandwidth_update = &rv515_bandwidth_update,
  443. };
  444. /*
  445. * r520,rv530,rv560,rv570,r580
  446. */
  447. int r520_init(struct radeon_device *rdev);
  448. int r520_resume(struct radeon_device *rdev);
  449. static struct radeon_asic r520_asic = {
  450. .init = &r520_init,
  451. .fini = &rv515_fini,
  452. .suspend = &rv515_suspend,
  453. .resume = &r520_resume,
  454. .errata = NULL,
  455. .vram_info = NULL,
  456. .gpu_reset = &rv515_gpu_reset,
  457. .mc_init = NULL,
  458. .mc_fini = NULL,
  459. .wb_init = NULL,
  460. .wb_fini = NULL,
  461. .gart_init = NULL,
  462. .gart_fini = NULL,
  463. .gart_enable = NULL,
  464. .gart_disable = NULL,
  465. .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
  466. .gart_set_page = &rv370_pcie_gart_set_page,
  467. .cp_init = NULL,
  468. .cp_fini = NULL,
  469. .cp_disable = NULL,
  470. .cp_commit = &r100_cp_commit,
  471. .ring_start = &rv515_ring_start,
  472. .ring_test = &r100_ring_test,
  473. .ring_ib_execute = &r100_ring_ib_execute,
  474. .ib_test = NULL,
  475. .irq_set = &rs600_irq_set,
  476. .irq_process = &rs600_irq_process,
  477. .get_vblank_counter = &rs600_get_vblank_counter,
  478. .fence_ring_emit = &r300_fence_ring_emit,
  479. .cs_parse = &r300_cs_parse,
  480. .copy_blit = &r100_copy_blit,
  481. .copy_dma = &r300_copy_dma,
  482. .copy = &r100_copy_blit,
  483. .set_engine_clock = &radeon_atom_set_engine_clock,
  484. .set_memory_clock = &radeon_atom_set_memory_clock,
  485. .set_pcie_lanes = &rv370_set_pcie_lanes,
  486. .set_clock_gating = &radeon_atom_set_clock_gating,
  487. .set_surface_reg = r100_set_surface_reg,
  488. .clear_surface_reg = r100_clear_surface_reg,
  489. .bandwidth_update = &rv515_bandwidth_update,
  490. };
  491. /*
  492. * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
  493. */
  494. int r600_init(struct radeon_device *rdev);
  495. void r600_fini(struct radeon_device *rdev);
  496. int r600_suspend(struct radeon_device *rdev);
  497. int r600_resume(struct radeon_device *rdev);
  498. int r600_wb_init(struct radeon_device *rdev);
  499. void r600_wb_fini(struct radeon_device *rdev);
  500. void r600_cp_commit(struct radeon_device *rdev);
  501. void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
  502. uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
  503. void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  504. int r600_cs_parse(struct radeon_cs_parser *p);
  505. void r600_fence_ring_emit(struct radeon_device *rdev,
  506. struct radeon_fence *fence);
  507. int r600_copy_dma(struct radeon_device *rdev,
  508. uint64_t src_offset,
  509. uint64_t dst_offset,
  510. unsigned num_pages,
  511. struct radeon_fence *fence);
  512. int r600_irq_process(struct radeon_device *rdev);
  513. int r600_irq_set(struct radeon_device *rdev);
  514. int r600_gpu_reset(struct radeon_device *rdev);
  515. int r600_set_surface_reg(struct radeon_device *rdev, int reg,
  516. uint32_t tiling_flags, uint32_t pitch,
  517. uint32_t offset, uint32_t obj_size);
  518. int r600_clear_surface_reg(struct radeon_device *rdev, int reg);
  519. void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
  520. int r600_ib_test(struct radeon_device *rdev);
  521. int r600_ring_test(struct radeon_device *rdev);
  522. int r600_copy_blit(struct radeon_device *rdev,
  523. uint64_t src_offset, uint64_t dst_offset,
  524. unsigned num_pages, struct radeon_fence *fence);
  525. static struct radeon_asic r600_asic = {
  526. .errata = NULL,
  527. .init = &r600_init,
  528. .fini = &r600_fini,
  529. .suspend = &r600_suspend,
  530. .resume = &r600_resume,
  531. .cp_commit = &r600_cp_commit,
  532. .vram_info = NULL,
  533. .gpu_reset = &r600_gpu_reset,
  534. .mc_init = NULL,
  535. .mc_fini = NULL,
  536. .wb_init = &r600_wb_init,
  537. .wb_fini = &r600_wb_fini,
  538. .gart_enable = NULL,
  539. .gart_disable = NULL,
  540. .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
  541. .gart_set_page = &rs600_gart_set_page,
  542. .cp_init = NULL,
  543. .cp_fini = NULL,
  544. .cp_disable = NULL,
  545. .ring_start = NULL,
  546. .ring_test = &r600_ring_test,
  547. .ring_ib_execute = &r600_ring_ib_execute,
  548. .ib_test = &r600_ib_test,
  549. .irq_set = &r600_irq_set,
  550. .irq_process = &r600_irq_process,
  551. .fence_ring_emit = &r600_fence_ring_emit,
  552. .cs_parse = &r600_cs_parse,
  553. .copy_blit = &r600_copy_blit,
  554. .copy_dma = &r600_copy_blit,
  555. .copy = &r600_copy_blit,
  556. .set_engine_clock = &radeon_atom_set_engine_clock,
  557. .set_memory_clock = &radeon_atom_set_memory_clock,
  558. .set_pcie_lanes = NULL,
  559. .set_clock_gating = &radeon_atom_set_clock_gating,
  560. .set_surface_reg = r600_set_surface_reg,
  561. .clear_surface_reg = r600_clear_surface_reg,
  562. .bandwidth_update = &rv515_bandwidth_update,
  563. };
  564. /*
  565. * rv770,rv730,rv710,rv740
  566. */
  567. int rv770_init(struct radeon_device *rdev);
  568. void rv770_fini(struct radeon_device *rdev);
  569. int rv770_suspend(struct radeon_device *rdev);
  570. int rv770_resume(struct radeon_device *rdev);
  571. int rv770_gpu_reset(struct radeon_device *rdev);
  572. static struct radeon_asic rv770_asic = {
  573. .errata = NULL,
  574. .init = &rv770_init,
  575. .fini = &rv770_fini,
  576. .suspend = &rv770_suspend,
  577. .resume = &rv770_resume,
  578. .cp_commit = &r600_cp_commit,
  579. .vram_info = NULL,
  580. .gpu_reset = &rv770_gpu_reset,
  581. .mc_init = NULL,
  582. .mc_fini = NULL,
  583. .wb_init = &r600_wb_init,
  584. .wb_fini = &r600_wb_fini,
  585. .gart_enable = NULL,
  586. .gart_disable = NULL,
  587. .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
  588. .gart_set_page = &rs600_gart_set_page,
  589. .cp_init = NULL,
  590. .cp_fini = NULL,
  591. .cp_disable = NULL,
  592. .ring_start = NULL,
  593. .ring_test = &r600_ring_test,
  594. .ring_ib_execute = &r600_ring_ib_execute,
  595. .ib_test = &r600_ib_test,
  596. .irq_set = &r600_irq_set,
  597. .irq_process = &r600_irq_process,
  598. .fence_ring_emit = &r600_fence_ring_emit,
  599. .cs_parse = &r600_cs_parse,
  600. .copy_blit = &r600_copy_blit,
  601. .copy_dma = &r600_copy_blit,
  602. .copy = &r600_copy_blit,
  603. .set_engine_clock = &radeon_atom_set_engine_clock,
  604. .set_memory_clock = &radeon_atom_set_memory_clock,
  605. .set_pcie_lanes = NULL,
  606. .set_clock_gating = &radeon_atom_set_clock_gating,
  607. .set_surface_reg = r600_set_surface_reg,
  608. .clear_surface_reg = r600_clear_surface_reg,
  609. .bandwidth_update = &rv515_bandwidth_update,
  610. };
  611. #endif