radeon_asic.h 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659
  1. /*
  2. * Copyright 2008 Advanced Micro Devices, Inc.
  3. * Copyright 2008 Red Hat Inc.
  4. * Copyright 2009 Jerome Glisse.
  5. *
  6. * Permission is hereby granted, free of charge, to any person obtaining a
  7. * copy of this software and associated documentation files (the "Software"),
  8. * to deal in the Software without restriction, including without limitation
  9. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  10. * and/or sell copies of the Software, and to permit persons to whom the
  11. * Software is furnished to do so, subject to the following conditions:
  12. *
  13. * The above copyright notice and this permission notice shall be included in
  14. * all copies or substantial portions of the Software.
  15. *
  16. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  17. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  18. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  19. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  20. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  21. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  22. * OTHER DEALINGS IN THE SOFTWARE.
  23. *
  24. * Authors: Dave Airlie
  25. * Alex Deucher
  26. * Jerome Glisse
  27. */
  28. #ifndef __RADEON_ASIC_H__
  29. #define __RADEON_ASIC_H__
  30. /*
  31. * common functions
  32. */
  33. uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev);
  34. void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
  35. uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev);
  36. void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
  37. uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev);
  38. void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
  39. uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev);
  40. void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
  41. void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
  42. /*
  43. * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
  44. */
  45. extern int r100_init(struct radeon_device *rdev);
  46. extern void r100_fini(struct radeon_device *rdev);
  47. extern int r100_suspend(struct radeon_device *rdev);
  48. extern int r100_resume(struct radeon_device *rdev);
  49. uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
  50. void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  51. void r100_vga_set_state(struct radeon_device *rdev, bool state);
  52. int r100_gpu_reset(struct radeon_device *rdev);
  53. u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
  54. void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
  55. int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
  56. void r100_cp_commit(struct radeon_device *rdev);
  57. void r100_ring_start(struct radeon_device *rdev);
  58. int r100_irq_set(struct radeon_device *rdev);
  59. int r100_irq_process(struct radeon_device *rdev);
  60. void r100_fence_ring_emit(struct radeon_device *rdev,
  61. struct radeon_fence *fence);
  62. int r100_cs_parse(struct radeon_cs_parser *p);
  63. void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  64. uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
  65. int r100_copy_blit(struct radeon_device *rdev,
  66. uint64_t src_offset,
  67. uint64_t dst_offset,
  68. unsigned num_pages,
  69. struct radeon_fence *fence);
  70. int r100_set_surface_reg(struct radeon_device *rdev, int reg,
  71. uint32_t tiling_flags, uint32_t pitch,
  72. uint32_t offset, uint32_t obj_size);
  73. int r100_clear_surface_reg(struct radeon_device *rdev, int reg);
  74. void r100_bandwidth_update(struct radeon_device *rdev);
  75. void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
  76. int r100_ring_test(struct radeon_device *rdev);
  77. void r100_hpd_init(struct radeon_device *rdev);
  78. void r100_hpd_fini(struct radeon_device *rdev);
  79. bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
  80. void r100_hpd_set_polarity(struct radeon_device *rdev,
  81. enum radeon_hpd_id hpd);
  82. static struct radeon_asic r100_asic = {
  83. .init = &r100_init,
  84. .fini = &r100_fini,
  85. .suspend = &r100_suspend,
  86. .resume = &r100_resume,
  87. .vga_set_state = &r100_vga_set_state,
  88. .gpu_reset = &r100_gpu_reset,
  89. .gart_tlb_flush = &r100_pci_gart_tlb_flush,
  90. .gart_set_page = &r100_pci_gart_set_page,
  91. .cp_commit = &r100_cp_commit,
  92. .ring_start = &r100_ring_start,
  93. .ring_test = &r100_ring_test,
  94. .ring_ib_execute = &r100_ring_ib_execute,
  95. .irq_set = &r100_irq_set,
  96. .irq_process = &r100_irq_process,
  97. .get_vblank_counter = &r100_get_vblank_counter,
  98. .fence_ring_emit = &r100_fence_ring_emit,
  99. .cs_parse = &r100_cs_parse,
  100. .copy_blit = &r100_copy_blit,
  101. .copy_dma = NULL,
  102. .copy = &r100_copy_blit,
  103. .get_engine_clock = &radeon_legacy_get_engine_clock,
  104. .set_engine_clock = &radeon_legacy_set_engine_clock,
  105. .get_memory_clock = &radeon_legacy_get_memory_clock,
  106. .set_memory_clock = NULL,
  107. .get_pcie_lanes = NULL,
  108. .set_pcie_lanes = NULL,
  109. .set_clock_gating = &radeon_legacy_set_clock_gating,
  110. .set_surface_reg = r100_set_surface_reg,
  111. .clear_surface_reg = r100_clear_surface_reg,
  112. .bandwidth_update = &r100_bandwidth_update,
  113. .hpd_init = &r100_hpd_init,
  114. .hpd_fini = &r100_hpd_fini,
  115. .hpd_sense = &r100_hpd_sense,
  116. .hpd_set_polarity = &r100_hpd_set_polarity,
  117. .ioctl_wait_idle = NULL,
  118. };
  119. /*
  120. * r300,r350,rv350,rv380
  121. */
  122. extern int r300_init(struct radeon_device *rdev);
  123. extern void r300_fini(struct radeon_device *rdev);
  124. extern int r300_suspend(struct radeon_device *rdev);
  125. extern int r300_resume(struct radeon_device *rdev);
  126. extern int r300_gpu_reset(struct radeon_device *rdev);
  127. extern void r300_ring_start(struct radeon_device *rdev);
  128. extern void r300_fence_ring_emit(struct radeon_device *rdev,
  129. struct radeon_fence *fence);
  130. extern int r300_cs_parse(struct radeon_cs_parser *p);
  131. extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
  132. extern int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
  133. extern uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
  134. extern void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  135. extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
  136. extern int rv370_get_pcie_lanes(struct radeon_device *rdev);
  137. extern int r300_copy_dma(struct radeon_device *rdev,
  138. uint64_t src_offset,
  139. uint64_t dst_offset,
  140. unsigned num_pages,
  141. struct radeon_fence *fence);
  142. static struct radeon_asic r300_asic = {
  143. .init = &r300_init,
  144. .fini = &r300_fini,
  145. .suspend = &r300_suspend,
  146. .resume = &r300_resume,
  147. .vga_set_state = &r100_vga_set_state,
  148. .gpu_reset = &r300_gpu_reset,
  149. .gart_tlb_flush = &r100_pci_gart_tlb_flush,
  150. .gart_set_page = &r100_pci_gart_set_page,
  151. .cp_commit = &r100_cp_commit,
  152. .ring_start = &r300_ring_start,
  153. .ring_test = &r100_ring_test,
  154. .ring_ib_execute = &r100_ring_ib_execute,
  155. .irq_set = &r100_irq_set,
  156. .irq_process = &r100_irq_process,
  157. .get_vblank_counter = &r100_get_vblank_counter,
  158. .fence_ring_emit = &r300_fence_ring_emit,
  159. .cs_parse = &r300_cs_parse,
  160. .copy_blit = &r100_copy_blit,
  161. .copy_dma = &r300_copy_dma,
  162. .copy = &r100_copy_blit,
  163. .get_engine_clock = &radeon_legacy_get_engine_clock,
  164. .set_engine_clock = &radeon_legacy_set_engine_clock,
  165. .get_memory_clock = &radeon_legacy_get_memory_clock,
  166. .set_memory_clock = NULL,
  167. .get_pcie_lanes = &rv370_get_pcie_lanes,
  168. .set_pcie_lanes = &rv370_set_pcie_lanes,
  169. .set_clock_gating = &radeon_legacy_set_clock_gating,
  170. .set_surface_reg = r100_set_surface_reg,
  171. .clear_surface_reg = r100_clear_surface_reg,
  172. .bandwidth_update = &r100_bandwidth_update,
  173. .hpd_init = &r100_hpd_init,
  174. .hpd_fini = &r100_hpd_fini,
  175. .hpd_sense = &r100_hpd_sense,
  176. .hpd_set_polarity = &r100_hpd_set_polarity,
  177. .ioctl_wait_idle = NULL,
  178. };
  179. /*
  180. * r420,r423,rv410
  181. */
  182. extern int r420_init(struct radeon_device *rdev);
  183. extern void r420_fini(struct radeon_device *rdev);
  184. extern int r420_suspend(struct radeon_device *rdev);
  185. extern int r420_resume(struct radeon_device *rdev);
  186. static struct radeon_asic r420_asic = {
  187. .init = &r420_init,
  188. .fini = &r420_fini,
  189. .suspend = &r420_suspend,
  190. .resume = &r420_resume,
  191. .vga_set_state = &r100_vga_set_state,
  192. .gpu_reset = &r300_gpu_reset,
  193. .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
  194. .gart_set_page = &rv370_pcie_gart_set_page,
  195. .cp_commit = &r100_cp_commit,
  196. .ring_start = &r300_ring_start,
  197. .ring_test = &r100_ring_test,
  198. .ring_ib_execute = &r100_ring_ib_execute,
  199. .irq_set = &r100_irq_set,
  200. .irq_process = &r100_irq_process,
  201. .get_vblank_counter = &r100_get_vblank_counter,
  202. .fence_ring_emit = &r300_fence_ring_emit,
  203. .cs_parse = &r300_cs_parse,
  204. .copy_blit = &r100_copy_blit,
  205. .copy_dma = &r300_copy_dma,
  206. .copy = &r100_copy_blit,
  207. .get_engine_clock = &radeon_atom_get_engine_clock,
  208. .set_engine_clock = &radeon_atom_set_engine_clock,
  209. .get_memory_clock = &radeon_atom_get_memory_clock,
  210. .set_memory_clock = &radeon_atom_set_memory_clock,
  211. .get_pcie_lanes = &rv370_get_pcie_lanes,
  212. .set_pcie_lanes = &rv370_set_pcie_lanes,
  213. .set_clock_gating = &radeon_atom_set_clock_gating,
  214. .set_surface_reg = r100_set_surface_reg,
  215. .clear_surface_reg = r100_clear_surface_reg,
  216. .bandwidth_update = &r100_bandwidth_update,
  217. .hpd_init = &r100_hpd_init,
  218. .hpd_fini = &r100_hpd_fini,
  219. .hpd_sense = &r100_hpd_sense,
  220. .hpd_set_polarity = &r100_hpd_set_polarity,
  221. .ioctl_wait_idle = NULL,
  222. };
  223. /*
  224. * rs400,rs480
  225. */
  226. extern int rs400_init(struct radeon_device *rdev);
  227. extern void rs400_fini(struct radeon_device *rdev);
  228. extern int rs400_suspend(struct radeon_device *rdev);
  229. extern int rs400_resume(struct radeon_device *rdev);
  230. void rs400_gart_tlb_flush(struct radeon_device *rdev);
  231. int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
  232. uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
  233. void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  234. static struct radeon_asic rs400_asic = {
  235. .init = &rs400_init,
  236. .fini = &rs400_fini,
  237. .suspend = &rs400_suspend,
  238. .resume = &rs400_resume,
  239. .vga_set_state = &r100_vga_set_state,
  240. .gpu_reset = &r300_gpu_reset,
  241. .gart_tlb_flush = &rs400_gart_tlb_flush,
  242. .gart_set_page = &rs400_gart_set_page,
  243. .cp_commit = &r100_cp_commit,
  244. .ring_start = &r300_ring_start,
  245. .ring_test = &r100_ring_test,
  246. .ring_ib_execute = &r100_ring_ib_execute,
  247. .irq_set = &r100_irq_set,
  248. .irq_process = &r100_irq_process,
  249. .get_vblank_counter = &r100_get_vblank_counter,
  250. .fence_ring_emit = &r300_fence_ring_emit,
  251. .cs_parse = &r300_cs_parse,
  252. .copy_blit = &r100_copy_blit,
  253. .copy_dma = &r300_copy_dma,
  254. .copy = &r100_copy_blit,
  255. .get_engine_clock = &radeon_legacy_get_engine_clock,
  256. .set_engine_clock = &radeon_legacy_set_engine_clock,
  257. .get_memory_clock = &radeon_legacy_get_memory_clock,
  258. .set_memory_clock = NULL,
  259. .get_pcie_lanes = NULL,
  260. .set_pcie_lanes = NULL,
  261. .set_clock_gating = &radeon_legacy_set_clock_gating,
  262. .set_surface_reg = r100_set_surface_reg,
  263. .clear_surface_reg = r100_clear_surface_reg,
  264. .bandwidth_update = &r100_bandwidth_update,
  265. .hpd_init = &r100_hpd_init,
  266. .hpd_fini = &r100_hpd_fini,
  267. .hpd_sense = &r100_hpd_sense,
  268. .hpd_set_polarity = &r100_hpd_set_polarity,
  269. .ioctl_wait_idle = NULL,
  270. };
  271. /*
  272. * rs600.
  273. */
  274. extern int rs600_init(struct radeon_device *rdev);
  275. extern void rs600_fini(struct radeon_device *rdev);
  276. extern int rs600_suspend(struct radeon_device *rdev);
  277. extern int rs600_resume(struct radeon_device *rdev);
  278. int rs600_irq_set(struct radeon_device *rdev);
  279. int rs600_irq_process(struct radeon_device *rdev);
  280. u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
  281. void rs600_gart_tlb_flush(struct radeon_device *rdev);
  282. int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
  283. uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
  284. void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  285. void rs600_bandwidth_update(struct radeon_device *rdev);
  286. void rs600_hpd_init(struct radeon_device *rdev);
  287. void rs600_hpd_fini(struct radeon_device *rdev);
  288. bool rs600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
  289. void rs600_hpd_set_polarity(struct radeon_device *rdev,
  290. enum radeon_hpd_id hpd);
  291. static struct radeon_asic rs600_asic = {
  292. .init = &rs600_init,
  293. .fini = &rs600_fini,
  294. .suspend = &rs600_suspend,
  295. .resume = &rs600_resume,
  296. .vga_set_state = &r100_vga_set_state,
  297. .gpu_reset = &r300_gpu_reset,
  298. .gart_tlb_flush = &rs600_gart_tlb_flush,
  299. .gart_set_page = &rs600_gart_set_page,
  300. .cp_commit = &r100_cp_commit,
  301. .ring_start = &r300_ring_start,
  302. .ring_test = &r100_ring_test,
  303. .ring_ib_execute = &r100_ring_ib_execute,
  304. .irq_set = &rs600_irq_set,
  305. .irq_process = &rs600_irq_process,
  306. .get_vblank_counter = &rs600_get_vblank_counter,
  307. .fence_ring_emit = &r300_fence_ring_emit,
  308. .cs_parse = &r300_cs_parse,
  309. .copy_blit = &r100_copy_blit,
  310. .copy_dma = &r300_copy_dma,
  311. .copy = &r100_copy_blit,
  312. .get_engine_clock = &radeon_atom_get_engine_clock,
  313. .set_engine_clock = &radeon_atom_set_engine_clock,
  314. .get_memory_clock = &radeon_atom_get_memory_clock,
  315. .set_memory_clock = &radeon_atom_set_memory_clock,
  316. .get_pcie_lanes = NULL,
  317. .set_pcie_lanes = NULL,
  318. .set_clock_gating = &radeon_atom_set_clock_gating,
  319. .bandwidth_update = &rs600_bandwidth_update,
  320. .hpd_init = &rs600_hpd_init,
  321. .hpd_fini = &rs600_hpd_fini,
  322. .hpd_sense = &rs600_hpd_sense,
  323. .hpd_set_polarity = &rs600_hpd_set_polarity,
  324. .ioctl_wait_idle = NULL,
  325. };
  326. /*
  327. * rs690,rs740
  328. */
  329. int rs690_init(struct radeon_device *rdev);
  330. void rs690_fini(struct radeon_device *rdev);
  331. int rs690_resume(struct radeon_device *rdev);
  332. int rs690_suspend(struct radeon_device *rdev);
  333. uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
  334. void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  335. void rs690_bandwidth_update(struct radeon_device *rdev);
  336. static struct radeon_asic rs690_asic = {
  337. .init = &rs690_init,
  338. .fini = &rs690_fini,
  339. .suspend = &rs690_suspend,
  340. .resume = &rs690_resume,
  341. .vga_set_state = &r100_vga_set_state,
  342. .gpu_reset = &r300_gpu_reset,
  343. .gart_tlb_flush = &rs400_gart_tlb_flush,
  344. .gart_set_page = &rs400_gart_set_page,
  345. .cp_commit = &r100_cp_commit,
  346. .ring_start = &r300_ring_start,
  347. .ring_test = &r100_ring_test,
  348. .ring_ib_execute = &r100_ring_ib_execute,
  349. .irq_set = &rs600_irq_set,
  350. .irq_process = &rs600_irq_process,
  351. .get_vblank_counter = &rs600_get_vblank_counter,
  352. .fence_ring_emit = &r300_fence_ring_emit,
  353. .cs_parse = &r300_cs_parse,
  354. .copy_blit = &r100_copy_blit,
  355. .copy_dma = &r300_copy_dma,
  356. .copy = &r300_copy_dma,
  357. .get_engine_clock = &radeon_atom_get_engine_clock,
  358. .set_engine_clock = &radeon_atom_set_engine_clock,
  359. .get_memory_clock = &radeon_atom_get_memory_clock,
  360. .set_memory_clock = &radeon_atom_set_memory_clock,
  361. .get_pcie_lanes = NULL,
  362. .set_pcie_lanes = NULL,
  363. .set_clock_gating = &radeon_atom_set_clock_gating,
  364. .set_surface_reg = r100_set_surface_reg,
  365. .clear_surface_reg = r100_clear_surface_reg,
  366. .bandwidth_update = &rs690_bandwidth_update,
  367. .hpd_init = &rs600_hpd_init,
  368. .hpd_fini = &rs600_hpd_fini,
  369. .hpd_sense = &rs600_hpd_sense,
  370. .hpd_set_polarity = &rs600_hpd_set_polarity,
  371. .ioctl_wait_idle = NULL,
  372. };
  373. /*
  374. * rv515
  375. */
  376. int rv515_init(struct radeon_device *rdev);
  377. void rv515_fini(struct radeon_device *rdev);
  378. int rv515_gpu_reset(struct radeon_device *rdev);
  379. uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
  380. void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  381. void rv515_ring_start(struct radeon_device *rdev);
  382. uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
  383. void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  384. void rv515_bandwidth_update(struct radeon_device *rdev);
  385. int rv515_resume(struct radeon_device *rdev);
  386. int rv515_suspend(struct radeon_device *rdev);
  387. static struct radeon_asic rv515_asic = {
  388. .init = &rv515_init,
  389. .fini = &rv515_fini,
  390. .suspend = &rv515_suspend,
  391. .resume = &rv515_resume,
  392. .vga_set_state = &r100_vga_set_state,
  393. .gpu_reset = &rv515_gpu_reset,
  394. .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
  395. .gart_set_page = &rv370_pcie_gart_set_page,
  396. .cp_commit = &r100_cp_commit,
  397. .ring_start = &rv515_ring_start,
  398. .ring_test = &r100_ring_test,
  399. .ring_ib_execute = &r100_ring_ib_execute,
  400. .irq_set = &rs600_irq_set,
  401. .irq_process = &rs600_irq_process,
  402. .get_vblank_counter = &rs600_get_vblank_counter,
  403. .fence_ring_emit = &r300_fence_ring_emit,
  404. .cs_parse = &r300_cs_parse,
  405. .copy_blit = &r100_copy_blit,
  406. .copy_dma = &r300_copy_dma,
  407. .copy = &r100_copy_blit,
  408. .get_engine_clock = &radeon_atom_get_engine_clock,
  409. .set_engine_clock = &radeon_atom_set_engine_clock,
  410. .get_memory_clock = &radeon_atom_get_memory_clock,
  411. .set_memory_clock = &radeon_atom_set_memory_clock,
  412. .get_pcie_lanes = &rv370_get_pcie_lanes,
  413. .set_pcie_lanes = &rv370_set_pcie_lanes,
  414. .set_clock_gating = &radeon_atom_set_clock_gating,
  415. .set_surface_reg = r100_set_surface_reg,
  416. .clear_surface_reg = r100_clear_surface_reg,
  417. .bandwidth_update = &rv515_bandwidth_update,
  418. .hpd_init = &rs600_hpd_init,
  419. .hpd_fini = &rs600_hpd_fini,
  420. .hpd_sense = &rs600_hpd_sense,
  421. .hpd_set_polarity = &rs600_hpd_set_polarity,
  422. .ioctl_wait_idle = NULL,
  423. };
  424. /*
  425. * r520,rv530,rv560,rv570,r580
  426. */
  427. int r520_init(struct radeon_device *rdev);
  428. int r520_resume(struct radeon_device *rdev);
  429. static struct radeon_asic r520_asic = {
  430. .init = &r520_init,
  431. .fini = &rv515_fini,
  432. .suspend = &rv515_suspend,
  433. .resume = &r520_resume,
  434. .vga_set_state = &r100_vga_set_state,
  435. .gpu_reset = &rv515_gpu_reset,
  436. .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
  437. .gart_set_page = &rv370_pcie_gart_set_page,
  438. .cp_commit = &r100_cp_commit,
  439. .ring_start = &rv515_ring_start,
  440. .ring_test = &r100_ring_test,
  441. .ring_ib_execute = &r100_ring_ib_execute,
  442. .irq_set = &rs600_irq_set,
  443. .irq_process = &rs600_irq_process,
  444. .get_vblank_counter = &rs600_get_vblank_counter,
  445. .fence_ring_emit = &r300_fence_ring_emit,
  446. .cs_parse = &r300_cs_parse,
  447. .copy_blit = &r100_copy_blit,
  448. .copy_dma = &r300_copy_dma,
  449. .copy = &r100_copy_blit,
  450. .get_engine_clock = &radeon_atom_get_engine_clock,
  451. .set_engine_clock = &radeon_atom_set_engine_clock,
  452. .get_memory_clock = &radeon_atom_get_memory_clock,
  453. .set_memory_clock = &radeon_atom_set_memory_clock,
  454. .get_pcie_lanes = &rv370_get_pcie_lanes,
  455. .set_pcie_lanes = &rv370_set_pcie_lanes,
  456. .set_clock_gating = &radeon_atom_set_clock_gating,
  457. .set_surface_reg = r100_set_surface_reg,
  458. .clear_surface_reg = r100_clear_surface_reg,
  459. .bandwidth_update = &rv515_bandwidth_update,
  460. .hpd_init = &rs600_hpd_init,
  461. .hpd_fini = &rs600_hpd_fini,
  462. .hpd_sense = &rs600_hpd_sense,
  463. .hpd_set_polarity = &rs600_hpd_set_polarity,
  464. .ioctl_wait_idle = NULL,
  465. };
  466. /*
  467. * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
  468. */
  469. int r600_init(struct radeon_device *rdev);
  470. void r600_fini(struct radeon_device *rdev);
  471. int r600_suspend(struct radeon_device *rdev);
  472. int r600_resume(struct radeon_device *rdev);
  473. void r600_vga_set_state(struct radeon_device *rdev, bool state);
  474. int r600_wb_init(struct radeon_device *rdev);
  475. void r600_wb_fini(struct radeon_device *rdev);
  476. void r600_cp_commit(struct radeon_device *rdev);
  477. void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
  478. uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
  479. void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
  480. int r600_cs_parse(struct radeon_cs_parser *p);
  481. void r600_fence_ring_emit(struct radeon_device *rdev,
  482. struct radeon_fence *fence);
  483. int r600_copy_dma(struct radeon_device *rdev,
  484. uint64_t src_offset,
  485. uint64_t dst_offset,
  486. unsigned num_pages,
  487. struct radeon_fence *fence);
  488. int r600_irq_process(struct radeon_device *rdev);
  489. int r600_irq_set(struct radeon_device *rdev);
  490. int r600_gpu_reset(struct radeon_device *rdev);
  491. int r600_set_surface_reg(struct radeon_device *rdev, int reg,
  492. uint32_t tiling_flags, uint32_t pitch,
  493. uint32_t offset, uint32_t obj_size);
  494. int r600_clear_surface_reg(struct radeon_device *rdev, int reg);
  495. void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
  496. int r600_ring_test(struct radeon_device *rdev);
  497. int r600_copy_blit(struct radeon_device *rdev,
  498. uint64_t src_offset, uint64_t dst_offset,
  499. unsigned num_pages, struct radeon_fence *fence);
  500. void r600_hpd_init(struct radeon_device *rdev);
  501. void r600_hpd_fini(struct radeon_device *rdev);
  502. bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
  503. void r600_hpd_set_polarity(struct radeon_device *rdev,
  504. enum radeon_hpd_id hpd);
  505. extern void r600_ioctl_wait_idle(struct radeon_device *rdev, struct radeon_bo *bo);
  506. static struct radeon_asic r600_asic = {
  507. .init = &r600_init,
  508. .fini = &r600_fini,
  509. .suspend = &r600_suspend,
  510. .resume = &r600_resume,
  511. .cp_commit = &r600_cp_commit,
  512. .vga_set_state = &r600_vga_set_state,
  513. .gpu_reset = &r600_gpu_reset,
  514. .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
  515. .gart_set_page = &rs600_gart_set_page,
  516. .ring_test = &r600_ring_test,
  517. .ring_ib_execute = &r600_ring_ib_execute,
  518. .irq_set = &r600_irq_set,
  519. .irq_process = &r600_irq_process,
  520. .get_vblank_counter = &rs600_get_vblank_counter,
  521. .fence_ring_emit = &r600_fence_ring_emit,
  522. .cs_parse = &r600_cs_parse,
  523. .copy_blit = &r600_copy_blit,
  524. .copy_dma = &r600_copy_blit,
  525. .copy = &r600_copy_blit,
  526. .get_engine_clock = &radeon_atom_get_engine_clock,
  527. .set_engine_clock = &radeon_atom_set_engine_clock,
  528. .get_memory_clock = &radeon_atom_get_memory_clock,
  529. .set_memory_clock = &radeon_atom_set_memory_clock,
  530. .get_pcie_lanes = NULL,
  531. .set_pcie_lanes = NULL,
  532. .set_clock_gating = NULL,
  533. .set_surface_reg = r600_set_surface_reg,
  534. .clear_surface_reg = r600_clear_surface_reg,
  535. .bandwidth_update = &rv515_bandwidth_update,
  536. .hpd_init = &r600_hpd_init,
  537. .hpd_fini = &r600_hpd_fini,
  538. .hpd_sense = &r600_hpd_sense,
  539. .hpd_set_polarity = &r600_hpd_set_polarity,
  540. .ioctl_wait_idle = r600_ioctl_wait_idle,
  541. };
  542. /*
  543. * rv770,rv730,rv710,rv740
  544. */
  545. int rv770_init(struct radeon_device *rdev);
  546. void rv770_fini(struct radeon_device *rdev);
  547. int rv770_suspend(struct radeon_device *rdev);
  548. int rv770_resume(struct radeon_device *rdev);
  549. int rv770_gpu_reset(struct radeon_device *rdev);
  550. static struct radeon_asic rv770_asic = {
  551. .init = &rv770_init,
  552. .fini = &rv770_fini,
  553. .suspend = &rv770_suspend,
  554. .resume = &rv770_resume,
  555. .cp_commit = &r600_cp_commit,
  556. .gpu_reset = &rv770_gpu_reset,
  557. .vga_set_state = &r600_vga_set_state,
  558. .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
  559. .gart_set_page = &rs600_gart_set_page,
  560. .ring_test = &r600_ring_test,
  561. .ring_ib_execute = &r600_ring_ib_execute,
  562. .irq_set = &r600_irq_set,
  563. .irq_process = &r600_irq_process,
  564. .get_vblank_counter = &rs600_get_vblank_counter,
  565. .fence_ring_emit = &r600_fence_ring_emit,
  566. .cs_parse = &r600_cs_parse,
  567. .copy_blit = &r600_copy_blit,
  568. .copy_dma = &r600_copy_blit,
  569. .copy = &r600_copy_blit,
  570. .get_engine_clock = &radeon_atom_get_engine_clock,
  571. .set_engine_clock = &radeon_atom_set_engine_clock,
  572. .get_memory_clock = &radeon_atom_get_memory_clock,
  573. .set_memory_clock = &radeon_atom_set_memory_clock,
  574. .get_pcie_lanes = NULL,
  575. .set_pcie_lanes = NULL,
  576. .set_clock_gating = &radeon_atom_set_clock_gating,
  577. .set_surface_reg = r600_set_surface_reg,
  578. .clear_surface_reg = r600_clear_surface_reg,
  579. .bandwidth_update = &rv515_bandwidth_update,
  580. .hpd_init = &r600_hpd_init,
  581. .hpd_fini = &r600_hpd_fini,
  582. .hpd_sense = &r600_hpd_sense,
  583. .hpd_set_polarity = &r600_hpd_set_polarity,
  584. .ioctl_wait_idle = r600_ioctl_wait_idle,
  585. };
  586. /*
  587. * evergreen
  588. */
  589. int evergreen_init(struct radeon_device *rdev);
  590. void evergreen_fini(struct radeon_device *rdev);
  591. int evergreen_suspend(struct radeon_device *rdev);
  592. int evergreen_resume(struct radeon_device *rdev);
  593. int evergreen_gpu_reset(struct radeon_device *rdev);
  594. void evergreen_bandwidth_update(struct radeon_device *rdev);
  595. void evergreen_hpd_init(struct radeon_device *rdev);
  596. void evergreen_hpd_fini(struct radeon_device *rdev);
  597. bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd);
  598. void evergreen_hpd_set_polarity(struct radeon_device *rdev,
  599. enum radeon_hpd_id hpd);
  600. static struct radeon_asic evergreen_asic = {
  601. .init = &evergreen_init,
  602. .fini = &evergreen_fini,
  603. .suspend = &evergreen_suspend,
  604. .resume = &evergreen_resume,
  605. .cp_commit = NULL,
  606. .gpu_reset = &evergreen_gpu_reset,
  607. .vga_set_state = &r600_vga_set_state,
  608. .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
  609. .gart_set_page = &rs600_gart_set_page,
  610. .ring_test = NULL,
  611. .ring_ib_execute = NULL,
  612. .irq_set = NULL,
  613. .irq_process = NULL,
  614. .get_vblank_counter = NULL,
  615. .fence_ring_emit = NULL,
  616. .cs_parse = NULL,
  617. .copy_blit = NULL,
  618. .copy_dma = NULL,
  619. .copy = NULL,
  620. .get_engine_clock = &radeon_atom_get_engine_clock,
  621. .set_engine_clock = &radeon_atom_set_engine_clock,
  622. .get_memory_clock = &radeon_atom_get_memory_clock,
  623. .set_memory_clock = &radeon_atom_set_memory_clock,
  624. .set_pcie_lanes = NULL,
  625. .set_clock_gating = NULL,
  626. .set_surface_reg = r600_set_surface_reg,
  627. .clear_surface_reg = r600_clear_surface_reg,
  628. .bandwidth_update = &evergreen_bandwidth_update,
  629. .hpd_init = &evergreen_hpd_init,
  630. .hpd_fini = &evergreen_hpd_fini,
  631. .hpd_sense = &evergreen_hpd_sense,
  632. .hpd_set_polarity = &evergreen_hpd_set_polarity,
  633. };
  634. #endif