nvd0_display.c 35 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342
  1. /*
  2. * Copyright 2011 Red Hat Inc.
  3. *
  4. * Permission is hereby granted, free of charge, to any person obtaining a
  5. * copy of this software and associated documentation files (the "Software"),
  6. * to deal in the Software without restriction, including without limitation
  7. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  8. * and/or sell copies of the Software, and to permit persons to whom the
  9. * Software is furnished to do so, subject to the following conditions:
  10. *
  11. * The above copyright notice and this permission notice shall be included in
  12. * all copies or substantial portions of the Software.
  13. *
  14. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  15. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  16. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  17. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  18. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  19. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  20. * OTHER DEALINGS IN THE SOFTWARE.
  21. *
  22. * Authors: Ben Skeggs
  23. */
  24. #include <linux/dma-mapping.h>
  25. #include "drmP.h"
  26. #include "drm_crtc_helper.h"
  27. #include "nouveau_drv.h"
  28. #include "nouveau_connector.h"
  29. #include "nouveau_encoder.h"
  30. #include "nouveau_crtc.h"
  31. #include "nouveau_fb.h"
  32. #include "nv50_display.h"
  33. #define MEM_SYNC 0xe0000001
  34. #define MEM_VRAM 0xe0010000
  35. #include "nouveau_dma.h"
  36. struct nvd0_display {
  37. struct nouveau_gpuobj *mem;
  38. struct {
  39. dma_addr_t handle;
  40. u32 *ptr;
  41. } evo[1];
  42. struct {
  43. struct dcb_entry *dis;
  44. struct dcb_entry *ena;
  45. int crtc;
  46. int pclk;
  47. u16 script;
  48. } irq;
  49. };
  50. static struct nvd0_display *
  51. nvd0_display(struct drm_device *dev)
  52. {
  53. struct drm_nouveau_private *dev_priv = dev->dev_private;
  54. return dev_priv->engine.display.priv;
  55. }
  56. static int
  57. evo_icmd(struct drm_device *dev, int id, u32 mthd, u32 data)
  58. {
  59. int ret = 0;
  60. nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000001);
  61. nv_wr32(dev, 0x610704 + (id * 0x10), data);
  62. nv_mask(dev, 0x610704 + (id * 0x10), 0x80000ffc, 0x80000000 | mthd);
  63. if (!nv_wait(dev, 0x610704 + (id * 0x10), 0x80000000, 0x00000000))
  64. ret = -EBUSY;
  65. nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000000);
  66. return ret;
  67. }
  68. static u32 *
  69. evo_wait(struct drm_device *dev, int id, int nr)
  70. {
  71. struct nvd0_display *disp = nvd0_display(dev);
  72. u32 put = nv_rd32(dev, 0x640000 + (id * 0x1000)) / 4;
  73. if (put + nr >= (PAGE_SIZE / 4)) {
  74. disp->evo[id].ptr[put] = 0x20000000;
  75. nv_wr32(dev, 0x640000 + (id * 0x1000), 0x00000000);
  76. if (!nv_wait(dev, 0x640004 + (id * 0x1000), ~0, 0x00000000)) {
  77. NV_ERROR(dev, "evo %d dma stalled\n", id);
  78. return NULL;
  79. }
  80. put = 0;
  81. }
  82. return disp->evo[id].ptr + put;
  83. }
  84. static void
  85. evo_kick(u32 *push, struct drm_device *dev, int id)
  86. {
  87. struct nvd0_display *disp = nvd0_display(dev);
  88. nv_wr32(dev, 0x640000 + (id * 0x1000), (push - disp->evo[id].ptr) << 2);
  89. }
  90. #define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
  91. #define evo_data(p,d) *((p)++) = (d)
  92. static struct drm_crtc *
  93. nvd0_display_crtc_get(struct drm_encoder *encoder)
  94. {
  95. return nouveau_encoder(encoder)->crtc;
  96. }
  97. /******************************************************************************
  98. * CRTC
  99. *****************************************************************************/
  100. static int
  101. nvd0_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool on, bool update)
  102. {
  103. struct drm_device *dev = nv_crtc->base.dev;
  104. u32 *push, mode;
  105. mode = 0x00000000;
  106. if (on) {
  107. /* 0x11: 6bpc dynamic 2x2
  108. * 0x13: 8bpc dynamic 2x2
  109. * 0x19: 6bpc static 2x2
  110. * 0x1b: 8bpc static 2x2
  111. * 0x21: 6bpc temporal
  112. * 0x23: 8bpc temporal
  113. */
  114. mode = 0x00000011;
  115. }
  116. push = evo_wait(dev, 0, 4);
  117. if (push) {
  118. evo_mthd(push, 0x0490 + (nv_crtc->index * 0x300), 1);
  119. evo_data(push, mode);
  120. if (update) {
  121. evo_mthd(push, 0x0080, 1);
  122. evo_data(push, 0x00000000);
  123. }
  124. evo_kick(push, dev, 0);
  125. }
  126. return 0;
  127. }
  128. static int
  129. nvd0_crtc_set_scale(struct nouveau_crtc *nv_crtc, int type, bool update)
  130. {
  131. struct drm_display_mode *mode = &nv_crtc->base.mode;
  132. struct drm_device *dev = nv_crtc->base.dev;
  133. u32 *push;
  134. /*XXX: actually handle scaling */
  135. push = evo_wait(dev, 0, 16);
  136. if (push) {
  137. evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
  138. evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
  139. evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
  140. evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
  141. evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
  142. evo_data(push, 0x00000000);
  143. evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
  144. evo_data(push, 0x00000000);
  145. evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
  146. evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
  147. if (update) {
  148. evo_mthd(push, 0x0080, 1);
  149. evo_data(push, 0x00000000);
  150. }
  151. evo_kick(push, dev, 0);
  152. }
  153. return 0;
  154. }
  155. static int
  156. nvd0_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
  157. int x, int y, bool update)
  158. {
  159. struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
  160. u32 *push;
  161. push = evo_wait(fb->dev, 0, 16);
  162. if (push) {
  163. evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
  164. evo_data(push, nvfb->nvbo->bo.offset >> 8);
  165. evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
  166. evo_data(push, (fb->height << 16) | fb->width);
  167. evo_data(push, nvfb->r_pitch);
  168. evo_data(push, nvfb->r_format);
  169. evo_data(push, nvfb->r_dma);
  170. evo_kick(push, fb->dev, 0);
  171. }
  172. nv_crtc->fb.tile_flags = nvfb->r_dma;
  173. return 0;
  174. }
  175. static void
  176. nvd0_crtc_cursor_show(struct nouveau_crtc *nv_crtc, bool show, bool update)
  177. {
  178. struct drm_device *dev = nv_crtc->base.dev;
  179. u32 *push = evo_wait(dev, 0, 16);
  180. if (push) {
  181. if (show) {
  182. evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
  183. evo_data(push, 0x85000000);
  184. evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
  185. evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
  186. evo_data(push, MEM_VRAM);
  187. } else {
  188. evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
  189. evo_data(push, 0x05000000);
  190. evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
  191. evo_data(push, 0x00000000);
  192. }
  193. if (update) {
  194. evo_mthd(push, 0x0080, 1);
  195. evo_data(push, 0x00000000);
  196. }
  197. evo_kick(push, dev, 0);
  198. }
  199. }
  200. static void
  201. nvd0_crtc_dpms(struct drm_crtc *crtc, int mode)
  202. {
  203. }
  204. static void
  205. nvd0_crtc_prepare(struct drm_crtc *crtc)
  206. {
  207. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  208. u32 *push;
  209. push = evo_wait(crtc->dev, 0, 2);
  210. if (push) {
  211. evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
  212. evo_data(push, 0x00000000);
  213. evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
  214. evo_data(push, 0x03000000);
  215. evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
  216. evo_data(push, 0x00000000);
  217. evo_kick(push, crtc->dev, 0);
  218. }
  219. nvd0_crtc_cursor_show(nv_crtc, false, false);
  220. }
  221. static void
  222. nvd0_crtc_commit(struct drm_crtc *crtc)
  223. {
  224. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  225. u32 *push;
  226. push = evo_wait(crtc->dev, 0, 32);
  227. if (push) {
  228. evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
  229. evo_data(push, nv_crtc->fb.tile_flags);
  230. evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
  231. evo_data(push, 0x83000000);
  232. evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
  233. evo_data(push, 0x00000000);
  234. evo_data(push, 0x00000000);
  235. evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
  236. evo_data(push, MEM_VRAM);
  237. evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1);
  238. evo_data(push, 0xffffff00);
  239. evo_kick(push, crtc->dev, 0);
  240. }
  241. nvd0_crtc_cursor_show(nv_crtc, nv_crtc->cursor.visible, true);
  242. }
  243. static bool
  244. nvd0_crtc_mode_fixup(struct drm_crtc *crtc, struct drm_display_mode *mode,
  245. struct drm_display_mode *adjusted_mode)
  246. {
  247. return true;
  248. }
  249. static int
  250. nvd0_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
  251. {
  252. struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->fb);
  253. int ret;
  254. ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
  255. if (ret)
  256. return ret;
  257. if (old_fb) {
  258. nvfb = nouveau_framebuffer(old_fb);
  259. nouveau_bo_unpin(nvfb->nvbo);
  260. }
  261. return 0;
  262. }
  263. static int
  264. nvd0_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
  265. struct drm_display_mode *mode, int x, int y,
  266. struct drm_framebuffer *old_fb)
  267. {
  268. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  269. struct nouveau_connector *nv_connector;
  270. u32 htotal = mode->htotal;
  271. u32 vtotal = mode->vtotal;
  272. u32 hsyncw = mode->hsync_end - mode->hsync_start - 1;
  273. u32 vsyncw = mode->vsync_end - mode->vsync_start - 1;
  274. u32 hfrntp = mode->hsync_start - mode->hdisplay;
  275. u32 vfrntp = mode->vsync_start - mode->vdisplay;
  276. u32 hbackp = mode->htotal - mode->hsync_end;
  277. u32 vbackp = mode->vtotal - mode->vsync_end;
  278. u32 hss2be = hsyncw + hbackp;
  279. u32 vss2be = vsyncw + vbackp;
  280. u32 hss2de = htotal - hfrntp;
  281. u32 vss2de = vtotal - vfrntp;
  282. u32 hstart = 0;
  283. u32 vstart = 0;
  284. u32 *push;
  285. int ret;
  286. ret = nvd0_crtc_swap_fbs(crtc, old_fb);
  287. if (ret)
  288. return ret;
  289. push = evo_wait(crtc->dev, 0, 64);
  290. if (push) {
  291. evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 5);
  292. evo_data(push, (vstart << 16) | hstart);
  293. evo_data(push, (vtotal << 16) | htotal);
  294. evo_data(push, (vsyncw << 16) | hsyncw);
  295. evo_data(push, (vss2be << 16) | hss2be);
  296. evo_data(push, (vss2de << 16) | hss2de);
  297. evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
  298. evo_data(push, 0x00000000); /* ??? */
  299. evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
  300. evo_data(push, mode->clock * 1000);
  301. evo_data(push, 0x00200000); /* ??? */
  302. evo_data(push, mode->clock * 1000);
  303. evo_mthd(push, 0x0408 + (nv_crtc->index * 0x300), 1);
  304. evo_data(push, 0x31ec6000); /* ??? */
  305. evo_kick(push, crtc->dev, 0);
  306. }
  307. nv_connector = nouveau_crtc_connector_get(nv_crtc);
  308. nvd0_crtc_set_dither(nv_crtc, nv_connector->use_dithering, false);
  309. nvd0_crtc_set_scale(nv_crtc, nv_connector->scaling_mode, false);
  310. nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, false);
  311. return 0;
  312. }
  313. static int
  314. nvd0_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
  315. struct drm_framebuffer *old_fb)
  316. {
  317. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  318. int ret;
  319. ret = nvd0_crtc_swap_fbs(crtc, old_fb);
  320. if (ret)
  321. return ret;
  322. nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, true);
  323. return 0;
  324. }
  325. static int
  326. nvd0_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
  327. struct drm_framebuffer *fb, int x, int y,
  328. enum mode_set_atomic state)
  329. {
  330. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  331. nvd0_crtc_set_image(nv_crtc, fb, x, y, true);
  332. return 0;
  333. }
  334. static void
  335. nvd0_crtc_lut_load(struct drm_crtc *crtc)
  336. {
  337. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  338. void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
  339. int i;
  340. for (i = 0; i < 256; i++) {
  341. writew(0x6000 + (nv_crtc->lut.r[i] >> 2), lut + (i * 0x20) + 0);
  342. writew(0x6000 + (nv_crtc->lut.g[i] >> 2), lut + (i * 0x20) + 2);
  343. writew(0x6000 + (nv_crtc->lut.b[i] >> 2), lut + (i * 0x20) + 4);
  344. }
  345. }
  346. static int
  347. nvd0_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
  348. uint32_t handle, uint32_t width, uint32_t height)
  349. {
  350. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  351. struct drm_device *dev = crtc->dev;
  352. struct drm_gem_object *gem;
  353. struct nouveau_bo *nvbo;
  354. bool visible = (handle != 0);
  355. int i, ret = 0;
  356. if (visible) {
  357. if (width != 64 || height != 64)
  358. return -EINVAL;
  359. gem = drm_gem_object_lookup(dev, file_priv, handle);
  360. if (unlikely(!gem))
  361. return -ENOENT;
  362. nvbo = nouveau_gem_object(gem);
  363. ret = nouveau_bo_map(nvbo);
  364. if (ret == 0) {
  365. for (i = 0; i < 64 * 64; i++) {
  366. u32 v = nouveau_bo_rd32(nvbo, i);
  367. nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
  368. }
  369. nouveau_bo_unmap(nvbo);
  370. }
  371. drm_gem_object_unreference_unlocked(gem);
  372. }
  373. if (visible != nv_crtc->cursor.visible) {
  374. nvd0_crtc_cursor_show(nv_crtc, visible, true);
  375. nv_crtc->cursor.visible = visible;
  376. }
  377. return ret;
  378. }
  379. static int
  380. nvd0_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
  381. {
  382. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  383. const u32 data = (y << 16) | x;
  384. nv_wr32(crtc->dev, 0x64d084 + (nv_crtc->index * 0x1000), data);
  385. nv_wr32(crtc->dev, 0x64d080 + (nv_crtc->index * 0x1000), 0x00000000);
  386. return 0;
  387. }
  388. static void
  389. nvd0_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
  390. uint32_t start, uint32_t size)
  391. {
  392. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  393. u32 end = max(start + size, (u32)256);
  394. u32 i;
  395. for (i = start; i < end; i++) {
  396. nv_crtc->lut.r[i] = r[i];
  397. nv_crtc->lut.g[i] = g[i];
  398. nv_crtc->lut.b[i] = b[i];
  399. }
  400. nvd0_crtc_lut_load(crtc);
  401. }
  402. static void
  403. nvd0_crtc_destroy(struct drm_crtc *crtc)
  404. {
  405. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  406. nouveau_bo_unmap(nv_crtc->cursor.nvbo);
  407. nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
  408. nouveau_bo_unmap(nv_crtc->lut.nvbo);
  409. nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
  410. drm_crtc_cleanup(crtc);
  411. kfree(crtc);
  412. }
  413. static const struct drm_crtc_helper_funcs nvd0_crtc_hfunc = {
  414. .dpms = nvd0_crtc_dpms,
  415. .prepare = nvd0_crtc_prepare,
  416. .commit = nvd0_crtc_commit,
  417. .mode_fixup = nvd0_crtc_mode_fixup,
  418. .mode_set = nvd0_crtc_mode_set,
  419. .mode_set_base = nvd0_crtc_mode_set_base,
  420. .mode_set_base_atomic = nvd0_crtc_mode_set_base_atomic,
  421. .load_lut = nvd0_crtc_lut_load,
  422. };
  423. static const struct drm_crtc_funcs nvd0_crtc_func = {
  424. .cursor_set = nvd0_crtc_cursor_set,
  425. .cursor_move = nvd0_crtc_cursor_move,
  426. .gamma_set = nvd0_crtc_gamma_set,
  427. .set_config = drm_crtc_helper_set_config,
  428. .destroy = nvd0_crtc_destroy,
  429. };
  430. static int
  431. nvd0_crtc_create(struct drm_device *dev, int index)
  432. {
  433. struct nouveau_crtc *nv_crtc;
  434. struct drm_crtc *crtc;
  435. int ret, i;
  436. nv_crtc = kzalloc(sizeof(*nv_crtc), GFP_KERNEL);
  437. if (!nv_crtc)
  438. return -ENOMEM;
  439. nv_crtc->index = index;
  440. nv_crtc->set_dither = nvd0_crtc_set_dither;
  441. nv_crtc->set_scale = nvd0_crtc_set_scale;
  442. for (i = 0; i < 256; i++) {
  443. nv_crtc->lut.r[i] = i << 8;
  444. nv_crtc->lut.g[i] = i << 8;
  445. nv_crtc->lut.b[i] = i << 8;
  446. }
  447. crtc = &nv_crtc->base;
  448. drm_crtc_init(dev, crtc, &nvd0_crtc_func);
  449. drm_crtc_helper_add(crtc, &nvd0_crtc_hfunc);
  450. drm_mode_crtc_set_gamma_size(crtc, 256);
  451. ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
  452. 0, 0x0000, &nv_crtc->cursor.nvbo);
  453. if (!ret) {
  454. ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
  455. if (!ret)
  456. ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
  457. if (ret)
  458. nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
  459. }
  460. if (ret)
  461. goto out;
  462. ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
  463. 0, 0x0000, &nv_crtc->lut.nvbo);
  464. if (!ret) {
  465. ret = nouveau_bo_pin(nv_crtc->lut.nvbo, TTM_PL_FLAG_VRAM);
  466. if (!ret)
  467. ret = nouveau_bo_map(nv_crtc->lut.nvbo);
  468. if (ret)
  469. nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
  470. }
  471. if (ret)
  472. goto out;
  473. nvd0_crtc_lut_load(crtc);
  474. out:
  475. if (ret)
  476. nvd0_crtc_destroy(crtc);
  477. return ret;
  478. }
  479. /******************************************************************************
  480. * DAC
  481. *****************************************************************************/
  482. static void
  483. nvd0_dac_dpms(struct drm_encoder *encoder, int mode)
  484. {
  485. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  486. struct drm_device *dev = encoder->dev;
  487. int or = nv_encoder->or;
  488. u32 dpms_ctrl;
  489. dpms_ctrl = 0x80000000;
  490. if (mode == DRM_MODE_DPMS_STANDBY || mode == DRM_MODE_DPMS_OFF)
  491. dpms_ctrl |= 0x00000001;
  492. if (mode == DRM_MODE_DPMS_SUSPEND || mode == DRM_MODE_DPMS_OFF)
  493. dpms_ctrl |= 0x00000004;
  494. nv_wait(dev, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
  495. nv_mask(dev, 0x61a004 + (or * 0x0800), 0xc000007f, dpms_ctrl);
  496. nv_wait(dev, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
  497. }
  498. static bool
  499. nvd0_dac_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
  500. struct drm_display_mode *adjusted_mode)
  501. {
  502. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  503. struct nouveau_connector *nv_connector;
  504. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  505. if (nv_connector && nv_connector->native_mode) {
  506. if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
  507. int id = adjusted_mode->base.id;
  508. *adjusted_mode = *nv_connector->native_mode;
  509. adjusted_mode->base.id = id;
  510. }
  511. }
  512. return true;
  513. }
  514. static void
  515. nvd0_dac_prepare(struct drm_encoder *encoder)
  516. {
  517. }
  518. static void
  519. nvd0_dac_commit(struct drm_encoder *encoder)
  520. {
  521. }
  522. static void
  523. nvd0_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
  524. struct drm_display_mode *adjusted_mode)
  525. {
  526. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  527. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  528. u32 *push;
  529. nvd0_dac_dpms(encoder, DRM_MODE_DPMS_ON);
  530. push = evo_wait(encoder->dev, 0, 2);
  531. if (push) {
  532. evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
  533. evo_data(push, 1 << nv_crtc->index);
  534. evo_kick(push, encoder->dev, 0);
  535. }
  536. nv_encoder->crtc = encoder->crtc;
  537. }
  538. static void
  539. nvd0_dac_disconnect(struct drm_encoder *encoder)
  540. {
  541. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  542. struct drm_device *dev = encoder->dev;
  543. u32 *push;
  544. if (nv_encoder->crtc) {
  545. nvd0_crtc_prepare(nv_encoder->crtc);
  546. push = evo_wait(dev, 0, 4);
  547. if (push) {
  548. evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
  549. evo_data(push, 0x00000000);
  550. evo_mthd(push, 0x0080, 1);
  551. evo_data(push, 0x00000000);
  552. evo_kick(push, dev, 0);
  553. }
  554. nv_encoder->crtc = NULL;
  555. }
  556. }
  557. static enum drm_connector_status
  558. nvd0_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
  559. {
  560. return connector_status_disconnected;
  561. }
  562. static void
  563. nvd0_dac_destroy(struct drm_encoder *encoder)
  564. {
  565. drm_encoder_cleanup(encoder);
  566. kfree(encoder);
  567. }
  568. static const struct drm_encoder_helper_funcs nvd0_dac_hfunc = {
  569. .dpms = nvd0_dac_dpms,
  570. .mode_fixup = nvd0_dac_mode_fixup,
  571. .prepare = nvd0_dac_prepare,
  572. .commit = nvd0_dac_commit,
  573. .mode_set = nvd0_dac_mode_set,
  574. .disable = nvd0_dac_disconnect,
  575. .get_crtc = nvd0_display_crtc_get,
  576. .detect = nvd0_dac_detect
  577. };
  578. static const struct drm_encoder_funcs nvd0_dac_func = {
  579. .destroy = nvd0_dac_destroy,
  580. };
  581. static int
  582. nvd0_dac_create(struct drm_connector *connector, struct dcb_entry *dcbe)
  583. {
  584. struct drm_device *dev = connector->dev;
  585. struct nouveau_encoder *nv_encoder;
  586. struct drm_encoder *encoder;
  587. nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
  588. if (!nv_encoder)
  589. return -ENOMEM;
  590. nv_encoder->dcb = dcbe;
  591. nv_encoder->or = ffs(dcbe->or) - 1;
  592. encoder = to_drm_encoder(nv_encoder);
  593. encoder->possible_crtcs = dcbe->heads;
  594. encoder->possible_clones = 0;
  595. drm_encoder_init(dev, encoder, &nvd0_dac_func, DRM_MODE_ENCODER_DAC);
  596. drm_encoder_helper_add(encoder, &nvd0_dac_hfunc);
  597. drm_mode_connector_attach_encoder(connector, encoder);
  598. return 0;
  599. }
  600. /******************************************************************************
  601. * SOR
  602. *****************************************************************************/
  603. static void
  604. nvd0_sor_dpms(struct drm_encoder *encoder, int mode)
  605. {
  606. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  607. struct drm_device *dev = encoder->dev;
  608. struct drm_encoder *partner;
  609. int or = nv_encoder->or;
  610. u32 dpms_ctrl;
  611. nv_encoder->last_dpms = mode;
  612. list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
  613. struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
  614. if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
  615. continue;
  616. if (nv_partner != nv_encoder &&
  617. nv_partner->dcb->or == nv_encoder->or) {
  618. if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
  619. return;
  620. break;
  621. }
  622. }
  623. dpms_ctrl = (mode == DRM_MODE_DPMS_ON);
  624. dpms_ctrl |= 0x80000000;
  625. nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
  626. nv_mask(dev, 0x61c004 + (or * 0x0800), 0x80000001, dpms_ctrl);
  627. nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
  628. nv_wait(dev, 0x61c030 + (or * 0x0800), 0x10000000, 0x00000000);
  629. }
  630. static bool
  631. nvd0_sor_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
  632. struct drm_display_mode *adjusted_mode)
  633. {
  634. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  635. struct nouveau_connector *nv_connector;
  636. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  637. if (nv_connector && nv_connector->native_mode) {
  638. if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
  639. int id = adjusted_mode->base.id;
  640. *adjusted_mode = *nv_connector->native_mode;
  641. adjusted_mode->base.id = id;
  642. }
  643. }
  644. return true;
  645. }
  646. static void
  647. nvd0_sor_prepare(struct drm_encoder *encoder)
  648. {
  649. }
  650. static void
  651. nvd0_sor_commit(struct drm_encoder *encoder)
  652. {
  653. }
  654. static void
  655. nvd0_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
  656. struct drm_display_mode *adjusted_mode)
  657. {
  658. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  659. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  660. u32 mode_ctrl = (1 << nv_crtc->index);
  661. u32 *push;
  662. if (nv_encoder->dcb->sorconf.link & 1) {
  663. if (adjusted_mode->clock < 165000)
  664. mode_ctrl |= 0x00000100;
  665. else
  666. mode_ctrl |= 0x00000500;
  667. } else {
  668. mode_ctrl |= 0x00000200;
  669. }
  670. nvd0_sor_dpms(encoder, DRM_MODE_DPMS_ON);
  671. push = evo_wait(encoder->dev, 0, 2);
  672. if (push) {
  673. evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
  674. evo_data(push, mode_ctrl);
  675. evo_kick(push, encoder->dev, 0);
  676. }
  677. nv_encoder->crtc = encoder->crtc;
  678. }
  679. static void
  680. nvd0_sor_disconnect(struct drm_encoder *encoder)
  681. {
  682. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  683. struct drm_device *dev = encoder->dev;
  684. u32 *push;
  685. if (nv_encoder->crtc) {
  686. nvd0_crtc_prepare(nv_encoder->crtc);
  687. push = evo_wait(dev, 0, 4);
  688. if (push) {
  689. evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
  690. evo_data(push, 0x00000000);
  691. evo_mthd(push, 0x0080, 1);
  692. evo_data(push, 0x00000000);
  693. evo_kick(push, dev, 0);
  694. }
  695. nv_encoder->crtc = NULL;
  696. nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
  697. }
  698. }
  699. static void
  700. nvd0_sor_destroy(struct drm_encoder *encoder)
  701. {
  702. drm_encoder_cleanup(encoder);
  703. kfree(encoder);
  704. }
  705. static const struct drm_encoder_helper_funcs nvd0_sor_hfunc = {
  706. .dpms = nvd0_sor_dpms,
  707. .mode_fixup = nvd0_sor_mode_fixup,
  708. .prepare = nvd0_sor_prepare,
  709. .commit = nvd0_sor_commit,
  710. .mode_set = nvd0_sor_mode_set,
  711. .disable = nvd0_sor_disconnect,
  712. .get_crtc = nvd0_display_crtc_get,
  713. };
  714. static const struct drm_encoder_funcs nvd0_sor_func = {
  715. .destroy = nvd0_sor_destroy,
  716. };
  717. static int
  718. nvd0_sor_create(struct drm_connector *connector, struct dcb_entry *dcbe)
  719. {
  720. struct drm_device *dev = connector->dev;
  721. struct nouveau_encoder *nv_encoder;
  722. struct drm_encoder *encoder;
  723. nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
  724. if (!nv_encoder)
  725. return -ENOMEM;
  726. nv_encoder->dcb = dcbe;
  727. nv_encoder->or = ffs(dcbe->or) - 1;
  728. nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
  729. encoder = to_drm_encoder(nv_encoder);
  730. encoder->possible_crtcs = dcbe->heads;
  731. encoder->possible_clones = 0;
  732. drm_encoder_init(dev, encoder, &nvd0_sor_func, DRM_MODE_ENCODER_TMDS);
  733. drm_encoder_helper_add(encoder, &nvd0_sor_hfunc);
  734. drm_mode_connector_attach_encoder(connector, encoder);
  735. return 0;
  736. }
  737. /******************************************************************************
  738. * IRQ
  739. *****************************************************************************/
  740. static struct dcb_entry *
  741. lookup_dcb(struct drm_device *dev, int id, u32 mc)
  742. {
  743. struct drm_nouveau_private *dev_priv = dev->dev_private;
  744. int type, or, i;
  745. if (id < 4) {
  746. type = OUTPUT_ANALOG;
  747. or = id;
  748. } else {
  749. type = OUTPUT_TMDS;
  750. or = id - 4;
  751. }
  752. for (i = 0; i < dev_priv->vbios.dcb.entries; i++) {
  753. struct dcb_entry *dcb = &dev_priv->vbios.dcb.entry[i];
  754. if (dcb->type == type && (dcb->or & (1 << or)))
  755. return dcb;
  756. }
  757. NV_INFO(dev, "PDISP: DCB for %d/0x%08x not found\n", id, mc);
  758. return NULL;
  759. }
  760. static void
  761. nvd0_display_unk1_handler(struct drm_device *dev)
  762. {
  763. struct nvd0_display *disp = nvd0_display(dev);
  764. struct dcb_entry *dcb;
  765. u32 unkn, crtc = 0;
  766. int i;
  767. NV_INFO(dev, "PDISP: 1 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
  768. nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
  769. unkn = nv_rd32(dev, 0x6101d4);
  770. if (!unkn) {
  771. unkn = nv_rd32(dev, 0x6109d4);
  772. crtc = 1;
  773. }
  774. disp->irq.ena = NULL;
  775. disp->irq.dis = NULL;
  776. disp->irq.crtc = crtc;
  777. disp->irq.pclk = nv_rd32(dev, 0x660450 + (disp->irq.crtc * 0x300));
  778. disp->irq.pclk /= 1000;
  779. for (i = 0; i < 8; i++) {
  780. u32 mcc = nv_rd32(dev, 0x640180 + (i * 0x20));
  781. u32 mcp = nv_rd32(dev, 0x660180 + (i * 0x20));
  782. if (mcc & (1 << crtc))
  783. disp->irq.dis = lookup_dcb(dev, i, mcc);
  784. if (mcp & (1 << crtc)) {
  785. disp->irq.ena = lookup_dcb(dev, i, mcp);
  786. switch (disp->irq.ena->type) {
  787. case OUTPUT_ANALOG:
  788. disp->irq.script = 0x00ff;
  789. break;
  790. case OUTPUT_TMDS:
  791. disp->irq.script = (mcp & 0x00000f00) >> 8;
  792. if (disp->irq.pclk >= 165000)
  793. disp->irq.script |= 0x0100;
  794. break;
  795. default:
  796. disp->irq.script = 0xbeef;
  797. break;
  798. }
  799. }
  800. }
  801. dcb = disp->irq.dis;
  802. if (dcb)
  803. nouveau_bios_run_display_table(dev, 0x0000, -1, dcb, crtc);
  804. nv_wr32(dev, 0x6101d4, 0x00000000);
  805. nv_wr32(dev, 0x6109d4, 0x00000000);
  806. nv_wr32(dev, 0x6101d0, 0x80000000);
  807. }
  808. static void
  809. nvd0_display_unk2_handler(struct drm_device *dev)
  810. {
  811. struct nvd0_display *disp = nvd0_display(dev);
  812. struct dcb_entry *dcb;
  813. int crtc = disp->irq.crtc;
  814. int pclk = disp->irq.pclk;
  815. int or;
  816. u32 tmp;
  817. NV_INFO(dev, "PDISP: 2 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
  818. nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
  819. dcb = disp->irq.dis;
  820. disp->irq.dis = NULL;
  821. if (dcb)
  822. nouveau_bios_run_display_table(dev, 0x0000, -2, dcb, crtc);
  823. nv50_crtc_set_clock(dev, crtc, pclk);
  824. dcb = disp->irq.ena;
  825. if (!dcb)
  826. goto ack;
  827. or = ffs(dcb->or) - 1;
  828. nouveau_bios_run_display_table(dev, disp->irq.script, pclk, dcb, crtc);
  829. nv_wr32(dev, 0x612200 + (crtc * 0x800), 0x00000000);
  830. switch (dcb->type) {
  831. case OUTPUT_ANALOG:
  832. nv_wr32(dev, 0x612280 + (or * 0x800), 0x00000000);
  833. break;
  834. case OUTPUT_TMDS:
  835. if (disp->irq.pclk >= 165000)
  836. tmp = 0x00000101;
  837. else
  838. tmp = 0x00000000;
  839. nv_mask(dev, 0x612300 + (or * 0x800), 0x00000707, tmp);
  840. break;
  841. default:
  842. break;
  843. }
  844. ack:
  845. nv_wr32(dev, 0x6101d4, 0x00000000);
  846. nv_wr32(dev, 0x6109d4, 0x00000000);
  847. nv_wr32(dev, 0x6101d0, 0x80000000);
  848. }
  849. static void
  850. nvd0_display_unk4_handler(struct drm_device *dev)
  851. {
  852. struct nvd0_display *disp = nvd0_display(dev);
  853. struct dcb_entry *dcb;
  854. int crtc = disp->irq.crtc;
  855. int pclk = disp->irq.pclk;
  856. NV_INFO(dev, "PDISP: 4 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
  857. nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
  858. dcb = disp->irq.ena;
  859. disp->irq.ena = NULL;
  860. if (!dcb)
  861. goto ack;
  862. nouveau_bios_run_display_table(dev, disp->irq.script, pclk, dcb, crtc);
  863. ack:
  864. nv_wr32(dev, 0x6101d4, 0x00000000);
  865. nv_wr32(dev, 0x6109d4, 0x00000000);
  866. nv_wr32(dev, 0x6101d0, 0x80000000);
  867. }
  868. static void
  869. nvd0_display_intr(struct drm_device *dev)
  870. {
  871. u32 intr = nv_rd32(dev, 0x610088);
  872. if (intr & 0x00000002) {
  873. u32 stat = nv_rd32(dev, 0x61009c);
  874. int chid = ffs(stat) - 1;
  875. if (chid >= 0) {
  876. u32 mthd = nv_rd32(dev, 0x6101f0 + (chid * 12));
  877. u32 data = nv_rd32(dev, 0x6101f4 + (chid * 12));
  878. u32 unkn = nv_rd32(dev, 0x6101f8 + (chid * 12));
  879. NV_INFO(dev, "EvoCh: chid %d mthd 0x%04x data 0x%08x "
  880. "0x%08x 0x%08x\n",
  881. chid, (mthd & 0x0000ffc), data, mthd, unkn);
  882. nv_wr32(dev, 0x61009c, (1 << chid));
  883. nv_wr32(dev, 0x6101f0 + (chid * 12), 0x90000000);
  884. }
  885. intr &= ~0x00000002;
  886. }
  887. if (intr & 0x00100000) {
  888. u32 stat = nv_rd32(dev, 0x6100ac);
  889. if (stat & 0x00000007) {
  890. nv_wr32(dev, 0x6100ac, (stat & 0x00000007));
  891. if (stat & 0x00000001)
  892. nvd0_display_unk1_handler(dev);
  893. if (stat & 0x00000002)
  894. nvd0_display_unk2_handler(dev);
  895. if (stat & 0x00000004)
  896. nvd0_display_unk4_handler(dev);
  897. stat &= ~0x00000007;
  898. }
  899. if (stat) {
  900. NV_INFO(dev, "PDISP: unknown intr24 0x%08x\n", stat);
  901. nv_wr32(dev, 0x6100ac, stat);
  902. }
  903. intr &= ~0x00100000;
  904. }
  905. if (intr & 0x01000000) {
  906. u32 stat = nv_rd32(dev, 0x6100bc);
  907. nv_wr32(dev, 0x6100bc, stat);
  908. intr &= ~0x01000000;
  909. }
  910. if (intr & 0x02000000) {
  911. u32 stat = nv_rd32(dev, 0x6108bc);
  912. nv_wr32(dev, 0x6108bc, stat);
  913. intr &= ~0x02000000;
  914. }
  915. if (intr)
  916. NV_INFO(dev, "PDISP: unknown intr 0x%08x\n", intr);
  917. }
  918. /******************************************************************************
  919. * Init
  920. *****************************************************************************/
  921. static void
  922. nvd0_display_fini(struct drm_device *dev)
  923. {
  924. int i;
  925. /* fini cursors */
  926. for (i = 14; i >= 13; i--) {
  927. if (!(nv_rd32(dev, 0x610490 + (i * 0x10)) & 0x00000001))
  928. continue;
  929. nv_mask(dev, 0x610490 + (i * 0x10), 0x00000001, 0x00000000);
  930. nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00000000);
  931. nv_mask(dev, 0x610090, 1 << i, 0x00000000);
  932. nv_mask(dev, 0x6100a0, 1 << i, 0x00000000);
  933. }
  934. /* fini master */
  935. if (nv_rd32(dev, 0x610490) & 0x00000010) {
  936. nv_mask(dev, 0x610490, 0x00000010, 0x00000000);
  937. nv_mask(dev, 0x610490, 0x00000003, 0x00000000);
  938. nv_wait(dev, 0x610490, 0x80000000, 0x00000000);
  939. nv_mask(dev, 0x610090, 0x00000001, 0x00000000);
  940. nv_mask(dev, 0x6100a0, 0x00000001, 0x00000000);
  941. }
  942. }
  943. int
  944. nvd0_display_init(struct drm_device *dev)
  945. {
  946. struct nvd0_display *disp = nvd0_display(dev);
  947. u32 *push;
  948. int i;
  949. if (nv_rd32(dev, 0x6100ac) & 0x00000100) {
  950. nv_wr32(dev, 0x6100ac, 0x00000100);
  951. nv_mask(dev, 0x6194e8, 0x00000001, 0x00000000);
  952. if (!nv_wait(dev, 0x6194e8, 0x00000002, 0x00000000)) {
  953. NV_ERROR(dev, "PDISP: 0x6194e8 0x%08x\n",
  954. nv_rd32(dev, 0x6194e8));
  955. return -EBUSY;
  956. }
  957. }
  958. /* nfi what these are exactly, i do know that SOR_MODE_CTRL won't
  959. * work at all unless you do the SOR part below.
  960. */
  961. for (i = 0; i < 3; i++) {
  962. u32 dac = nv_rd32(dev, 0x61a000 + (i * 0x800));
  963. nv_wr32(dev, 0x6101c0 + (i * 0x800), dac);
  964. }
  965. for (i = 0; i < 4; i++) {
  966. u32 sor = nv_rd32(dev, 0x61c000 + (i * 0x800));
  967. nv_wr32(dev, 0x6301c4 + (i * 0x800), sor);
  968. }
  969. for (i = 0; i < 2; i++) {
  970. u32 crtc0 = nv_rd32(dev, 0x616104 + (i * 0x800));
  971. u32 crtc1 = nv_rd32(dev, 0x616108 + (i * 0x800));
  972. u32 crtc2 = nv_rd32(dev, 0x61610c + (i * 0x800));
  973. nv_wr32(dev, 0x6101b4 + (i * 0x800), crtc0);
  974. nv_wr32(dev, 0x6101b8 + (i * 0x800), crtc1);
  975. nv_wr32(dev, 0x6101bc + (i * 0x800), crtc2);
  976. }
  977. /* point at our hash table / objects, enable interrupts */
  978. nv_wr32(dev, 0x610010, (disp->mem->vinst >> 8) | 9);
  979. nv_mask(dev, 0x6100b0, 0x00000307, 0x00000307);
  980. /* init master */
  981. nv_wr32(dev, 0x610494, (disp->evo[0].handle >> 8) | 3);
  982. nv_wr32(dev, 0x610498, 0x00010000);
  983. nv_wr32(dev, 0x61049c, 0x00000001);
  984. nv_mask(dev, 0x610490, 0x00000010, 0x00000010);
  985. nv_wr32(dev, 0x640000, 0x00000000);
  986. nv_wr32(dev, 0x610490, 0x01000013);
  987. if (!nv_wait(dev, 0x610490, 0x80000000, 0x00000000)) {
  988. NV_ERROR(dev, "PDISP: master 0x%08x\n",
  989. nv_rd32(dev, 0x610490));
  990. return -EBUSY;
  991. }
  992. nv_mask(dev, 0x610090, 0x00000001, 0x00000001);
  993. nv_mask(dev, 0x6100a0, 0x00000001, 0x00000001);
  994. /* init cursors */
  995. for (i = 13; i <= 14; i++) {
  996. nv_wr32(dev, 0x610490 + (i * 0x10), 0x00000001);
  997. if (!nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00010000)) {
  998. NV_ERROR(dev, "PDISP: curs%d 0x%08x\n", i,
  999. nv_rd32(dev, 0x610490 + (i * 0x10)));
  1000. return -EBUSY;
  1001. }
  1002. nv_mask(dev, 0x610090, 1 << i, 1 << i);
  1003. nv_mask(dev, 0x6100a0, 1 << i, 1 << i);
  1004. }
  1005. push = evo_wait(dev, 0, 32);
  1006. if (!push)
  1007. return -EBUSY;
  1008. evo_mthd(push, 0x0088, 1);
  1009. evo_data(push, MEM_SYNC);
  1010. evo_mthd(push, 0x0084, 1);
  1011. evo_data(push, 0x00000000);
  1012. evo_mthd(push, 0x0084, 1);
  1013. evo_data(push, 0x80000000);
  1014. evo_mthd(push, 0x008c, 1);
  1015. evo_data(push, 0x00000000);
  1016. evo_kick(push, dev, 0);
  1017. return 0;
  1018. }
  1019. void
  1020. nvd0_display_destroy(struct drm_device *dev)
  1021. {
  1022. struct drm_nouveau_private *dev_priv = dev->dev_private;
  1023. struct nvd0_display *disp = nvd0_display(dev);
  1024. struct pci_dev *pdev = dev->pdev;
  1025. nvd0_display_fini(dev);
  1026. pci_free_consistent(pdev, PAGE_SIZE, disp->evo[0].ptr, disp->evo[0].handle);
  1027. nouveau_gpuobj_ref(NULL, &disp->mem);
  1028. nouveau_irq_unregister(dev, 26);
  1029. dev_priv->engine.display.priv = NULL;
  1030. kfree(disp);
  1031. }
  1032. int
  1033. nvd0_display_create(struct drm_device *dev)
  1034. {
  1035. struct drm_nouveau_private *dev_priv = dev->dev_private;
  1036. struct nouveau_instmem_engine *pinstmem = &dev_priv->engine.instmem;
  1037. struct dcb_table *dcb = &dev_priv->vbios.dcb;
  1038. struct drm_connector *connector, *tmp;
  1039. struct pci_dev *pdev = dev->pdev;
  1040. struct nvd0_display *disp;
  1041. struct dcb_entry *dcbe;
  1042. int ret, i;
  1043. disp = kzalloc(sizeof(*disp), GFP_KERNEL);
  1044. if (!disp)
  1045. return -ENOMEM;
  1046. dev_priv->engine.display.priv = disp;
  1047. /* create crtc objects to represent the hw heads */
  1048. for (i = 0; i < 2; i++) {
  1049. ret = nvd0_crtc_create(dev, i);
  1050. if (ret)
  1051. goto out;
  1052. }
  1053. /* create encoder/connector objects based on VBIOS DCB table */
  1054. for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
  1055. connector = nouveau_connector_create(dev, dcbe->connector);
  1056. if (IS_ERR(connector))
  1057. continue;
  1058. if (dcbe->location != DCB_LOC_ON_CHIP) {
  1059. NV_WARN(dev, "skipping off-chip encoder %d/%d\n",
  1060. dcbe->type, ffs(dcbe->or) - 1);
  1061. continue;
  1062. }
  1063. switch (dcbe->type) {
  1064. case OUTPUT_TMDS:
  1065. nvd0_sor_create(connector, dcbe);
  1066. break;
  1067. case OUTPUT_ANALOG:
  1068. nvd0_dac_create(connector, dcbe);
  1069. break;
  1070. default:
  1071. NV_WARN(dev, "skipping unsupported encoder %d/%d\n",
  1072. dcbe->type, ffs(dcbe->or) - 1);
  1073. continue;
  1074. }
  1075. }
  1076. /* cull any connectors we created that don't have an encoder */
  1077. list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
  1078. if (connector->encoder_ids[0])
  1079. continue;
  1080. NV_WARN(dev, "%s has no encoders, removing\n",
  1081. drm_get_connector_name(connector));
  1082. connector->funcs->destroy(connector);
  1083. }
  1084. /* setup interrupt handling */
  1085. nouveau_irq_register(dev, 26, nvd0_display_intr);
  1086. /* hash table and dma objects for the memory areas we care about */
  1087. ret = nouveau_gpuobj_new(dev, NULL, 0x4000, 0x10000,
  1088. NVOBJ_FLAG_ZERO_ALLOC, &disp->mem);
  1089. if (ret)
  1090. goto out;
  1091. nv_wo32(disp->mem, 0x1000, 0x00000049);
  1092. nv_wo32(disp->mem, 0x1004, (disp->mem->vinst + 0x2000) >> 8);
  1093. nv_wo32(disp->mem, 0x1008, (disp->mem->vinst + 0x2fff) >> 8);
  1094. nv_wo32(disp->mem, 0x100c, 0x00000000);
  1095. nv_wo32(disp->mem, 0x1010, 0x00000000);
  1096. nv_wo32(disp->mem, 0x1014, 0x00000000);
  1097. nv_wo32(disp->mem, 0x0000, MEM_SYNC);
  1098. nv_wo32(disp->mem, 0x0004, (0x1000 << 9) | 0x00000001);
  1099. nv_wo32(disp->mem, 0x1020, 0x00000049);
  1100. nv_wo32(disp->mem, 0x1024, 0x00000000);
  1101. nv_wo32(disp->mem, 0x1028, (dev_priv->vram_size - 1) >> 8);
  1102. nv_wo32(disp->mem, 0x102c, 0x00000000);
  1103. nv_wo32(disp->mem, 0x1030, 0x00000000);
  1104. nv_wo32(disp->mem, 0x1034, 0x00000000);
  1105. nv_wo32(disp->mem, 0x0008, MEM_VRAM);
  1106. nv_wo32(disp->mem, 0x000c, (0x1020 << 9) | 0x00000001);
  1107. nv_wo32(disp->mem, 0x1040, 0x00000009);
  1108. nv_wo32(disp->mem, 0x1044, 0x00000000);
  1109. nv_wo32(disp->mem, 0x1048, (dev_priv->vram_size - 1) >> 8);
  1110. nv_wo32(disp->mem, 0x104c, 0x00000000);
  1111. nv_wo32(disp->mem, 0x1050, 0x00000000);
  1112. nv_wo32(disp->mem, 0x1054, 0x00000000);
  1113. nv_wo32(disp->mem, 0x0010, NvEvoVRAM_LP);
  1114. nv_wo32(disp->mem, 0x0014, (0x1040 << 9) | 0x00000001);
  1115. nv_wo32(disp->mem, 0x1060, 0x0fe00009);
  1116. nv_wo32(disp->mem, 0x1064, 0x00000000);
  1117. nv_wo32(disp->mem, 0x1068, (dev_priv->vram_size - 1) >> 8);
  1118. nv_wo32(disp->mem, 0x106c, 0x00000000);
  1119. nv_wo32(disp->mem, 0x1070, 0x00000000);
  1120. nv_wo32(disp->mem, 0x1074, 0x00000000);
  1121. nv_wo32(disp->mem, 0x0018, NvEvoFB32);
  1122. nv_wo32(disp->mem, 0x001c, (0x1060 << 9) | 0x00000001);
  1123. pinstmem->flush(dev);
  1124. /* push buffers for evo channels */
  1125. disp->evo[0].ptr =
  1126. pci_alloc_consistent(pdev, PAGE_SIZE, &disp->evo[0].handle);
  1127. if (!disp->evo[0].ptr) {
  1128. ret = -ENOMEM;
  1129. goto out;
  1130. }
  1131. ret = nvd0_display_init(dev);
  1132. if (ret)
  1133. goto out;
  1134. out:
  1135. if (ret)
  1136. nvd0_display_destroy(dev);
  1137. return ret;
  1138. }