nvd0_display.c 36 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377
  1. /*
  2. * Copyright 2011 Red Hat Inc.
  3. *
  4. * Permission is hereby granted, free of charge, to any person obtaining a
  5. * copy of this software and associated documentation files (the "Software"),
  6. * to deal in the Software without restriction, including without limitation
  7. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  8. * and/or sell copies of the Software, and to permit persons to whom the
  9. * Software is furnished to do so, subject to the following conditions:
  10. *
  11. * The above copyright notice and this permission notice shall be included in
  12. * all copies or substantial portions of the Software.
  13. *
  14. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  15. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  16. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  17. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  18. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  19. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  20. * OTHER DEALINGS IN THE SOFTWARE.
  21. *
  22. * Authors: Ben Skeggs
  23. */
  24. #include <linux/dma-mapping.h>
  25. #include "drmP.h"
  26. #include "drm_crtc_helper.h"
  27. #include "nouveau_drv.h"
  28. #include "nouveau_connector.h"
  29. #include "nouveau_encoder.h"
  30. #include "nouveau_crtc.h"
  31. #include "nouveau_fb.h"
  32. #include "nv50_display.h"
  33. #define MEM_SYNC 0xe0000001
  34. #define MEM_VRAM 0xe0010000
  35. #include "nouveau_dma.h"
  36. struct nvd0_display {
  37. struct nouveau_gpuobj *mem;
  38. struct {
  39. dma_addr_t handle;
  40. u32 *ptr;
  41. } evo[1];
  42. struct {
  43. struct dcb_entry *dis;
  44. struct dcb_entry *ena;
  45. int crtc;
  46. int pclk;
  47. u16 script;
  48. } irq;
  49. };
  50. static struct nvd0_display *
  51. nvd0_display(struct drm_device *dev)
  52. {
  53. struct drm_nouveau_private *dev_priv = dev->dev_private;
  54. return dev_priv->engine.display.priv;
  55. }
  56. static int
  57. evo_icmd(struct drm_device *dev, int id, u32 mthd, u32 data)
  58. {
  59. int ret = 0;
  60. nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000001);
  61. nv_wr32(dev, 0x610704 + (id * 0x10), data);
  62. nv_mask(dev, 0x610704 + (id * 0x10), 0x80000ffc, 0x80000000 | mthd);
  63. if (!nv_wait(dev, 0x610704 + (id * 0x10), 0x80000000, 0x00000000))
  64. ret = -EBUSY;
  65. nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000000);
  66. return ret;
  67. }
  68. static u32 *
  69. evo_wait(struct drm_device *dev, int id, int nr)
  70. {
  71. struct nvd0_display *disp = nvd0_display(dev);
  72. u32 put = nv_rd32(dev, 0x640000 + (id * 0x1000)) / 4;
  73. if (put + nr >= (PAGE_SIZE / 4)) {
  74. disp->evo[id].ptr[put] = 0x20000000;
  75. nv_wr32(dev, 0x640000 + (id * 0x1000), 0x00000000);
  76. if (!nv_wait(dev, 0x640004 + (id * 0x1000), ~0, 0x00000000)) {
  77. NV_ERROR(dev, "evo %d dma stalled\n", id);
  78. return NULL;
  79. }
  80. put = 0;
  81. }
  82. return disp->evo[id].ptr + put;
  83. }
  84. static void
  85. evo_kick(u32 *push, struct drm_device *dev, int id)
  86. {
  87. struct nvd0_display *disp = nvd0_display(dev);
  88. nv_wr32(dev, 0x640000 + (id * 0x1000), (push - disp->evo[id].ptr) << 2);
  89. }
  90. #define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
  91. #define evo_data(p,d) *((p)++) = (d)
  92. static struct drm_crtc *
  93. nvd0_display_crtc_get(struct drm_encoder *encoder)
  94. {
  95. return nouveau_encoder(encoder)->crtc;
  96. }
  97. /******************************************************************************
  98. * CRTC
  99. *****************************************************************************/
  100. static int
  101. nvd0_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool on, bool update)
  102. {
  103. struct drm_device *dev = nv_crtc->base.dev;
  104. u32 *push, mode;
  105. mode = 0x00000000;
  106. if (on) {
  107. /* 0x11: 6bpc dynamic 2x2
  108. * 0x13: 8bpc dynamic 2x2
  109. * 0x19: 6bpc static 2x2
  110. * 0x1b: 8bpc static 2x2
  111. * 0x21: 6bpc temporal
  112. * 0x23: 8bpc temporal
  113. */
  114. mode = 0x00000011;
  115. }
  116. push = evo_wait(dev, 0, 4);
  117. if (push) {
  118. evo_mthd(push, 0x0490 + (nv_crtc->index * 0x300), 1);
  119. evo_data(push, mode);
  120. if (update) {
  121. evo_mthd(push, 0x0080, 1);
  122. evo_data(push, 0x00000000);
  123. }
  124. evo_kick(push, dev, 0);
  125. }
  126. return 0;
  127. }
  128. static int
  129. nvd0_crtc_set_scale(struct nouveau_crtc *nv_crtc, int type, bool update)
  130. {
  131. struct drm_display_mode *mode = &nv_crtc->base.mode;
  132. struct drm_device *dev = nv_crtc->base.dev;
  133. struct nouveau_connector *nv_connector;
  134. u32 *push, outX, outY;
  135. outX = mode->hdisplay;
  136. outY = mode->vdisplay;
  137. nv_connector = nouveau_crtc_connector_get(nv_crtc);
  138. if (nv_connector && nv_connector->native_mode) {
  139. struct drm_display_mode *native = nv_connector->native_mode;
  140. u32 xratio = (native->hdisplay << 19) / mode->hdisplay;
  141. u32 yratio = (native->vdisplay << 19) / mode->vdisplay;
  142. switch (type) {
  143. case DRM_MODE_SCALE_ASPECT:
  144. if (xratio > yratio) {
  145. outX = (mode->hdisplay * yratio) >> 19;
  146. outY = (mode->vdisplay * yratio) >> 19;
  147. } else {
  148. outX = (mode->hdisplay * xratio) >> 19;
  149. outY = (mode->vdisplay * xratio) >> 19;
  150. }
  151. break;
  152. case DRM_MODE_SCALE_FULLSCREEN:
  153. outX = native->hdisplay;
  154. outY = native->vdisplay;
  155. break;
  156. default:
  157. break;
  158. }
  159. }
  160. push = evo_wait(dev, 0, 16);
  161. if (push) {
  162. evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
  163. evo_data(push, (outY << 16) | outX);
  164. evo_data(push, (outY << 16) | outX);
  165. evo_data(push, (outY << 16) | outX);
  166. evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
  167. evo_data(push, 0x00000000);
  168. evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
  169. evo_data(push, 0x00000000);
  170. evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
  171. evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
  172. if (update) {
  173. evo_mthd(push, 0x0080, 1);
  174. evo_data(push, 0x00000000);
  175. }
  176. evo_kick(push, dev, 0);
  177. }
  178. return 0;
  179. }
  180. static int
  181. nvd0_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
  182. int x, int y, bool update)
  183. {
  184. struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
  185. u32 *push;
  186. push = evo_wait(fb->dev, 0, 16);
  187. if (push) {
  188. evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
  189. evo_data(push, nvfb->nvbo->bo.offset >> 8);
  190. evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
  191. evo_data(push, (fb->height << 16) | fb->width);
  192. evo_data(push, nvfb->r_pitch);
  193. evo_data(push, nvfb->r_format);
  194. evo_data(push, nvfb->r_dma);
  195. if (update) {
  196. evo_mthd(push, 0x0080, 1);
  197. evo_data(push, 0x00000000);
  198. }
  199. evo_kick(push, fb->dev, 0);
  200. }
  201. nv_crtc->fb.tile_flags = nvfb->r_dma;
  202. return 0;
  203. }
  204. static void
  205. nvd0_crtc_cursor_show(struct nouveau_crtc *nv_crtc, bool show, bool update)
  206. {
  207. struct drm_device *dev = nv_crtc->base.dev;
  208. u32 *push = evo_wait(dev, 0, 16);
  209. if (push) {
  210. if (show) {
  211. evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
  212. evo_data(push, 0x85000000);
  213. evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
  214. evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
  215. evo_data(push, MEM_VRAM);
  216. } else {
  217. evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
  218. evo_data(push, 0x05000000);
  219. evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
  220. evo_data(push, 0x00000000);
  221. }
  222. if (update) {
  223. evo_mthd(push, 0x0080, 1);
  224. evo_data(push, 0x00000000);
  225. }
  226. evo_kick(push, dev, 0);
  227. }
  228. }
  229. static void
  230. nvd0_crtc_dpms(struct drm_crtc *crtc, int mode)
  231. {
  232. }
  233. static void
  234. nvd0_crtc_prepare(struct drm_crtc *crtc)
  235. {
  236. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  237. u32 *push;
  238. push = evo_wait(crtc->dev, 0, 2);
  239. if (push) {
  240. evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
  241. evo_data(push, 0x00000000);
  242. evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
  243. evo_data(push, 0x03000000);
  244. evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
  245. evo_data(push, 0x00000000);
  246. evo_kick(push, crtc->dev, 0);
  247. }
  248. nvd0_crtc_cursor_show(nv_crtc, false, false);
  249. }
  250. static void
  251. nvd0_crtc_commit(struct drm_crtc *crtc)
  252. {
  253. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  254. u32 *push;
  255. push = evo_wait(crtc->dev, 0, 32);
  256. if (push) {
  257. evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
  258. evo_data(push, nv_crtc->fb.tile_flags);
  259. evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
  260. evo_data(push, 0x83000000);
  261. evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
  262. evo_data(push, 0x00000000);
  263. evo_data(push, 0x00000000);
  264. evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
  265. evo_data(push, MEM_VRAM);
  266. evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1);
  267. evo_data(push, 0xffffff00);
  268. evo_kick(push, crtc->dev, 0);
  269. }
  270. nvd0_crtc_cursor_show(nv_crtc, nv_crtc->cursor.visible, true);
  271. }
  272. static bool
  273. nvd0_crtc_mode_fixup(struct drm_crtc *crtc, struct drm_display_mode *mode,
  274. struct drm_display_mode *adjusted_mode)
  275. {
  276. return true;
  277. }
  278. static int
  279. nvd0_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
  280. {
  281. struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->fb);
  282. int ret;
  283. ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
  284. if (ret)
  285. return ret;
  286. if (old_fb) {
  287. nvfb = nouveau_framebuffer(old_fb);
  288. nouveau_bo_unpin(nvfb->nvbo);
  289. }
  290. return 0;
  291. }
  292. static int
  293. nvd0_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
  294. struct drm_display_mode *mode, int x, int y,
  295. struct drm_framebuffer *old_fb)
  296. {
  297. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  298. struct nouveau_connector *nv_connector;
  299. u32 htotal = mode->htotal;
  300. u32 vtotal = mode->vtotal;
  301. u32 hsyncw = mode->hsync_end - mode->hsync_start - 1;
  302. u32 vsyncw = mode->vsync_end - mode->vsync_start - 1;
  303. u32 hfrntp = mode->hsync_start - mode->hdisplay;
  304. u32 vfrntp = mode->vsync_start - mode->vdisplay;
  305. u32 hbackp = mode->htotal - mode->hsync_end;
  306. u32 vbackp = mode->vtotal - mode->vsync_end;
  307. u32 hss2be = hsyncw + hbackp;
  308. u32 vss2be = vsyncw + vbackp;
  309. u32 hss2de = htotal - hfrntp;
  310. u32 vss2de = vtotal - vfrntp;
  311. u32 syncs, *push;
  312. int ret;
  313. syncs = 0x00000001;
  314. if (mode->flags & DRM_MODE_FLAG_NHSYNC)
  315. syncs |= 0x00000008;
  316. if (mode->flags & DRM_MODE_FLAG_NVSYNC)
  317. syncs |= 0x00000010;
  318. ret = nvd0_crtc_swap_fbs(crtc, old_fb);
  319. if (ret)
  320. return ret;
  321. push = evo_wait(crtc->dev, 0, 64);
  322. if (push) {
  323. evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 5);
  324. evo_data(push, 0x00000000);
  325. evo_data(push, (vtotal << 16) | htotal);
  326. evo_data(push, (vsyncw << 16) | hsyncw);
  327. evo_data(push, (vss2be << 16) | hss2be);
  328. evo_data(push, (vss2de << 16) | hss2de);
  329. evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
  330. evo_data(push, 0x00000000); /* ??? */
  331. evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
  332. evo_data(push, mode->clock * 1000);
  333. evo_data(push, 0x00200000); /* ??? */
  334. evo_data(push, mode->clock * 1000);
  335. evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 1);
  336. evo_data(push, syncs);
  337. evo_kick(push, crtc->dev, 0);
  338. }
  339. nv_connector = nouveau_crtc_connector_get(nv_crtc);
  340. nvd0_crtc_set_dither(nv_crtc, nv_connector->use_dithering, false);
  341. nvd0_crtc_set_scale(nv_crtc, nv_connector->scaling_mode, false);
  342. nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, false);
  343. return 0;
  344. }
  345. static int
  346. nvd0_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
  347. struct drm_framebuffer *old_fb)
  348. {
  349. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  350. int ret;
  351. ret = nvd0_crtc_swap_fbs(crtc, old_fb);
  352. if (ret)
  353. return ret;
  354. nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, true);
  355. return 0;
  356. }
  357. static int
  358. nvd0_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
  359. struct drm_framebuffer *fb, int x, int y,
  360. enum mode_set_atomic state)
  361. {
  362. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  363. nvd0_crtc_set_image(nv_crtc, fb, x, y, true);
  364. return 0;
  365. }
  366. static void
  367. nvd0_crtc_lut_load(struct drm_crtc *crtc)
  368. {
  369. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  370. void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
  371. int i;
  372. for (i = 0; i < 256; i++) {
  373. writew(0x6000 + (nv_crtc->lut.r[i] >> 2), lut + (i * 0x20) + 0);
  374. writew(0x6000 + (nv_crtc->lut.g[i] >> 2), lut + (i * 0x20) + 2);
  375. writew(0x6000 + (nv_crtc->lut.b[i] >> 2), lut + (i * 0x20) + 4);
  376. }
  377. }
  378. static int
  379. nvd0_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
  380. uint32_t handle, uint32_t width, uint32_t height)
  381. {
  382. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  383. struct drm_device *dev = crtc->dev;
  384. struct drm_gem_object *gem;
  385. struct nouveau_bo *nvbo;
  386. bool visible = (handle != 0);
  387. int i, ret = 0;
  388. if (visible) {
  389. if (width != 64 || height != 64)
  390. return -EINVAL;
  391. gem = drm_gem_object_lookup(dev, file_priv, handle);
  392. if (unlikely(!gem))
  393. return -ENOENT;
  394. nvbo = nouveau_gem_object(gem);
  395. ret = nouveau_bo_map(nvbo);
  396. if (ret == 0) {
  397. for (i = 0; i < 64 * 64; i++) {
  398. u32 v = nouveau_bo_rd32(nvbo, i);
  399. nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
  400. }
  401. nouveau_bo_unmap(nvbo);
  402. }
  403. drm_gem_object_unreference_unlocked(gem);
  404. }
  405. if (visible != nv_crtc->cursor.visible) {
  406. nvd0_crtc_cursor_show(nv_crtc, visible, true);
  407. nv_crtc->cursor.visible = visible;
  408. }
  409. return ret;
  410. }
  411. static int
  412. nvd0_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
  413. {
  414. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  415. const u32 data = (y << 16) | x;
  416. nv_wr32(crtc->dev, 0x64d084 + (nv_crtc->index * 0x1000), data);
  417. nv_wr32(crtc->dev, 0x64d080 + (nv_crtc->index * 0x1000), 0x00000000);
  418. return 0;
  419. }
  420. static void
  421. nvd0_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
  422. uint32_t start, uint32_t size)
  423. {
  424. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  425. u32 end = max(start + size, (u32)256);
  426. u32 i;
  427. for (i = start; i < end; i++) {
  428. nv_crtc->lut.r[i] = r[i];
  429. nv_crtc->lut.g[i] = g[i];
  430. nv_crtc->lut.b[i] = b[i];
  431. }
  432. nvd0_crtc_lut_load(crtc);
  433. }
  434. static void
  435. nvd0_crtc_destroy(struct drm_crtc *crtc)
  436. {
  437. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  438. nouveau_bo_unmap(nv_crtc->cursor.nvbo);
  439. nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
  440. nouveau_bo_unmap(nv_crtc->lut.nvbo);
  441. nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
  442. drm_crtc_cleanup(crtc);
  443. kfree(crtc);
  444. }
  445. static const struct drm_crtc_helper_funcs nvd0_crtc_hfunc = {
  446. .dpms = nvd0_crtc_dpms,
  447. .prepare = nvd0_crtc_prepare,
  448. .commit = nvd0_crtc_commit,
  449. .mode_fixup = nvd0_crtc_mode_fixup,
  450. .mode_set = nvd0_crtc_mode_set,
  451. .mode_set_base = nvd0_crtc_mode_set_base,
  452. .mode_set_base_atomic = nvd0_crtc_mode_set_base_atomic,
  453. .load_lut = nvd0_crtc_lut_load,
  454. };
  455. static const struct drm_crtc_funcs nvd0_crtc_func = {
  456. .cursor_set = nvd0_crtc_cursor_set,
  457. .cursor_move = nvd0_crtc_cursor_move,
  458. .gamma_set = nvd0_crtc_gamma_set,
  459. .set_config = drm_crtc_helper_set_config,
  460. .destroy = nvd0_crtc_destroy,
  461. };
  462. static int
  463. nvd0_crtc_create(struct drm_device *dev, int index)
  464. {
  465. struct nouveau_crtc *nv_crtc;
  466. struct drm_crtc *crtc;
  467. int ret, i;
  468. nv_crtc = kzalloc(sizeof(*nv_crtc), GFP_KERNEL);
  469. if (!nv_crtc)
  470. return -ENOMEM;
  471. nv_crtc->index = index;
  472. nv_crtc->set_dither = nvd0_crtc_set_dither;
  473. nv_crtc->set_scale = nvd0_crtc_set_scale;
  474. for (i = 0; i < 256; i++) {
  475. nv_crtc->lut.r[i] = i << 8;
  476. nv_crtc->lut.g[i] = i << 8;
  477. nv_crtc->lut.b[i] = i << 8;
  478. }
  479. crtc = &nv_crtc->base;
  480. drm_crtc_init(dev, crtc, &nvd0_crtc_func);
  481. drm_crtc_helper_add(crtc, &nvd0_crtc_hfunc);
  482. drm_mode_crtc_set_gamma_size(crtc, 256);
  483. ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
  484. 0, 0x0000, &nv_crtc->cursor.nvbo);
  485. if (!ret) {
  486. ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
  487. if (!ret)
  488. ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
  489. if (ret)
  490. nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
  491. }
  492. if (ret)
  493. goto out;
  494. ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
  495. 0, 0x0000, &nv_crtc->lut.nvbo);
  496. if (!ret) {
  497. ret = nouveau_bo_pin(nv_crtc->lut.nvbo, TTM_PL_FLAG_VRAM);
  498. if (!ret)
  499. ret = nouveau_bo_map(nv_crtc->lut.nvbo);
  500. if (ret)
  501. nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
  502. }
  503. if (ret)
  504. goto out;
  505. nvd0_crtc_lut_load(crtc);
  506. out:
  507. if (ret)
  508. nvd0_crtc_destroy(crtc);
  509. return ret;
  510. }
  511. /******************************************************************************
  512. * DAC
  513. *****************************************************************************/
  514. static void
  515. nvd0_dac_dpms(struct drm_encoder *encoder, int mode)
  516. {
  517. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  518. struct drm_device *dev = encoder->dev;
  519. int or = nv_encoder->or;
  520. u32 dpms_ctrl;
  521. dpms_ctrl = 0x80000000;
  522. if (mode == DRM_MODE_DPMS_STANDBY || mode == DRM_MODE_DPMS_OFF)
  523. dpms_ctrl |= 0x00000001;
  524. if (mode == DRM_MODE_DPMS_SUSPEND || mode == DRM_MODE_DPMS_OFF)
  525. dpms_ctrl |= 0x00000004;
  526. nv_wait(dev, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
  527. nv_mask(dev, 0x61a004 + (or * 0x0800), 0xc000007f, dpms_ctrl);
  528. nv_wait(dev, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
  529. }
  530. static bool
  531. nvd0_dac_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
  532. struct drm_display_mode *adjusted_mode)
  533. {
  534. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  535. struct nouveau_connector *nv_connector;
  536. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  537. if (nv_connector && nv_connector->native_mode) {
  538. if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
  539. int id = adjusted_mode->base.id;
  540. *adjusted_mode = *nv_connector->native_mode;
  541. adjusted_mode->base.id = id;
  542. }
  543. }
  544. return true;
  545. }
  546. static void
  547. nvd0_dac_prepare(struct drm_encoder *encoder)
  548. {
  549. }
  550. static void
  551. nvd0_dac_commit(struct drm_encoder *encoder)
  552. {
  553. }
  554. static void
  555. nvd0_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
  556. struct drm_display_mode *adjusted_mode)
  557. {
  558. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  559. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  560. u32 *push;
  561. nvd0_dac_dpms(encoder, DRM_MODE_DPMS_ON);
  562. push = evo_wait(encoder->dev, 0, 2);
  563. if (push) {
  564. evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
  565. evo_data(push, 1 << nv_crtc->index);
  566. evo_kick(push, encoder->dev, 0);
  567. }
  568. nv_encoder->crtc = encoder->crtc;
  569. }
  570. static void
  571. nvd0_dac_disconnect(struct drm_encoder *encoder)
  572. {
  573. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  574. struct drm_device *dev = encoder->dev;
  575. u32 *push;
  576. if (nv_encoder->crtc) {
  577. nvd0_crtc_prepare(nv_encoder->crtc);
  578. push = evo_wait(dev, 0, 4);
  579. if (push) {
  580. evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
  581. evo_data(push, 0x00000000);
  582. evo_mthd(push, 0x0080, 1);
  583. evo_data(push, 0x00000000);
  584. evo_kick(push, dev, 0);
  585. }
  586. nv_encoder->crtc = NULL;
  587. }
  588. }
  589. static enum drm_connector_status
  590. nvd0_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
  591. {
  592. return connector_status_disconnected;
  593. }
  594. static void
  595. nvd0_dac_destroy(struct drm_encoder *encoder)
  596. {
  597. drm_encoder_cleanup(encoder);
  598. kfree(encoder);
  599. }
  600. static const struct drm_encoder_helper_funcs nvd0_dac_hfunc = {
  601. .dpms = nvd0_dac_dpms,
  602. .mode_fixup = nvd0_dac_mode_fixup,
  603. .prepare = nvd0_dac_prepare,
  604. .commit = nvd0_dac_commit,
  605. .mode_set = nvd0_dac_mode_set,
  606. .disable = nvd0_dac_disconnect,
  607. .get_crtc = nvd0_display_crtc_get,
  608. .detect = nvd0_dac_detect
  609. };
  610. static const struct drm_encoder_funcs nvd0_dac_func = {
  611. .destroy = nvd0_dac_destroy,
  612. };
  613. static int
  614. nvd0_dac_create(struct drm_connector *connector, struct dcb_entry *dcbe)
  615. {
  616. struct drm_device *dev = connector->dev;
  617. struct nouveau_encoder *nv_encoder;
  618. struct drm_encoder *encoder;
  619. nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
  620. if (!nv_encoder)
  621. return -ENOMEM;
  622. nv_encoder->dcb = dcbe;
  623. nv_encoder->or = ffs(dcbe->or) - 1;
  624. encoder = to_drm_encoder(nv_encoder);
  625. encoder->possible_crtcs = dcbe->heads;
  626. encoder->possible_clones = 0;
  627. drm_encoder_init(dev, encoder, &nvd0_dac_func, DRM_MODE_ENCODER_DAC);
  628. drm_encoder_helper_add(encoder, &nvd0_dac_hfunc);
  629. drm_mode_connector_attach_encoder(connector, encoder);
  630. return 0;
  631. }
  632. /******************************************************************************
  633. * SOR
  634. *****************************************************************************/
  635. static void
  636. nvd0_sor_dpms(struct drm_encoder *encoder, int mode)
  637. {
  638. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  639. struct drm_device *dev = encoder->dev;
  640. struct drm_encoder *partner;
  641. int or = nv_encoder->or;
  642. u32 dpms_ctrl;
  643. nv_encoder->last_dpms = mode;
  644. list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
  645. struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
  646. if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
  647. continue;
  648. if (nv_partner != nv_encoder &&
  649. nv_partner->dcb->or == nv_encoder->or) {
  650. if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
  651. return;
  652. break;
  653. }
  654. }
  655. dpms_ctrl = (mode == DRM_MODE_DPMS_ON);
  656. dpms_ctrl |= 0x80000000;
  657. nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
  658. nv_mask(dev, 0x61c004 + (or * 0x0800), 0x80000001, dpms_ctrl);
  659. nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
  660. nv_wait(dev, 0x61c030 + (or * 0x0800), 0x10000000, 0x00000000);
  661. }
  662. static bool
  663. nvd0_sor_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
  664. struct drm_display_mode *adjusted_mode)
  665. {
  666. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  667. struct nouveau_connector *nv_connector;
  668. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  669. if (nv_connector && nv_connector->native_mode) {
  670. if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
  671. int id = adjusted_mode->base.id;
  672. *adjusted_mode = *nv_connector->native_mode;
  673. adjusted_mode->base.id = id;
  674. }
  675. }
  676. return true;
  677. }
  678. static void
  679. nvd0_sor_prepare(struct drm_encoder *encoder)
  680. {
  681. }
  682. static void
  683. nvd0_sor_commit(struct drm_encoder *encoder)
  684. {
  685. }
  686. static void
  687. nvd0_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
  688. struct drm_display_mode *adjusted_mode)
  689. {
  690. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  691. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  692. u32 mode_ctrl = (1 << nv_crtc->index);
  693. u32 *push;
  694. if (nv_encoder->dcb->sorconf.link & 1) {
  695. if (adjusted_mode->clock < 165000)
  696. mode_ctrl |= 0x00000100;
  697. else
  698. mode_ctrl |= 0x00000500;
  699. } else {
  700. mode_ctrl |= 0x00000200;
  701. }
  702. nvd0_sor_dpms(encoder, DRM_MODE_DPMS_ON);
  703. push = evo_wait(encoder->dev, 0, 2);
  704. if (push) {
  705. evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
  706. evo_data(push, mode_ctrl);
  707. evo_kick(push, encoder->dev, 0);
  708. }
  709. nv_encoder->crtc = encoder->crtc;
  710. }
  711. static void
  712. nvd0_sor_disconnect(struct drm_encoder *encoder)
  713. {
  714. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  715. struct drm_device *dev = encoder->dev;
  716. u32 *push;
  717. if (nv_encoder->crtc) {
  718. nvd0_crtc_prepare(nv_encoder->crtc);
  719. push = evo_wait(dev, 0, 4);
  720. if (push) {
  721. evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
  722. evo_data(push, 0x00000000);
  723. evo_mthd(push, 0x0080, 1);
  724. evo_data(push, 0x00000000);
  725. evo_kick(push, dev, 0);
  726. }
  727. nv_encoder->crtc = NULL;
  728. nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
  729. }
  730. }
  731. static void
  732. nvd0_sor_destroy(struct drm_encoder *encoder)
  733. {
  734. drm_encoder_cleanup(encoder);
  735. kfree(encoder);
  736. }
  737. static const struct drm_encoder_helper_funcs nvd0_sor_hfunc = {
  738. .dpms = nvd0_sor_dpms,
  739. .mode_fixup = nvd0_sor_mode_fixup,
  740. .prepare = nvd0_sor_prepare,
  741. .commit = nvd0_sor_commit,
  742. .mode_set = nvd0_sor_mode_set,
  743. .disable = nvd0_sor_disconnect,
  744. .get_crtc = nvd0_display_crtc_get,
  745. };
  746. static const struct drm_encoder_funcs nvd0_sor_func = {
  747. .destroy = nvd0_sor_destroy,
  748. };
  749. static int
  750. nvd0_sor_create(struct drm_connector *connector, struct dcb_entry *dcbe)
  751. {
  752. struct drm_device *dev = connector->dev;
  753. struct nouveau_encoder *nv_encoder;
  754. struct drm_encoder *encoder;
  755. nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
  756. if (!nv_encoder)
  757. return -ENOMEM;
  758. nv_encoder->dcb = dcbe;
  759. nv_encoder->or = ffs(dcbe->or) - 1;
  760. nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
  761. encoder = to_drm_encoder(nv_encoder);
  762. encoder->possible_crtcs = dcbe->heads;
  763. encoder->possible_clones = 0;
  764. drm_encoder_init(dev, encoder, &nvd0_sor_func, DRM_MODE_ENCODER_TMDS);
  765. drm_encoder_helper_add(encoder, &nvd0_sor_hfunc);
  766. drm_mode_connector_attach_encoder(connector, encoder);
  767. return 0;
  768. }
  769. /******************************************************************************
  770. * IRQ
  771. *****************************************************************************/
  772. static struct dcb_entry *
  773. lookup_dcb(struct drm_device *dev, int id, u32 mc)
  774. {
  775. struct drm_nouveau_private *dev_priv = dev->dev_private;
  776. int type, or, i;
  777. if (id < 4) {
  778. type = OUTPUT_ANALOG;
  779. or = id;
  780. } else {
  781. type = OUTPUT_TMDS;
  782. or = id - 4;
  783. }
  784. for (i = 0; i < dev_priv->vbios.dcb.entries; i++) {
  785. struct dcb_entry *dcb = &dev_priv->vbios.dcb.entry[i];
  786. if (dcb->type == type && (dcb->or & (1 << or)))
  787. return dcb;
  788. }
  789. NV_INFO(dev, "PDISP: DCB for %d/0x%08x not found\n", id, mc);
  790. return NULL;
  791. }
  792. static void
  793. nvd0_display_unk1_handler(struct drm_device *dev)
  794. {
  795. struct nvd0_display *disp = nvd0_display(dev);
  796. struct dcb_entry *dcb;
  797. u32 unkn, crtc = 0;
  798. int i;
  799. NV_INFO(dev, "PDISP: 1 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
  800. nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
  801. unkn = nv_rd32(dev, 0x6101d4);
  802. if (!unkn) {
  803. unkn = nv_rd32(dev, 0x6109d4);
  804. crtc = 1;
  805. }
  806. disp->irq.ena = NULL;
  807. disp->irq.dis = NULL;
  808. disp->irq.crtc = crtc;
  809. disp->irq.pclk = nv_rd32(dev, 0x660450 + (disp->irq.crtc * 0x300));
  810. disp->irq.pclk /= 1000;
  811. for (i = 0; i < 8; i++) {
  812. u32 mcc = nv_rd32(dev, 0x640180 + (i * 0x20));
  813. u32 mcp = nv_rd32(dev, 0x660180 + (i * 0x20));
  814. if (mcc & (1 << crtc))
  815. disp->irq.dis = lookup_dcb(dev, i, mcc);
  816. if (mcp & (1 << crtc)) {
  817. disp->irq.ena = lookup_dcb(dev, i, mcp);
  818. switch (disp->irq.ena->type) {
  819. case OUTPUT_ANALOG:
  820. disp->irq.script = 0x00ff;
  821. break;
  822. case OUTPUT_TMDS:
  823. disp->irq.script = (mcp & 0x00000f00) >> 8;
  824. if (disp->irq.pclk >= 165000)
  825. disp->irq.script |= 0x0100;
  826. break;
  827. default:
  828. disp->irq.script = 0xbeef;
  829. break;
  830. }
  831. }
  832. }
  833. dcb = disp->irq.dis;
  834. if (dcb)
  835. nouveau_bios_run_display_table(dev, 0x0000, -1, dcb, crtc);
  836. nv_wr32(dev, 0x6101d4, 0x00000000);
  837. nv_wr32(dev, 0x6109d4, 0x00000000);
  838. nv_wr32(dev, 0x6101d0, 0x80000000);
  839. }
  840. static void
  841. nvd0_display_unk2_handler(struct drm_device *dev)
  842. {
  843. struct nvd0_display *disp = nvd0_display(dev);
  844. struct dcb_entry *dcb;
  845. int crtc = disp->irq.crtc;
  846. int pclk = disp->irq.pclk;
  847. int or;
  848. u32 tmp;
  849. NV_INFO(dev, "PDISP: 2 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
  850. nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
  851. dcb = disp->irq.dis;
  852. disp->irq.dis = NULL;
  853. if (dcb)
  854. nouveau_bios_run_display_table(dev, 0x0000, -2, dcb, crtc);
  855. nv50_crtc_set_clock(dev, crtc, pclk);
  856. dcb = disp->irq.ena;
  857. if (!dcb)
  858. goto ack;
  859. or = ffs(dcb->or) - 1;
  860. nouveau_bios_run_display_table(dev, disp->irq.script, pclk, dcb, crtc);
  861. nv_wr32(dev, 0x612200 + (crtc * 0x800), 0x00000000);
  862. switch (dcb->type) {
  863. case OUTPUT_ANALOG:
  864. nv_wr32(dev, 0x612280 + (or * 0x800), 0x00000000);
  865. break;
  866. case OUTPUT_TMDS:
  867. if (disp->irq.pclk >= 165000)
  868. tmp = 0x00000101;
  869. else
  870. tmp = 0x00000000;
  871. nv_mask(dev, 0x612300 + (or * 0x800), 0x00000707, tmp);
  872. break;
  873. default:
  874. break;
  875. }
  876. ack:
  877. nv_wr32(dev, 0x6101d4, 0x00000000);
  878. nv_wr32(dev, 0x6109d4, 0x00000000);
  879. nv_wr32(dev, 0x6101d0, 0x80000000);
  880. }
  881. static void
  882. nvd0_display_unk4_handler(struct drm_device *dev)
  883. {
  884. struct nvd0_display *disp = nvd0_display(dev);
  885. struct dcb_entry *dcb;
  886. int crtc = disp->irq.crtc;
  887. int pclk = disp->irq.pclk;
  888. NV_INFO(dev, "PDISP: 4 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
  889. nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
  890. dcb = disp->irq.ena;
  891. disp->irq.ena = NULL;
  892. if (!dcb)
  893. goto ack;
  894. nouveau_bios_run_display_table(dev, disp->irq.script, pclk, dcb, crtc);
  895. ack:
  896. nv_wr32(dev, 0x6101d4, 0x00000000);
  897. nv_wr32(dev, 0x6109d4, 0x00000000);
  898. nv_wr32(dev, 0x6101d0, 0x80000000);
  899. }
  900. static void
  901. nvd0_display_intr(struct drm_device *dev)
  902. {
  903. u32 intr = nv_rd32(dev, 0x610088);
  904. if (intr & 0x00000002) {
  905. u32 stat = nv_rd32(dev, 0x61009c);
  906. int chid = ffs(stat) - 1;
  907. if (chid >= 0) {
  908. u32 mthd = nv_rd32(dev, 0x6101f0 + (chid * 12));
  909. u32 data = nv_rd32(dev, 0x6101f4 + (chid * 12));
  910. u32 unkn = nv_rd32(dev, 0x6101f8 + (chid * 12));
  911. NV_INFO(dev, "EvoCh: chid %d mthd 0x%04x data 0x%08x "
  912. "0x%08x 0x%08x\n",
  913. chid, (mthd & 0x0000ffc), data, mthd, unkn);
  914. nv_wr32(dev, 0x61009c, (1 << chid));
  915. nv_wr32(dev, 0x6101f0 + (chid * 12), 0x90000000);
  916. }
  917. intr &= ~0x00000002;
  918. }
  919. if (intr & 0x00100000) {
  920. u32 stat = nv_rd32(dev, 0x6100ac);
  921. if (stat & 0x00000007) {
  922. nv_wr32(dev, 0x6100ac, (stat & 0x00000007));
  923. if (stat & 0x00000001)
  924. nvd0_display_unk1_handler(dev);
  925. if (stat & 0x00000002)
  926. nvd0_display_unk2_handler(dev);
  927. if (stat & 0x00000004)
  928. nvd0_display_unk4_handler(dev);
  929. stat &= ~0x00000007;
  930. }
  931. if (stat) {
  932. NV_INFO(dev, "PDISP: unknown intr24 0x%08x\n", stat);
  933. nv_wr32(dev, 0x6100ac, stat);
  934. }
  935. intr &= ~0x00100000;
  936. }
  937. if (intr & 0x01000000) {
  938. u32 stat = nv_rd32(dev, 0x6100bc);
  939. nv_wr32(dev, 0x6100bc, stat);
  940. intr &= ~0x01000000;
  941. }
  942. if (intr & 0x02000000) {
  943. u32 stat = nv_rd32(dev, 0x6108bc);
  944. nv_wr32(dev, 0x6108bc, stat);
  945. intr &= ~0x02000000;
  946. }
  947. if (intr)
  948. NV_INFO(dev, "PDISP: unknown intr 0x%08x\n", intr);
  949. }
  950. /******************************************************************************
  951. * Init
  952. *****************************************************************************/
  953. static void
  954. nvd0_display_fini(struct drm_device *dev)
  955. {
  956. int i;
  957. /* fini cursors */
  958. for (i = 14; i >= 13; i--) {
  959. if (!(nv_rd32(dev, 0x610490 + (i * 0x10)) & 0x00000001))
  960. continue;
  961. nv_mask(dev, 0x610490 + (i * 0x10), 0x00000001, 0x00000000);
  962. nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00000000);
  963. nv_mask(dev, 0x610090, 1 << i, 0x00000000);
  964. nv_mask(dev, 0x6100a0, 1 << i, 0x00000000);
  965. }
  966. /* fini master */
  967. if (nv_rd32(dev, 0x610490) & 0x00000010) {
  968. nv_mask(dev, 0x610490, 0x00000010, 0x00000000);
  969. nv_mask(dev, 0x610490, 0x00000003, 0x00000000);
  970. nv_wait(dev, 0x610490, 0x80000000, 0x00000000);
  971. nv_mask(dev, 0x610090, 0x00000001, 0x00000000);
  972. nv_mask(dev, 0x6100a0, 0x00000001, 0x00000000);
  973. }
  974. }
  975. int
  976. nvd0_display_init(struct drm_device *dev)
  977. {
  978. struct nvd0_display *disp = nvd0_display(dev);
  979. u32 *push;
  980. int i;
  981. if (nv_rd32(dev, 0x6100ac) & 0x00000100) {
  982. nv_wr32(dev, 0x6100ac, 0x00000100);
  983. nv_mask(dev, 0x6194e8, 0x00000001, 0x00000000);
  984. if (!nv_wait(dev, 0x6194e8, 0x00000002, 0x00000000)) {
  985. NV_ERROR(dev, "PDISP: 0x6194e8 0x%08x\n",
  986. nv_rd32(dev, 0x6194e8));
  987. return -EBUSY;
  988. }
  989. }
  990. /* nfi what these are exactly, i do know that SOR_MODE_CTRL won't
  991. * work at all unless you do the SOR part below.
  992. */
  993. for (i = 0; i < 3; i++) {
  994. u32 dac = nv_rd32(dev, 0x61a000 + (i * 0x800));
  995. nv_wr32(dev, 0x6101c0 + (i * 0x800), dac);
  996. }
  997. for (i = 0; i < 4; i++) {
  998. u32 sor = nv_rd32(dev, 0x61c000 + (i * 0x800));
  999. nv_wr32(dev, 0x6301c4 + (i * 0x800), sor);
  1000. }
  1001. for (i = 0; i < 2; i++) {
  1002. u32 crtc0 = nv_rd32(dev, 0x616104 + (i * 0x800));
  1003. u32 crtc1 = nv_rd32(dev, 0x616108 + (i * 0x800));
  1004. u32 crtc2 = nv_rd32(dev, 0x61610c + (i * 0x800));
  1005. nv_wr32(dev, 0x6101b4 + (i * 0x800), crtc0);
  1006. nv_wr32(dev, 0x6101b8 + (i * 0x800), crtc1);
  1007. nv_wr32(dev, 0x6101bc + (i * 0x800), crtc2);
  1008. }
  1009. /* point at our hash table / objects, enable interrupts */
  1010. nv_wr32(dev, 0x610010, (disp->mem->vinst >> 8) | 9);
  1011. nv_mask(dev, 0x6100b0, 0x00000307, 0x00000307);
  1012. /* init master */
  1013. nv_wr32(dev, 0x610494, (disp->evo[0].handle >> 8) | 3);
  1014. nv_wr32(dev, 0x610498, 0x00010000);
  1015. nv_wr32(dev, 0x61049c, 0x00000001);
  1016. nv_mask(dev, 0x610490, 0x00000010, 0x00000010);
  1017. nv_wr32(dev, 0x640000, 0x00000000);
  1018. nv_wr32(dev, 0x610490, 0x01000013);
  1019. if (!nv_wait(dev, 0x610490, 0x80000000, 0x00000000)) {
  1020. NV_ERROR(dev, "PDISP: master 0x%08x\n",
  1021. nv_rd32(dev, 0x610490));
  1022. return -EBUSY;
  1023. }
  1024. nv_mask(dev, 0x610090, 0x00000001, 0x00000001);
  1025. nv_mask(dev, 0x6100a0, 0x00000001, 0x00000001);
  1026. /* init cursors */
  1027. for (i = 13; i <= 14; i++) {
  1028. nv_wr32(dev, 0x610490 + (i * 0x10), 0x00000001);
  1029. if (!nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00010000)) {
  1030. NV_ERROR(dev, "PDISP: curs%d 0x%08x\n", i,
  1031. nv_rd32(dev, 0x610490 + (i * 0x10)));
  1032. return -EBUSY;
  1033. }
  1034. nv_mask(dev, 0x610090, 1 << i, 1 << i);
  1035. nv_mask(dev, 0x6100a0, 1 << i, 1 << i);
  1036. }
  1037. push = evo_wait(dev, 0, 32);
  1038. if (!push)
  1039. return -EBUSY;
  1040. evo_mthd(push, 0x0088, 1);
  1041. evo_data(push, MEM_SYNC);
  1042. evo_mthd(push, 0x0084, 1);
  1043. evo_data(push, 0x00000000);
  1044. evo_mthd(push, 0x0084, 1);
  1045. evo_data(push, 0x80000000);
  1046. evo_mthd(push, 0x008c, 1);
  1047. evo_data(push, 0x00000000);
  1048. evo_kick(push, dev, 0);
  1049. return 0;
  1050. }
  1051. void
  1052. nvd0_display_destroy(struct drm_device *dev)
  1053. {
  1054. struct drm_nouveau_private *dev_priv = dev->dev_private;
  1055. struct nvd0_display *disp = nvd0_display(dev);
  1056. struct pci_dev *pdev = dev->pdev;
  1057. nvd0_display_fini(dev);
  1058. pci_free_consistent(pdev, PAGE_SIZE, disp->evo[0].ptr, disp->evo[0].handle);
  1059. nouveau_gpuobj_ref(NULL, &disp->mem);
  1060. nouveau_irq_unregister(dev, 26);
  1061. dev_priv->engine.display.priv = NULL;
  1062. kfree(disp);
  1063. }
  1064. int
  1065. nvd0_display_create(struct drm_device *dev)
  1066. {
  1067. struct drm_nouveau_private *dev_priv = dev->dev_private;
  1068. struct nouveau_instmem_engine *pinstmem = &dev_priv->engine.instmem;
  1069. struct dcb_table *dcb = &dev_priv->vbios.dcb;
  1070. struct drm_connector *connector, *tmp;
  1071. struct pci_dev *pdev = dev->pdev;
  1072. struct nvd0_display *disp;
  1073. struct dcb_entry *dcbe;
  1074. int ret, i;
  1075. disp = kzalloc(sizeof(*disp), GFP_KERNEL);
  1076. if (!disp)
  1077. return -ENOMEM;
  1078. dev_priv->engine.display.priv = disp;
  1079. /* create crtc objects to represent the hw heads */
  1080. for (i = 0; i < 2; i++) {
  1081. ret = nvd0_crtc_create(dev, i);
  1082. if (ret)
  1083. goto out;
  1084. }
  1085. /* create encoder/connector objects based on VBIOS DCB table */
  1086. for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
  1087. connector = nouveau_connector_create(dev, dcbe->connector);
  1088. if (IS_ERR(connector))
  1089. continue;
  1090. if (dcbe->location != DCB_LOC_ON_CHIP) {
  1091. NV_WARN(dev, "skipping off-chip encoder %d/%d\n",
  1092. dcbe->type, ffs(dcbe->or) - 1);
  1093. continue;
  1094. }
  1095. switch (dcbe->type) {
  1096. case OUTPUT_TMDS:
  1097. nvd0_sor_create(connector, dcbe);
  1098. break;
  1099. case OUTPUT_ANALOG:
  1100. nvd0_dac_create(connector, dcbe);
  1101. break;
  1102. default:
  1103. NV_WARN(dev, "skipping unsupported encoder %d/%d\n",
  1104. dcbe->type, ffs(dcbe->or) - 1);
  1105. continue;
  1106. }
  1107. }
  1108. /* cull any connectors we created that don't have an encoder */
  1109. list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
  1110. if (connector->encoder_ids[0])
  1111. continue;
  1112. NV_WARN(dev, "%s has no encoders, removing\n",
  1113. drm_get_connector_name(connector));
  1114. connector->funcs->destroy(connector);
  1115. }
  1116. /* setup interrupt handling */
  1117. nouveau_irq_register(dev, 26, nvd0_display_intr);
  1118. /* hash table and dma objects for the memory areas we care about */
  1119. ret = nouveau_gpuobj_new(dev, NULL, 0x4000, 0x10000,
  1120. NVOBJ_FLAG_ZERO_ALLOC, &disp->mem);
  1121. if (ret)
  1122. goto out;
  1123. nv_wo32(disp->mem, 0x1000, 0x00000049);
  1124. nv_wo32(disp->mem, 0x1004, (disp->mem->vinst + 0x2000) >> 8);
  1125. nv_wo32(disp->mem, 0x1008, (disp->mem->vinst + 0x2fff) >> 8);
  1126. nv_wo32(disp->mem, 0x100c, 0x00000000);
  1127. nv_wo32(disp->mem, 0x1010, 0x00000000);
  1128. nv_wo32(disp->mem, 0x1014, 0x00000000);
  1129. nv_wo32(disp->mem, 0x0000, MEM_SYNC);
  1130. nv_wo32(disp->mem, 0x0004, (0x1000 << 9) | 0x00000001);
  1131. nv_wo32(disp->mem, 0x1020, 0x00000049);
  1132. nv_wo32(disp->mem, 0x1024, 0x00000000);
  1133. nv_wo32(disp->mem, 0x1028, (dev_priv->vram_size - 1) >> 8);
  1134. nv_wo32(disp->mem, 0x102c, 0x00000000);
  1135. nv_wo32(disp->mem, 0x1030, 0x00000000);
  1136. nv_wo32(disp->mem, 0x1034, 0x00000000);
  1137. nv_wo32(disp->mem, 0x0008, MEM_VRAM);
  1138. nv_wo32(disp->mem, 0x000c, (0x1020 << 9) | 0x00000001);
  1139. nv_wo32(disp->mem, 0x1040, 0x00000009);
  1140. nv_wo32(disp->mem, 0x1044, 0x00000000);
  1141. nv_wo32(disp->mem, 0x1048, (dev_priv->vram_size - 1) >> 8);
  1142. nv_wo32(disp->mem, 0x104c, 0x00000000);
  1143. nv_wo32(disp->mem, 0x1050, 0x00000000);
  1144. nv_wo32(disp->mem, 0x1054, 0x00000000);
  1145. nv_wo32(disp->mem, 0x0010, NvEvoVRAM_LP);
  1146. nv_wo32(disp->mem, 0x0014, (0x1040 << 9) | 0x00000001);
  1147. nv_wo32(disp->mem, 0x1060, 0x0fe00009);
  1148. nv_wo32(disp->mem, 0x1064, 0x00000000);
  1149. nv_wo32(disp->mem, 0x1068, (dev_priv->vram_size - 1) >> 8);
  1150. nv_wo32(disp->mem, 0x106c, 0x00000000);
  1151. nv_wo32(disp->mem, 0x1070, 0x00000000);
  1152. nv_wo32(disp->mem, 0x1074, 0x00000000);
  1153. nv_wo32(disp->mem, 0x0018, NvEvoFB32);
  1154. nv_wo32(disp->mem, 0x001c, (0x1060 << 9) | 0x00000001);
  1155. pinstmem->flush(dev);
  1156. /* push buffers for evo channels */
  1157. disp->evo[0].ptr =
  1158. pci_alloc_consistent(pdev, PAGE_SIZE, &disp->evo[0].handle);
  1159. if (!disp->evo[0].ptr) {
  1160. ret = -ENOMEM;
  1161. goto out;
  1162. }
  1163. ret = nvd0_display_init(dev);
  1164. if (ret)
  1165. goto out;
  1166. out:
  1167. if (ret)
  1168. nvd0_display_destroy(dev);
  1169. return ret;
  1170. }