nvd0_display.c 36 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392
  1. /*
  2. * Copyright 2011 Red Hat Inc.
  3. *
  4. * Permission is hereby granted, free of charge, to any person obtaining a
  5. * copy of this software and associated documentation files (the "Software"),
  6. * to deal in the Software without restriction, including without limitation
  7. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  8. * and/or sell copies of the Software, and to permit persons to whom the
  9. * Software is furnished to do so, subject to the following conditions:
  10. *
  11. * The above copyright notice and this permission notice shall be included in
  12. * all copies or substantial portions of the Software.
  13. *
  14. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  15. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  16. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  17. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  18. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  19. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  20. * OTHER DEALINGS IN THE SOFTWARE.
  21. *
  22. * Authors: Ben Skeggs
  23. */
  24. #include <linux/dma-mapping.h>
  25. #include "drmP.h"
  26. #include "drm_crtc_helper.h"
  27. #include "nouveau_drv.h"
  28. #include "nouveau_connector.h"
  29. #include "nouveau_encoder.h"
  30. #include "nouveau_crtc.h"
  31. #include "nouveau_fb.h"
  32. #include "nv50_display.h"
  33. #define MEM_SYNC 0xe0000001
  34. #define MEM_VRAM 0xe0010000
  35. #include "nouveau_dma.h"
  36. struct nvd0_display {
  37. struct nouveau_gpuobj *mem;
  38. struct {
  39. dma_addr_t handle;
  40. u32 *ptr;
  41. } evo[1];
  42. struct {
  43. struct dcb_entry *dis;
  44. struct dcb_entry *ena;
  45. int crtc;
  46. int pclk;
  47. u16 script;
  48. } irq;
  49. };
  50. static struct nvd0_display *
  51. nvd0_display(struct drm_device *dev)
  52. {
  53. struct drm_nouveau_private *dev_priv = dev->dev_private;
  54. return dev_priv->engine.display.priv;
  55. }
  56. static int
  57. evo_icmd(struct drm_device *dev, int id, u32 mthd, u32 data)
  58. {
  59. int ret = 0;
  60. nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000001);
  61. nv_wr32(dev, 0x610704 + (id * 0x10), data);
  62. nv_mask(dev, 0x610704 + (id * 0x10), 0x80000ffc, 0x80000000 | mthd);
  63. if (!nv_wait(dev, 0x610704 + (id * 0x10), 0x80000000, 0x00000000))
  64. ret = -EBUSY;
  65. nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000000);
  66. return ret;
  67. }
  68. static u32 *
  69. evo_wait(struct drm_device *dev, int id, int nr)
  70. {
  71. struct nvd0_display *disp = nvd0_display(dev);
  72. u32 put = nv_rd32(dev, 0x640000 + (id * 0x1000)) / 4;
  73. if (put + nr >= (PAGE_SIZE / 4)) {
  74. disp->evo[id].ptr[put] = 0x20000000;
  75. nv_wr32(dev, 0x640000 + (id * 0x1000), 0x00000000);
  76. if (!nv_wait(dev, 0x640004 + (id * 0x1000), ~0, 0x00000000)) {
  77. NV_ERROR(dev, "evo %d dma stalled\n", id);
  78. return NULL;
  79. }
  80. put = 0;
  81. }
  82. return disp->evo[id].ptr + put;
  83. }
  84. static void
  85. evo_kick(u32 *push, struct drm_device *dev, int id)
  86. {
  87. struct nvd0_display *disp = nvd0_display(dev);
  88. nv_wr32(dev, 0x640000 + (id * 0x1000), (push - disp->evo[id].ptr) << 2);
  89. }
  90. #define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
  91. #define evo_data(p,d) *((p)++) = (d)
  92. static struct drm_crtc *
  93. nvd0_display_crtc_get(struct drm_encoder *encoder)
  94. {
  95. return nouveau_encoder(encoder)->crtc;
  96. }
  97. /******************************************************************************
  98. * CRTC
  99. *****************************************************************************/
  100. static int
  101. nvd0_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool on, bool update)
  102. {
  103. struct drm_device *dev = nv_crtc->base.dev;
  104. u32 *push, mode;
  105. mode = 0x00000000;
  106. if (on) {
  107. /* 0x11: 6bpc dynamic 2x2
  108. * 0x13: 8bpc dynamic 2x2
  109. * 0x19: 6bpc static 2x2
  110. * 0x1b: 8bpc static 2x2
  111. * 0x21: 6bpc temporal
  112. * 0x23: 8bpc temporal
  113. */
  114. mode = 0x00000011;
  115. }
  116. push = evo_wait(dev, 0, 4);
  117. if (push) {
  118. evo_mthd(push, 0x0490 + (nv_crtc->index * 0x300), 1);
  119. evo_data(push, mode);
  120. if (update) {
  121. evo_mthd(push, 0x0080, 1);
  122. evo_data(push, 0x00000000);
  123. }
  124. evo_kick(push, dev, 0);
  125. }
  126. return 0;
  127. }
  128. static int
  129. nvd0_crtc_set_scale(struct nouveau_crtc *nv_crtc, int type, bool update)
  130. {
  131. struct drm_display_mode *mode = &nv_crtc->base.mode;
  132. struct drm_device *dev = nv_crtc->base.dev;
  133. struct nouveau_connector *nv_connector;
  134. u32 *push, outX, outY;
  135. outX = mode->hdisplay;
  136. outY = mode->vdisplay;
  137. nv_connector = nouveau_crtc_connector_get(nv_crtc);
  138. if (nv_connector && nv_connector->native_mode) {
  139. struct drm_display_mode *native = nv_connector->native_mode;
  140. u32 xratio = (native->hdisplay << 19) / mode->hdisplay;
  141. u32 yratio = (native->vdisplay << 19) / mode->vdisplay;
  142. switch (type) {
  143. case DRM_MODE_SCALE_ASPECT:
  144. if (xratio > yratio) {
  145. outX = (mode->hdisplay * yratio) >> 19;
  146. outY = (mode->vdisplay * yratio) >> 19;
  147. } else {
  148. outX = (mode->hdisplay * xratio) >> 19;
  149. outY = (mode->vdisplay * xratio) >> 19;
  150. }
  151. break;
  152. case DRM_MODE_SCALE_FULLSCREEN:
  153. outX = native->hdisplay;
  154. outY = native->vdisplay;
  155. break;
  156. default:
  157. break;
  158. }
  159. }
  160. push = evo_wait(dev, 0, 16);
  161. if (push) {
  162. evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
  163. evo_data(push, (outY << 16) | outX);
  164. evo_data(push, (outY << 16) | outX);
  165. evo_data(push, (outY << 16) | outX);
  166. evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
  167. evo_data(push, 0x00000000);
  168. evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
  169. evo_data(push, 0x00000000);
  170. evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
  171. evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
  172. if (update) {
  173. evo_mthd(push, 0x0080, 1);
  174. evo_data(push, 0x00000000);
  175. }
  176. evo_kick(push, dev, 0);
  177. }
  178. return 0;
  179. }
  180. static int
  181. nvd0_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
  182. int x, int y, bool update)
  183. {
  184. struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
  185. u32 *push;
  186. push = evo_wait(fb->dev, 0, 16);
  187. if (push) {
  188. evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
  189. evo_data(push, nvfb->nvbo->bo.offset >> 8);
  190. evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
  191. evo_data(push, (fb->height << 16) | fb->width);
  192. evo_data(push, nvfb->r_pitch);
  193. evo_data(push, nvfb->r_format);
  194. evo_data(push, nvfb->r_dma);
  195. if (update) {
  196. evo_mthd(push, 0x0080, 1);
  197. evo_data(push, 0x00000000);
  198. }
  199. evo_kick(push, fb->dev, 0);
  200. }
  201. nv_crtc->fb.tile_flags = nvfb->r_dma;
  202. return 0;
  203. }
  204. static void
  205. nvd0_crtc_cursor_show(struct nouveau_crtc *nv_crtc, bool show, bool update)
  206. {
  207. struct drm_device *dev = nv_crtc->base.dev;
  208. u32 *push = evo_wait(dev, 0, 16);
  209. if (push) {
  210. if (show) {
  211. evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
  212. evo_data(push, 0x85000000);
  213. evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
  214. evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
  215. evo_data(push, MEM_VRAM);
  216. } else {
  217. evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
  218. evo_data(push, 0x05000000);
  219. evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
  220. evo_data(push, 0x00000000);
  221. }
  222. if (update) {
  223. evo_mthd(push, 0x0080, 1);
  224. evo_data(push, 0x00000000);
  225. }
  226. evo_kick(push, dev, 0);
  227. }
  228. }
  229. static void
  230. nvd0_crtc_dpms(struct drm_crtc *crtc, int mode)
  231. {
  232. }
  233. static void
  234. nvd0_crtc_prepare(struct drm_crtc *crtc)
  235. {
  236. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  237. u32 *push;
  238. push = evo_wait(crtc->dev, 0, 2);
  239. if (push) {
  240. evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
  241. evo_data(push, 0x00000000);
  242. evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
  243. evo_data(push, 0x03000000);
  244. evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
  245. evo_data(push, 0x00000000);
  246. evo_kick(push, crtc->dev, 0);
  247. }
  248. nvd0_crtc_cursor_show(nv_crtc, false, false);
  249. }
  250. static void
  251. nvd0_crtc_commit(struct drm_crtc *crtc)
  252. {
  253. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  254. u32 *push;
  255. push = evo_wait(crtc->dev, 0, 32);
  256. if (push) {
  257. evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
  258. evo_data(push, nv_crtc->fb.tile_flags);
  259. evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
  260. evo_data(push, 0x83000000);
  261. evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
  262. evo_data(push, 0x00000000);
  263. evo_data(push, 0x00000000);
  264. evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
  265. evo_data(push, MEM_VRAM);
  266. evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1);
  267. evo_data(push, 0xffffff00);
  268. evo_kick(push, crtc->dev, 0);
  269. }
  270. nvd0_crtc_cursor_show(nv_crtc, nv_crtc->cursor.visible, true);
  271. }
  272. static bool
  273. nvd0_crtc_mode_fixup(struct drm_crtc *crtc, struct drm_display_mode *mode,
  274. struct drm_display_mode *adjusted_mode)
  275. {
  276. return true;
  277. }
  278. static int
  279. nvd0_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
  280. {
  281. struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->fb);
  282. int ret;
  283. ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
  284. if (ret)
  285. return ret;
  286. if (old_fb) {
  287. nvfb = nouveau_framebuffer(old_fb);
  288. nouveau_bo_unpin(nvfb->nvbo);
  289. }
  290. return 0;
  291. }
  292. static int
  293. nvd0_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
  294. struct drm_display_mode *mode, int x, int y,
  295. struct drm_framebuffer *old_fb)
  296. {
  297. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  298. struct nouveau_connector *nv_connector;
  299. u32 htotal = mode->htotal;
  300. u32 vtotal = mode->vtotal;
  301. u32 hsyncw = mode->hsync_end - mode->hsync_start - 1;
  302. u32 vsyncw = mode->vsync_end - mode->vsync_start - 1;
  303. u32 hfrntp = mode->hsync_start - mode->hdisplay;
  304. u32 vfrntp = mode->vsync_start - mode->vdisplay;
  305. u32 hbackp = mode->htotal - mode->hsync_end;
  306. u32 vbackp = mode->vtotal - mode->vsync_end;
  307. u32 hss2be = hsyncw + hbackp;
  308. u32 vss2be = vsyncw + vbackp;
  309. u32 hss2de = htotal - hfrntp;
  310. u32 vss2de = vtotal - vfrntp;
  311. u32 syncs, *push;
  312. int ret;
  313. syncs = 0x00000001;
  314. if (mode->flags & DRM_MODE_FLAG_NHSYNC)
  315. syncs |= 0x00000008;
  316. if (mode->flags & DRM_MODE_FLAG_NVSYNC)
  317. syncs |= 0x00000010;
  318. ret = nvd0_crtc_swap_fbs(crtc, old_fb);
  319. if (ret)
  320. return ret;
  321. push = evo_wait(crtc->dev, 0, 64);
  322. if (push) {
  323. evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 5);
  324. evo_data(push, 0x00000000);
  325. evo_data(push, (vtotal << 16) | htotal);
  326. evo_data(push, (vsyncw << 16) | hsyncw);
  327. evo_data(push, (vss2be << 16) | hss2be);
  328. evo_data(push, (vss2de << 16) | hss2de);
  329. evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
  330. evo_data(push, 0x00000000); /* ??? */
  331. evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
  332. evo_data(push, mode->clock * 1000);
  333. evo_data(push, 0x00200000); /* ??? */
  334. evo_data(push, mode->clock * 1000);
  335. evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 1);
  336. evo_data(push, syncs);
  337. evo_kick(push, crtc->dev, 0);
  338. }
  339. nv_connector = nouveau_crtc_connector_get(nv_crtc);
  340. nvd0_crtc_set_dither(nv_crtc, nv_connector->use_dithering, false);
  341. nvd0_crtc_set_scale(nv_crtc, nv_connector->scaling_mode, false);
  342. nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, false);
  343. return 0;
  344. }
  345. static int
  346. nvd0_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
  347. struct drm_framebuffer *old_fb)
  348. {
  349. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  350. int ret;
  351. ret = nvd0_crtc_swap_fbs(crtc, old_fb);
  352. if (ret)
  353. return ret;
  354. nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, true);
  355. return 0;
  356. }
  357. static int
  358. nvd0_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
  359. struct drm_framebuffer *fb, int x, int y,
  360. enum mode_set_atomic state)
  361. {
  362. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  363. nvd0_crtc_set_image(nv_crtc, fb, x, y, true);
  364. return 0;
  365. }
  366. static void
  367. nvd0_crtc_lut_load(struct drm_crtc *crtc)
  368. {
  369. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  370. void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
  371. int i;
  372. for (i = 0; i < 256; i++) {
  373. writew(0x6000 + (nv_crtc->lut.r[i] >> 2), lut + (i * 0x20) + 0);
  374. writew(0x6000 + (nv_crtc->lut.g[i] >> 2), lut + (i * 0x20) + 2);
  375. writew(0x6000 + (nv_crtc->lut.b[i] >> 2), lut + (i * 0x20) + 4);
  376. }
  377. }
  378. static int
  379. nvd0_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
  380. uint32_t handle, uint32_t width, uint32_t height)
  381. {
  382. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  383. struct drm_device *dev = crtc->dev;
  384. struct drm_gem_object *gem;
  385. struct nouveau_bo *nvbo;
  386. bool visible = (handle != 0);
  387. int i, ret = 0;
  388. if (visible) {
  389. if (width != 64 || height != 64)
  390. return -EINVAL;
  391. gem = drm_gem_object_lookup(dev, file_priv, handle);
  392. if (unlikely(!gem))
  393. return -ENOENT;
  394. nvbo = nouveau_gem_object(gem);
  395. ret = nouveau_bo_map(nvbo);
  396. if (ret == 0) {
  397. for (i = 0; i < 64 * 64; i++) {
  398. u32 v = nouveau_bo_rd32(nvbo, i);
  399. nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
  400. }
  401. nouveau_bo_unmap(nvbo);
  402. }
  403. drm_gem_object_unreference_unlocked(gem);
  404. }
  405. if (visible != nv_crtc->cursor.visible) {
  406. nvd0_crtc_cursor_show(nv_crtc, visible, true);
  407. nv_crtc->cursor.visible = visible;
  408. }
  409. return ret;
  410. }
  411. static int
  412. nvd0_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
  413. {
  414. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  415. const u32 data = (y << 16) | x;
  416. nv_wr32(crtc->dev, 0x64d084 + (nv_crtc->index * 0x1000), data);
  417. nv_wr32(crtc->dev, 0x64d080 + (nv_crtc->index * 0x1000), 0x00000000);
  418. return 0;
  419. }
  420. static void
  421. nvd0_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
  422. uint32_t start, uint32_t size)
  423. {
  424. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  425. u32 end = max(start + size, (u32)256);
  426. u32 i;
  427. for (i = start; i < end; i++) {
  428. nv_crtc->lut.r[i] = r[i];
  429. nv_crtc->lut.g[i] = g[i];
  430. nv_crtc->lut.b[i] = b[i];
  431. }
  432. nvd0_crtc_lut_load(crtc);
  433. }
  434. static void
  435. nvd0_crtc_destroy(struct drm_crtc *crtc)
  436. {
  437. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  438. nouveau_bo_unmap(nv_crtc->cursor.nvbo);
  439. nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
  440. nouveau_bo_unmap(nv_crtc->lut.nvbo);
  441. nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
  442. drm_crtc_cleanup(crtc);
  443. kfree(crtc);
  444. }
  445. static const struct drm_crtc_helper_funcs nvd0_crtc_hfunc = {
  446. .dpms = nvd0_crtc_dpms,
  447. .prepare = nvd0_crtc_prepare,
  448. .commit = nvd0_crtc_commit,
  449. .mode_fixup = nvd0_crtc_mode_fixup,
  450. .mode_set = nvd0_crtc_mode_set,
  451. .mode_set_base = nvd0_crtc_mode_set_base,
  452. .mode_set_base_atomic = nvd0_crtc_mode_set_base_atomic,
  453. .load_lut = nvd0_crtc_lut_load,
  454. };
  455. static const struct drm_crtc_funcs nvd0_crtc_func = {
  456. .cursor_set = nvd0_crtc_cursor_set,
  457. .cursor_move = nvd0_crtc_cursor_move,
  458. .gamma_set = nvd0_crtc_gamma_set,
  459. .set_config = drm_crtc_helper_set_config,
  460. .destroy = nvd0_crtc_destroy,
  461. };
  462. static int
  463. nvd0_crtc_create(struct drm_device *dev, int index)
  464. {
  465. struct nouveau_crtc *nv_crtc;
  466. struct drm_crtc *crtc;
  467. int ret, i;
  468. nv_crtc = kzalloc(sizeof(*nv_crtc), GFP_KERNEL);
  469. if (!nv_crtc)
  470. return -ENOMEM;
  471. nv_crtc->index = index;
  472. nv_crtc->set_dither = nvd0_crtc_set_dither;
  473. nv_crtc->set_scale = nvd0_crtc_set_scale;
  474. for (i = 0; i < 256; i++) {
  475. nv_crtc->lut.r[i] = i << 8;
  476. nv_crtc->lut.g[i] = i << 8;
  477. nv_crtc->lut.b[i] = i << 8;
  478. }
  479. crtc = &nv_crtc->base;
  480. drm_crtc_init(dev, crtc, &nvd0_crtc_func);
  481. drm_crtc_helper_add(crtc, &nvd0_crtc_hfunc);
  482. drm_mode_crtc_set_gamma_size(crtc, 256);
  483. ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
  484. 0, 0x0000, &nv_crtc->cursor.nvbo);
  485. if (!ret) {
  486. ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
  487. if (!ret)
  488. ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
  489. if (ret)
  490. nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
  491. }
  492. if (ret)
  493. goto out;
  494. ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
  495. 0, 0x0000, &nv_crtc->lut.nvbo);
  496. if (!ret) {
  497. ret = nouveau_bo_pin(nv_crtc->lut.nvbo, TTM_PL_FLAG_VRAM);
  498. if (!ret)
  499. ret = nouveau_bo_map(nv_crtc->lut.nvbo);
  500. if (ret)
  501. nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
  502. }
  503. if (ret)
  504. goto out;
  505. nvd0_crtc_lut_load(crtc);
  506. out:
  507. if (ret)
  508. nvd0_crtc_destroy(crtc);
  509. return ret;
  510. }
  511. /******************************************************************************
  512. * DAC
  513. *****************************************************************************/
  514. static void
  515. nvd0_dac_dpms(struct drm_encoder *encoder, int mode)
  516. {
  517. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  518. struct drm_device *dev = encoder->dev;
  519. int or = nv_encoder->or;
  520. u32 dpms_ctrl;
  521. dpms_ctrl = 0x80000000;
  522. if (mode == DRM_MODE_DPMS_STANDBY || mode == DRM_MODE_DPMS_OFF)
  523. dpms_ctrl |= 0x00000001;
  524. if (mode == DRM_MODE_DPMS_SUSPEND || mode == DRM_MODE_DPMS_OFF)
  525. dpms_ctrl |= 0x00000004;
  526. nv_wait(dev, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
  527. nv_mask(dev, 0x61a004 + (or * 0x0800), 0xc000007f, dpms_ctrl);
  528. nv_wait(dev, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
  529. }
  530. static bool
  531. nvd0_dac_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
  532. struct drm_display_mode *adjusted_mode)
  533. {
  534. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  535. struct nouveau_connector *nv_connector;
  536. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  537. if (nv_connector && nv_connector->native_mode) {
  538. if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
  539. int id = adjusted_mode->base.id;
  540. *adjusted_mode = *nv_connector->native_mode;
  541. adjusted_mode->base.id = id;
  542. }
  543. }
  544. return true;
  545. }
  546. static void
  547. nvd0_dac_prepare(struct drm_encoder *encoder)
  548. {
  549. }
  550. static void
  551. nvd0_dac_commit(struct drm_encoder *encoder)
  552. {
  553. }
  554. static void
  555. nvd0_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
  556. struct drm_display_mode *adjusted_mode)
  557. {
  558. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  559. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  560. u32 *push;
  561. nvd0_dac_dpms(encoder, DRM_MODE_DPMS_ON);
  562. push = evo_wait(encoder->dev, 0, 2);
  563. if (push) {
  564. evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
  565. evo_data(push, 1 << nv_crtc->index);
  566. evo_kick(push, encoder->dev, 0);
  567. }
  568. nv_encoder->crtc = encoder->crtc;
  569. }
  570. static void
  571. nvd0_dac_disconnect(struct drm_encoder *encoder)
  572. {
  573. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  574. struct drm_device *dev = encoder->dev;
  575. u32 *push;
  576. if (nv_encoder->crtc) {
  577. nvd0_crtc_prepare(nv_encoder->crtc);
  578. push = evo_wait(dev, 0, 4);
  579. if (push) {
  580. evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
  581. evo_data(push, 0x00000000);
  582. evo_mthd(push, 0x0080, 1);
  583. evo_data(push, 0x00000000);
  584. evo_kick(push, dev, 0);
  585. }
  586. nv_encoder->crtc = NULL;
  587. }
  588. }
  589. static enum drm_connector_status
  590. nvd0_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
  591. {
  592. enum drm_connector_status status = connector_status_disconnected;
  593. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  594. struct drm_device *dev = encoder->dev;
  595. int or = nv_encoder->or;
  596. u32 load;
  597. nv_wr32(dev, 0x61a00c + (or * 0x800), 0x00100000);
  598. udelay(9500);
  599. nv_wr32(dev, 0x61a00c + (or * 0x800), 0x80000000);
  600. load = nv_rd32(dev, 0x61a00c + (or * 0x800));
  601. if ((load & 0x38000000) == 0x38000000)
  602. status = connector_status_connected;
  603. nv_wr32(dev, 0x61a00c + (or * 0x800), 0x00000000);
  604. return status;
  605. }
  606. static void
  607. nvd0_dac_destroy(struct drm_encoder *encoder)
  608. {
  609. drm_encoder_cleanup(encoder);
  610. kfree(encoder);
  611. }
  612. static const struct drm_encoder_helper_funcs nvd0_dac_hfunc = {
  613. .dpms = nvd0_dac_dpms,
  614. .mode_fixup = nvd0_dac_mode_fixup,
  615. .prepare = nvd0_dac_prepare,
  616. .commit = nvd0_dac_commit,
  617. .mode_set = nvd0_dac_mode_set,
  618. .disable = nvd0_dac_disconnect,
  619. .get_crtc = nvd0_display_crtc_get,
  620. .detect = nvd0_dac_detect
  621. };
  622. static const struct drm_encoder_funcs nvd0_dac_func = {
  623. .destroy = nvd0_dac_destroy,
  624. };
  625. static int
  626. nvd0_dac_create(struct drm_connector *connector, struct dcb_entry *dcbe)
  627. {
  628. struct drm_device *dev = connector->dev;
  629. struct nouveau_encoder *nv_encoder;
  630. struct drm_encoder *encoder;
  631. nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
  632. if (!nv_encoder)
  633. return -ENOMEM;
  634. nv_encoder->dcb = dcbe;
  635. nv_encoder->or = ffs(dcbe->or) - 1;
  636. encoder = to_drm_encoder(nv_encoder);
  637. encoder->possible_crtcs = dcbe->heads;
  638. encoder->possible_clones = 0;
  639. drm_encoder_init(dev, encoder, &nvd0_dac_func, DRM_MODE_ENCODER_DAC);
  640. drm_encoder_helper_add(encoder, &nvd0_dac_hfunc);
  641. drm_mode_connector_attach_encoder(connector, encoder);
  642. return 0;
  643. }
  644. /******************************************************************************
  645. * SOR
  646. *****************************************************************************/
  647. static void
  648. nvd0_sor_dpms(struct drm_encoder *encoder, int mode)
  649. {
  650. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  651. struct drm_device *dev = encoder->dev;
  652. struct drm_encoder *partner;
  653. int or = nv_encoder->or;
  654. u32 dpms_ctrl;
  655. nv_encoder->last_dpms = mode;
  656. list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
  657. struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
  658. if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
  659. continue;
  660. if (nv_partner != nv_encoder &&
  661. nv_partner->dcb->or == nv_encoder->or) {
  662. if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
  663. return;
  664. break;
  665. }
  666. }
  667. dpms_ctrl = (mode == DRM_MODE_DPMS_ON);
  668. dpms_ctrl |= 0x80000000;
  669. nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
  670. nv_mask(dev, 0x61c004 + (or * 0x0800), 0x80000001, dpms_ctrl);
  671. nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
  672. nv_wait(dev, 0x61c030 + (or * 0x0800), 0x10000000, 0x00000000);
  673. }
  674. static bool
  675. nvd0_sor_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
  676. struct drm_display_mode *adjusted_mode)
  677. {
  678. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  679. struct nouveau_connector *nv_connector;
  680. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  681. if (nv_connector && nv_connector->native_mode) {
  682. if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
  683. int id = adjusted_mode->base.id;
  684. *adjusted_mode = *nv_connector->native_mode;
  685. adjusted_mode->base.id = id;
  686. }
  687. }
  688. return true;
  689. }
  690. static void
  691. nvd0_sor_prepare(struct drm_encoder *encoder)
  692. {
  693. }
  694. static void
  695. nvd0_sor_commit(struct drm_encoder *encoder)
  696. {
  697. }
  698. static void
  699. nvd0_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
  700. struct drm_display_mode *adjusted_mode)
  701. {
  702. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  703. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  704. u32 mode_ctrl = (1 << nv_crtc->index);
  705. u32 *push;
  706. if (nv_encoder->dcb->sorconf.link & 1) {
  707. if (adjusted_mode->clock < 165000)
  708. mode_ctrl |= 0x00000100;
  709. else
  710. mode_ctrl |= 0x00000500;
  711. } else {
  712. mode_ctrl |= 0x00000200;
  713. }
  714. nvd0_sor_dpms(encoder, DRM_MODE_DPMS_ON);
  715. push = evo_wait(encoder->dev, 0, 2);
  716. if (push) {
  717. evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
  718. evo_data(push, mode_ctrl);
  719. evo_kick(push, encoder->dev, 0);
  720. }
  721. nv_encoder->crtc = encoder->crtc;
  722. }
  723. static void
  724. nvd0_sor_disconnect(struct drm_encoder *encoder)
  725. {
  726. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  727. struct drm_device *dev = encoder->dev;
  728. u32 *push;
  729. if (nv_encoder->crtc) {
  730. nvd0_crtc_prepare(nv_encoder->crtc);
  731. push = evo_wait(dev, 0, 4);
  732. if (push) {
  733. evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
  734. evo_data(push, 0x00000000);
  735. evo_mthd(push, 0x0080, 1);
  736. evo_data(push, 0x00000000);
  737. evo_kick(push, dev, 0);
  738. }
  739. nv_encoder->crtc = NULL;
  740. nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
  741. }
  742. }
  743. static void
  744. nvd0_sor_destroy(struct drm_encoder *encoder)
  745. {
  746. drm_encoder_cleanup(encoder);
  747. kfree(encoder);
  748. }
  749. static const struct drm_encoder_helper_funcs nvd0_sor_hfunc = {
  750. .dpms = nvd0_sor_dpms,
  751. .mode_fixup = nvd0_sor_mode_fixup,
  752. .prepare = nvd0_sor_prepare,
  753. .commit = nvd0_sor_commit,
  754. .mode_set = nvd0_sor_mode_set,
  755. .disable = nvd0_sor_disconnect,
  756. .get_crtc = nvd0_display_crtc_get,
  757. };
  758. static const struct drm_encoder_funcs nvd0_sor_func = {
  759. .destroy = nvd0_sor_destroy,
  760. };
  761. static int
  762. nvd0_sor_create(struct drm_connector *connector, struct dcb_entry *dcbe)
  763. {
  764. struct drm_device *dev = connector->dev;
  765. struct nouveau_encoder *nv_encoder;
  766. struct drm_encoder *encoder;
  767. nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
  768. if (!nv_encoder)
  769. return -ENOMEM;
  770. nv_encoder->dcb = dcbe;
  771. nv_encoder->or = ffs(dcbe->or) - 1;
  772. nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
  773. encoder = to_drm_encoder(nv_encoder);
  774. encoder->possible_crtcs = dcbe->heads;
  775. encoder->possible_clones = 0;
  776. drm_encoder_init(dev, encoder, &nvd0_sor_func, DRM_MODE_ENCODER_TMDS);
  777. drm_encoder_helper_add(encoder, &nvd0_sor_hfunc);
  778. drm_mode_connector_attach_encoder(connector, encoder);
  779. return 0;
  780. }
  781. /******************************************************************************
  782. * IRQ
  783. *****************************************************************************/
  784. static struct dcb_entry *
  785. lookup_dcb(struct drm_device *dev, int id, u32 mc)
  786. {
  787. struct drm_nouveau_private *dev_priv = dev->dev_private;
  788. int type, or, i;
  789. if (id < 4) {
  790. type = OUTPUT_ANALOG;
  791. or = id;
  792. } else {
  793. type = OUTPUT_TMDS;
  794. or = id - 4;
  795. }
  796. for (i = 0; i < dev_priv->vbios.dcb.entries; i++) {
  797. struct dcb_entry *dcb = &dev_priv->vbios.dcb.entry[i];
  798. if (dcb->type == type && (dcb->or & (1 << or)))
  799. return dcb;
  800. }
  801. NV_INFO(dev, "PDISP: DCB for %d/0x%08x not found\n", id, mc);
  802. return NULL;
  803. }
  804. static void
  805. nvd0_display_unk1_handler(struct drm_device *dev)
  806. {
  807. struct nvd0_display *disp = nvd0_display(dev);
  808. struct dcb_entry *dcb;
  809. u32 unkn, crtc = 0;
  810. int i;
  811. NV_INFO(dev, "PDISP: 1 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
  812. nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
  813. unkn = nv_rd32(dev, 0x6101d4);
  814. if (!unkn) {
  815. unkn = nv_rd32(dev, 0x6109d4);
  816. crtc = 1;
  817. }
  818. disp->irq.ena = NULL;
  819. disp->irq.dis = NULL;
  820. disp->irq.crtc = crtc;
  821. disp->irq.pclk = nv_rd32(dev, 0x660450 + (disp->irq.crtc * 0x300));
  822. disp->irq.pclk /= 1000;
  823. for (i = 0; i < 8; i++) {
  824. u32 mcc = nv_rd32(dev, 0x640180 + (i * 0x20));
  825. u32 mcp = nv_rd32(dev, 0x660180 + (i * 0x20));
  826. if (mcc & (1 << crtc))
  827. disp->irq.dis = lookup_dcb(dev, i, mcc);
  828. if (mcp & (1 << crtc)) {
  829. disp->irq.ena = lookup_dcb(dev, i, mcp);
  830. switch (disp->irq.ena->type) {
  831. case OUTPUT_ANALOG:
  832. disp->irq.script = 0x00ff;
  833. break;
  834. case OUTPUT_TMDS:
  835. disp->irq.script = (mcp & 0x00000f00) >> 8;
  836. if (disp->irq.pclk >= 165000)
  837. disp->irq.script |= 0x0100;
  838. break;
  839. default:
  840. disp->irq.script = 0xbeef;
  841. break;
  842. }
  843. }
  844. }
  845. dcb = disp->irq.dis;
  846. if (dcb)
  847. nouveau_bios_run_display_table(dev, 0x0000, -1, dcb, crtc);
  848. nv_wr32(dev, 0x6101d4, 0x00000000);
  849. nv_wr32(dev, 0x6109d4, 0x00000000);
  850. nv_wr32(dev, 0x6101d0, 0x80000000);
  851. }
  852. static void
  853. nvd0_display_unk2_handler(struct drm_device *dev)
  854. {
  855. struct nvd0_display *disp = nvd0_display(dev);
  856. struct dcb_entry *dcb;
  857. int crtc = disp->irq.crtc;
  858. int pclk = disp->irq.pclk;
  859. int or;
  860. u32 tmp;
  861. NV_INFO(dev, "PDISP: 2 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
  862. nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
  863. dcb = disp->irq.dis;
  864. disp->irq.dis = NULL;
  865. if (dcb)
  866. nouveau_bios_run_display_table(dev, 0x0000, -2, dcb, crtc);
  867. nv50_crtc_set_clock(dev, crtc, pclk);
  868. dcb = disp->irq.ena;
  869. if (!dcb)
  870. goto ack;
  871. or = ffs(dcb->or) - 1;
  872. nouveau_bios_run_display_table(dev, disp->irq.script, pclk, dcb, crtc);
  873. nv_wr32(dev, 0x612200 + (crtc * 0x800), 0x00000000);
  874. switch (dcb->type) {
  875. case OUTPUT_ANALOG:
  876. nv_wr32(dev, 0x612280 + (or * 0x800), 0x00000000);
  877. break;
  878. case OUTPUT_TMDS:
  879. if (disp->irq.pclk >= 165000)
  880. tmp = 0x00000101;
  881. else
  882. tmp = 0x00000000;
  883. nv_mask(dev, 0x612300 + (or * 0x800), 0x00000707, tmp);
  884. break;
  885. default:
  886. break;
  887. }
  888. ack:
  889. nv_wr32(dev, 0x6101d4, 0x00000000);
  890. nv_wr32(dev, 0x6109d4, 0x00000000);
  891. nv_wr32(dev, 0x6101d0, 0x80000000);
  892. }
  893. static void
  894. nvd0_display_unk4_handler(struct drm_device *dev)
  895. {
  896. struct nvd0_display *disp = nvd0_display(dev);
  897. struct dcb_entry *dcb;
  898. int crtc = disp->irq.crtc;
  899. int pclk = disp->irq.pclk;
  900. NV_INFO(dev, "PDISP: 4 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
  901. nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
  902. dcb = disp->irq.ena;
  903. disp->irq.ena = NULL;
  904. if (!dcb)
  905. goto ack;
  906. nouveau_bios_run_display_table(dev, disp->irq.script, pclk, dcb, crtc);
  907. ack:
  908. nv_wr32(dev, 0x6101d4, 0x00000000);
  909. nv_wr32(dev, 0x6109d4, 0x00000000);
  910. nv_wr32(dev, 0x6101d0, 0x80000000);
  911. }
  912. static void
  913. nvd0_display_intr(struct drm_device *dev)
  914. {
  915. u32 intr = nv_rd32(dev, 0x610088);
  916. if (intr & 0x00000002) {
  917. u32 stat = nv_rd32(dev, 0x61009c);
  918. int chid = ffs(stat) - 1;
  919. if (chid >= 0) {
  920. u32 mthd = nv_rd32(dev, 0x6101f0 + (chid * 12));
  921. u32 data = nv_rd32(dev, 0x6101f4 + (chid * 12));
  922. u32 unkn = nv_rd32(dev, 0x6101f8 + (chid * 12));
  923. NV_INFO(dev, "EvoCh: chid %d mthd 0x%04x data 0x%08x "
  924. "0x%08x 0x%08x\n",
  925. chid, (mthd & 0x0000ffc), data, mthd, unkn);
  926. nv_wr32(dev, 0x61009c, (1 << chid));
  927. nv_wr32(dev, 0x6101f0 + (chid * 12), 0x90000000);
  928. }
  929. intr &= ~0x00000002;
  930. }
  931. if (intr & 0x00100000) {
  932. u32 stat = nv_rd32(dev, 0x6100ac);
  933. if (stat & 0x00000007) {
  934. nv_wr32(dev, 0x6100ac, (stat & 0x00000007));
  935. if (stat & 0x00000001)
  936. nvd0_display_unk1_handler(dev);
  937. if (stat & 0x00000002)
  938. nvd0_display_unk2_handler(dev);
  939. if (stat & 0x00000004)
  940. nvd0_display_unk4_handler(dev);
  941. stat &= ~0x00000007;
  942. }
  943. if (stat) {
  944. NV_INFO(dev, "PDISP: unknown intr24 0x%08x\n", stat);
  945. nv_wr32(dev, 0x6100ac, stat);
  946. }
  947. intr &= ~0x00100000;
  948. }
  949. if (intr & 0x01000000) {
  950. u32 stat = nv_rd32(dev, 0x6100bc);
  951. nv_wr32(dev, 0x6100bc, stat);
  952. intr &= ~0x01000000;
  953. }
  954. if (intr & 0x02000000) {
  955. u32 stat = nv_rd32(dev, 0x6108bc);
  956. nv_wr32(dev, 0x6108bc, stat);
  957. intr &= ~0x02000000;
  958. }
  959. if (intr)
  960. NV_INFO(dev, "PDISP: unknown intr 0x%08x\n", intr);
  961. }
  962. /******************************************************************************
  963. * Init
  964. *****************************************************************************/
  965. static void
  966. nvd0_display_fini(struct drm_device *dev)
  967. {
  968. int i;
  969. /* fini cursors */
  970. for (i = 14; i >= 13; i--) {
  971. if (!(nv_rd32(dev, 0x610490 + (i * 0x10)) & 0x00000001))
  972. continue;
  973. nv_mask(dev, 0x610490 + (i * 0x10), 0x00000001, 0x00000000);
  974. nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00000000);
  975. nv_mask(dev, 0x610090, 1 << i, 0x00000000);
  976. nv_mask(dev, 0x6100a0, 1 << i, 0x00000000);
  977. }
  978. /* fini master */
  979. if (nv_rd32(dev, 0x610490) & 0x00000010) {
  980. nv_mask(dev, 0x610490, 0x00000010, 0x00000000);
  981. nv_mask(dev, 0x610490, 0x00000003, 0x00000000);
  982. nv_wait(dev, 0x610490, 0x80000000, 0x00000000);
  983. nv_mask(dev, 0x610090, 0x00000001, 0x00000000);
  984. nv_mask(dev, 0x6100a0, 0x00000001, 0x00000000);
  985. }
  986. }
  987. int
  988. nvd0_display_init(struct drm_device *dev)
  989. {
  990. struct nvd0_display *disp = nvd0_display(dev);
  991. u32 *push;
  992. int i;
  993. if (nv_rd32(dev, 0x6100ac) & 0x00000100) {
  994. nv_wr32(dev, 0x6100ac, 0x00000100);
  995. nv_mask(dev, 0x6194e8, 0x00000001, 0x00000000);
  996. if (!nv_wait(dev, 0x6194e8, 0x00000002, 0x00000000)) {
  997. NV_ERROR(dev, "PDISP: 0x6194e8 0x%08x\n",
  998. nv_rd32(dev, 0x6194e8));
  999. return -EBUSY;
  1000. }
  1001. }
  1002. /* nfi what these are exactly, i do know that SOR_MODE_CTRL won't
  1003. * work at all unless you do the SOR part below.
  1004. */
  1005. for (i = 0; i < 3; i++) {
  1006. u32 dac = nv_rd32(dev, 0x61a000 + (i * 0x800));
  1007. nv_wr32(dev, 0x6101c0 + (i * 0x800), dac);
  1008. }
  1009. for (i = 0; i < 4; i++) {
  1010. u32 sor = nv_rd32(dev, 0x61c000 + (i * 0x800));
  1011. nv_wr32(dev, 0x6301c4 + (i * 0x800), sor);
  1012. }
  1013. for (i = 0; i < 2; i++) {
  1014. u32 crtc0 = nv_rd32(dev, 0x616104 + (i * 0x800));
  1015. u32 crtc1 = nv_rd32(dev, 0x616108 + (i * 0x800));
  1016. u32 crtc2 = nv_rd32(dev, 0x61610c + (i * 0x800));
  1017. nv_wr32(dev, 0x6101b4 + (i * 0x800), crtc0);
  1018. nv_wr32(dev, 0x6101b8 + (i * 0x800), crtc1);
  1019. nv_wr32(dev, 0x6101bc + (i * 0x800), crtc2);
  1020. }
  1021. /* point at our hash table / objects, enable interrupts */
  1022. nv_wr32(dev, 0x610010, (disp->mem->vinst >> 8) | 9);
  1023. nv_mask(dev, 0x6100b0, 0x00000307, 0x00000307);
  1024. /* init master */
  1025. nv_wr32(dev, 0x610494, (disp->evo[0].handle >> 8) | 3);
  1026. nv_wr32(dev, 0x610498, 0x00010000);
  1027. nv_wr32(dev, 0x61049c, 0x00000001);
  1028. nv_mask(dev, 0x610490, 0x00000010, 0x00000010);
  1029. nv_wr32(dev, 0x640000, 0x00000000);
  1030. nv_wr32(dev, 0x610490, 0x01000013);
  1031. if (!nv_wait(dev, 0x610490, 0x80000000, 0x00000000)) {
  1032. NV_ERROR(dev, "PDISP: master 0x%08x\n",
  1033. nv_rd32(dev, 0x610490));
  1034. return -EBUSY;
  1035. }
  1036. nv_mask(dev, 0x610090, 0x00000001, 0x00000001);
  1037. nv_mask(dev, 0x6100a0, 0x00000001, 0x00000001);
  1038. /* init cursors */
  1039. for (i = 13; i <= 14; i++) {
  1040. nv_wr32(dev, 0x610490 + (i * 0x10), 0x00000001);
  1041. if (!nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00010000)) {
  1042. NV_ERROR(dev, "PDISP: curs%d 0x%08x\n", i,
  1043. nv_rd32(dev, 0x610490 + (i * 0x10)));
  1044. return -EBUSY;
  1045. }
  1046. nv_mask(dev, 0x610090, 1 << i, 1 << i);
  1047. nv_mask(dev, 0x6100a0, 1 << i, 1 << i);
  1048. }
  1049. push = evo_wait(dev, 0, 32);
  1050. if (!push)
  1051. return -EBUSY;
  1052. evo_mthd(push, 0x0088, 1);
  1053. evo_data(push, MEM_SYNC);
  1054. evo_mthd(push, 0x0084, 1);
  1055. evo_data(push, 0x00000000);
  1056. evo_mthd(push, 0x0084, 1);
  1057. evo_data(push, 0x80000000);
  1058. evo_mthd(push, 0x008c, 1);
  1059. evo_data(push, 0x00000000);
  1060. evo_kick(push, dev, 0);
  1061. return 0;
  1062. }
  1063. void
  1064. nvd0_display_destroy(struct drm_device *dev)
  1065. {
  1066. struct drm_nouveau_private *dev_priv = dev->dev_private;
  1067. struct nvd0_display *disp = nvd0_display(dev);
  1068. struct pci_dev *pdev = dev->pdev;
  1069. nvd0_display_fini(dev);
  1070. pci_free_consistent(pdev, PAGE_SIZE, disp->evo[0].ptr, disp->evo[0].handle);
  1071. nouveau_gpuobj_ref(NULL, &disp->mem);
  1072. nouveau_irq_unregister(dev, 26);
  1073. dev_priv->engine.display.priv = NULL;
  1074. kfree(disp);
  1075. }
  1076. int
  1077. nvd0_display_create(struct drm_device *dev)
  1078. {
  1079. struct drm_nouveau_private *dev_priv = dev->dev_private;
  1080. struct nouveau_instmem_engine *pinstmem = &dev_priv->engine.instmem;
  1081. struct dcb_table *dcb = &dev_priv->vbios.dcb;
  1082. struct drm_connector *connector, *tmp;
  1083. struct pci_dev *pdev = dev->pdev;
  1084. struct nvd0_display *disp;
  1085. struct dcb_entry *dcbe;
  1086. int ret, i;
  1087. disp = kzalloc(sizeof(*disp), GFP_KERNEL);
  1088. if (!disp)
  1089. return -ENOMEM;
  1090. dev_priv->engine.display.priv = disp;
  1091. /* create crtc objects to represent the hw heads */
  1092. for (i = 0; i < 2; i++) {
  1093. ret = nvd0_crtc_create(dev, i);
  1094. if (ret)
  1095. goto out;
  1096. }
  1097. /* create encoder/connector objects based on VBIOS DCB table */
  1098. for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
  1099. connector = nouveau_connector_create(dev, dcbe->connector);
  1100. if (IS_ERR(connector))
  1101. continue;
  1102. if (dcbe->location != DCB_LOC_ON_CHIP) {
  1103. NV_WARN(dev, "skipping off-chip encoder %d/%d\n",
  1104. dcbe->type, ffs(dcbe->or) - 1);
  1105. continue;
  1106. }
  1107. switch (dcbe->type) {
  1108. case OUTPUT_TMDS:
  1109. nvd0_sor_create(connector, dcbe);
  1110. break;
  1111. case OUTPUT_ANALOG:
  1112. nvd0_dac_create(connector, dcbe);
  1113. break;
  1114. default:
  1115. NV_WARN(dev, "skipping unsupported encoder %d/%d\n",
  1116. dcbe->type, ffs(dcbe->or) - 1);
  1117. continue;
  1118. }
  1119. }
  1120. /* cull any connectors we created that don't have an encoder */
  1121. list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
  1122. if (connector->encoder_ids[0])
  1123. continue;
  1124. NV_WARN(dev, "%s has no encoders, removing\n",
  1125. drm_get_connector_name(connector));
  1126. connector->funcs->destroy(connector);
  1127. }
  1128. /* setup interrupt handling */
  1129. nouveau_irq_register(dev, 26, nvd0_display_intr);
  1130. /* hash table and dma objects for the memory areas we care about */
  1131. ret = nouveau_gpuobj_new(dev, NULL, 0x4000, 0x10000,
  1132. NVOBJ_FLAG_ZERO_ALLOC, &disp->mem);
  1133. if (ret)
  1134. goto out;
  1135. nv_wo32(disp->mem, 0x1000, 0x00000049);
  1136. nv_wo32(disp->mem, 0x1004, (disp->mem->vinst + 0x2000) >> 8);
  1137. nv_wo32(disp->mem, 0x1008, (disp->mem->vinst + 0x2fff) >> 8);
  1138. nv_wo32(disp->mem, 0x100c, 0x00000000);
  1139. nv_wo32(disp->mem, 0x1010, 0x00000000);
  1140. nv_wo32(disp->mem, 0x1014, 0x00000000);
  1141. nv_wo32(disp->mem, 0x0000, MEM_SYNC);
  1142. nv_wo32(disp->mem, 0x0004, (0x1000 << 9) | 0x00000001);
  1143. nv_wo32(disp->mem, 0x1020, 0x00000049);
  1144. nv_wo32(disp->mem, 0x1024, 0x00000000);
  1145. nv_wo32(disp->mem, 0x1028, (dev_priv->vram_size - 1) >> 8);
  1146. nv_wo32(disp->mem, 0x102c, 0x00000000);
  1147. nv_wo32(disp->mem, 0x1030, 0x00000000);
  1148. nv_wo32(disp->mem, 0x1034, 0x00000000);
  1149. nv_wo32(disp->mem, 0x0008, MEM_VRAM);
  1150. nv_wo32(disp->mem, 0x000c, (0x1020 << 9) | 0x00000001);
  1151. nv_wo32(disp->mem, 0x1040, 0x00000009);
  1152. nv_wo32(disp->mem, 0x1044, 0x00000000);
  1153. nv_wo32(disp->mem, 0x1048, (dev_priv->vram_size - 1) >> 8);
  1154. nv_wo32(disp->mem, 0x104c, 0x00000000);
  1155. nv_wo32(disp->mem, 0x1050, 0x00000000);
  1156. nv_wo32(disp->mem, 0x1054, 0x00000000);
  1157. nv_wo32(disp->mem, 0x0010, NvEvoVRAM_LP);
  1158. nv_wo32(disp->mem, 0x0014, (0x1040 << 9) | 0x00000001);
  1159. nv_wo32(disp->mem, 0x1060, 0x0fe00009);
  1160. nv_wo32(disp->mem, 0x1064, 0x00000000);
  1161. nv_wo32(disp->mem, 0x1068, (dev_priv->vram_size - 1) >> 8);
  1162. nv_wo32(disp->mem, 0x106c, 0x00000000);
  1163. nv_wo32(disp->mem, 0x1070, 0x00000000);
  1164. nv_wo32(disp->mem, 0x1074, 0x00000000);
  1165. nv_wo32(disp->mem, 0x0018, NvEvoFB32);
  1166. nv_wo32(disp->mem, 0x001c, (0x1060 << 9) | 0x00000001);
  1167. pinstmem->flush(dev);
  1168. /* push buffers for evo channels */
  1169. disp->evo[0].ptr =
  1170. pci_alloc_consistent(pdev, PAGE_SIZE, &disp->evo[0].handle);
  1171. if (!disp->evo[0].ptr) {
  1172. ret = -ENOMEM;
  1173. goto out;
  1174. }
  1175. ret = nvd0_display_init(dev);
  1176. if (ret)
  1177. goto out;
  1178. out:
  1179. if (ret)
  1180. nvd0_display_destroy(dev);
  1181. return ret;
  1182. }