nvd0_display.c 42 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596
  1. /*
  2. * Copyright 2011 Red Hat Inc.
  3. *
  4. * Permission is hereby granted, free of charge, to any person obtaining a
  5. * copy of this software and associated documentation files (the "Software"),
  6. * to deal in the Software without restriction, including without limitation
  7. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  8. * and/or sell copies of the Software, and to permit persons to whom the
  9. * Software is furnished to do so, subject to the following conditions:
  10. *
  11. * The above copyright notice and this permission notice shall be included in
  12. * all copies or substantial portions of the Software.
  13. *
  14. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  15. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  16. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  17. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  18. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  19. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  20. * OTHER DEALINGS IN THE SOFTWARE.
  21. *
  22. * Authors: Ben Skeggs
  23. */
  24. #include <linux/dma-mapping.h>
  25. #include "drmP.h"
  26. #include "drm_crtc_helper.h"
  27. #include "nouveau_drv.h"
  28. #include "nouveau_connector.h"
  29. #include "nouveau_encoder.h"
  30. #include "nouveau_crtc.h"
  31. #include "nouveau_dma.h"
  32. #include "nouveau_fb.h"
  33. #include "nv50_display.h"
  34. struct nvd0_display {
  35. struct nouveau_gpuobj *mem;
  36. struct {
  37. dma_addr_t handle;
  38. u32 *ptr;
  39. } evo[1];
  40. struct tasklet_struct tasklet;
  41. u32 modeset;
  42. };
  43. static struct nvd0_display *
  44. nvd0_display(struct drm_device *dev)
  45. {
  46. struct drm_nouveau_private *dev_priv = dev->dev_private;
  47. return dev_priv->engine.display.priv;
  48. }
  49. static inline int
  50. evo_icmd(struct drm_device *dev, int id, u32 mthd, u32 data)
  51. {
  52. int ret = 0;
  53. nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000001);
  54. nv_wr32(dev, 0x610704 + (id * 0x10), data);
  55. nv_mask(dev, 0x610704 + (id * 0x10), 0x80000ffc, 0x80000000 | mthd);
  56. if (!nv_wait(dev, 0x610704 + (id * 0x10), 0x80000000, 0x00000000))
  57. ret = -EBUSY;
  58. nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000000);
  59. return ret;
  60. }
  61. static u32 *
  62. evo_wait(struct drm_device *dev, int id, int nr)
  63. {
  64. struct nvd0_display *disp = nvd0_display(dev);
  65. u32 put = nv_rd32(dev, 0x640000 + (id * 0x1000)) / 4;
  66. if (put + nr >= (PAGE_SIZE / 4)) {
  67. disp->evo[id].ptr[put] = 0x20000000;
  68. nv_wr32(dev, 0x640000 + (id * 0x1000), 0x00000000);
  69. if (!nv_wait(dev, 0x640004 + (id * 0x1000), ~0, 0x00000000)) {
  70. NV_ERROR(dev, "evo %d dma stalled\n", id);
  71. return NULL;
  72. }
  73. put = 0;
  74. }
  75. if (nouveau_reg_debug & NOUVEAU_REG_DEBUG_EVO)
  76. NV_INFO(dev, "Evo%d: %p START\n", id, disp->evo[id].ptr + put);
  77. return disp->evo[id].ptr + put;
  78. }
  79. static void
  80. evo_kick(u32 *push, struct drm_device *dev, int id)
  81. {
  82. struct nvd0_display *disp = nvd0_display(dev);
  83. if (nouveau_reg_debug & NOUVEAU_REG_DEBUG_EVO) {
  84. u32 curp = nv_rd32(dev, 0x640000 + (id * 0x1000)) >> 2;
  85. u32 *cur = disp->evo[id].ptr + curp;
  86. while (cur < push)
  87. NV_INFO(dev, "Evo%d: 0x%08x\n", id, *cur++);
  88. NV_INFO(dev, "Evo%d: %p KICK!\n", id, push);
  89. }
  90. nv_wr32(dev, 0x640000 + (id * 0x1000), (push - disp->evo[id].ptr) << 2);
  91. }
  92. #define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
  93. #define evo_data(p,d) *((p)++) = (d)
  94. static struct drm_crtc *
  95. nvd0_display_crtc_get(struct drm_encoder *encoder)
  96. {
  97. return nouveau_encoder(encoder)->crtc;
  98. }
  99. /******************************************************************************
  100. * CRTC
  101. *****************************************************************************/
  102. static int
  103. nvd0_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update)
  104. {
  105. struct drm_device *dev = nv_crtc->base.dev;
  106. struct nouveau_connector *nv_connector;
  107. struct drm_connector *connector;
  108. u32 *push, mode = 0x00;
  109. nv_connector = nouveau_crtc_connector_get(nv_crtc);
  110. connector = &nv_connector->base;
  111. if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) {
  112. if (nv_crtc->base.fb->depth > connector->display_info.bpc * 3)
  113. mode = DITHERING_MODE_DYNAMIC2X2;
  114. } else {
  115. mode = nv_connector->dithering_mode;
  116. }
  117. if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) {
  118. if (connector->display_info.bpc >= 8)
  119. mode |= DITHERING_DEPTH_8BPC;
  120. } else {
  121. mode |= nv_connector->dithering_depth;
  122. }
  123. push = evo_wait(dev, 0, 4);
  124. if (push) {
  125. evo_mthd(push, 0x0490 + (nv_crtc->index * 0x300), 1);
  126. evo_data(push, mode);
  127. if (update) {
  128. evo_mthd(push, 0x0080, 1);
  129. evo_data(push, 0x00000000);
  130. }
  131. evo_kick(push, dev, 0);
  132. }
  133. return 0;
  134. }
  135. static int
  136. nvd0_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update)
  137. {
  138. struct drm_display_mode *mode = &nv_crtc->base.mode;
  139. struct drm_device *dev = nv_crtc->base.dev;
  140. struct nouveau_connector *nv_connector;
  141. u32 *push, outX, outY;
  142. outX = mode->hdisplay;
  143. outY = mode->vdisplay;
  144. nv_connector = nouveau_crtc_connector_get(nv_crtc);
  145. if (nv_connector && nv_connector->native_mode) {
  146. struct drm_display_mode *native = nv_connector->native_mode;
  147. u32 xratio = (native->hdisplay << 19) / mode->hdisplay;
  148. u32 yratio = (native->vdisplay << 19) / mode->vdisplay;
  149. switch (nv_connector->scaling_mode) {
  150. case DRM_MODE_SCALE_ASPECT:
  151. if (xratio > yratio) {
  152. outX = (mode->hdisplay * yratio) >> 19;
  153. outY = (mode->vdisplay * yratio) >> 19;
  154. } else {
  155. outX = (mode->hdisplay * xratio) >> 19;
  156. outY = (mode->vdisplay * xratio) >> 19;
  157. }
  158. break;
  159. case DRM_MODE_SCALE_FULLSCREEN:
  160. outX = native->hdisplay;
  161. outY = native->vdisplay;
  162. break;
  163. default:
  164. break;
  165. }
  166. }
  167. push = evo_wait(dev, 0, 16);
  168. if (push) {
  169. evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
  170. evo_data(push, (outY << 16) | outX);
  171. evo_data(push, (outY << 16) | outX);
  172. evo_data(push, (outY << 16) | outX);
  173. evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
  174. evo_data(push, 0x00000000);
  175. evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
  176. evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
  177. if (update) {
  178. evo_mthd(push, 0x0080, 1);
  179. evo_data(push, 0x00000000);
  180. }
  181. evo_kick(push, dev, 0);
  182. }
  183. return 0;
  184. }
  185. static int
  186. nvd0_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
  187. int x, int y, bool update)
  188. {
  189. struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
  190. u32 *push;
  191. push = evo_wait(fb->dev, 0, 16);
  192. if (push) {
  193. evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
  194. evo_data(push, nvfb->nvbo->bo.offset >> 8);
  195. evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
  196. evo_data(push, (fb->height << 16) | fb->width);
  197. evo_data(push, nvfb->r_pitch);
  198. evo_data(push, nvfb->r_format);
  199. evo_data(push, nvfb->r_dma);
  200. evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
  201. evo_data(push, (y << 16) | x);
  202. if (update) {
  203. evo_mthd(push, 0x0080, 1);
  204. evo_data(push, 0x00000000);
  205. }
  206. evo_kick(push, fb->dev, 0);
  207. }
  208. nv_crtc->fb.tile_flags = nvfb->r_dma;
  209. return 0;
  210. }
  211. static void
  212. nvd0_crtc_cursor_show(struct nouveau_crtc *nv_crtc, bool show, bool update)
  213. {
  214. struct drm_device *dev = nv_crtc->base.dev;
  215. u32 *push = evo_wait(dev, 0, 16);
  216. if (push) {
  217. if (show) {
  218. evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
  219. evo_data(push, 0x85000000);
  220. evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
  221. evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
  222. evo_data(push, NvEvoVRAM);
  223. } else {
  224. evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
  225. evo_data(push, 0x05000000);
  226. evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
  227. evo_data(push, 0x00000000);
  228. }
  229. if (update) {
  230. evo_mthd(push, 0x0080, 1);
  231. evo_data(push, 0x00000000);
  232. }
  233. evo_kick(push, dev, 0);
  234. }
  235. }
  236. static void
  237. nvd0_crtc_dpms(struct drm_crtc *crtc, int mode)
  238. {
  239. }
  240. static void
  241. nvd0_crtc_prepare(struct drm_crtc *crtc)
  242. {
  243. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  244. u32 *push;
  245. push = evo_wait(crtc->dev, 0, 2);
  246. if (push) {
  247. evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
  248. evo_data(push, 0x00000000);
  249. evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
  250. evo_data(push, 0x03000000);
  251. evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
  252. evo_data(push, 0x00000000);
  253. evo_kick(push, crtc->dev, 0);
  254. }
  255. nvd0_crtc_cursor_show(nv_crtc, false, false);
  256. }
  257. static void
  258. nvd0_crtc_commit(struct drm_crtc *crtc)
  259. {
  260. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  261. u32 *push;
  262. push = evo_wait(crtc->dev, 0, 32);
  263. if (push) {
  264. evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
  265. evo_data(push, nv_crtc->fb.tile_flags);
  266. evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
  267. evo_data(push, 0x83000000);
  268. evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
  269. evo_data(push, 0x00000000);
  270. evo_data(push, 0x00000000);
  271. evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
  272. evo_data(push, NvEvoVRAM);
  273. evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1);
  274. evo_data(push, 0xffffff00);
  275. evo_kick(push, crtc->dev, 0);
  276. }
  277. nvd0_crtc_cursor_show(nv_crtc, nv_crtc->cursor.visible, true);
  278. }
  279. static bool
  280. nvd0_crtc_mode_fixup(struct drm_crtc *crtc, struct drm_display_mode *mode,
  281. struct drm_display_mode *adjusted_mode)
  282. {
  283. return true;
  284. }
  285. static int
  286. nvd0_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
  287. {
  288. struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->fb);
  289. int ret;
  290. ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
  291. if (ret)
  292. return ret;
  293. if (old_fb) {
  294. nvfb = nouveau_framebuffer(old_fb);
  295. nouveau_bo_unpin(nvfb->nvbo);
  296. }
  297. return 0;
  298. }
  299. static int
  300. nvd0_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
  301. struct drm_display_mode *mode, int x, int y,
  302. struct drm_framebuffer *old_fb)
  303. {
  304. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  305. struct nouveau_connector *nv_connector;
  306. u32 htotal = mode->htotal;
  307. u32 vtotal = mode->vtotal;
  308. u32 hsyncw = mode->hsync_end - mode->hsync_start - 1;
  309. u32 vsyncw = mode->vsync_end - mode->vsync_start - 1;
  310. u32 hfrntp = mode->hsync_start - mode->hdisplay;
  311. u32 vfrntp = mode->vsync_start - mode->vdisplay;
  312. u32 hbackp = mode->htotal - mode->hsync_end;
  313. u32 vbackp = mode->vtotal - mode->vsync_end;
  314. u32 hss2be = hsyncw + hbackp;
  315. u32 vss2be = vsyncw + vbackp;
  316. u32 hss2de = htotal - hfrntp;
  317. u32 vss2de = vtotal - vfrntp;
  318. u32 syncs, *push;
  319. int ret;
  320. syncs = 0x00000001;
  321. if (mode->flags & DRM_MODE_FLAG_NHSYNC)
  322. syncs |= 0x00000008;
  323. if (mode->flags & DRM_MODE_FLAG_NVSYNC)
  324. syncs |= 0x00000010;
  325. ret = nvd0_crtc_swap_fbs(crtc, old_fb);
  326. if (ret)
  327. return ret;
  328. push = evo_wait(crtc->dev, 0, 64);
  329. if (push) {
  330. evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 5);
  331. evo_data(push, 0x00000000);
  332. evo_data(push, (vtotal << 16) | htotal);
  333. evo_data(push, (vsyncw << 16) | hsyncw);
  334. evo_data(push, (vss2be << 16) | hss2be);
  335. evo_data(push, (vss2de << 16) | hss2de);
  336. evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
  337. evo_data(push, 0x00000000); /* ??? */
  338. evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
  339. evo_data(push, mode->clock * 1000);
  340. evo_data(push, 0x00200000); /* ??? */
  341. evo_data(push, mode->clock * 1000);
  342. evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 1);
  343. evo_data(push, syncs);
  344. evo_kick(push, crtc->dev, 0);
  345. }
  346. nv_connector = nouveau_crtc_connector_get(nv_crtc);
  347. nvd0_crtc_set_dither(nv_crtc, false);
  348. nvd0_crtc_set_scale(nv_crtc, false);
  349. nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, false);
  350. return 0;
  351. }
  352. static int
  353. nvd0_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
  354. struct drm_framebuffer *old_fb)
  355. {
  356. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  357. int ret;
  358. if (!crtc->fb) {
  359. NV_DEBUG_KMS(crtc->dev, "No FB bound\n");
  360. return 0;
  361. }
  362. ret = nvd0_crtc_swap_fbs(crtc, old_fb);
  363. if (ret)
  364. return ret;
  365. nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, true);
  366. return 0;
  367. }
  368. static int
  369. nvd0_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
  370. struct drm_framebuffer *fb, int x, int y,
  371. enum mode_set_atomic state)
  372. {
  373. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  374. nvd0_crtc_set_image(nv_crtc, fb, x, y, true);
  375. return 0;
  376. }
  377. static void
  378. nvd0_crtc_lut_load(struct drm_crtc *crtc)
  379. {
  380. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  381. void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
  382. int i;
  383. for (i = 0; i < 256; i++) {
  384. writew(0x6000 + (nv_crtc->lut.r[i] >> 2), lut + (i * 0x20) + 0);
  385. writew(0x6000 + (nv_crtc->lut.g[i] >> 2), lut + (i * 0x20) + 2);
  386. writew(0x6000 + (nv_crtc->lut.b[i] >> 2), lut + (i * 0x20) + 4);
  387. }
  388. }
  389. static int
  390. nvd0_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
  391. uint32_t handle, uint32_t width, uint32_t height)
  392. {
  393. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  394. struct drm_device *dev = crtc->dev;
  395. struct drm_gem_object *gem;
  396. struct nouveau_bo *nvbo;
  397. bool visible = (handle != 0);
  398. int i, ret = 0;
  399. if (visible) {
  400. if (width != 64 || height != 64)
  401. return -EINVAL;
  402. gem = drm_gem_object_lookup(dev, file_priv, handle);
  403. if (unlikely(!gem))
  404. return -ENOENT;
  405. nvbo = nouveau_gem_object(gem);
  406. ret = nouveau_bo_map(nvbo);
  407. if (ret == 0) {
  408. for (i = 0; i < 64 * 64; i++) {
  409. u32 v = nouveau_bo_rd32(nvbo, i);
  410. nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
  411. }
  412. nouveau_bo_unmap(nvbo);
  413. }
  414. drm_gem_object_unreference_unlocked(gem);
  415. }
  416. if (visible != nv_crtc->cursor.visible) {
  417. nvd0_crtc_cursor_show(nv_crtc, visible, true);
  418. nv_crtc->cursor.visible = visible;
  419. }
  420. return ret;
  421. }
  422. static int
  423. nvd0_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
  424. {
  425. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  426. const u32 data = (y << 16) | x;
  427. nv_wr32(crtc->dev, 0x64d084 + (nv_crtc->index * 0x1000), data);
  428. nv_wr32(crtc->dev, 0x64d080 + (nv_crtc->index * 0x1000), 0x00000000);
  429. return 0;
  430. }
  431. static void
  432. nvd0_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
  433. uint32_t start, uint32_t size)
  434. {
  435. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  436. u32 end = max(start + size, (u32)256);
  437. u32 i;
  438. for (i = start; i < end; i++) {
  439. nv_crtc->lut.r[i] = r[i];
  440. nv_crtc->lut.g[i] = g[i];
  441. nv_crtc->lut.b[i] = b[i];
  442. }
  443. nvd0_crtc_lut_load(crtc);
  444. }
  445. static void
  446. nvd0_crtc_destroy(struct drm_crtc *crtc)
  447. {
  448. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  449. nouveau_bo_unmap(nv_crtc->cursor.nvbo);
  450. nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
  451. nouveau_bo_unmap(nv_crtc->lut.nvbo);
  452. nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
  453. drm_crtc_cleanup(crtc);
  454. kfree(crtc);
  455. }
  456. static const struct drm_crtc_helper_funcs nvd0_crtc_hfunc = {
  457. .dpms = nvd0_crtc_dpms,
  458. .prepare = nvd0_crtc_prepare,
  459. .commit = nvd0_crtc_commit,
  460. .mode_fixup = nvd0_crtc_mode_fixup,
  461. .mode_set = nvd0_crtc_mode_set,
  462. .mode_set_base = nvd0_crtc_mode_set_base,
  463. .mode_set_base_atomic = nvd0_crtc_mode_set_base_atomic,
  464. .load_lut = nvd0_crtc_lut_load,
  465. };
  466. static const struct drm_crtc_funcs nvd0_crtc_func = {
  467. .cursor_set = nvd0_crtc_cursor_set,
  468. .cursor_move = nvd0_crtc_cursor_move,
  469. .gamma_set = nvd0_crtc_gamma_set,
  470. .set_config = drm_crtc_helper_set_config,
  471. .destroy = nvd0_crtc_destroy,
  472. };
  473. static void
  474. nvd0_cursor_set_pos(struct nouveau_crtc *nv_crtc, int x, int y)
  475. {
  476. }
  477. static void
  478. nvd0_cursor_set_offset(struct nouveau_crtc *nv_crtc, uint32_t offset)
  479. {
  480. }
  481. static int
  482. nvd0_crtc_create(struct drm_device *dev, int index)
  483. {
  484. struct nouveau_crtc *nv_crtc;
  485. struct drm_crtc *crtc;
  486. int ret, i;
  487. nv_crtc = kzalloc(sizeof(*nv_crtc), GFP_KERNEL);
  488. if (!nv_crtc)
  489. return -ENOMEM;
  490. nv_crtc->index = index;
  491. nv_crtc->set_dither = nvd0_crtc_set_dither;
  492. nv_crtc->set_scale = nvd0_crtc_set_scale;
  493. nv_crtc->cursor.set_offset = nvd0_cursor_set_offset;
  494. nv_crtc->cursor.set_pos = nvd0_cursor_set_pos;
  495. for (i = 0; i < 256; i++) {
  496. nv_crtc->lut.r[i] = i << 8;
  497. nv_crtc->lut.g[i] = i << 8;
  498. nv_crtc->lut.b[i] = i << 8;
  499. }
  500. crtc = &nv_crtc->base;
  501. drm_crtc_init(dev, crtc, &nvd0_crtc_func);
  502. drm_crtc_helper_add(crtc, &nvd0_crtc_hfunc);
  503. drm_mode_crtc_set_gamma_size(crtc, 256);
  504. ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
  505. 0, 0x0000, &nv_crtc->cursor.nvbo);
  506. if (!ret) {
  507. ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
  508. if (!ret)
  509. ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
  510. if (ret)
  511. nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
  512. }
  513. if (ret)
  514. goto out;
  515. ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
  516. 0, 0x0000, &nv_crtc->lut.nvbo);
  517. if (!ret) {
  518. ret = nouveau_bo_pin(nv_crtc->lut.nvbo, TTM_PL_FLAG_VRAM);
  519. if (!ret)
  520. ret = nouveau_bo_map(nv_crtc->lut.nvbo);
  521. if (ret)
  522. nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
  523. }
  524. if (ret)
  525. goto out;
  526. nvd0_crtc_lut_load(crtc);
  527. out:
  528. if (ret)
  529. nvd0_crtc_destroy(crtc);
  530. return ret;
  531. }
  532. /******************************************************************************
  533. * DAC
  534. *****************************************************************************/
  535. static void
  536. nvd0_dac_dpms(struct drm_encoder *encoder, int mode)
  537. {
  538. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  539. struct drm_device *dev = encoder->dev;
  540. int or = nv_encoder->or;
  541. u32 dpms_ctrl;
  542. dpms_ctrl = 0x80000000;
  543. if (mode == DRM_MODE_DPMS_STANDBY || mode == DRM_MODE_DPMS_OFF)
  544. dpms_ctrl |= 0x00000001;
  545. if (mode == DRM_MODE_DPMS_SUSPEND || mode == DRM_MODE_DPMS_OFF)
  546. dpms_ctrl |= 0x00000004;
  547. nv_wait(dev, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
  548. nv_mask(dev, 0x61a004 + (or * 0x0800), 0xc000007f, dpms_ctrl);
  549. nv_wait(dev, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
  550. }
  551. static bool
  552. nvd0_dac_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
  553. struct drm_display_mode *adjusted_mode)
  554. {
  555. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  556. struct nouveau_connector *nv_connector;
  557. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  558. if (nv_connector && nv_connector->native_mode) {
  559. if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
  560. int id = adjusted_mode->base.id;
  561. *adjusted_mode = *nv_connector->native_mode;
  562. adjusted_mode->base.id = id;
  563. }
  564. }
  565. return true;
  566. }
  567. static void
  568. nvd0_dac_prepare(struct drm_encoder *encoder)
  569. {
  570. }
  571. static void
  572. nvd0_dac_commit(struct drm_encoder *encoder)
  573. {
  574. }
  575. static void
  576. nvd0_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
  577. struct drm_display_mode *adjusted_mode)
  578. {
  579. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  580. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  581. u32 *push;
  582. nvd0_dac_dpms(encoder, DRM_MODE_DPMS_ON);
  583. push = evo_wait(encoder->dev, 0, 4);
  584. if (push) {
  585. evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 2);
  586. evo_data(push, 1 << nv_crtc->index);
  587. evo_data(push, 0x00ff);
  588. evo_kick(push, encoder->dev, 0);
  589. }
  590. nv_encoder->crtc = encoder->crtc;
  591. }
  592. static void
  593. nvd0_dac_disconnect(struct drm_encoder *encoder)
  594. {
  595. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  596. struct drm_device *dev = encoder->dev;
  597. u32 *push;
  598. if (nv_encoder->crtc) {
  599. nvd0_crtc_prepare(nv_encoder->crtc);
  600. push = evo_wait(dev, 0, 4);
  601. if (push) {
  602. evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
  603. evo_data(push, 0x00000000);
  604. evo_mthd(push, 0x0080, 1);
  605. evo_data(push, 0x00000000);
  606. evo_kick(push, dev, 0);
  607. }
  608. nv_encoder->crtc = NULL;
  609. }
  610. }
  611. static enum drm_connector_status
  612. nvd0_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
  613. {
  614. enum drm_connector_status status = connector_status_disconnected;
  615. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  616. struct drm_device *dev = encoder->dev;
  617. int or = nv_encoder->or;
  618. u32 load;
  619. nv_wr32(dev, 0x61a00c + (or * 0x800), 0x00100000);
  620. udelay(9500);
  621. nv_wr32(dev, 0x61a00c + (or * 0x800), 0x80000000);
  622. load = nv_rd32(dev, 0x61a00c + (or * 0x800));
  623. if ((load & 0x38000000) == 0x38000000)
  624. status = connector_status_connected;
  625. nv_wr32(dev, 0x61a00c + (or * 0x800), 0x00000000);
  626. return status;
  627. }
  628. static void
  629. nvd0_dac_destroy(struct drm_encoder *encoder)
  630. {
  631. drm_encoder_cleanup(encoder);
  632. kfree(encoder);
  633. }
  634. static const struct drm_encoder_helper_funcs nvd0_dac_hfunc = {
  635. .dpms = nvd0_dac_dpms,
  636. .mode_fixup = nvd0_dac_mode_fixup,
  637. .prepare = nvd0_dac_prepare,
  638. .commit = nvd0_dac_commit,
  639. .mode_set = nvd0_dac_mode_set,
  640. .disable = nvd0_dac_disconnect,
  641. .get_crtc = nvd0_display_crtc_get,
  642. .detect = nvd0_dac_detect
  643. };
  644. static const struct drm_encoder_funcs nvd0_dac_func = {
  645. .destroy = nvd0_dac_destroy,
  646. };
  647. static int
  648. nvd0_dac_create(struct drm_connector *connector, struct dcb_entry *dcbe)
  649. {
  650. struct drm_device *dev = connector->dev;
  651. struct nouveau_encoder *nv_encoder;
  652. struct drm_encoder *encoder;
  653. nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
  654. if (!nv_encoder)
  655. return -ENOMEM;
  656. nv_encoder->dcb = dcbe;
  657. nv_encoder->or = ffs(dcbe->or) - 1;
  658. encoder = to_drm_encoder(nv_encoder);
  659. encoder->possible_crtcs = dcbe->heads;
  660. encoder->possible_clones = 0;
  661. drm_encoder_init(dev, encoder, &nvd0_dac_func, DRM_MODE_ENCODER_DAC);
  662. drm_encoder_helper_add(encoder, &nvd0_dac_hfunc);
  663. drm_mode_connector_attach_encoder(connector, encoder);
  664. return 0;
  665. }
  666. /******************************************************************************
  667. * Audio
  668. *****************************************************************************/
  669. static void
  670. nvd0_audio_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
  671. {
  672. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  673. struct nouveau_connector *nv_connector;
  674. struct drm_device *dev = encoder->dev;
  675. int i, or = nv_encoder->or * 0x30;
  676. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  677. if (!drm_detect_monitor_audio(nv_connector->edid))
  678. return;
  679. nv_mask(dev, 0x10ec10 + or, 0x80000003, 0x80000001);
  680. drm_edid_to_eld(&nv_connector->base, nv_connector->edid);
  681. if (nv_connector->base.eld[0]) {
  682. u8 *eld = nv_connector->base.eld;
  683. for (i = 0; i < eld[2] * 4; i++)
  684. nv_wr32(dev, 0x10ec00 + or, (i << 8) | eld[i]);
  685. for (i = eld[2] * 4; i < 0x60; i++)
  686. nv_wr32(dev, 0x10ec00 + or, (i << 8) | 0x00);
  687. nv_mask(dev, 0x10ec10 + or, 0x80000002, 0x80000002);
  688. }
  689. }
  690. static void
  691. nvd0_audio_disconnect(struct drm_encoder *encoder)
  692. {
  693. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  694. struct drm_device *dev = encoder->dev;
  695. int or = nv_encoder->or * 0x30;
  696. nv_mask(dev, 0x10ec10 + or, 0x80000003, 0x80000000);
  697. }
  698. /******************************************************************************
  699. * HDMI
  700. *****************************************************************************/
  701. static void
  702. nvd0_hdmi_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
  703. {
  704. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  705. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  706. struct nouveau_connector *nv_connector;
  707. struct drm_device *dev = encoder->dev;
  708. int head = nv_crtc->index * 0x800;
  709. u32 rekey = 56; /* binary driver, and tegra constant */
  710. u32 max_ac_packet;
  711. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  712. if (!drm_detect_hdmi_monitor(nv_connector->edid))
  713. return;
  714. max_ac_packet = mode->htotal - mode->hdisplay;
  715. max_ac_packet -= rekey;
  716. max_ac_packet -= 18; /* constant from tegra */
  717. max_ac_packet /= 32;
  718. /* AVI InfoFrame */
  719. nv_mask(dev, 0x616714 + head, 0x00000001, 0x00000000);
  720. nv_wr32(dev, 0x61671c + head, 0x000d0282);
  721. nv_wr32(dev, 0x616720 + head, 0x0000006f);
  722. nv_wr32(dev, 0x616724 + head, 0x00000000);
  723. nv_wr32(dev, 0x616728 + head, 0x00000000);
  724. nv_wr32(dev, 0x61672c + head, 0x00000000);
  725. nv_mask(dev, 0x616714 + head, 0x00000001, 0x00000001);
  726. /* ??? InfoFrame? */
  727. nv_mask(dev, 0x6167a4 + head, 0x00000001, 0x00000000);
  728. nv_wr32(dev, 0x6167ac + head, 0x00000010);
  729. nv_mask(dev, 0x6167a4 + head, 0x00000001, 0x00000001);
  730. /* HDMI_CTRL */
  731. nv_mask(dev, 0x616798 + head, 0x401f007f, 0x40000000 | rekey |
  732. max_ac_packet << 16);
  733. nvd0_audio_mode_set(encoder, mode);
  734. }
  735. static void
  736. nvd0_hdmi_disconnect(struct drm_encoder *encoder)
  737. {
  738. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  739. struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
  740. struct drm_device *dev = encoder->dev;
  741. int head = nv_crtc->index * 0x800;
  742. nvd0_audio_disconnect(encoder);
  743. nv_mask(dev, 0x616798 + head, 0x40000000, 0x00000000);
  744. nv_mask(dev, 0x6167a4 + head, 0x00000001, 0x00000000);
  745. nv_mask(dev, 0x616714 + head, 0x00000001, 0x00000000);
  746. }
  747. /******************************************************************************
  748. * SOR
  749. *****************************************************************************/
  750. static void
  751. nvd0_sor_dpms(struct drm_encoder *encoder, int mode)
  752. {
  753. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  754. struct drm_device *dev = encoder->dev;
  755. struct drm_encoder *partner;
  756. int or = nv_encoder->or;
  757. u32 dpms_ctrl;
  758. nv_encoder->last_dpms = mode;
  759. list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
  760. struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
  761. if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
  762. continue;
  763. if (nv_partner != nv_encoder &&
  764. nv_partner->dcb->or == nv_encoder->dcb->or) {
  765. if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
  766. return;
  767. break;
  768. }
  769. }
  770. dpms_ctrl = (mode == DRM_MODE_DPMS_ON);
  771. dpms_ctrl |= 0x80000000;
  772. nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
  773. nv_mask(dev, 0x61c004 + (or * 0x0800), 0x80000001, dpms_ctrl);
  774. nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
  775. nv_wait(dev, 0x61c030 + (or * 0x0800), 0x10000000, 0x00000000);
  776. }
  777. static bool
  778. nvd0_sor_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
  779. struct drm_display_mode *adjusted_mode)
  780. {
  781. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  782. struct nouveau_connector *nv_connector;
  783. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  784. if (nv_connector && nv_connector->native_mode) {
  785. if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
  786. int id = adjusted_mode->base.id;
  787. *adjusted_mode = *nv_connector->native_mode;
  788. adjusted_mode->base.id = id;
  789. }
  790. }
  791. return true;
  792. }
  793. static void
  794. nvd0_sor_prepare(struct drm_encoder *encoder)
  795. {
  796. }
  797. static void
  798. nvd0_sor_commit(struct drm_encoder *encoder)
  799. {
  800. }
  801. static void
  802. nvd0_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode,
  803. struct drm_display_mode *mode)
  804. {
  805. struct drm_device *dev = encoder->dev;
  806. struct drm_nouveau_private *dev_priv = dev->dev_private;
  807. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  808. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  809. struct nouveau_connector *nv_connector;
  810. struct nvbios *bios = &dev_priv->vbios;
  811. u32 mode_ctrl = (1 << nv_crtc->index);
  812. u32 *push, or_config;
  813. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  814. switch (nv_encoder->dcb->type) {
  815. case OUTPUT_TMDS:
  816. if (nv_encoder->dcb->sorconf.link & 1) {
  817. if (mode->clock < 165000)
  818. mode_ctrl |= 0x00000100;
  819. else
  820. mode_ctrl |= 0x00000500;
  821. } else {
  822. mode_ctrl |= 0x00000200;
  823. }
  824. or_config = (mode_ctrl & 0x00000f00) >> 8;
  825. if (mode->clock >= 165000)
  826. or_config |= 0x0100;
  827. nvd0_hdmi_mode_set(encoder, mode);
  828. break;
  829. case OUTPUT_LVDS:
  830. or_config = (mode_ctrl & 0x00000f00) >> 8;
  831. if (bios->fp_no_ddc) {
  832. if (bios->fp.dual_link)
  833. or_config |= 0x0100;
  834. if (bios->fp.if_is_24bit)
  835. or_config |= 0x0200;
  836. } else {
  837. if (nv_connector->dcb->type == DCB_CONNECTOR_LVDS_SPWG) {
  838. if (((u8 *)nv_connector->edid)[121] == 2)
  839. or_config |= 0x0100;
  840. } else
  841. if (mode->clock >= bios->fp.duallink_transition_clk) {
  842. or_config |= 0x0100;
  843. }
  844. if (or_config & 0x0100) {
  845. if (bios->fp.strapless_is_24bit & 2)
  846. or_config |= 0x0200;
  847. } else {
  848. if (bios->fp.strapless_is_24bit & 1)
  849. or_config |= 0x0200;
  850. }
  851. if (nv_connector->base.display_info.bpc == 8)
  852. or_config |= 0x0200;
  853. }
  854. break;
  855. default:
  856. BUG_ON(1);
  857. break;
  858. }
  859. nvd0_sor_dpms(encoder, DRM_MODE_DPMS_ON);
  860. push = evo_wait(dev, 0, 4);
  861. if (push) {
  862. evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 2);
  863. evo_data(push, mode_ctrl);
  864. evo_data(push, or_config);
  865. evo_kick(push, dev, 0);
  866. }
  867. nv_encoder->crtc = encoder->crtc;
  868. }
  869. static void
  870. nvd0_sor_disconnect(struct drm_encoder *encoder)
  871. {
  872. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  873. struct drm_device *dev = encoder->dev;
  874. u32 *push;
  875. if (nv_encoder->crtc) {
  876. nvd0_crtc_prepare(nv_encoder->crtc);
  877. push = evo_wait(dev, 0, 4);
  878. if (push) {
  879. evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
  880. evo_data(push, 0x00000000);
  881. evo_mthd(push, 0x0080, 1);
  882. evo_data(push, 0x00000000);
  883. evo_kick(push, dev, 0);
  884. }
  885. nvd0_hdmi_disconnect(encoder);
  886. nv_encoder->crtc = NULL;
  887. nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
  888. }
  889. }
  890. static void
  891. nvd0_sor_destroy(struct drm_encoder *encoder)
  892. {
  893. drm_encoder_cleanup(encoder);
  894. kfree(encoder);
  895. }
  896. static const struct drm_encoder_helper_funcs nvd0_sor_hfunc = {
  897. .dpms = nvd0_sor_dpms,
  898. .mode_fixup = nvd0_sor_mode_fixup,
  899. .prepare = nvd0_sor_prepare,
  900. .commit = nvd0_sor_commit,
  901. .mode_set = nvd0_sor_mode_set,
  902. .disable = nvd0_sor_disconnect,
  903. .get_crtc = nvd0_display_crtc_get,
  904. };
  905. static const struct drm_encoder_funcs nvd0_sor_func = {
  906. .destroy = nvd0_sor_destroy,
  907. };
  908. static int
  909. nvd0_sor_create(struct drm_connector *connector, struct dcb_entry *dcbe)
  910. {
  911. struct drm_device *dev = connector->dev;
  912. struct nouveau_encoder *nv_encoder;
  913. struct drm_encoder *encoder;
  914. nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
  915. if (!nv_encoder)
  916. return -ENOMEM;
  917. nv_encoder->dcb = dcbe;
  918. nv_encoder->or = ffs(dcbe->or) - 1;
  919. nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
  920. encoder = to_drm_encoder(nv_encoder);
  921. encoder->possible_crtcs = dcbe->heads;
  922. encoder->possible_clones = 0;
  923. drm_encoder_init(dev, encoder, &nvd0_sor_func, DRM_MODE_ENCODER_TMDS);
  924. drm_encoder_helper_add(encoder, &nvd0_sor_hfunc);
  925. drm_mode_connector_attach_encoder(connector, encoder);
  926. return 0;
  927. }
  928. /******************************************************************************
  929. * IRQ
  930. *****************************************************************************/
  931. static struct dcb_entry *
  932. lookup_dcb(struct drm_device *dev, int id, u32 mc)
  933. {
  934. struct drm_nouveau_private *dev_priv = dev->dev_private;
  935. int type, or, i;
  936. if (id < 4) {
  937. type = OUTPUT_ANALOG;
  938. or = id;
  939. } else {
  940. switch (mc & 0x00000f00) {
  941. case 0x00000000: type = OUTPUT_LVDS; break;
  942. case 0x00000100: type = OUTPUT_TMDS; break;
  943. case 0x00000200: type = OUTPUT_TMDS; break;
  944. case 0x00000500: type = OUTPUT_TMDS; break;
  945. default:
  946. NV_ERROR(dev, "PDISP: unknown SOR mc 0x%08x\n", mc);
  947. return NULL;
  948. }
  949. or = id - 4;
  950. }
  951. for (i = 0; i < dev_priv->vbios.dcb.entries; i++) {
  952. struct dcb_entry *dcb = &dev_priv->vbios.dcb.entry[i];
  953. if (dcb->type == type && (dcb->or & (1 << or)))
  954. return dcb;
  955. }
  956. NV_ERROR(dev, "PDISP: DCB for %d/0x%08x not found\n", id, mc);
  957. return NULL;
  958. }
  959. static void
  960. nvd0_display_unk1_handler(struct drm_device *dev, u32 crtc, u32 mask)
  961. {
  962. struct dcb_entry *dcb;
  963. int i;
  964. for (i = 0; mask && i < 8; i++) {
  965. u32 mcc = nv_rd32(dev, 0x640180 + (i * 0x20));
  966. if (!(mcc & (1 << crtc)))
  967. continue;
  968. dcb = lookup_dcb(dev, i, mcc);
  969. if (!dcb)
  970. continue;
  971. nouveau_bios_run_display_table(dev, 0x0000, -1, dcb, crtc);
  972. }
  973. nv_wr32(dev, 0x6101d4, 0x00000000);
  974. nv_wr32(dev, 0x6109d4, 0x00000000);
  975. nv_wr32(dev, 0x6101d0, 0x80000000);
  976. }
  977. static void
  978. nvd0_display_unk2_handler(struct drm_device *dev, u32 crtc, u32 mask)
  979. {
  980. struct dcb_entry *dcb;
  981. u32 or, tmp, pclk;
  982. int i;
  983. for (i = 0; mask && i < 8; i++) {
  984. u32 mcc = nv_rd32(dev, 0x640180 + (i * 0x20));
  985. if (!(mcc & (1 << crtc)))
  986. continue;
  987. dcb = lookup_dcb(dev, i, mcc);
  988. if (!dcb)
  989. continue;
  990. nouveau_bios_run_display_table(dev, 0x0000, -2, dcb, crtc);
  991. }
  992. pclk = nv_rd32(dev, 0x660450 + (crtc * 0x300)) / 1000;
  993. if (mask & 0x00010000) {
  994. nv50_crtc_set_clock(dev, crtc, pclk);
  995. }
  996. for (i = 0; mask && i < 8; i++) {
  997. u32 mcp = nv_rd32(dev, 0x660180 + (i * 0x20));
  998. u32 cfg = nv_rd32(dev, 0x660184 + (i * 0x20));
  999. if (!(mcp & (1 << crtc)))
  1000. continue;
  1001. dcb = lookup_dcb(dev, i, mcp);
  1002. if (!dcb)
  1003. continue;
  1004. or = ffs(dcb->or) - 1;
  1005. nouveau_bios_run_display_table(dev, cfg, pclk, dcb, crtc);
  1006. nv_wr32(dev, 0x612200 + (crtc * 0x800), 0x00000000);
  1007. switch (dcb->type) {
  1008. case OUTPUT_ANALOG:
  1009. nv_wr32(dev, 0x612280 + (or * 0x800), 0x00000000);
  1010. break;
  1011. case OUTPUT_TMDS:
  1012. case OUTPUT_LVDS:
  1013. if (cfg & 0x00000100)
  1014. tmp = 0x00000101;
  1015. else
  1016. tmp = 0x00000000;
  1017. nv_mask(dev, 0x612300 + (or * 0x800), 0x00000707, tmp);
  1018. break;
  1019. default:
  1020. break;
  1021. }
  1022. break;
  1023. }
  1024. nv_wr32(dev, 0x6101d4, 0x00000000);
  1025. nv_wr32(dev, 0x6109d4, 0x00000000);
  1026. nv_wr32(dev, 0x6101d0, 0x80000000);
  1027. }
  1028. static void
  1029. nvd0_display_unk4_handler(struct drm_device *dev, u32 crtc, u32 mask)
  1030. {
  1031. struct dcb_entry *dcb;
  1032. int pclk, i;
  1033. pclk = nv_rd32(dev, 0x660450 + (crtc * 0x300)) / 1000;
  1034. for (i = 0; mask && i < 8; i++) {
  1035. u32 mcp = nv_rd32(dev, 0x660180 + (i * 0x20));
  1036. u32 cfg = nv_rd32(dev, 0x660184 + (i * 0x20));
  1037. if (!(mcp & (1 << crtc)))
  1038. continue;
  1039. dcb = lookup_dcb(dev, i, mcp);
  1040. if (!dcb)
  1041. continue;
  1042. nouveau_bios_run_display_table(dev, cfg, -pclk, dcb, crtc);
  1043. }
  1044. nv_wr32(dev, 0x6101d4, 0x00000000);
  1045. nv_wr32(dev, 0x6109d4, 0x00000000);
  1046. nv_wr32(dev, 0x6101d0, 0x80000000);
  1047. }
  1048. static void
  1049. nvd0_display_bh(unsigned long data)
  1050. {
  1051. struct drm_device *dev = (struct drm_device *)data;
  1052. struct nvd0_display *disp = nvd0_display(dev);
  1053. u32 mask, crtc;
  1054. int i;
  1055. if (drm_debug & (DRM_UT_DRIVER | DRM_UT_KMS)) {
  1056. NV_INFO(dev, "PDISP: modeset req %d\n", disp->modeset);
  1057. NV_INFO(dev, " STAT: 0x%08x 0x%08x 0x%08x\n",
  1058. nv_rd32(dev, 0x6101d0),
  1059. nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
  1060. for (i = 0; i < 8; i++) {
  1061. NV_INFO(dev, " %s%d: 0x%08x 0x%08x\n",
  1062. i < 4 ? "DAC" : "SOR", i,
  1063. nv_rd32(dev, 0x640180 + (i * 0x20)),
  1064. nv_rd32(dev, 0x660180 + (i * 0x20)));
  1065. }
  1066. }
  1067. mask = nv_rd32(dev, 0x6101d4);
  1068. crtc = 0;
  1069. if (!mask) {
  1070. mask = nv_rd32(dev, 0x6109d4);
  1071. crtc = 1;
  1072. }
  1073. if (disp->modeset & 0x00000001)
  1074. nvd0_display_unk1_handler(dev, crtc, mask);
  1075. if (disp->modeset & 0x00000002)
  1076. nvd0_display_unk2_handler(dev, crtc, mask);
  1077. if (disp->modeset & 0x00000004)
  1078. nvd0_display_unk4_handler(dev, crtc, mask);
  1079. }
  1080. static void
  1081. nvd0_display_intr(struct drm_device *dev)
  1082. {
  1083. struct nvd0_display *disp = nvd0_display(dev);
  1084. u32 intr = nv_rd32(dev, 0x610088);
  1085. if (intr & 0x00000002) {
  1086. u32 stat = nv_rd32(dev, 0x61009c);
  1087. int chid = ffs(stat) - 1;
  1088. if (chid >= 0) {
  1089. u32 mthd = nv_rd32(dev, 0x6101f0 + (chid * 12));
  1090. u32 data = nv_rd32(dev, 0x6101f4 + (chid * 12));
  1091. u32 unkn = nv_rd32(dev, 0x6101f8 + (chid * 12));
  1092. NV_INFO(dev, "EvoCh: chid %d mthd 0x%04x data 0x%08x "
  1093. "0x%08x 0x%08x\n",
  1094. chid, (mthd & 0x0000ffc), data, mthd, unkn);
  1095. nv_wr32(dev, 0x61009c, (1 << chid));
  1096. nv_wr32(dev, 0x6101f0 + (chid * 12), 0x90000000);
  1097. }
  1098. intr &= ~0x00000002;
  1099. }
  1100. if (intr & 0x00100000) {
  1101. u32 stat = nv_rd32(dev, 0x6100ac);
  1102. if (stat & 0x00000007) {
  1103. disp->modeset = stat;
  1104. tasklet_schedule(&disp->tasklet);
  1105. nv_wr32(dev, 0x6100ac, (stat & 0x00000007));
  1106. stat &= ~0x00000007;
  1107. }
  1108. if (stat) {
  1109. NV_INFO(dev, "PDISP: unknown intr24 0x%08x\n", stat);
  1110. nv_wr32(dev, 0x6100ac, stat);
  1111. }
  1112. intr &= ~0x00100000;
  1113. }
  1114. if (intr & 0x01000000) {
  1115. u32 stat = nv_rd32(dev, 0x6100bc);
  1116. nv_wr32(dev, 0x6100bc, stat);
  1117. intr &= ~0x01000000;
  1118. }
  1119. if (intr & 0x02000000) {
  1120. u32 stat = nv_rd32(dev, 0x6108bc);
  1121. nv_wr32(dev, 0x6108bc, stat);
  1122. intr &= ~0x02000000;
  1123. }
  1124. if (intr)
  1125. NV_INFO(dev, "PDISP: unknown intr 0x%08x\n", intr);
  1126. }
  1127. /******************************************************************************
  1128. * Init
  1129. *****************************************************************************/
  1130. static void
  1131. nvd0_display_fini(struct drm_device *dev)
  1132. {
  1133. int i;
  1134. /* fini cursors */
  1135. for (i = 14; i >= 13; i--) {
  1136. if (!(nv_rd32(dev, 0x610490 + (i * 0x10)) & 0x00000001))
  1137. continue;
  1138. nv_mask(dev, 0x610490 + (i * 0x10), 0x00000001, 0x00000000);
  1139. nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00000000);
  1140. nv_mask(dev, 0x610090, 1 << i, 0x00000000);
  1141. nv_mask(dev, 0x6100a0, 1 << i, 0x00000000);
  1142. }
  1143. /* fini master */
  1144. if (nv_rd32(dev, 0x610490) & 0x00000010) {
  1145. nv_mask(dev, 0x610490, 0x00000010, 0x00000000);
  1146. nv_mask(dev, 0x610490, 0x00000003, 0x00000000);
  1147. nv_wait(dev, 0x610490, 0x80000000, 0x00000000);
  1148. nv_mask(dev, 0x610090, 0x00000001, 0x00000000);
  1149. nv_mask(dev, 0x6100a0, 0x00000001, 0x00000000);
  1150. }
  1151. }
  1152. int
  1153. nvd0_display_init(struct drm_device *dev)
  1154. {
  1155. struct nvd0_display *disp = nvd0_display(dev);
  1156. u32 *push;
  1157. int i;
  1158. if (nv_rd32(dev, 0x6100ac) & 0x00000100) {
  1159. nv_wr32(dev, 0x6100ac, 0x00000100);
  1160. nv_mask(dev, 0x6194e8, 0x00000001, 0x00000000);
  1161. if (!nv_wait(dev, 0x6194e8, 0x00000002, 0x00000000)) {
  1162. NV_ERROR(dev, "PDISP: 0x6194e8 0x%08x\n",
  1163. nv_rd32(dev, 0x6194e8));
  1164. return -EBUSY;
  1165. }
  1166. }
  1167. /* nfi what these are exactly, i do know that SOR_MODE_CTRL won't
  1168. * work at all unless you do the SOR part below.
  1169. */
  1170. for (i = 0; i < 3; i++) {
  1171. u32 dac = nv_rd32(dev, 0x61a000 + (i * 0x800));
  1172. nv_wr32(dev, 0x6101c0 + (i * 0x800), dac);
  1173. }
  1174. for (i = 0; i < 4; i++) {
  1175. u32 sor = nv_rd32(dev, 0x61c000 + (i * 0x800));
  1176. nv_wr32(dev, 0x6301c4 + (i * 0x800), sor);
  1177. }
  1178. for (i = 0; i < 2; i++) {
  1179. u32 crtc0 = nv_rd32(dev, 0x616104 + (i * 0x800));
  1180. u32 crtc1 = nv_rd32(dev, 0x616108 + (i * 0x800));
  1181. u32 crtc2 = nv_rd32(dev, 0x61610c + (i * 0x800));
  1182. nv_wr32(dev, 0x6101b4 + (i * 0x800), crtc0);
  1183. nv_wr32(dev, 0x6101b8 + (i * 0x800), crtc1);
  1184. nv_wr32(dev, 0x6101bc + (i * 0x800), crtc2);
  1185. }
  1186. /* point at our hash table / objects, enable interrupts */
  1187. nv_wr32(dev, 0x610010, (disp->mem->vinst >> 8) | 9);
  1188. nv_mask(dev, 0x6100b0, 0x00000307, 0x00000307);
  1189. /* init master */
  1190. nv_wr32(dev, 0x610494, (disp->evo[0].handle >> 8) | 3);
  1191. nv_wr32(dev, 0x610498, 0x00010000);
  1192. nv_wr32(dev, 0x61049c, 0x00000001);
  1193. nv_mask(dev, 0x610490, 0x00000010, 0x00000010);
  1194. nv_wr32(dev, 0x640000, 0x00000000);
  1195. nv_wr32(dev, 0x610490, 0x01000013);
  1196. if (!nv_wait(dev, 0x610490, 0x80000000, 0x00000000)) {
  1197. NV_ERROR(dev, "PDISP: master 0x%08x\n",
  1198. nv_rd32(dev, 0x610490));
  1199. return -EBUSY;
  1200. }
  1201. nv_mask(dev, 0x610090, 0x00000001, 0x00000001);
  1202. nv_mask(dev, 0x6100a0, 0x00000001, 0x00000001);
  1203. /* init cursors */
  1204. for (i = 13; i <= 14; i++) {
  1205. nv_wr32(dev, 0x610490 + (i * 0x10), 0x00000001);
  1206. if (!nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00010000)) {
  1207. NV_ERROR(dev, "PDISP: curs%d 0x%08x\n", i,
  1208. nv_rd32(dev, 0x610490 + (i * 0x10)));
  1209. return -EBUSY;
  1210. }
  1211. nv_mask(dev, 0x610090, 1 << i, 1 << i);
  1212. nv_mask(dev, 0x6100a0, 1 << i, 1 << i);
  1213. }
  1214. push = evo_wait(dev, 0, 32);
  1215. if (!push)
  1216. return -EBUSY;
  1217. evo_mthd(push, 0x0088, 1);
  1218. evo_data(push, NvEvoSync);
  1219. evo_mthd(push, 0x0084, 1);
  1220. evo_data(push, 0x00000000);
  1221. evo_mthd(push, 0x0084, 1);
  1222. evo_data(push, 0x80000000);
  1223. evo_mthd(push, 0x008c, 1);
  1224. evo_data(push, 0x00000000);
  1225. evo_kick(push, dev, 0);
  1226. return 0;
  1227. }
  1228. void
  1229. nvd0_display_destroy(struct drm_device *dev)
  1230. {
  1231. struct drm_nouveau_private *dev_priv = dev->dev_private;
  1232. struct nvd0_display *disp = nvd0_display(dev);
  1233. struct pci_dev *pdev = dev->pdev;
  1234. nvd0_display_fini(dev);
  1235. pci_free_consistent(pdev, PAGE_SIZE, disp->evo[0].ptr, disp->evo[0].handle);
  1236. nouveau_gpuobj_ref(NULL, &disp->mem);
  1237. nouveau_irq_unregister(dev, 26);
  1238. dev_priv->engine.display.priv = NULL;
  1239. kfree(disp);
  1240. }
  1241. int
  1242. nvd0_display_create(struct drm_device *dev)
  1243. {
  1244. struct drm_nouveau_private *dev_priv = dev->dev_private;
  1245. struct nouveau_instmem_engine *pinstmem = &dev_priv->engine.instmem;
  1246. struct dcb_table *dcb = &dev_priv->vbios.dcb;
  1247. struct drm_connector *connector, *tmp;
  1248. struct pci_dev *pdev = dev->pdev;
  1249. struct nvd0_display *disp;
  1250. struct dcb_entry *dcbe;
  1251. int ret, i;
  1252. disp = kzalloc(sizeof(*disp), GFP_KERNEL);
  1253. if (!disp)
  1254. return -ENOMEM;
  1255. dev_priv->engine.display.priv = disp;
  1256. /* create crtc objects to represent the hw heads */
  1257. for (i = 0; i < 2; i++) {
  1258. ret = nvd0_crtc_create(dev, i);
  1259. if (ret)
  1260. goto out;
  1261. }
  1262. /* create encoder/connector objects based on VBIOS DCB table */
  1263. for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
  1264. connector = nouveau_connector_create(dev, dcbe->connector);
  1265. if (IS_ERR(connector))
  1266. continue;
  1267. if (dcbe->location != DCB_LOC_ON_CHIP) {
  1268. NV_WARN(dev, "skipping off-chip encoder %d/%d\n",
  1269. dcbe->type, ffs(dcbe->or) - 1);
  1270. continue;
  1271. }
  1272. switch (dcbe->type) {
  1273. case OUTPUT_TMDS:
  1274. case OUTPUT_LVDS:
  1275. nvd0_sor_create(connector, dcbe);
  1276. break;
  1277. case OUTPUT_ANALOG:
  1278. nvd0_dac_create(connector, dcbe);
  1279. break;
  1280. default:
  1281. NV_WARN(dev, "skipping unsupported encoder %d/%d\n",
  1282. dcbe->type, ffs(dcbe->or) - 1);
  1283. continue;
  1284. }
  1285. }
  1286. /* cull any connectors we created that don't have an encoder */
  1287. list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
  1288. if (connector->encoder_ids[0])
  1289. continue;
  1290. NV_WARN(dev, "%s has no encoders, removing\n",
  1291. drm_get_connector_name(connector));
  1292. connector->funcs->destroy(connector);
  1293. }
  1294. /* setup interrupt handling */
  1295. tasklet_init(&disp->tasklet, nvd0_display_bh, (unsigned long)dev);
  1296. nouveau_irq_register(dev, 26, nvd0_display_intr);
  1297. /* hash table and dma objects for the memory areas we care about */
  1298. ret = nouveau_gpuobj_new(dev, NULL, 0x4000, 0x10000,
  1299. NVOBJ_FLAG_ZERO_ALLOC, &disp->mem);
  1300. if (ret)
  1301. goto out;
  1302. nv_wo32(disp->mem, 0x1000, 0x00000049);
  1303. nv_wo32(disp->mem, 0x1004, (disp->mem->vinst + 0x2000) >> 8);
  1304. nv_wo32(disp->mem, 0x1008, (disp->mem->vinst + 0x2fff) >> 8);
  1305. nv_wo32(disp->mem, 0x100c, 0x00000000);
  1306. nv_wo32(disp->mem, 0x1010, 0x00000000);
  1307. nv_wo32(disp->mem, 0x1014, 0x00000000);
  1308. nv_wo32(disp->mem, 0x0000, NvEvoSync);
  1309. nv_wo32(disp->mem, 0x0004, (0x1000 << 9) | 0x00000001);
  1310. nv_wo32(disp->mem, 0x1020, 0x00000049);
  1311. nv_wo32(disp->mem, 0x1024, 0x00000000);
  1312. nv_wo32(disp->mem, 0x1028, (dev_priv->vram_size - 1) >> 8);
  1313. nv_wo32(disp->mem, 0x102c, 0x00000000);
  1314. nv_wo32(disp->mem, 0x1030, 0x00000000);
  1315. nv_wo32(disp->mem, 0x1034, 0x00000000);
  1316. nv_wo32(disp->mem, 0x0008, NvEvoVRAM);
  1317. nv_wo32(disp->mem, 0x000c, (0x1020 << 9) | 0x00000001);
  1318. nv_wo32(disp->mem, 0x1040, 0x00000009);
  1319. nv_wo32(disp->mem, 0x1044, 0x00000000);
  1320. nv_wo32(disp->mem, 0x1048, (dev_priv->vram_size - 1) >> 8);
  1321. nv_wo32(disp->mem, 0x104c, 0x00000000);
  1322. nv_wo32(disp->mem, 0x1050, 0x00000000);
  1323. nv_wo32(disp->mem, 0x1054, 0x00000000);
  1324. nv_wo32(disp->mem, 0x0010, NvEvoVRAM_LP);
  1325. nv_wo32(disp->mem, 0x0014, (0x1040 << 9) | 0x00000001);
  1326. nv_wo32(disp->mem, 0x1060, 0x0fe00009);
  1327. nv_wo32(disp->mem, 0x1064, 0x00000000);
  1328. nv_wo32(disp->mem, 0x1068, (dev_priv->vram_size - 1) >> 8);
  1329. nv_wo32(disp->mem, 0x106c, 0x00000000);
  1330. nv_wo32(disp->mem, 0x1070, 0x00000000);
  1331. nv_wo32(disp->mem, 0x1074, 0x00000000);
  1332. nv_wo32(disp->mem, 0x0018, NvEvoFB32);
  1333. nv_wo32(disp->mem, 0x001c, (0x1060 << 9) | 0x00000001);
  1334. pinstmem->flush(dev);
  1335. /* push buffers for evo channels */
  1336. disp->evo[0].ptr =
  1337. pci_alloc_consistent(pdev, PAGE_SIZE, &disp->evo[0].handle);
  1338. if (!disp->evo[0].ptr) {
  1339. ret = -ENOMEM;
  1340. goto out;
  1341. }
  1342. ret = nvd0_display_init(dev);
  1343. if (ret)
  1344. goto out;
  1345. out:
  1346. if (ret)
  1347. nvd0_display_destroy(dev);
  1348. return ret;
  1349. }