nv50_crtc.c 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812
  1. /*
  2. * Copyright (C) 2008 Maarten Maathuis.
  3. * All Rights Reserved.
  4. *
  5. * Permission is hereby granted, free of charge, to any person obtaining
  6. * a copy of this software and associated documentation files (the
  7. * "Software"), to deal in the Software without restriction, including
  8. * without limitation the rights to use, copy, modify, merge, publish,
  9. * distribute, sublicense, and/or sell copies of the Software, and to
  10. * permit persons to whom the Software is furnished to do so, subject to
  11. * the following conditions:
  12. *
  13. * The above copyright notice and this permission notice (including the
  14. * next paragraph) shall be included in all copies or substantial
  15. * portions of the Software.
  16. *
  17. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
  18. * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
  19. * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
  20. * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
  21. * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
  22. * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
  23. * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
  24. *
  25. */
  26. #include "drmP.h"
  27. #include "drm_mode.h"
  28. #include "drm_crtc_helper.h"
  29. #define NOUVEAU_DMA_DEBUG (nouveau_reg_debug & NOUVEAU_REG_DEBUG_EVO)
  30. #include "nouveau_reg.h"
  31. #include "nouveau_drv.h"
  32. #include "nouveau_hw.h"
  33. #include "nouveau_encoder.h"
  34. #include "nouveau_crtc.h"
  35. #include "nouveau_fb.h"
  36. #include "nouveau_connector.h"
  37. #include "nv50_display.h"
  38. static int
  39. nv50_crtc_wait_complete(struct drm_crtc *crtc)
  40. {
  41. struct drm_device *dev = crtc->dev;
  42. struct drm_nouveau_private *dev_priv = dev->dev_private;
  43. struct nouveau_timer_engine *ptimer = &dev_priv->engine.timer;
  44. struct nv50_display *disp = nv50_display(dev);
  45. struct nouveau_channel *evo = disp->master;
  46. u64 start;
  47. int ret;
  48. ret = RING_SPACE(evo, 6);
  49. if (ret)
  50. return ret;
  51. BEGIN_RING(evo, 0, 0x0084, 1);
  52. OUT_RING (evo, 0x80000000);
  53. BEGIN_RING(evo, 0, 0x0080, 1);
  54. OUT_RING (evo, 0);
  55. BEGIN_RING(evo, 0, 0x0084, 1);
  56. OUT_RING (evo, 0x00000000);
  57. nv_wo32(disp->ntfy, 0x000, 0x00000000);
  58. FIRE_RING (evo);
  59. start = ptimer->read(dev);
  60. do {
  61. if (nv_ro32(disp->ntfy, 0x000))
  62. return 0;
  63. } while (ptimer->read(dev) - start < 2000000000ULL);
  64. return -EBUSY;
  65. }
  66. static void
  67. nv50_crtc_lut_load(struct drm_crtc *crtc)
  68. {
  69. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  70. void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
  71. int i;
  72. NV_DEBUG_KMS(crtc->dev, "\n");
  73. for (i = 0; i < 256; i++) {
  74. writew(nv_crtc->lut.r[i] >> 2, lut + 8*i + 0);
  75. writew(nv_crtc->lut.g[i] >> 2, lut + 8*i + 2);
  76. writew(nv_crtc->lut.b[i] >> 2, lut + 8*i + 4);
  77. }
  78. if (nv_crtc->lut.depth == 30) {
  79. writew(nv_crtc->lut.r[i - 1] >> 2, lut + 8*i + 0);
  80. writew(nv_crtc->lut.g[i - 1] >> 2, lut + 8*i + 2);
  81. writew(nv_crtc->lut.b[i - 1] >> 2, lut + 8*i + 4);
  82. }
  83. }
  84. int
  85. nv50_crtc_blank(struct nouveau_crtc *nv_crtc, bool blanked)
  86. {
  87. struct drm_device *dev = nv_crtc->base.dev;
  88. struct drm_nouveau_private *dev_priv = dev->dev_private;
  89. struct nouveau_channel *evo = nv50_display(dev)->master;
  90. int index = nv_crtc->index, ret;
  91. NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
  92. NV_DEBUG_KMS(dev, "%s\n", blanked ? "blanked" : "unblanked");
  93. if (blanked) {
  94. nv_crtc->cursor.hide(nv_crtc, false);
  95. ret = RING_SPACE(evo, dev_priv->chipset != 0x50 ? 7 : 5);
  96. if (ret) {
  97. NV_ERROR(dev, "no space while blanking crtc\n");
  98. return ret;
  99. }
  100. BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, CLUT_MODE), 2);
  101. OUT_RING(evo, NV50_EVO_CRTC_CLUT_MODE_BLANK);
  102. OUT_RING(evo, 0);
  103. if (dev_priv->chipset != 0x50) {
  104. BEGIN_RING(evo, 0, NV84_EVO_CRTC(index, CLUT_DMA), 1);
  105. OUT_RING(evo, NV84_EVO_CRTC_CLUT_DMA_HANDLE_NONE);
  106. }
  107. BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, FB_DMA), 1);
  108. OUT_RING(evo, NV50_EVO_CRTC_FB_DMA_HANDLE_NONE);
  109. } else {
  110. if (nv_crtc->cursor.visible)
  111. nv_crtc->cursor.show(nv_crtc, false);
  112. else
  113. nv_crtc->cursor.hide(nv_crtc, false);
  114. ret = RING_SPACE(evo, dev_priv->chipset != 0x50 ? 10 : 8);
  115. if (ret) {
  116. NV_ERROR(dev, "no space while unblanking crtc\n");
  117. return ret;
  118. }
  119. BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, CLUT_MODE), 2);
  120. OUT_RING(evo, nv_crtc->lut.depth == 8 ?
  121. NV50_EVO_CRTC_CLUT_MODE_OFF :
  122. NV50_EVO_CRTC_CLUT_MODE_ON);
  123. OUT_RING(evo, nv_crtc->lut.nvbo->bo.offset >> 8);
  124. if (dev_priv->chipset != 0x50) {
  125. BEGIN_RING(evo, 0, NV84_EVO_CRTC(index, CLUT_DMA), 1);
  126. OUT_RING(evo, NvEvoVRAM);
  127. }
  128. BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, FB_OFFSET), 2);
  129. OUT_RING(evo, nv_crtc->fb.offset >> 8);
  130. OUT_RING(evo, 0);
  131. BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, FB_DMA), 1);
  132. if (dev_priv->chipset != 0x50)
  133. if (nv_crtc->fb.tile_flags == 0x7a00 ||
  134. nv_crtc->fb.tile_flags == 0xfe00)
  135. OUT_RING(evo, NvEvoFB32);
  136. else
  137. if (nv_crtc->fb.tile_flags == 0x7000)
  138. OUT_RING(evo, NvEvoFB16);
  139. else
  140. OUT_RING(evo, NvEvoVRAM_LP);
  141. else
  142. OUT_RING(evo, NvEvoVRAM_LP);
  143. }
  144. nv_crtc->fb.blanked = blanked;
  145. return 0;
  146. }
  147. static int
  148. nv50_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool on, bool update)
  149. {
  150. struct drm_device *dev = nv_crtc->base.dev;
  151. struct nouveau_channel *evo = nv50_display(dev)->master;
  152. int ret;
  153. NV_DEBUG_KMS(dev, "\n");
  154. ret = RING_SPACE(evo, 2 + (update ? 2 : 0));
  155. if (ret) {
  156. NV_ERROR(dev, "no space while setting dither\n");
  157. return ret;
  158. }
  159. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, DITHER_CTRL), 1);
  160. if (on)
  161. OUT_RING(evo, NV50_EVO_CRTC_DITHER_CTRL_ON);
  162. else
  163. OUT_RING(evo, NV50_EVO_CRTC_DITHER_CTRL_OFF);
  164. if (update) {
  165. BEGIN_RING(evo, 0, NV50_EVO_UPDATE, 1);
  166. OUT_RING(evo, 0);
  167. FIRE_RING(evo);
  168. }
  169. return 0;
  170. }
  171. struct nouveau_connector *
  172. nouveau_crtc_connector_get(struct nouveau_crtc *nv_crtc)
  173. {
  174. struct drm_device *dev = nv_crtc->base.dev;
  175. struct drm_connector *connector;
  176. struct drm_crtc *crtc = to_drm_crtc(nv_crtc);
  177. /* The safest approach is to find an encoder with the right crtc, that
  178. * is also linked to a connector. */
  179. list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
  180. if (connector->encoder)
  181. if (connector->encoder->crtc == crtc)
  182. return nouveau_connector(connector);
  183. }
  184. return NULL;
  185. }
  186. static int
  187. nv50_crtc_set_scale(struct nouveau_crtc *nv_crtc, int scaling_mode, bool update)
  188. {
  189. struct nouveau_connector *nv_connector;
  190. struct drm_crtc *crtc = &nv_crtc->base;
  191. struct drm_device *dev = crtc->dev;
  192. struct nouveau_channel *evo = nv50_display(dev)->master;
  193. struct drm_display_mode *mode = &crtc->mode;
  194. u32 ctrl = 0, oX, oY;
  195. int ret;
  196. NV_DEBUG_KMS(dev, "\n");
  197. nv_connector = nouveau_crtc_connector_get(nv_crtc);
  198. if (!nv_connector || !nv_connector->native_mode) {
  199. NV_ERROR(dev, "no native mode, forcing panel scaling\n");
  200. scaling_mode = DRM_MODE_SCALE_NONE;
  201. }
  202. /* start off at the resolution we programmed the crtc for, this
  203. * effectively handles NONE/FULL scaling
  204. */
  205. if (scaling_mode != DRM_MODE_SCALE_NONE) {
  206. oX = nv_connector->native_mode->hdisplay;
  207. oY = nv_connector->native_mode->vdisplay;
  208. } else {
  209. oX = mode->hdisplay;
  210. oY = mode->vdisplay;
  211. }
  212. /* add overscan compensation if necessary, will keep the aspect
  213. * ratio the same as the backend mode unless overridden by the
  214. * user setting both hborder and vborder properties.
  215. */
  216. if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON ||
  217. (nv_connector->underscan == UNDERSCAN_AUTO &&
  218. nv_connector->edid &&
  219. drm_detect_hdmi_monitor(nv_connector->edid)))) {
  220. u32 bX = nv_connector->underscan_hborder;
  221. u32 bY = nv_connector->underscan_vborder;
  222. u32 aspect = (oY << 19) / oX;
  223. if (bX) {
  224. oX -= (bX * 2);
  225. if (bY) oY -= (bY * 2);
  226. else oY = ((oX * aspect) + (aspect / 2)) >> 19;
  227. } else {
  228. oX -= (oX >> 4) + 32;
  229. if (bY) oY -= (bY * 2);
  230. else oY = ((oX * aspect) + (aspect / 2)) >> 19;
  231. }
  232. }
  233. /* handle CENTER/ASPECT scaling, taking into account the areas
  234. * removed already for overscan compensation
  235. */
  236. switch (scaling_mode) {
  237. case DRM_MODE_SCALE_CENTER:
  238. oX = min((u32)mode->hdisplay, oX);
  239. oY = min((u32)mode->vdisplay, oY);
  240. /* fall-through */
  241. case DRM_MODE_SCALE_ASPECT:
  242. if (oY < oX) {
  243. u32 aspect = (mode->hdisplay << 19) / mode->vdisplay;
  244. oX = ((oY * aspect) + (aspect / 2)) >> 19;
  245. } else {
  246. u32 aspect = (mode->vdisplay << 19) / mode->hdisplay;
  247. oY = ((oX * aspect) + (aspect / 2)) >> 19;
  248. }
  249. break;
  250. default:
  251. break;
  252. }
  253. if (mode->hdisplay != oX || mode->vdisplay != oY ||
  254. mode->flags & DRM_MODE_FLAG_INTERLACE ||
  255. mode->flags & DRM_MODE_FLAG_DBLSCAN)
  256. ctrl |= NV50_EVO_CRTC_SCALE_CTRL_ACTIVE;
  257. ret = RING_SPACE(evo, 5);
  258. if (ret)
  259. return ret;
  260. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, SCALE_CTRL), 1);
  261. OUT_RING (evo, ctrl);
  262. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, SCALE_RES1), 2);
  263. OUT_RING (evo, oY << 16 | oX);
  264. OUT_RING (evo, oY << 16 | oX);
  265. if (update) {
  266. nv50_display_flip_stop(crtc);
  267. nv50_crtc_wait_complete(crtc);
  268. nv50_display_flip_next(crtc, crtc->fb, NULL);
  269. }
  270. return 0;
  271. }
  272. int
  273. nv50_crtc_set_clock(struct drm_device *dev, int head, int pclk)
  274. {
  275. struct drm_nouveau_private *dev_priv = dev->dev_private;
  276. struct pll_lims pll;
  277. uint32_t reg1, reg2;
  278. int ret, N1, M1, N2, M2, P;
  279. ret = get_pll_limits(dev, PLL_VPLL0 + head, &pll);
  280. if (ret)
  281. return ret;
  282. if (pll.vco2.maxfreq) {
  283. ret = nv50_calc_pll(dev, &pll, pclk, &N1, &M1, &N2, &M2, &P);
  284. if (ret <= 0)
  285. return 0;
  286. NV_DEBUG(dev, "pclk %d out %d NM1 %d %d NM2 %d %d P %d\n",
  287. pclk, ret, N1, M1, N2, M2, P);
  288. reg1 = nv_rd32(dev, pll.reg + 4) & 0xff00ff00;
  289. reg2 = nv_rd32(dev, pll.reg + 8) & 0x8000ff00;
  290. nv_wr32(dev, pll.reg + 0, 0x10000611);
  291. nv_wr32(dev, pll.reg + 4, reg1 | (M1 << 16) | N1);
  292. nv_wr32(dev, pll.reg + 8, reg2 | (P << 28) | (M2 << 16) | N2);
  293. } else
  294. if (dev_priv->chipset < NV_C0) {
  295. ret = nva3_calc_pll(dev, &pll, pclk, &N1, &N2, &M1, &P);
  296. if (ret <= 0)
  297. return 0;
  298. NV_DEBUG(dev, "pclk %d out %d N %d fN 0x%04x M %d P %d\n",
  299. pclk, ret, N1, N2, M1, P);
  300. reg1 = nv_rd32(dev, pll.reg + 4) & 0xffc00000;
  301. nv_wr32(dev, pll.reg + 0, 0x50000610);
  302. nv_wr32(dev, pll.reg + 4, reg1 | (P << 16) | (M1 << 8) | N1);
  303. nv_wr32(dev, pll.reg + 8, N2);
  304. } else {
  305. ret = nva3_calc_pll(dev, &pll, pclk, &N1, &N2, &M1, &P);
  306. if (ret <= 0)
  307. return 0;
  308. NV_DEBUG(dev, "pclk %d out %d N %d fN 0x%04x M %d P %d\n",
  309. pclk, ret, N1, N2, M1, P);
  310. nv_mask(dev, pll.reg + 0x0c, 0x00000000, 0x00000100);
  311. nv_wr32(dev, pll.reg + 0x04, (P << 16) | (N1 << 8) | M1);
  312. nv_wr32(dev, pll.reg + 0x10, N2 << 16);
  313. }
  314. return 0;
  315. }
  316. static void
  317. nv50_crtc_destroy(struct drm_crtc *crtc)
  318. {
  319. struct drm_device *dev;
  320. struct nouveau_crtc *nv_crtc;
  321. if (!crtc)
  322. return;
  323. dev = crtc->dev;
  324. nv_crtc = nouveau_crtc(crtc);
  325. NV_DEBUG_KMS(dev, "\n");
  326. drm_crtc_cleanup(&nv_crtc->base);
  327. nouveau_bo_unmap(nv_crtc->lut.nvbo);
  328. nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
  329. nouveau_bo_unmap(nv_crtc->cursor.nvbo);
  330. nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
  331. kfree(nv_crtc->mode);
  332. kfree(nv_crtc);
  333. }
  334. int
  335. nv50_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
  336. uint32_t buffer_handle, uint32_t width, uint32_t height)
  337. {
  338. struct drm_device *dev = crtc->dev;
  339. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  340. struct nouveau_bo *cursor = NULL;
  341. struct drm_gem_object *gem;
  342. int ret = 0, i;
  343. if (!buffer_handle) {
  344. nv_crtc->cursor.hide(nv_crtc, true);
  345. return 0;
  346. }
  347. if (width != 64 || height != 64)
  348. return -EINVAL;
  349. gem = drm_gem_object_lookup(dev, file_priv, buffer_handle);
  350. if (!gem)
  351. return -ENOENT;
  352. cursor = nouveau_gem_object(gem);
  353. ret = nouveau_bo_map(cursor);
  354. if (ret)
  355. goto out;
  356. /* The simple will do for now. */
  357. for (i = 0; i < 64 * 64; i++)
  358. nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, nouveau_bo_rd32(cursor, i));
  359. nouveau_bo_unmap(cursor);
  360. nv_crtc->cursor.set_offset(nv_crtc, nv_crtc->cursor.nvbo->bo.offset);
  361. nv_crtc->cursor.show(nv_crtc, true);
  362. out:
  363. drm_gem_object_unreference_unlocked(gem);
  364. return ret;
  365. }
  366. int
  367. nv50_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
  368. {
  369. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  370. nv_crtc->cursor.set_pos(nv_crtc, x, y);
  371. return 0;
  372. }
  373. static void
  374. nv50_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
  375. uint32_t start, uint32_t size)
  376. {
  377. int end = (start + size > 256) ? 256 : start + size, i;
  378. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  379. for (i = start; i < end; i++) {
  380. nv_crtc->lut.r[i] = r[i];
  381. nv_crtc->lut.g[i] = g[i];
  382. nv_crtc->lut.b[i] = b[i];
  383. }
  384. /* We need to know the depth before we upload, but it's possible to
  385. * get called before a framebuffer is bound. If this is the case,
  386. * mark the lut values as dirty by setting depth==0, and it'll be
  387. * uploaded on the first mode_set_base()
  388. */
  389. if (!nv_crtc->base.fb) {
  390. nv_crtc->lut.depth = 0;
  391. return;
  392. }
  393. nv50_crtc_lut_load(crtc);
  394. }
  395. static void
  396. nv50_crtc_save(struct drm_crtc *crtc)
  397. {
  398. NV_ERROR(crtc->dev, "!!\n");
  399. }
  400. static void
  401. nv50_crtc_restore(struct drm_crtc *crtc)
  402. {
  403. NV_ERROR(crtc->dev, "!!\n");
  404. }
  405. static const struct drm_crtc_funcs nv50_crtc_funcs = {
  406. .save = nv50_crtc_save,
  407. .restore = nv50_crtc_restore,
  408. .cursor_set = nv50_crtc_cursor_set,
  409. .cursor_move = nv50_crtc_cursor_move,
  410. .gamma_set = nv50_crtc_gamma_set,
  411. .set_config = drm_crtc_helper_set_config,
  412. .page_flip = nouveau_crtc_page_flip,
  413. .destroy = nv50_crtc_destroy,
  414. };
  415. static void
  416. nv50_crtc_dpms(struct drm_crtc *crtc, int mode)
  417. {
  418. }
  419. static void
  420. nv50_crtc_prepare(struct drm_crtc *crtc)
  421. {
  422. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  423. struct drm_device *dev = crtc->dev;
  424. NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
  425. nv50_display_flip_stop(crtc);
  426. drm_vblank_pre_modeset(dev, nv_crtc->index);
  427. nv50_crtc_blank(nv_crtc, true);
  428. }
  429. static void
  430. nv50_crtc_commit(struct drm_crtc *crtc)
  431. {
  432. struct drm_device *dev = crtc->dev;
  433. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  434. NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
  435. nv50_crtc_blank(nv_crtc, false);
  436. drm_vblank_post_modeset(dev, nv_crtc->index);
  437. nv50_crtc_wait_complete(crtc);
  438. nv50_display_flip_next(crtc, crtc->fb, NULL);
  439. }
  440. static bool
  441. nv50_crtc_mode_fixup(struct drm_crtc *crtc, struct drm_display_mode *mode,
  442. struct drm_display_mode *adjusted_mode)
  443. {
  444. return true;
  445. }
  446. static int
  447. nv50_crtc_do_mode_set_base(struct drm_crtc *crtc,
  448. struct drm_framebuffer *passed_fb,
  449. int x, int y, bool atomic)
  450. {
  451. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  452. struct drm_device *dev = nv_crtc->base.dev;
  453. struct drm_nouveau_private *dev_priv = dev->dev_private;
  454. struct nouveau_channel *evo = nv50_display(dev)->master;
  455. struct drm_framebuffer *drm_fb;
  456. struct nouveau_framebuffer *fb;
  457. int ret;
  458. NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
  459. /* no fb bound */
  460. if (!atomic && !crtc->fb) {
  461. NV_DEBUG_KMS(dev, "No FB bound\n");
  462. return 0;
  463. }
  464. /* If atomic, we want to switch to the fb we were passed, so
  465. * now we update pointers to do that. (We don't pin; just
  466. * assume we're already pinned and update the base address.)
  467. */
  468. if (atomic) {
  469. drm_fb = passed_fb;
  470. fb = nouveau_framebuffer(passed_fb);
  471. } else {
  472. drm_fb = crtc->fb;
  473. fb = nouveau_framebuffer(crtc->fb);
  474. /* If not atomic, we can go ahead and pin, and unpin the
  475. * old fb we were passed.
  476. */
  477. ret = nouveau_bo_pin(fb->nvbo, TTM_PL_FLAG_VRAM);
  478. if (ret)
  479. return ret;
  480. if (passed_fb) {
  481. struct nouveau_framebuffer *ofb = nouveau_framebuffer(passed_fb);
  482. nouveau_bo_unpin(ofb->nvbo);
  483. }
  484. }
  485. nv_crtc->fb.offset = fb->nvbo->bo.offset;
  486. nv_crtc->fb.tile_flags = nouveau_bo_tile_layout(fb->nvbo);
  487. nv_crtc->fb.cpp = drm_fb->bits_per_pixel / 8;
  488. if (!nv_crtc->fb.blanked && dev_priv->chipset != 0x50) {
  489. ret = RING_SPACE(evo, 2);
  490. if (ret)
  491. return ret;
  492. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, FB_DMA), 1);
  493. OUT_RING (evo, fb->r_dma);
  494. }
  495. ret = RING_SPACE(evo, 12);
  496. if (ret)
  497. return ret;
  498. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, FB_OFFSET), 5);
  499. OUT_RING (evo, nv_crtc->fb.offset >> 8);
  500. OUT_RING (evo, 0);
  501. OUT_RING (evo, (drm_fb->height << 16) | drm_fb->width);
  502. OUT_RING (evo, fb->r_pitch);
  503. OUT_RING (evo, fb->r_format);
  504. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, CLUT_MODE), 1);
  505. OUT_RING (evo, fb->base.depth == 8 ?
  506. NV50_EVO_CRTC_CLUT_MODE_OFF : NV50_EVO_CRTC_CLUT_MODE_ON);
  507. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, COLOR_CTRL), 1);
  508. OUT_RING (evo, NV50_EVO_CRTC_COLOR_CTRL_COLOR);
  509. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, FB_POS), 1);
  510. OUT_RING (evo, (y << 16) | x);
  511. if (nv_crtc->lut.depth != fb->base.depth) {
  512. nv_crtc->lut.depth = fb->base.depth;
  513. nv50_crtc_lut_load(crtc);
  514. }
  515. return 0;
  516. }
  517. static int
  518. nv50_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *mode,
  519. struct drm_display_mode *adjusted_mode, int x, int y,
  520. struct drm_framebuffer *old_fb)
  521. {
  522. struct drm_device *dev = crtc->dev;
  523. struct nouveau_channel *evo = nv50_display(dev)->master;
  524. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  525. struct nouveau_connector *nv_connector = NULL;
  526. uint32_t hsync_dur, vsync_dur, hsync_start_to_end, vsync_start_to_end;
  527. uint32_t hunk1, vunk1, vunk2a, vunk2b;
  528. int ret;
  529. /* Find the connector attached to this CRTC */
  530. nv_connector = nouveau_crtc_connector_get(nv_crtc);
  531. *nv_crtc->mode = *adjusted_mode;
  532. NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
  533. hsync_dur = adjusted_mode->hsync_end - adjusted_mode->hsync_start;
  534. vsync_dur = adjusted_mode->vsync_end - adjusted_mode->vsync_start;
  535. hsync_start_to_end = adjusted_mode->htotal - adjusted_mode->hsync_start;
  536. vsync_start_to_end = adjusted_mode->vtotal - adjusted_mode->vsync_start;
  537. /* I can't give this a proper name, anyone else can? */
  538. hunk1 = adjusted_mode->htotal -
  539. adjusted_mode->hsync_start + adjusted_mode->hdisplay;
  540. vunk1 = adjusted_mode->vtotal -
  541. adjusted_mode->vsync_start + adjusted_mode->vdisplay;
  542. /* Another strange value, this time only for interlaced adjusted_modes. */
  543. vunk2a = 2 * adjusted_mode->vtotal -
  544. adjusted_mode->vsync_start + adjusted_mode->vdisplay;
  545. vunk2b = adjusted_mode->vtotal -
  546. adjusted_mode->vsync_start + adjusted_mode->vtotal;
  547. if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) {
  548. vsync_dur /= 2;
  549. vsync_start_to_end /= 2;
  550. vunk1 /= 2;
  551. vunk2a /= 2;
  552. vunk2b /= 2;
  553. /* magic */
  554. if (adjusted_mode->flags & DRM_MODE_FLAG_DBLSCAN) {
  555. vsync_start_to_end -= 1;
  556. vunk1 -= 1;
  557. vunk2a -= 1;
  558. vunk2b -= 1;
  559. }
  560. }
  561. ret = RING_SPACE(evo, 17);
  562. if (ret)
  563. return ret;
  564. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, CLOCK), 2);
  565. OUT_RING(evo, adjusted_mode->clock | 0x800000);
  566. OUT_RING(evo, (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 0);
  567. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, DISPLAY_START), 5);
  568. OUT_RING(evo, 0);
  569. OUT_RING(evo, (adjusted_mode->vtotal << 16) | adjusted_mode->htotal);
  570. OUT_RING(evo, (vsync_dur - 1) << 16 | (hsync_dur - 1));
  571. OUT_RING(evo, (vsync_start_to_end - 1) << 16 |
  572. (hsync_start_to_end - 1));
  573. OUT_RING(evo, (vunk1 - 1) << 16 | (hunk1 - 1));
  574. if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) {
  575. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, UNK0824), 1);
  576. OUT_RING(evo, (vunk2b - 1) << 16 | (vunk2a - 1));
  577. } else {
  578. OUT_RING(evo, 0);
  579. OUT_RING(evo, 0);
  580. }
  581. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, UNK082C), 1);
  582. OUT_RING(evo, 0);
  583. /* This is the actual resolution of the mode. */
  584. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, REAL_RES), 1);
  585. OUT_RING(evo, (mode->vdisplay << 16) | mode->hdisplay);
  586. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, SCALE_CENTER_OFFSET), 1);
  587. OUT_RING(evo, NV50_EVO_CRTC_SCALE_CENTER_OFFSET_VAL(0, 0));
  588. nv_crtc->set_dither(nv_crtc, nv_connector->use_dithering, false);
  589. nv_crtc->set_scale(nv_crtc, nv_connector->scaling_mode, false);
  590. return nv50_crtc_do_mode_set_base(crtc, old_fb, x, y, false);
  591. }
  592. static int
  593. nv50_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
  594. struct drm_framebuffer *old_fb)
  595. {
  596. int ret;
  597. nv50_display_flip_stop(crtc);
  598. ret = nv50_crtc_do_mode_set_base(crtc, old_fb, x, y, false);
  599. if (ret)
  600. return ret;
  601. ret = nv50_crtc_wait_complete(crtc);
  602. if (ret)
  603. return ret;
  604. return nv50_display_flip_next(crtc, crtc->fb, NULL);
  605. }
  606. static int
  607. nv50_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
  608. struct drm_framebuffer *fb,
  609. int x, int y, enum mode_set_atomic state)
  610. {
  611. int ret;
  612. nv50_display_flip_stop(crtc);
  613. ret = nv50_crtc_do_mode_set_base(crtc, fb, x, y, true);
  614. if (ret)
  615. return ret;
  616. return nv50_crtc_wait_complete(crtc);
  617. }
  618. static const struct drm_crtc_helper_funcs nv50_crtc_helper_funcs = {
  619. .dpms = nv50_crtc_dpms,
  620. .prepare = nv50_crtc_prepare,
  621. .commit = nv50_crtc_commit,
  622. .mode_fixup = nv50_crtc_mode_fixup,
  623. .mode_set = nv50_crtc_mode_set,
  624. .mode_set_base = nv50_crtc_mode_set_base,
  625. .mode_set_base_atomic = nv50_crtc_mode_set_base_atomic,
  626. .load_lut = nv50_crtc_lut_load,
  627. };
  628. int
  629. nv50_crtc_create(struct drm_device *dev, int index)
  630. {
  631. struct nouveau_crtc *nv_crtc = NULL;
  632. int ret, i;
  633. NV_DEBUG_KMS(dev, "\n");
  634. nv_crtc = kzalloc(sizeof(*nv_crtc), GFP_KERNEL);
  635. if (!nv_crtc)
  636. return -ENOMEM;
  637. nv_crtc->mode = kzalloc(sizeof(*nv_crtc->mode), GFP_KERNEL);
  638. if (!nv_crtc->mode) {
  639. kfree(nv_crtc);
  640. return -ENOMEM;
  641. }
  642. /* Default CLUT parameters, will be activated on the hw upon
  643. * first mode set.
  644. */
  645. for (i = 0; i < 256; i++) {
  646. nv_crtc->lut.r[i] = i << 8;
  647. nv_crtc->lut.g[i] = i << 8;
  648. nv_crtc->lut.b[i] = i << 8;
  649. }
  650. nv_crtc->lut.depth = 0;
  651. ret = nouveau_bo_new(dev, 4096, 0x100, TTM_PL_FLAG_VRAM,
  652. 0, 0x0000, &nv_crtc->lut.nvbo);
  653. if (!ret) {
  654. ret = nouveau_bo_pin(nv_crtc->lut.nvbo, TTM_PL_FLAG_VRAM);
  655. if (!ret)
  656. ret = nouveau_bo_map(nv_crtc->lut.nvbo);
  657. if (ret)
  658. nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
  659. }
  660. if (ret) {
  661. kfree(nv_crtc->mode);
  662. kfree(nv_crtc);
  663. return ret;
  664. }
  665. nv_crtc->index = index;
  666. /* set function pointers */
  667. nv_crtc->set_dither = nv50_crtc_set_dither;
  668. nv_crtc->set_scale = nv50_crtc_set_scale;
  669. drm_crtc_init(dev, &nv_crtc->base, &nv50_crtc_funcs);
  670. drm_crtc_helper_add(&nv_crtc->base, &nv50_crtc_helper_funcs);
  671. drm_mode_crtc_set_gamma_size(&nv_crtc->base, 256);
  672. ret = nouveau_bo_new(dev, 64*64*4, 0x100, TTM_PL_FLAG_VRAM,
  673. 0, 0x0000, &nv_crtc->cursor.nvbo);
  674. if (!ret) {
  675. ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
  676. if (!ret)
  677. ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
  678. if (ret)
  679. nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
  680. }
  681. nv50_cursor_init(nv_crtc);
  682. return 0;
  683. }