nv50_crtc.c 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771
  1. /*
  2. * Copyright (C) 2008 Maarten Maathuis.
  3. * All Rights Reserved.
  4. *
  5. * Permission is hereby granted, free of charge, to any person obtaining
  6. * a copy of this software and associated documentation files (the
  7. * "Software"), to deal in the Software without restriction, including
  8. * without limitation the rights to use, copy, modify, merge, publish,
  9. * distribute, sublicense, and/or sell copies of the Software, and to
  10. * permit persons to whom the Software is furnished to do so, subject to
  11. * the following conditions:
  12. *
  13. * The above copyright notice and this permission notice (including the
  14. * next paragraph) shall be included in all copies or substantial
  15. * portions of the Software.
  16. *
  17. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
  18. * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
  19. * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
  20. * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
  21. * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
  22. * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
  23. * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
  24. *
  25. */
  26. #include "drmP.h"
  27. #include "drm_mode.h"
  28. #include "drm_crtc_helper.h"
  29. #define NOUVEAU_DMA_DEBUG (nouveau_reg_debug & NOUVEAU_REG_DEBUG_EVO)
  30. #include "nouveau_reg.h"
  31. #include "nouveau_drv.h"
  32. #include "nouveau_hw.h"
  33. #include "nouveau_encoder.h"
  34. #include "nouveau_crtc.h"
  35. #include "nouveau_fb.h"
  36. #include "nouveau_connector.h"
  37. #include "nv50_display.h"
  38. static void
  39. nv50_crtc_lut_load(struct drm_crtc *crtc)
  40. {
  41. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  42. void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
  43. int i;
  44. NV_DEBUG_KMS(crtc->dev, "\n");
  45. for (i = 0; i < 256; i++) {
  46. writew(nv_crtc->lut.r[i] >> 2, lut + 8*i + 0);
  47. writew(nv_crtc->lut.g[i] >> 2, lut + 8*i + 2);
  48. writew(nv_crtc->lut.b[i] >> 2, lut + 8*i + 4);
  49. }
  50. if (nv_crtc->lut.depth == 30) {
  51. writew(nv_crtc->lut.r[i - 1] >> 2, lut + 8*i + 0);
  52. writew(nv_crtc->lut.g[i - 1] >> 2, lut + 8*i + 2);
  53. writew(nv_crtc->lut.b[i - 1] >> 2, lut + 8*i + 4);
  54. }
  55. }
  56. int
  57. nv50_crtc_blank(struct nouveau_crtc *nv_crtc, bool blanked)
  58. {
  59. struct drm_device *dev = nv_crtc->base.dev;
  60. struct drm_nouveau_private *dev_priv = dev->dev_private;
  61. struct nouveau_channel *evo = nv50_display(dev)->master;
  62. int index = nv_crtc->index, ret;
  63. NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
  64. NV_DEBUG_KMS(dev, "%s\n", blanked ? "blanked" : "unblanked");
  65. if (blanked) {
  66. nv_crtc->cursor.hide(nv_crtc, false);
  67. ret = RING_SPACE(evo, dev_priv->chipset != 0x50 ? 7 : 5);
  68. if (ret) {
  69. NV_ERROR(dev, "no space while blanking crtc\n");
  70. return ret;
  71. }
  72. BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, CLUT_MODE), 2);
  73. OUT_RING(evo, NV50_EVO_CRTC_CLUT_MODE_BLANK);
  74. OUT_RING(evo, 0);
  75. if (dev_priv->chipset != 0x50) {
  76. BEGIN_RING(evo, 0, NV84_EVO_CRTC(index, CLUT_DMA), 1);
  77. OUT_RING(evo, NV84_EVO_CRTC_CLUT_DMA_HANDLE_NONE);
  78. }
  79. BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, FB_DMA), 1);
  80. OUT_RING(evo, NV50_EVO_CRTC_FB_DMA_HANDLE_NONE);
  81. } else {
  82. if (nv_crtc->cursor.visible)
  83. nv_crtc->cursor.show(nv_crtc, false);
  84. else
  85. nv_crtc->cursor.hide(nv_crtc, false);
  86. ret = RING_SPACE(evo, dev_priv->chipset != 0x50 ? 10 : 8);
  87. if (ret) {
  88. NV_ERROR(dev, "no space while unblanking crtc\n");
  89. return ret;
  90. }
  91. BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, CLUT_MODE), 2);
  92. OUT_RING(evo, nv_crtc->lut.depth == 8 ?
  93. NV50_EVO_CRTC_CLUT_MODE_OFF :
  94. NV50_EVO_CRTC_CLUT_MODE_ON);
  95. OUT_RING(evo, nv_crtc->lut.nvbo->bo.offset >> 8);
  96. if (dev_priv->chipset != 0x50) {
  97. BEGIN_RING(evo, 0, NV84_EVO_CRTC(index, CLUT_DMA), 1);
  98. OUT_RING(evo, NvEvoVRAM);
  99. }
  100. BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, FB_OFFSET), 2);
  101. OUT_RING(evo, nv_crtc->fb.offset >> 8);
  102. OUT_RING(evo, 0);
  103. BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, FB_DMA), 1);
  104. if (dev_priv->chipset != 0x50)
  105. if (nv_crtc->fb.tile_flags == 0x7a00 ||
  106. nv_crtc->fb.tile_flags == 0xfe00)
  107. OUT_RING(evo, NvEvoFB32);
  108. else
  109. if (nv_crtc->fb.tile_flags == 0x7000)
  110. OUT_RING(evo, NvEvoFB16);
  111. else
  112. OUT_RING(evo, NvEvoVRAM_LP);
  113. else
  114. OUT_RING(evo, NvEvoVRAM_LP);
  115. }
  116. nv_crtc->fb.blanked = blanked;
  117. return 0;
  118. }
  119. static int
  120. nv50_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update)
  121. {
  122. struct nouveau_channel *evo = nv50_display(nv_crtc->base.dev)->master;
  123. struct nouveau_connector *nv_connector;
  124. struct drm_connector *connector;
  125. int head = nv_crtc->index, ret;
  126. u32 mode = 0x00;
  127. nv_connector = nouveau_crtc_connector_get(nv_crtc);
  128. connector = &nv_connector->base;
  129. if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) {
  130. if (nv_crtc->base.fb->depth > connector->display_info.bpc * 3)
  131. mode = DITHERING_MODE_DYNAMIC2X2;
  132. } else {
  133. mode = nv_connector->dithering_mode;
  134. }
  135. if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) {
  136. if (connector->display_info.bpc >= 8)
  137. mode |= DITHERING_DEPTH_8BPC;
  138. } else {
  139. mode |= nv_connector->dithering_depth;
  140. }
  141. ret = RING_SPACE(evo, 2 + (update ? 2 : 0));
  142. if (ret == 0) {
  143. BEGIN_RING(evo, 0, NV50_EVO_CRTC(head, DITHER_CTRL), 1);
  144. OUT_RING (evo, mode);
  145. if (update) {
  146. BEGIN_RING(evo, 0, NV50_EVO_UPDATE, 1);
  147. OUT_RING (evo, 0);
  148. FIRE_RING (evo);
  149. }
  150. }
  151. return ret;
  152. }
  153. struct nouveau_connector *
  154. nouveau_crtc_connector_get(struct nouveau_crtc *nv_crtc)
  155. {
  156. struct drm_device *dev = nv_crtc->base.dev;
  157. struct drm_connector *connector;
  158. struct drm_crtc *crtc = to_drm_crtc(nv_crtc);
  159. /* The safest approach is to find an encoder with the right crtc, that
  160. * is also linked to a connector. */
  161. list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
  162. if (connector->encoder)
  163. if (connector->encoder->crtc == crtc)
  164. return nouveau_connector(connector);
  165. }
  166. return NULL;
  167. }
  168. static int
  169. nv50_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update)
  170. {
  171. struct nouveau_connector *nv_connector;
  172. struct drm_crtc *crtc = &nv_crtc->base;
  173. struct drm_device *dev = crtc->dev;
  174. struct nouveau_channel *evo = nv50_display(dev)->master;
  175. struct drm_display_mode *umode = &crtc->mode;
  176. struct drm_display_mode *omode;
  177. int scaling_mode, ret;
  178. u32 ctrl = 0, oX, oY;
  179. NV_DEBUG_KMS(dev, "\n");
  180. nv_connector = nouveau_crtc_connector_get(nv_crtc);
  181. if (!nv_connector || !nv_connector->native_mode) {
  182. NV_ERROR(dev, "no native mode, forcing panel scaling\n");
  183. scaling_mode = DRM_MODE_SCALE_NONE;
  184. } else {
  185. scaling_mode = nv_connector->scaling_mode;
  186. }
  187. /* start off at the resolution we programmed the crtc for, this
  188. * effectively handles NONE/FULL scaling
  189. */
  190. if (scaling_mode != DRM_MODE_SCALE_NONE)
  191. omode = nv_connector->native_mode;
  192. else
  193. omode = umode;
  194. oX = omode->hdisplay;
  195. oY = omode->vdisplay;
  196. if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
  197. oY *= 2;
  198. /* add overscan compensation if necessary, will keep the aspect
  199. * ratio the same as the backend mode unless overridden by the
  200. * user setting both hborder and vborder properties.
  201. */
  202. if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON ||
  203. (nv_connector->underscan == UNDERSCAN_AUTO &&
  204. nv_connector->edid &&
  205. drm_detect_hdmi_monitor(nv_connector->edid)))) {
  206. u32 bX = nv_connector->underscan_hborder;
  207. u32 bY = nv_connector->underscan_vborder;
  208. u32 aspect = (oY << 19) / oX;
  209. if (bX) {
  210. oX -= (bX * 2);
  211. if (bY) oY -= (bY * 2);
  212. else oY = ((oX * aspect) + (aspect / 2)) >> 19;
  213. } else {
  214. oX -= (oX >> 4) + 32;
  215. if (bY) oY -= (bY * 2);
  216. else oY = ((oX * aspect) + (aspect / 2)) >> 19;
  217. }
  218. }
  219. /* handle CENTER/ASPECT scaling, taking into account the areas
  220. * removed already for overscan compensation
  221. */
  222. switch (scaling_mode) {
  223. case DRM_MODE_SCALE_CENTER:
  224. oX = min((u32)umode->hdisplay, oX);
  225. oY = min((u32)umode->vdisplay, oY);
  226. /* fall-through */
  227. case DRM_MODE_SCALE_ASPECT:
  228. if (oY < oX) {
  229. u32 aspect = (umode->hdisplay << 19) / umode->vdisplay;
  230. oX = ((oY * aspect) + (aspect / 2)) >> 19;
  231. } else {
  232. u32 aspect = (umode->vdisplay << 19) / umode->hdisplay;
  233. oY = ((oX * aspect) + (aspect / 2)) >> 19;
  234. }
  235. break;
  236. default:
  237. break;
  238. }
  239. if (umode->hdisplay != oX || umode->vdisplay != oY ||
  240. umode->flags & DRM_MODE_FLAG_INTERLACE ||
  241. umode->flags & DRM_MODE_FLAG_DBLSCAN)
  242. ctrl |= NV50_EVO_CRTC_SCALE_CTRL_ACTIVE;
  243. ret = RING_SPACE(evo, 5);
  244. if (ret)
  245. return ret;
  246. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, SCALE_CTRL), 1);
  247. OUT_RING (evo, ctrl);
  248. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, SCALE_RES1), 2);
  249. OUT_RING (evo, oY << 16 | oX);
  250. OUT_RING (evo, oY << 16 | oX);
  251. if (update) {
  252. nv50_display_flip_stop(crtc);
  253. nv50_display_sync(dev);
  254. nv50_display_flip_next(crtc, crtc->fb, NULL);
  255. }
  256. return 0;
  257. }
  258. int
  259. nv50_crtc_set_clock(struct drm_device *dev, int head, int pclk)
  260. {
  261. struct drm_nouveau_private *dev_priv = dev->dev_private;
  262. struct pll_lims pll;
  263. uint32_t reg1, reg2;
  264. int ret, N1, M1, N2, M2, P;
  265. ret = get_pll_limits(dev, PLL_VPLL0 + head, &pll);
  266. if (ret)
  267. return ret;
  268. if (pll.vco2.maxfreq) {
  269. ret = nv50_calc_pll(dev, &pll, pclk, &N1, &M1, &N2, &M2, &P);
  270. if (ret <= 0)
  271. return 0;
  272. NV_DEBUG(dev, "pclk %d out %d NM1 %d %d NM2 %d %d P %d\n",
  273. pclk, ret, N1, M1, N2, M2, P);
  274. reg1 = nv_rd32(dev, pll.reg + 4) & 0xff00ff00;
  275. reg2 = nv_rd32(dev, pll.reg + 8) & 0x8000ff00;
  276. nv_wr32(dev, pll.reg + 0, 0x10000611);
  277. nv_wr32(dev, pll.reg + 4, reg1 | (M1 << 16) | N1);
  278. nv_wr32(dev, pll.reg + 8, reg2 | (P << 28) | (M2 << 16) | N2);
  279. } else
  280. if (dev_priv->chipset < NV_C0) {
  281. ret = nva3_calc_pll(dev, &pll, pclk, &N1, &N2, &M1, &P);
  282. if (ret <= 0)
  283. return 0;
  284. NV_DEBUG(dev, "pclk %d out %d N %d fN 0x%04x M %d P %d\n",
  285. pclk, ret, N1, N2, M1, P);
  286. reg1 = nv_rd32(dev, pll.reg + 4) & 0xffc00000;
  287. nv_wr32(dev, pll.reg + 0, 0x50000610);
  288. nv_wr32(dev, pll.reg + 4, reg1 | (P << 16) | (M1 << 8) | N1);
  289. nv_wr32(dev, pll.reg + 8, N2);
  290. } else {
  291. ret = nva3_calc_pll(dev, &pll, pclk, &N1, &N2, &M1, &P);
  292. if (ret <= 0)
  293. return 0;
  294. NV_DEBUG(dev, "pclk %d out %d N %d fN 0x%04x M %d P %d\n",
  295. pclk, ret, N1, N2, M1, P);
  296. nv_mask(dev, pll.reg + 0x0c, 0x00000000, 0x00000100);
  297. nv_wr32(dev, pll.reg + 0x04, (P << 16) | (N1 << 8) | M1);
  298. nv_wr32(dev, pll.reg + 0x10, N2 << 16);
  299. }
  300. return 0;
  301. }
  302. static void
  303. nv50_crtc_destroy(struct drm_crtc *crtc)
  304. {
  305. struct drm_device *dev;
  306. struct nouveau_crtc *nv_crtc;
  307. if (!crtc)
  308. return;
  309. dev = crtc->dev;
  310. nv_crtc = nouveau_crtc(crtc);
  311. NV_DEBUG_KMS(dev, "\n");
  312. drm_crtc_cleanup(&nv_crtc->base);
  313. nouveau_bo_unmap(nv_crtc->lut.nvbo);
  314. nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
  315. nouveau_bo_unmap(nv_crtc->cursor.nvbo);
  316. nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
  317. kfree(nv_crtc);
  318. }
  319. int
  320. nv50_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
  321. uint32_t buffer_handle, uint32_t width, uint32_t height)
  322. {
  323. struct drm_device *dev = crtc->dev;
  324. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  325. struct nouveau_bo *cursor = NULL;
  326. struct drm_gem_object *gem;
  327. int ret = 0, i;
  328. if (!buffer_handle) {
  329. nv_crtc->cursor.hide(nv_crtc, true);
  330. return 0;
  331. }
  332. if (width != 64 || height != 64)
  333. return -EINVAL;
  334. gem = drm_gem_object_lookup(dev, file_priv, buffer_handle);
  335. if (!gem)
  336. return -ENOENT;
  337. cursor = nouveau_gem_object(gem);
  338. ret = nouveau_bo_map(cursor);
  339. if (ret)
  340. goto out;
  341. /* The simple will do for now. */
  342. for (i = 0; i < 64 * 64; i++)
  343. nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, nouveau_bo_rd32(cursor, i));
  344. nouveau_bo_unmap(cursor);
  345. nv_crtc->cursor.set_offset(nv_crtc, nv_crtc->cursor.nvbo->bo.offset);
  346. nv_crtc->cursor.show(nv_crtc, true);
  347. out:
  348. drm_gem_object_unreference_unlocked(gem);
  349. return ret;
  350. }
  351. int
  352. nv50_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
  353. {
  354. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  355. nv_crtc->cursor.set_pos(nv_crtc, x, y);
  356. return 0;
  357. }
  358. static void
  359. nv50_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
  360. uint32_t start, uint32_t size)
  361. {
  362. int end = (start + size > 256) ? 256 : start + size, i;
  363. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  364. for (i = start; i < end; i++) {
  365. nv_crtc->lut.r[i] = r[i];
  366. nv_crtc->lut.g[i] = g[i];
  367. nv_crtc->lut.b[i] = b[i];
  368. }
  369. /* We need to know the depth before we upload, but it's possible to
  370. * get called before a framebuffer is bound. If this is the case,
  371. * mark the lut values as dirty by setting depth==0, and it'll be
  372. * uploaded on the first mode_set_base()
  373. */
  374. if (!nv_crtc->base.fb) {
  375. nv_crtc->lut.depth = 0;
  376. return;
  377. }
  378. nv50_crtc_lut_load(crtc);
  379. }
  380. static void
  381. nv50_crtc_save(struct drm_crtc *crtc)
  382. {
  383. NV_ERROR(crtc->dev, "!!\n");
  384. }
  385. static void
  386. nv50_crtc_restore(struct drm_crtc *crtc)
  387. {
  388. NV_ERROR(crtc->dev, "!!\n");
  389. }
  390. static const struct drm_crtc_funcs nv50_crtc_funcs = {
  391. .save = nv50_crtc_save,
  392. .restore = nv50_crtc_restore,
  393. .cursor_set = nv50_crtc_cursor_set,
  394. .cursor_move = nv50_crtc_cursor_move,
  395. .gamma_set = nv50_crtc_gamma_set,
  396. .set_config = drm_crtc_helper_set_config,
  397. .page_flip = nouveau_crtc_page_flip,
  398. .destroy = nv50_crtc_destroy,
  399. };
  400. static void
  401. nv50_crtc_dpms(struct drm_crtc *crtc, int mode)
  402. {
  403. }
  404. static void
  405. nv50_crtc_prepare(struct drm_crtc *crtc)
  406. {
  407. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  408. struct drm_device *dev = crtc->dev;
  409. NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
  410. nv50_display_flip_stop(crtc);
  411. drm_vblank_pre_modeset(dev, nv_crtc->index);
  412. nv50_crtc_blank(nv_crtc, true);
  413. }
  414. static void
  415. nv50_crtc_commit(struct drm_crtc *crtc)
  416. {
  417. struct drm_device *dev = crtc->dev;
  418. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  419. NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
  420. nv50_crtc_blank(nv_crtc, false);
  421. drm_vblank_post_modeset(dev, nv_crtc->index);
  422. nv50_display_sync(dev);
  423. nv50_display_flip_next(crtc, crtc->fb, NULL);
  424. }
  425. static bool
  426. nv50_crtc_mode_fixup(struct drm_crtc *crtc, struct drm_display_mode *mode,
  427. struct drm_display_mode *adjusted_mode)
  428. {
  429. return true;
  430. }
  431. static int
  432. nv50_crtc_do_mode_set_base(struct drm_crtc *crtc,
  433. struct drm_framebuffer *passed_fb,
  434. int x, int y, bool atomic)
  435. {
  436. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  437. struct drm_device *dev = nv_crtc->base.dev;
  438. struct drm_nouveau_private *dev_priv = dev->dev_private;
  439. struct nouveau_channel *evo = nv50_display(dev)->master;
  440. struct drm_framebuffer *drm_fb;
  441. struct nouveau_framebuffer *fb;
  442. int ret;
  443. NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
  444. /* no fb bound */
  445. if (!atomic && !crtc->fb) {
  446. NV_DEBUG_KMS(dev, "No FB bound\n");
  447. return 0;
  448. }
  449. /* If atomic, we want to switch to the fb we were passed, so
  450. * now we update pointers to do that. (We don't pin; just
  451. * assume we're already pinned and update the base address.)
  452. */
  453. if (atomic) {
  454. drm_fb = passed_fb;
  455. fb = nouveau_framebuffer(passed_fb);
  456. } else {
  457. drm_fb = crtc->fb;
  458. fb = nouveau_framebuffer(crtc->fb);
  459. /* If not atomic, we can go ahead and pin, and unpin the
  460. * old fb we were passed.
  461. */
  462. ret = nouveau_bo_pin(fb->nvbo, TTM_PL_FLAG_VRAM);
  463. if (ret)
  464. return ret;
  465. if (passed_fb) {
  466. struct nouveau_framebuffer *ofb = nouveau_framebuffer(passed_fb);
  467. nouveau_bo_unpin(ofb->nvbo);
  468. }
  469. }
  470. nv_crtc->fb.offset = fb->nvbo->bo.offset;
  471. nv_crtc->fb.tile_flags = nouveau_bo_tile_layout(fb->nvbo);
  472. nv_crtc->fb.cpp = drm_fb->bits_per_pixel / 8;
  473. if (!nv_crtc->fb.blanked && dev_priv->chipset != 0x50) {
  474. ret = RING_SPACE(evo, 2);
  475. if (ret)
  476. return ret;
  477. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, FB_DMA), 1);
  478. OUT_RING (evo, fb->r_dma);
  479. }
  480. ret = RING_SPACE(evo, 12);
  481. if (ret)
  482. return ret;
  483. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, FB_OFFSET), 5);
  484. OUT_RING (evo, nv_crtc->fb.offset >> 8);
  485. OUT_RING (evo, 0);
  486. OUT_RING (evo, (drm_fb->height << 16) | drm_fb->width);
  487. OUT_RING (evo, fb->r_pitch);
  488. OUT_RING (evo, fb->r_format);
  489. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, CLUT_MODE), 1);
  490. OUT_RING (evo, fb->base.depth == 8 ?
  491. NV50_EVO_CRTC_CLUT_MODE_OFF : NV50_EVO_CRTC_CLUT_MODE_ON);
  492. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, COLOR_CTRL), 1);
  493. OUT_RING (evo, NV50_EVO_CRTC_COLOR_CTRL_COLOR);
  494. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, FB_POS), 1);
  495. OUT_RING (evo, (y << 16) | x);
  496. if (nv_crtc->lut.depth != fb->base.depth) {
  497. nv_crtc->lut.depth = fb->base.depth;
  498. nv50_crtc_lut_load(crtc);
  499. }
  500. return 0;
  501. }
  502. static int
  503. nv50_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
  504. struct drm_display_mode *mode, int x, int y,
  505. struct drm_framebuffer *old_fb)
  506. {
  507. struct drm_device *dev = crtc->dev;
  508. struct nouveau_channel *evo = nv50_display(dev)->master;
  509. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  510. u32 head = nv_crtc->index * 0x400;
  511. u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
  512. u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
  513. u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks;
  514. u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks;
  515. u32 vblan2e = 0, vblan2s = 1;
  516. int ret;
  517. /* hw timing description looks like this:
  518. *
  519. * <sync> <back porch> <---------display---------> <front porch>
  520. * ______
  521. * |____________|---------------------------|____________|
  522. *
  523. * ^ synce ^ blanke ^ blanks ^ active
  524. *
  525. * interlaced modes also have 2 additional values pointing at the end
  526. * and start of the next field's blanking period.
  527. */
  528. hactive = mode->htotal;
  529. hsynce = mode->hsync_end - mode->hsync_start - 1;
  530. hbackp = mode->htotal - mode->hsync_end;
  531. hblanke = hsynce + hbackp;
  532. hfrontp = mode->hsync_start - mode->hdisplay;
  533. hblanks = mode->htotal - hfrontp - 1;
  534. vactive = mode->vtotal * vscan / ilace;
  535. vsynce = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
  536. vbackp = (mode->vtotal - mode->vsync_end) * vscan / ilace;
  537. vblanke = vsynce + vbackp;
  538. vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
  539. vblanks = vactive - vfrontp - 1;
  540. if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
  541. vblan2e = vactive + vsynce + vbackp;
  542. vblan2s = vblan2e + (mode->vdisplay * vscan / ilace);
  543. vactive = (vactive * 2) + 1;
  544. }
  545. ret = RING_SPACE(evo, 18);
  546. if (ret == 0) {
  547. BEGIN_RING(evo, 0, 0x0804 + head, 2);
  548. OUT_RING (evo, 0x00800000 | mode->clock);
  549. OUT_RING (evo, (ilace == 2) ? 2 : 0);
  550. BEGIN_RING(evo, 0, 0x0810 + head, 6);
  551. OUT_RING (evo, 0x00000000); /* border colour */
  552. OUT_RING (evo, (vactive << 16) | hactive);
  553. OUT_RING (evo, ( vsynce << 16) | hsynce);
  554. OUT_RING (evo, (vblanke << 16) | hblanke);
  555. OUT_RING (evo, (vblanks << 16) | hblanks);
  556. OUT_RING (evo, (vblan2e << 16) | vblan2s);
  557. BEGIN_RING(evo, 0, 0x082c + head, 1);
  558. OUT_RING (evo, 0x00000000);
  559. BEGIN_RING(evo, 0, 0x0900 + head, 1);
  560. OUT_RING (evo, 0x00000311); /* makes sync channel work */
  561. BEGIN_RING(evo, 0, 0x08c8 + head, 1);
  562. OUT_RING (evo, (umode->vdisplay << 16) | umode->hdisplay);
  563. BEGIN_RING(evo, 0, 0x08d4 + head, 1);
  564. OUT_RING (evo, 0x00000000); /* screen position */
  565. }
  566. nv_crtc->set_dither(nv_crtc, false);
  567. nv_crtc->set_scale(nv_crtc, false);
  568. return nv50_crtc_do_mode_set_base(crtc, old_fb, x, y, false);
  569. }
  570. static int
  571. nv50_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
  572. struct drm_framebuffer *old_fb)
  573. {
  574. int ret;
  575. nv50_display_flip_stop(crtc);
  576. ret = nv50_crtc_do_mode_set_base(crtc, old_fb, x, y, false);
  577. if (ret)
  578. return ret;
  579. ret = nv50_display_sync(crtc->dev);
  580. if (ret)
  581. return ret;
  582. return nv50_display_flip_next(crtc, crtc->fb, NULL);
  583. }
  584. static int
  585. nv50_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
  586. struct drm_framebuffer *fb,
  587. int x, int y, enum mode_set_atomic state)
  588. {
  589. int ret;
  590. nv50_display_flip_stop(crtc);
  591. ret = nv50_crtc_do_mode_set_base(crtc, fb, x, y, true);
  592. if (ret)
  593. return ret;
  594. return nv50_display_sync(crtc->dev);
  595. }
  596. static const struct drm_crtc_helper_funcs nv50_crtc_helper_funcs = {
  597. .dpms = nv50_crtc_dpms,
  598. .prepare = nv50_crtc_prepare,
  599. .commit = nv50_crtc_commit,
  600. .mode_fixup = nv50_crtc_mode_fixup,
  601. .mode_set = nv50_crtc_mode_set,
  602. .mode_set_base = nv50_crtc_mode_set_base,
  603. .mode_set_base_atomic = nv50_crtc_mode_set_base_atomic,
  604. .load_lut = nv50_crtc_lut_load,
  605. };
  606. int
  607. nv50_crtc_create(struct drm_device *dev, int index)
  608. {
  609. struct nouveau_crtc *nv_crtc = NULL;
  610. int ret, i;
  611. NV_DEBUG_KMS(dev, "\n");
  612. nv_crtc = kzalloc(sizeof(*nv_crtc), GFP_KERNEL);
  613. if (!nv_crtc)
  614. return -ENOMEM;
  615. /* Default CLUT parameters, will be activated on the hw upon
  616. * first mode set.
  617. */
  618. for (i = 0; i < 256; i++) {
  619. nv_crtc->lut.r[i] = i << 8;
  620. nv_crtc->lut.g[i] = i << 8;
  621. nv_crtc->lut.b[i] = i << 8;
  622. }
  623. nv_crtc->lut.depth = 0;
  624. ret = nouveau_bo_new(dev, 4096, 0x100, TTM_PL_FLAG_VRAM,
  625. 0, 0x0000, &nv_crtc->lut.nvbo);
  626. if (!ret) {
  627. ret = nouveau_bo_pin(nv_crtc->lut.nvbo, TTM_PL_FLAG_VRAM);
  628. if (!ret)
  629. ret = nouveau_bo_map(nv_crtc->lut.nvbo);
  630. if (ret)
  631. nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
  632. }
  633. if (ret) {
  634. kfree(nv_crtc);
  635. return ret;
  636. }
  637. nv_crtc->index = index;
  638. /* set function pointers */
  639. nv_crtc->set_dither = nv50_crtc_set_dither;
  640. nv_crtc->set_scale = nv50_crtc_set_scale;
  641. drm_crtc_init(dev, &nv_crtc->base, &nv50_crtc_funcs);
  642. drm_crtc_helper_add(&nv_crtc->base, &nv50_crtc_helper_funcs);
  643. drm_mode_crtc_set_gamma_size(&nv_crtc->base, 256);
  644. ret = nouveau_bo_new(dev, 64*64*4, 0x100, TTM_PL_FLAG_VRAM,
  645. 0, 0x0000, &nv_crtc->cursor.nvbo);
  646. if (!ret) {
  647. ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
  648. if (!ret)
  649. ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
  650. if (ret)
  651. nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
  652. }
  653. nv50_cursor_init(nv_crtc);
  654. return 0;
  655. }