nv50_crtc.c 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798
  1. /*
  2. * Copyright (C) 2008 Maarten Maathuis.
  3. * All Rights Reserved.
  4. *
  5. * Permission is hereby granted, free of charge, to any person obtaining
  6. * a copy of this software and associated documentation files (the
  7. * "Software"), to deal in the Software without restriction, including
  8. * without limitation the rights to use, copy, modify, merge, publish,
  9. * distribute, sublicense, and/or sell copies of the Software, and to
  10. * permit persons to whom the Software is furnished to do so, subject to
  11. * the following conditions:
  12. *
  13. * The above copyright notice and this permission notice (including the
  14. * next paragraph) shall be included in all copies or substantial
  15. * portions of the Software.
  16. *
  17. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
  18. * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
  19. * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
  20. * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
  21. * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
  22. * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
  23. * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
  24. *
  25. */
  26. #include "drmP.h"
  27. #include "drm_mode.h"
  28. #include "drm_crtc_helper.h"
  29. #define NOUVEAU_DMA_DEBUG (nouveau_reg_debug & NOUVEAU_REG_DEBUG_EVO)
  30. #include "nouveau_reg.h"
  31. #include "nouveau_drv.h"
  32. #include "nouveau_hw.h"
  33. #include "nouveau_encoder.h"
  34. #include "nouveau_crtc.h"
  35. #include "nouveau_fb.h"
  36. #include "nouveau_connector.h"
  37. #include "nv50_display.h"
  38. static void
  39. nv50_crtc_lut_load(struct drm_crtc *crtc)
  40. {
  41. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  42. void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
  43. int i;
  44. NV_DEBUG_KMS(crtc->dev, "\n");
  45. for (i = 0; i < 256; i++) {
  46. writew(nv_crtc->lut.r[i] >> 2, lut + 8*i + 0);
  47. writew(nv_crtc->lut.g[i] >> 2, lut + 8*i + 2);
  48. writew(nv_crtc->lut.b[i] >> 2, lut + 8*i + 4);
  49. }
  50. if (nv_crtc->lut.depth == 30) {
  51. writew(nv_crtc->lut.r[i - 1] >> 2, lut + 8*i + 0);
  52. writew(nv_crtc->lut.g[i - 1] >> 2, lut + 8*i + 2);
  53. writew(nv_crtc->lut.b[i - 1] >> 2, lut + 8*i + 4);
  54. }
  55. }
  56. int
  57. nv50_crtc_blank(struct nouveau_crtc *nv_crtc, bool blanked)
  58. {
  59. struct drm_device *dev = nv_crtc->base.dev;
  60. struct drm_nouveau_private *dev_priv = dev->dev_private;
  61. struct nouveau_channel *evo = dev_priv->evo;
  62. int index = nv_crtc->index, ret;
  63. NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
  64. NV_DEBUG_KMS(dev, "%s\n", blanked ? "blanked" : "unblanked");
  65. if (blanked) {
  66. nv_crtc->cursor.hide(nv_crtc, false);
  67. ret = RING_SPACE(evo, dev_priv->chipset != 0x50 ? 7 : 5);
  68. if (ret) {
  69. NV_ERROR(dev, "no space while blanking crtc\n");
  70. return ret;
  71. }
  72. BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, CLUT_MODE), 2);
  73. OUT_RING(evo, NV50_EVO_CRTC_CLUT_MODE_BLANK);
  74. OUT_RING(evo, 0);
  75. if (dev_priv->chipset != 0x50) {
  76. BEGIN_RING(evo, 0, NV84_EVO_CRTC(index, CLUT_DMA), 1);
  77. OUT_RING(evo, NV84_EVO_CRTC_CLUT_DMA_HANDLE_NONE);
  78. }
  79. BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, FB_DMA), 1);
  80. OUT_RING(evo, NV50_EVO_CRTC_FB_DMA_HANDLE_NONE);
  81. } else {
  82. if (nv_crtc->cursor.visible)
  83. nv_crtc->cursor.show(nv_crtc, false);
  84. else
  85. nv_crtc->cursor.hide(nv_crtc, false);
  86. ret = RING_SPACE(evo, dev_priv->chipset != 0x50 ? 10 : 8);
  87. if (ret) {
  88. NV_ERROR(dev, "no space while unblanking crtc\n");
  89. return ret;
  90. }
  91. BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, CLUT_MODE), 2);
  92. OUT_RING(evo, nv_crtc->lut.depth == 8 ?
  93. NV50_EVO_CRTC_CLUT_MODE_OFF :
  94. NV50_EVO_CRTC_CLUT_MODE_ON);
  95. OUT_RING(evo, (nv_crtc->lut.nvbo->bo.mem.start << PAGE_SHIFT) >> 8);
  96. if (dev_priv->chipset != 0x50) {
  97. BEGIN_RING(evo, 0, NV84_EVO_CRTC(index, CLUT_DMA), 1);
  98. OUT_RING(evo, NvEvoVRAM);
  99. }
  100. BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, FB_OFFSET), 2);
  101. OUT_RING(evo, nv_crtc->fb.offset >> 8);
  102. OUT_RING(evo, 0);
  103. BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, FB_DMA), 1);
  104. if (dev_priv->chipset != 0x50)
  105. if (nv_crtc->fb.tile_flags == 0x7a00 ||
  106. nv_crtc->fb.tile_flags == 0xfe00)
  107. OUT_RING(evo, NvEvoFB32);
  108. else
  109. if (nv_crtc->fb.tile_flags == 0x7000)
  110. OUT_RING(evo, NvEvoFB16);
  111. else
  112. OUT_RING(evo, NvEvoVRAM_LP);
  113. else
  114. OUT_RING(evo, NvEvoVRAM_LP);
  115. }
  116. nv_crtc->fb.blanked = blanked;
  117. return 0;
  118. }
  119. static int
  120. nv50_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool on, bool update)
  121. {
  122. struct drm_device *dev = nv_crtc->base.dev;
  123. struct drm_nouveau_private *dev_priv = dev->dev_private;
  124. struct nouveau_channel *evo = dev_priv->evo;
  125. int ret;
  126. NV_DEBUG_KMS(dev, "\n");
  127. ret = RING_SPACE(evo, 2 + (update ? 2 : 0));
  128. if (ret) {
  129. NV_ERROR(dev, "no space while setting dither\n");
  130. return ret;
  131. }
  132. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, DITHER_CTRL), 1);
  133. if (on)
  134. OUT_RING(evo, NV50_EVO_CRTC_DITHER_CTRL_ON);
  135. else
  136. OUT_RING(evo, NV50_EVO_CRTC_DITHER_CTRL_OFF);
  137. if (update) {
  138. BEGIN_RING(evo, 0, NV50_EVO_UPDATE, 1);
  139. OUT_RING(evo, 0);
  140. FIRE_RING(evo);
  141. }
  142. return 0;
  143. }
  144. struct nouveau_connector *
  145. nouveau_crtc_connector_get(struct nouveau_crtc *nv_crtc)
  146. {
  147. struct drm_device *dev = nv_crtc->base.dev;
  148. struct drm_connector *connector;
  149. struct drm_crtc *crtc = to_drm_crtc(nv_crtc);
  150. /* The safest approach is to find an encoder with the right crtc, that
  151. * is also linked to a connector. */
  152. list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
  153. if (connector->encoder)
  154. if (connector->encoder->crtc == crtc)
  155. return nouveau_connector(connector);
  156. }
  157. return NULL;
  158. }
  159. static int
  160. nv50_crtc_set_scale(struct nouveau_crtc *nv_crtc, int scaling_mode, bool update)
  161. {
  162. struct nouveau_connector *nv_connector =
  163. nouveau_crtc_connector_get(nv_crtc);
  164. struct drm_device *dev = nv_crtc->base.dev;
  165. struct drm_nouveau_private *dev_priv = dev->dev_private;
  166. struct nouveau_channel *evo = dev_priv->evo;
  167. struct drm_display_mode *native_mode = NULL;
  168. struct drm_display_mode *mode = &nv_crtc->base.mode;
  169. uint32_t outX, outY, horiz, vert;
  170. int ret;
  171. NV_DEBUG_KMS(dev, "\n");
  172. switch (scaling_mode) {
  173. case DRM_MODE_SCALE_NONE:
  174. break;
  175. default:
  176. if (!nv_connector || !nv_connector->native_mode) {
  177. NV_ERROR(dev, "No native mode, forcing panel scaling\n");
  178. scaling_mode = DRM_MODE_SCALE_NONE;
  179. } else {
  180. native_mode = nv_connector->native_mode;
  181. }
  182. break;
  183. }
  184. switch (scaling_mode) {
  185. case DRM_MODE_SCALE_ASPECT:
  186. horiz = (native_mode->hdisplay << 19) / mode->hdisplay;
  187. vert = (native_mode->vdisplay << 19) / mode->vdisplay;
  188. if (vert > horiz) {
  189. outX = (mode->hdisplay * horiz) >> 19;
  190. outY = (mode->vdisplay * horiz) >> 19;
  191. } else {
  192. outX = (mode->hdisplay * vert) >> 19;
  193. outY = (mode->vdisplay * vert) >> 19;
  194. }
  195. break;
  196. case DRM_MODE_SCALE_FULLSCREEN:
  197. outX = native_mode->hdisplay;
  198. outY = native_mode->vdisplay;
  199. break;
  200. case DRM_MODE_SCALE_CENTER:
  201. case DRM_MODE_SCALE_NONE:
  202. default:
  203. outX = mode->hdisplay;
  204. outY = mode->vdisplay;
  205. break;
  206. }
  207. ret = RING_SPACE(evo, update ? 7 : 5);
  208. if (ret)
  209. return ret;
  210. /* Got a better name for SCALER_ACTIVE? */
  211. /* One day i've got to really figure out why this is needed. */
  212. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, SCALE_CTRL), 1);
  213. if ((mode->flags & DRM_MODE_FLAG_DBLSCAN) ||
  214. (mode->flags & DRM_MODE_FLAG_INTERLACE) ||
  215. mode->hdisplay != outX || mode->vdisplay != outY) {
  216. OUT_RING(evo, NV50_EVO_CRTC_SCALE_CTRL_ACTIVE);
  217. } else {
  218. OUT_RING(evo, NV50_EVO_CRTC_SCALE_CTRL_INACTIVE);
  219. }
  220. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, SCALE_RES1), 2);
  221. OUT_RING(evo, outY << 16 | outX);
  222. OUT_RING(evo, outY << 16 | outX);
  223. if (update) {
  224. BEGIN_RING(evo, 0, NV50_EVO_UPDATE, 1);
  225. OUT_RING(evo, 0);
  226. FIRE_RING(evo);
  227. }
  228. return 0;
  229. }
  230. int
  231. nv50_crtc_set_clock(struct drm_device *dev, int head, int pclk)
  232. {
  233. struct drm_nouveau_private *dev_priv = dev->dev_private;
  234. struct pll_lims pll;
  235. uint32_t reg1, reg2;
  236. int ret, N1, M1, N2, M2, P;
  237. ret = get_pll_limits(dev, PLL_VPLL0 + head, &pll);
  238. if (ret)
  239. return ret;
  240. if (pll.vco2.maxfreq) {
  241. ret = nv50_calc_pll(dev, &pll, pclk, &N1, &M1, &N2, &M2, &P);
  242. if (ret <= 0)
  243. return 0;
  244. NV_DEBUG(dev, "pclk %d out %d NM1 %d %d NM2 %d %d P %d\n",
  245. pclk, ret, N1, M1, N2, M2, P);
  246. reg1 = nv_rd32(dev, pll.reg + 4) & 0xff00ff00;
  247. reg2 = nv_rd32(dev, pll.reg + 8) & 0x8000ff00;
  248. nv_wr32(dev, pll.reg + 0, 0x10000611);
  249. nv_wr32(dev, pll.reg + 4, reg1 | (M1 << 16) | N1);
  250. nv_wr32(dev, pll.reg + 8, reg2 | (P << 28) | (M2 << 16) | N2);
  251. } else
  252. if (dev_priv->chipset < NV_C0) {
  253. ret = nv50_calc_pll2(dev, &pll, pclk, &N1, &N2, &M1, &P);
  254. if (ret <= 0)
  255. return 0;
  256. NV_DEBUG(dev, "pclk %d out %d N %d fN 0x%04x M %d P %d\n",
  257. pclk, ret, N1, N2, M1, P);
  258. reg1 = nv_rd32(dev, pll.reg + 4) & 0xffc00000;
  259. nv_wr32(dev, pll.reg + 0, 0x50000610);
  260. nv_wr32(dev, pll.reg + 4, reg1 | (P << 16) | (M1 << 8) | N1);
  261. nv_wr32(dev, pll.reg + 8, N2);
  262. } else {
  263. ret = nv50_calc_pll2(dev, &pll, pclk, &N1, &N2, &M1, &P);
  264. if (ret <= 0)
  265. return 0;
  266. NV_DEBUG(dev, "pclk %d out %d N %d fN 0x%04x M %d P %d\n",
  267. pclk, ret, N1, N2, M1, P);
  268. nv_mask(dev, pll.reg + 0x0c, 0x00000000, 0x00000100);
  269. nv_wr32(dev, pll.reg + 0x04, (P << 16) | (N1 << 8) | M1);
  270. nv_wr32(dev, pll.reg + 0x10, N2 << 16);
  271. }
  272. return 0;
  273. }
  274. static void
  275. nv50_crtc_destroy(struct drm_crtc *crtc)
  276. {
  277. struct drm_device *dev;
  278. struct nouveau_crtc *nv_crtc;
  279. if (!crtc)
  280. return;
  281. dev = crtc->dev;
  282. nv_crtc = nouveau_crtc(crtc);
  283. NV_DEBUG_KMS(dev, "\n");
  284. drm_crtc_cleanup(&nv_crtc->base);
  285. nv50_cursor_fini(nv_crtc);
  286. nouveau_bo_unmap(nv_crtc->lut.nvbo);
  287. nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
  288. nouveau_bo_unmap(nv_crtc->cursor.nvbo);
  289. nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
  290. kfree(nv_crtc->mode);
  291. kfree(nv_crtc);
  292. }
  293. int
  294. nv50_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
  295. uint32_t buffer_handle, uint32_t width, uint32_t height)
  296. {
  297. struct drm_device *dev = crtc->dev;
  298. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  299. struct nouveau_bo *cursor = NULL;
  300. struct drm_gem_object *gem;
  301. int ret = 0, i;
  302. if (width != 64 || height != 64)
  303. return -EINVAL;
  304. if (!buffer_handle) {
  305. nv_crtc->cursor.hide(nv_crtc, true);
  306. return 0;
  307. }
  308. gem = drm_gem_object_lookup(dev, file_priv, buffer_handle);
  309. if (!gem)
  310. return -ENOENT;
  311. cursor = nouveau_gem_object(gem);
  312. ret = nouveau_bo_map(cursor);
  313. if (ret)
  314. goto out;
  315. /* The simple will do for now. */
  316. for (i = 0; i < 64 * 64; i++)
  317. nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, nouveau_bo_rd32(cursor, i));
  318. nouveau_bo_unmap(cursor);
  319. nv_crtc->cursor.set_offset(nv_crtc, nv_crtc->cursor.nvbo->bo.mem.start << PAGE_SHIFT);
  320. nv_crtc->cursor.show(nv_crtc, true);
  321. out:
  322. drm_gem_object_unreference_unlocked(gem);
  323. return ret;
  324. }
  325. int
  326. nv50_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
  327. {
  328. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  329. nv_crtc->cursor.set_pos(nv_crtc, x, y);
  330. return 0;
  331. }
  332. static void
  333. nv50_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
  334. uint32_t start, uint32_t size)
  335. {
  336. int end = (start + size > 256) ? 256 : start + size, i;
  337. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  338. for (i = start; i < end; i++) {
  339. nv_crtc->lut.r[i] = r[i];
  340. nv_crtc->lut.g[i] = g[i];
  341. nv_crtc->lut.b[i] = b[i];
  342. }
  343. /* We need to know the depth before we upload, but it's possible to
  344. * get called before a framebuffer is bound. If this is the case,
  345. * mark the lut values as dirty by setting depth==0, and it'll be
  346. * uploaded on the first mode_set_base()
  347. */
  348. if (!nv_crtc->base.fb) {
  349. nv_crtc->lut.depth = 0;
  350. return;
  351. }
  352. nv50_crtc_lut_load(crtc);
  353. }
  354. static void
  355. nv50_crtc_save(struct drm_crtc *crtc)
  356. {
  357. NV_ERROR(crtc->dev, "!!\n");
  358. }
  359. static void
  360. nv50_crtc_restore(struct drm_crtc *crtc)
  361. {
  362. NV_ERROR(crtc->dev, "!!\n");
  363. }
  364. static const struct drm_crtc_funcs nv50_crtc_funcs = {
  365. .save = nv50_crtc_save,
  366. .restore = nv50_crtc_restore,
  367. .cursor_set = nv50_crtc_cursor_set,
  368. .cursor_move = nv50_crtc_cursor_move,
  369. .gamma_set = nv50_crtc_gamma_set,
  370. .set_config = drm_crtc_helper_set_config,
  371. .page_flip = nouveau_crtc_page_flip,
  372. .destroy = nv50_crtc_destroy,
  373. };
  374. static void
  375. nv50_crtc_dpms(struct drm_crtc *crtc, int mode)
  376. {
  377. }
  378. static void
  379. nv50_crtc_prepare(struct drm_crtc *crtc)
  380. {
  381. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  382. struct drm_device *dev = crtc->dev;
  383. NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
  384. drm_vblank_pre_modeset(dev, nv_crtc->index);
  385. nv50_crtc_blank(nv_crtc, true);
  386. }
  387. static void
  388. nv50_crtc_commit(struct drm_crtc *crtc)
  389. {
  390. struct drm_device *dev = crtc->dev;
  391. struct drm_nouveau_private *dev_priv = dev->dev_private;
  392. struct nouveau_channel *evo = dev_priv->evo;
  393. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  394. int ret;
  395. NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
  396. nv50_crtc_blank(nv_crtc, false);
  397. drm_vblank_post_modeset(dev, nv_crtc->index);
  398. ret = RING_SPACE(evo, 2);
  399. if (ret) {
  400. NV_ERROR(dev, "no space while committing crtc\n");
  401. return;
  402. }
  403. BEGIN_RING(evo, 0, NV50_EVO_UPDATE, 1);
  404. OUT_RING (evo, 0);
  405. FIRE_RING (evo);
  406. }
  407. static bool
  408. nv50_crtc_mode_fixup(struct drm_crtc *crtc, struct drm_display_mode *mode,
  409. struct drm_display_mode *adjusted_mode)
  410. {
  411. return true;
  412. }
  413. static int
  414. nv50_crtc_do_mode_set_base(struct drm_crtc *crtc,
  415. struct drm_framebuffer *passed_fb,
  416. int x, int y, bool update, bool atomic)
  417. {
  418. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  419. struct drm_device *dev = nv_crtc->base.dev;
  420. struct drm_nouveau_private *dev_priv = dev->dev_private;
  421. struct nouveau_channel *evo = dev_priv->evo;
  422. struct drm_framebuffer *drm_fb = nv_crtc->base.fb;
  423. struct nouveau_framebuffer *fb = nouveau_framebuffer(drm_fb);
  424. int ret, format;
  425. NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
  426. /* If atomic, we want to switch to the fb we were passed, so
  427. * now we update pointers to do that. (We don't pin; just
  428. * assume we're already pinned and update the base address.)
  429. */
  430. if (atomic) {
  431. drm_fb = passed_fb;
  432. fb = nouveau_framebuffer(passed_fb);
  433. }
  434. else {
  435. /* If not atomic, we can go ahead and pin, and unpin the
  436. * old fb we were passed.
  437. */
  438. ret = nouveau_bo_pin(fb->nvbo, TTM_PL_FLAG_VRAM);
  439. if (ret)
  440. return ret;
  441. if (passed_fb) {
  442. struct nouveau_framebuffer *ofb = nouveau_framebuffer(passed_fb);
  443. nouveau_bo_unpin(ofb->nvbo);
  444. }
  445. }
  446. switch (drm_fb->depth) {
  447. case 8:
  448. format = NV50_EVO_CRTC_FB_DEPTH_8;
  449. break;
  450. case 15:
  451. format = NV50_EVO_CRTC_FB_DEPTH_15;
  452. break;
  453. case 16:
  454. format = NV50_EVO_CRTC_FB_DEPTH_16;
  455. break;
  456. case 24:
  457. case 32:
  458. format = NV50_EVO_CRTC_FB_DEPTH_24;
  459. break;
  460. case 30:
  461. format = NV50_EVO_CRTC_FB_DEPTH_30;
  462. break;
  463. default:
  464. NV_ERROR(dev, "unknown depth %d\n", drm_fb->depth);
  465. return -EINVAL;
  466. }
  467. nv_crtc->fb.offset = fb->nvbo->bo.mem.start << PAGE_SHIFT;
  468. nv_crtc->fb.tile_flags = nouveau_bo_tile_layout(fb->nvbo);
  469. nv_crtc->fb.cpp = drm_fb->bits_per_pixel / 8;
  470. if (!nv_crtc->fb.blanked && dev_priv->chipset != 0x50) {
  471. ret = RING_SPACE(evo, 2);
  472. if (ret)
  473. return ret;
  474. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, FB_DMA), 1);
  475. if (nv_crtc->fb.tile_flags == 0x7a00 ||
  476. nv_crtc->fb.tile_flags == 0xfe00)
  477. OUT_RING(evo, NvEvoFB32);
  478. else
  479. if (nv_crtc->fb.tile_flags == 0x7000)
  480. OUT_RING(evo, NvEvoFB16);
  481. else
  482. OUT_RING(evo, NvEvoVRAM_LP);
  483. }
  484. ret = RING_SPACE(evo, 12);
  485. if (ret)
  486. return ret;
  487. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, FB_OFFSET), 5);
  488. OUT_RING(evo, nv_crtc->fb.offset >> 8);
  489. OUT_RING(evo, 0);
  490. OUT_RING(evo, (drm_fb->height << 16) | drm_fb->width);
  491. if (!nv_crtc->fb.tile_flags) {
  492. OUT_RING(evo, drm_fb->pitch | (1 << 20));
  493. } else {
  494. u32 tile_mode = fb->nvbo->tile_mode;
  495. if (dev_priv->card_type >= NV_C0)
  496. tile_mode >>= 4;
  497. OUT_RING(evo, ((drm_fb->pitch / 4) << 4) | tile_mode);
  498. }
  499. if (dev_priv->chipset == 0x50)
  500. OUT_RING(evo, (nv_crtc->fb.tile_flags << 8) | format);
  501. else
  502. OUT_RING(evo, format);
  503. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, CLUT_MODE), 1);
  504. OUT_RING(evo, fb->base.depth == 8 ?
  505. NV50_EVO_CRTC_CLUT_MODE_OFF : NV50_EVO_CRTC_CLUT_MODE_ON);
  506. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, COLOR_CTRL), 1);
  507. OUT_RING(evo, NV50_EVO_CRTC_COLOR_CTRL_COLOR);
  508. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, FB_POS), 1);
  509. OUT_RING(evo, (y << 16) | x);
  510. if (nv_crtc->lut.depth != fb->base.depth) {
  511. nv_crtc->lut.depth = fb->base.depth;
  512. nv50_crtc_lut_load(crtc);
  513. }
  514. if (update) {
  515. ret = RING_SPACE(evo, 2);
  516. if (ret)
  517. return ret;
  518. BEGIN_RING(evo, 0, NV50_EVO_UPDATE, 1);
  519. OUT_RING(evo, 0);
  520. FIRE_RING(evo);
  521. }
  522. return 0;
  523. }
  524. static int
  525. nv50_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *mode,
  526. struct drm_display_mode *adjusted_mode, int x, int y,
  527. struct drm_framebuffer *old_fb)
  528. {
  529. struct drm_device *dev = crtc->dev;
  530. struct drm_nouveau_private *dev_priv = dev->dev_private;
  531. struct nouveau_channel *evo = dev_priv->evo;
  532. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  533. struct nouveau_connector *nv_connector = NULL;
  534. uint32_t hsync_dur, vsync_dur, hsync_start_to_end, vsync_start_to_end;
  535. uint32_t hunk1, vunk1, vunk2a, vunk2b;
  536. int ret;
  537. /* Find the connector attached to this CRTC */
  538. nv_connector = nouveau_crtc_connector_get(nv_crtc);
  539. *nv_crtc->mode = *adjusted_mode;
  540. NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
  541. hsync_dur = adjusted_mode->hsync_end - adjusted_mode->hsync_start;
  542. vsync_dur = adjusted_mode->vsync_end - adjusted_mode->vsync_start;
  543. hsync_start_to_end = adjusted_mode->htotal - adjusted_mode->hsync_start;
  544. vsync_start_to_end = adjusted_mode->vtotal - adjusted_mode->vsync_start;
  545. /* I can't give this a proper name, anyone else can? */
  546. hunk1 = adjusted_mode->htotal -
  547. adjusted_mode->hsync_start + adjusted_mode->hdisplay;
  548. vunk1 = adjusted_mode->vtotal -
  549. adjusted_mode->vsync_start + adjusted_mode->vdisplay;
  550. /* Another strange value, this time only for interlaced adjusted_modes. */
  551. vunk2a = 2 * adjusted_mode->vtotal -
  552. adjusted_mode->vsync_start + adjusted_mode->vdisplay;
  553. vunk2b = adjusted_mode->vtotal -
  554. adjusted_mode->vsync_start + adjusted_mode->vtotal;
  555. if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) {
  556. vsync_dur /= 2;
  557. vsync_start_to_end /= 2;
  558. vunk1 /= 2;
  559. vunk2a /= 2;
  560. vunk2b /= 2;
  561. /* magic */
  562. if (adjusted_mode->flags & DRM_MODE_FLAG_DBLSCAN) {
  563. vsync_start_to_end -= 1;
  564. vunk1 -= 1;
  565. vunk2a -= 1;
  566. vunk2b -= 1;
  567. }
  568. }
  569. ret = RING_SPACE(evo, 17);
  570. if (ret)
  571. return ret;
  572. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, CLOCK), 2);
  573. OUT_RING(evo, adjusted_mode->clock | 0x800000);
  574. OUT_RING(evo, (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 0);
  575. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, DISPLAY_START), 5);
  576. OUT_RING(evo, 0);
  577. OUT_RING(evo, (adjusted_mode->vtotal << 16) | adjusted_mode->htotal);
  578. OUT_RING(evo, (vsync_dur - 1) << 16 | (hsync_dur - 1));
  579. OUT_RING(evo, (vsync_start_to_end - 1) << 16 |
  580. (hsync_start_to_end - 1));
  581. OUT_RING(evo, (vunk1 - 1) << 16 | (hunk1 - 1));
  582. if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) {
  583. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, UNK0824), 1);
  584. OUT_RING(evo, (vunk2b - 1) << 16 | (vunk2a - 1));
  585. } else {
  586. OUT_RING(evo, 0);
  587. OUT_RING(evo, 0);
  588. }
  589. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, UNK082C), 1);
  590. OUT_RING(evo, 0);
  591. /* This is the actual resolution of the mode. */
  592. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, REAL_RES), 1);
  593. OUT_RING(evo, (mode->vdisplay << 16) | mode->hdisplay);
  594. BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, SCALE_CENTER_OFFSET), 1);
  595. OUT_RING(evo, NV50_EVO_CRTC_SCALE_CENTER_OFFSET_VAL(0, 0));
  596. nv_crtc->set_dither(nv_crtc, nv_connector->use_dithering, false);
  597. nv_crtc->set_scale(nv_crtc, nv_connector->scaling_mode, false);
  598. return nv50_crtc_do_mode_set_base(crtc, old_fb, x, y, false, false);
  599. }
  600. static int
  601. nv50_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
  602. struct drm_framebuffer *old_fb)
  603. {
  604. return nv50_crtc_do_mode_set_base(crtc, old_fb, x, y, true, false);
  605. }
  606. static int
  607. nv50_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
  608. struct drm_framebuffer *fb,
  609. int x, int y, enum mode_set_atomic state)
  610. {
  611. return nv50_crtc_do_mode_set_base(crtc, fb, x, y, true, true);
  612. }
  613. static const struct drm_crtc_helper_funcs nv50_crtc_helper_funcs = {
  614. .dpms = nv50_crtc_dpms,
  615. .prepare = nv50_crtc_prepare,
  616. .commit = nv50_crtc_commit,
  617. .mode_fixup = nv50_crtc_mode_fixup,
  618. .mode_set = nv50_crtc_mode_set,
  619. .mode_set_base = nv50_crtc_mode_set_base,
  620. .mode_set_base_atomic = nv50_crtc_mode_set_base_atomic,
  621. .load_lut = nv50_crtc_lut_load,
  622. };
  623. int
  624. nv50_crtc_create(struct drm_device *dev, int index)
  625. {
  626. struct nouveau_crtc *nv_crtc = NULL;
  627. int ret, i;
  628. NV_DEBUG_KMS(dev, "\n");
  629. nv_crtc = kzalloc(sizeof(*nv_crtc), GFP_KERNEL);
  630. if (!nv_crtc)
  631. return -ENOMEM;
  632. nv_crtc->mode = kzalloc(sizeof(*nv_crtc->mode), GFP_KERNEL);
  633. if (!nv_crtc->mode) {
  634. kfree(nv_crtc);
  635. return -ENOMEM;
  636. }
  637. /* Default CLUT parameters, will be activated on the hw upon
  638. * first mode set.
  639. */
  640. for (i = 0; i < 256; i++) {
  641. nv_crtc->lut.r[i] = i << 8;
  642. nv_crtc->lut.g[i] = i << 8;
  643. nv_crtc->lut.b[i] = i << 8;
  644. }
  645. nv_crtc->lut.depth = 0;
  646. ret = nouveau_bo_new(dev, NULL, 4096, 0x100, TTM_PL_FLAG_VRAM,
  647. 0, 0x0000, false, true, &nv_crtc->lut.nvbo);
  648. if (!ret) {
  649. ret = nouveau_bo_pin(nv_crtc->lut.nvbo, TTM_PL_FLAG_VRAM);
  650. if (!ret)
  651. ret = nouveau_bo_map(nv_crtc->lut.nvbo);
  652. if (ret)
  653. nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
  654. }
  655. if (ret) {
  656. kfree(nv_crtc->mode);
  657. kfree(nv_crtc);
  658. return ret;
  659. }
  660. nv_crtc->index = index;
  661. /* set function pointers */
  662. nv_crtc->set_dither = nv50_crtc_set_dither;
  663. nv_crtc->set_scale = nv50_crtc_set_scale;
  664. drm_crtc_init(dev, &nv_crtc->base, &nv50_crtc_funcs);
  665. drm_crtc_helper_add(&nv_crtc->base, &nv50_crtc_helper_funcs);
  666. drm_mode_crtc_set_gamma_size(&nv_crtc->base, 256);
  667. ret = nouveau_bo_new(dev, NULL, 64*64*4, 0x100, TTM_PL_FLAG_VRAM,
  668. 0, 0x0000, false, true, &nv_crtc->cursor.nvbo);
  669. if (!ret) {
  670. ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
  671. if (!ret)
  672. ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
  673. if (ret)
  674. nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
  675. }
  676. nv50_cursor_init(nv_crtc);
  677. return 0;
  678. }