nvd0_display.c 35 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346
  1. /*
  2. * Copyright 2011 Red Hat Inc.
  3. *
  4. * Permission is hereby granted, free of charge, to any person obtaining a
  5. * copy of this software and associated documentation files (the "Software"),
  6. * to deal in the Software without restriction, including without limitation
  7. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  8. * and/or sell copies of the Software, and to permit persons to whom the
  9. * Software is furnished to do so, subject to the following conditions:
  10. *
  11. * The above copyright notice and this permission notice shall be included in
  12. * all copies or substantial portions of the Software.
  13. *
  14. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  15. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  16. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  17. * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  18. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  19. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  20. * OTHER DEALINGS IN THE SOFTWARE.
  21. *
  22. * Authors: Ben Skeggs
  23. */
  24. #include <linux/dma-mapping.h>
  25. #include "drmP.h"
  26. #include "drm_crtc_helper.h"
  27. #include "nouveau_drv.h"
  28. #include "nouveau_connector.h"
  29. #include "nouveau_encoder.h"
  30. #include "nouveau_crtc.h"
  31. #include "nouveau_fb.h"
  32. #include "nv50_display.h"
  33. #define MEM_SYNC 0xe0000001
  34. #define MEM_VRAM 0xe0010000
  35. #include "nouveau_dma.h"
  36. struct nvd0_display {
  37. struct nouveau_gpuobj *mem;
  38. struct {
  39. dma_addr_t handle;
  40. u32 *ptr;
  41. } evo[1];
  42. struct {
  43. struct dcb_entry *dis;
  44. struct dcb_entry *ena;
  45. int crtc;
  46. int pclk;
  47. u16 script;
  48. } irq;
  49. };
  50. static struct nvd0_display *
  51. nvd0_display(struct drm_device *dev)
  52. {
  53. struct drm_nouveau_private *dev_priv = dev->dev_private;
  54. return dev_priv->engine.display.priv;
  55. }
  56. static int
  57. evo_icmd(struct drm_device *dev, int id, u32 mthd, u32 data)
  58. {
  59. int ret = 0;
  60. nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000001);
  61. nv_wr32(dev, 0x610704 + (id * 0x10), data);
  62. nv_mask(dev, 0x610704 + (id * 0x10), 0x80000ffc, 0x80000000 | mthd);
  63. if (!nv_wait(dev, 0x610704 + (id * 0x10), 0x80000000, 0x00000000))
  64. ret = -EBUSY;
  65. nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000000);
  66. return ret;
  67. }
  68. static u32 *
  69. evo_wait(struct drm_device *dev, int id, int nr)
  70. {
  71. struct nvd0_display *disp = nvd0_display(dev);
  72. u32 put = nv_rd32(dev, 0x640000 + (id * 0x1000)) / 4;
  73. if (put + nr >= (PAGE_SIZE / 4)) {
  74. disp->evo[id].ptr[put] = 0x20000000;
  75. nv_wr32(dev, 0x640000 + (id * 0x1000), 0x00000000);
  76. if (!nv_wait(dev, 0x640004 + (id * 0x1000), ~0, 0x00000000)) {
  77. NV_ERROR(dev, "evo %d dma stalled\n", id);
  78. return NULL;
  79. }
  80. put = 0;
  81. }
  82. return disp->evo[id].ptr + put;
  83. }
  84. static void
  85. evo_kick(u32 *push, struct drm_device *dev, int id)
  86. {
  87. struct nvd0_display *disp = nvd0_display(dev);
  88. nv_wr32(dev, 0x640000 + (id * 0x1000), (push - disp->evo[id].ptr) << 2);
  89. }
  90. #define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
  91. #define evo_data(p,d) *((p)++) = (d)
  92. static struct drm_crtc *
  93. nvd0_display_crtc_get(struct drm_encoder *encoder)
  94. {
  95. return nouveau_encoder(encoder)->crtc;
  96. }
  97. /******************************************************************************
  98. * CRTC
  99. *****************************************************************************/
  100. static int
  101. nvd0_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool on, bool update)
  102. {
  103. struct drm_device *dev = nv_crtc->base.dev;
  104. u32 *push, mode;
  105. mode = 0x00000000;
  106. if (on) {
  107. /* 0x11: 6bpc dynamic 2x2
  108. * 0x13: 8bpc dynamic 2x2
  109. * 0x19: 6bpc static 2x2
  110. * 0x1b: 8bpc static 2x2
  111. * 0x21: 6bpc temporal
  112. * 0x23: 8bpc temporal
  113. */
  114. mode = 0x00000011;
  115. }
  116. push = evo_wait(dev, 0, 4);
  117. if (push) {
  118. evo_mthd(push, 0x0490 + (nv_crtc->index * 0x300), 1);
  119. evo_data(push, mode);
  120. if (update) {
  121. evo_mthd(push, 0x0080, 1);
  122. evo_data(push, 0x00000000);
  123. }
  124. evo_kick(push, dev, 0);
  125. }
  126. return 0;
  127. }
  128. static int
  129. nvd0_crtc_set_scale(struct nouveau_crtc *nv_crtc, int type, bool update)
  130. {
  131. struct drm_display_mode *mode = &nv_crtc->base.mode;
  132. struct drm_device *dev = nv_crtc->base.dev;
  133. u32 *push;
  134. /*XXX: actually handle scaling */
  135. push = evo_wait(dev, 0, 16);
  136. if (push) {
  137. evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
  138. evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
  139. evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
  140. evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
  141. evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
  142. evo_data(push, 0x00000000);
  143. evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
  144. evo_data(push, 0x00000000);
  145. evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
  146. evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
  147. if (update) {
  148. evo_mthd(push, 0x0080, 1);
  149. evo_data(push, 0x00000000);
  150. }
  151. evo_kick(push, dev, 0);
  152. }
  153. return 0;
  154. }
  155. static int
  156. nvd0_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
  157. int x, int y, bool update)
  158. {
  159. struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
  160. u32 *push;
  161. push = evo_wait(fb->dev, 0, 16);
  162. if (push) {
  163. evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
  164. evo_data(push, nvfb->nvbo->bo.offset >> 8);
  165. evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
  166. evo_data(push, (fb->height << 16) | fb->width);
  167. evo_data(push, nvfb->r_pitch);
  168. evo_data(push, nvfb->r_format);
  169. evo_data(push, nvfb->r_dma);
  170. if (update) {
  171. evo_mthd(push, 0x0080, 1);
  172. evo_data(push, 0x00000000);
  173. }
  174. evo_kick(push, fb->dev, 0);
  175. }
  176. nv_crtc->fb.tile_flags = nvfb->r_dma;
  177. return 0;
  178. }
  179. static void
  180. nvd0_crtc_cursor_show(struct nouveau_crtc *nv_crtc, bool show, bool update)
  181. {
  182. struct drm_device *dev = nv_crtc->base.dev;
  183. u32 *push = evo_wait(dev, 0, 16);
  184. if (push) {
  185. if (show) {
  186. evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
  187. evo_data(push, 0x85000000);
  188. evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
  189. evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
  190. evo_data(push, MEM_VRAM);
  191. } else {
  192. evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
  193. evo_data(push, 0x05000000);
  194. evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
  195. evo_data(push, 0x00000000);
  196. }
  197. if (update) {
  198. evo_mthd(push, 0x0080, 1);
  199. evo_data(push, 0x00000000);
  200. }
  201. evo_kick(push, dev, 0);
  202. }
  203. }
  204. static void
  205. nvd0_crtc_dpms(struct drm_crtc *crtc, int mode)
  206. {
  207. }
  208. static void
  209. nvd0_crtc_prepare(struct drm_crtc *crtc)
  210. {
  211. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  212. u32 *push;
  213. push = evo_wait(crtc->dev, 0, 2);
  214. if (push) {
  215. evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
  216. evo_data(push, 0x00000000);
  217. evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
  218. evo_data(push, 0x03000000);
  219. evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
  220. evo_data(push, 0x00000000);
  221. evo_kick(push, crtc->dev, 0);
  222. }
  223. nvd0_crtc_cursor_show(nv_crtc, false, false);
  224. }
  225. static void
  226. nvd0_crtc_commit(struct drm_crtc *crtc)
  227. {
  228. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  229. u32 *push;
  230. push = evo_wait(crtc->dev, 0, 32);
  231. if (push) {
  232. evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
  233. evo_data(push, nv_crtc->fb.tile_flags);
  234. evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
  235. evo_data(push, 0x83000000);
  236. evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
  237. evo_data(push, 0x00000000);
  238. evo_data(push, 0x00000000);
  239. evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
  240. evo_data(push, MEM_VRAM);
  241. evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1);
  242. evo_data(push, 0xffffff00);
  243. evo_kick(push, crtc->dev, 0);
  244. }
  245. nvd0_crtc_cursor_show(nv_crtc, nv_crtc->cursor.visible, true);
  246. }
  247. static bool
  248. nvd0_crtc_mode_fixup(struct drm_crtc *crtc, struct drm_display_mode *mode,
  249. struct drm_display_mode *adjusted_mode)
  250. {
  251. return true;
  252. }
  253. static int
  254. nvd0_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
  255. {
  256. struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->fb);
  257. int ret;
  258. ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
  259. if (ret)
  260. return ret;
  261. if (old_fb) {
  262. nvfb = nouveau_framebuffer(old_fb);
  263. nouveau_bo_unpin(nvfb->nvbo);
  264. }
  265. return 0;
  266. }
  267. static int
  268. nvd0_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
  269. struct drm_display_mode *mode, int x, int y,
  270. struct drm_framebuffer *old_fb)
  271. {
  272. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  273. struct nouveau_connector *nv_connector;
  274. u32 htotal = mode->htotal;
  275. u32 vtotal = mode->vtotal;
  276. u32 hsyncw = mode->hsync_end - mode->hsync_start - 1;
  277. u32 vsyncw = mode->vsync_end - mode->vsync_start - 1;
  278. u32 hfrntp = mode->hsync_start - mode->hdisplay;
  279. u32 vfrntp = mode->vsync_start - mode->vdisplay;
  280. u32 hbackp = mode->htotal - mode->hsync_end;
  281. u32 vbackp = mode->vtotal - mode->vsync_end;
  282. u32 hss2be = hsyncw + hbackp;
  283. u32 vss2be = vsyncw + vbackp;
  284. u32 hss2de = htotal - hfrntp;
  285. u32 vss2de = vtotal - vfrntp;
  286. u32 hstart = 0;
  287. u32 vstart = 0;
  288. u32 *push;
  289. int ret;
  290. ret = nvd0_crtc_swap_fbs(crtc, old_fb);
  291. if (ret)
  292. return ret;
  293. push = evo_wait(crtc->dev, 0, 64);
  294. if (push) {
  295. evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 5);
  296. evo_data(push, (vstart << 16) | hstart);
  297. evo_data(push, (vtotal << 16) | htotal);
  298. evo_data(push, (vsyncw << 16) | hsyncw);
  299. evo_data(push, (vss2be << 16) | hss2be);
  300. evo_data(push, (vss2de << 16) | hss2de);
  301. evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
  302. evo_data(push, 0x00000000); /* ??? */
  303. evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
  304. evo_data(push, mode->clock * 1000);
  305. evo_data(push, 0x00200000); /* ??? */
  306. evo_data(push, mode->clock * 1000);
  307. evo_mthd(push, 0x0408 + (nv_crtc->index * 0x300), 1);
  308. evo_data(push, 0x31ec6000); /* ??? */
  309. evo_kick(push, crtc->dev, 0);
  310. }
  311. nv_connector = nouveau_crtc_connector_get(nv_crtc);
  312. nvd0_crtc_set_dither(nv_crtc, nv_connector->use_dithering, false);
  313. nvd0_crtc_set_scale(nv_crtc, nv_connector->scaling_mode, false);
  314. nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, false);
  315. return 0;
  316. }
  317. static int
  318. nvd0_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
  319. struct drm_framebuffer *old_fb)
  320. {
  321. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  322. int ret;
  323. ret = nvd0_crtc_swap_fbs(crtc, old_fb);
  324. if (ret)
  325. return ret;
  326. nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, true);
  327. return 0;
  328. }
  329. static int
  330. nvd0_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
  331. struct drm_framebuffer *fb, int x, int y,
  332. enum mode_set_atomic state)
  333. {
  334. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  335. nvd0_crtc_set_image(nv_crtc, fb, x, y, true);
  336. return 0;
  337. }
  338. static void
  339. nvd0_crtc_lut_load(struct drm_crtc *crtc)
  340. {
  341. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  342. void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
  343. int i;
  344. for (i = 0; i < 256; i++) {
  345. writew(0x6000 + (nv_crtc->lut.r[i] >> 2), lut + (i * 0x20) + 0);
  346. writew(0x6000 + (nv_crtc->lut.g[i] >> 2), lut + (i * 0x20) + 2);
  347. writew(0x6000 + (nv_crtc->lut.b[i] >> 2), lut + (i * 0x20) + 4);
  348. }
  349. }
  350. static int
  351. nvd0_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
  352. uint32_t handle, uint32_t width, uint32_t height)
  353. {
  354. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  355. struct drm_device *dev = crtc->dev;
  356. struct drm_gem_object *gem;
  357. struct nouveau_bo *nvbo;
  358. bool visible = (handle != 0);
  359. int i, ret = 0;
  360. if (visible) {
  361. if (width != 64 || height != 64)
  362. return -EINVAL;
  363. gem = drm_gem_object_lookup(dev, file_priv, handle);
  364. if (unlikely(!gem))
  365. return -ENOENT;
  366. nvbo = nouveau_gem_object(gem);
  367. ret = nouveau_bo_map(nvbo);
  368. if (ret == 0) {
  369. for (i = 0; i < 64 * 64; i++) {
  370. u32 v = nouveau_bo_rd32(nvbo, i);
  371. nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
  372. }
  373. nouveau_bo_unmap(nvbo);
  374. }
  375. drm_gem_object_unreference_unlocked(gem);
  376. }
  377. if (visible != nv_crtc->cursor.visible) {
  378. nvd0_crtc_cursor_show(nv_crtc, visible, true);
  379. nv_crtc->cursor.visible = visible;
  380. }
  381. return ret;
  382. }
  383. static int
  384. nvd0_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
  385. {
  386. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  387. const u32 data = (y << 16) | x;
  388. nv_wr32(crtc->dev, 0x64d084 + (nv_crtc->index * 0x1000), data);
  389. nv_wr32(crtc->dev, 0x64d080 + (nv_crtc->index * 0x1000), 0x00000000);
  390. return 0;
  391. }
  392. static void
  393. nvd0_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
  394. uint32_t start, uint32_t size)
  395. {
  396. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  397. u32 end = max(start + size, (u32)256);
  398. u32 i;
  399. for (i = start; i < end; i++) {
  400. nv_crtc->lut.r[i] = r[i];
  401. nv_crtc->lut.g[i] = g[i];
  402. nv_crtc->lut.b[i] = b[i];
  403. }
  404. nvd0_crtc_lut_load(crtc);
  405. }
  406. static void
  407. nvd0_crtc_destroy(struct drm_crtc *crtc)
  408. {
  409. struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
  410. nouveau_bo_unmap(nv_crtc->cursor.nvbo);
  411. nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
  412. nouveau_bo_unmap(nv_crtc->lut.nvbo);
  413. nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
  414. drm_crtc_cleanup(crtc);
  415. kfree(crtc);
  416. }
  417. static const struct drm_crtc_helper_funcs nvd0_crtc_hfunc = {
  418. .dpms = nvd0_crtc_dpms,
  419. .prepare = nvd0_crtc_prepare,
  420. .commit = nvd0_crtc_commit,
  421. .mode_fixup = nvd0_crtc_mode_fixup,
  422. .mode_set = nvd0_crtc_mode_set,
  423. .mode_set_base = nvd0_crtc_mode_set_base,
  424. .mode_set_base_atomic = nvd0_crtc_mode_set_base_atomic,
  425. .load_lut = nvd0_crtc_lut_load,
  426. };
  427. static const struct drm_crtc_funcs nvd0_crtc_func = {
  428. .cursor_set = nvd0_crtc_cursor_set,
  429. .cursor_move = nvd0_crtc_cursor_move,
  430. .gamma_set = nvd0_crtc_gamma_set,
  431. .set_config = drm_crtc_helper_set_config,
  432. .destroy = nvd0_crtc_destroy,
  433. };
  434. static int
  435. nvd0_crtc_create(struct drm_device *dev, int index)
  436. {
  437. struct nouveau_crtc *nv_crtc;
  438. struct drm_crtc *crtc;
  439. int ret, i;
  440. nv_crtc = kzalloc(sizeof(*nv_crtc), GFP_KERNEL);
  441. if (!nv_crtc)
  442. return -ENOMEM;
  443. nv_crtc->index = index;
  444. nv_crtc->set_dither = nvd0_crtc_set_dither;
  445. nv_crtc->set_scale = nvd0_crtc_set_scale;
  446. for (i = 0; i < 256; i++) {
  447. nv_crtc->lut.r[i] = i << 8;
  448. nv_crtc->lut.g[i] = i << 8;
  449. nv_crtc->lut.b[i] = i << 8;
  450. }
  451. crtc = &nv_crtc->base;
  452. drm_crtc_init(dev, crtc, &nvd0_crtc_func);
  453. drm_crtc_helper_add(crtc, &nvd0_crtc_hfunc);
  454. drm_mode_crtc_set_gamma_size(crtc, 256);
  455. ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
  456. 0, 0x0000, &nv_crtc->cursor.nvbo);
  457. if (!ret) {
  458. ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
  459. if (!ret)
  460. ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
  461. if (ret)
  462. nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
  463. }
  464. if (ret)
  465. goto out;
  466. ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
  467. 0, 0x0000, &nv_crtc->lut.nvbo);
  468. if (!ret) {
  469. ret = nouveau_bo_pin(nv_crtc->lut.nvbo, TTM_PL_FLAG_VRAM);
  470. if (!ret)
  471. ret = nouveau_bo_map(nv_crtc->lut.nvbo);
  472. if (ret)
  473. nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
  474. }
  475. if (ret)
  476. goto out;
  477. nvd0_crtc_lut_load(crtc);
  478. out:
  479. if (ret)
  480. nvd0_crtc_destroy(crtc);
  481. return ret;
  482. }
  483. /******************************************************************************
  484. * DAC
  485. *****************************************************************************/
  486. static void
  487. nvd0_dac_dpms(struct drm_encoder *encoder, int mode)
  488. {
  489. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  490. struct drm_device *dev = encoder->dev;
  491. int or = nv_encoder->or;
  492. u32 dpms_ctrl;
  493. dpms_ctrl = 0x80000000;
  494. if (mode == DRM_MODE_DPMS_STANDBY || mode == DRM_MODE_DPMS_OFF)
  495. dpms_ctrl |= 0x00000001;
  496. if (mode == DRM_MODE_DPMS_SUSPEND || mode == DRM_MODE_DPMS_OFF)
  497. dpms_ctrl |= 0x00000004;
  498. nv_wait(dev, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
  499. nv_mask(dev, 0x61a004 + (or * 0x0800), 0xc000007f, dpms_ctrl);
  500. nv_wait(dev, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
  501. }
  502. static bool
  503. nvd0_dac_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
  504. struct drm_display_mode *adjusted_mode)
  505. {
  506. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  507. struct nouveau_connector *nv_connector;
  508. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  509. if (nv_connector && nv_connector->native_mode) {
  510. if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
  511. int id = adjusted_mode->base.id;
  512. *adjusted_mode = *nv_connector->native_mode;
  513. adjusted_mode->base.id = id;
  514. }
  515. }
  516. return true;
  517. }
  518. static void
  519. nvd0_dac_prepare(struct drm_encoder *encoder)
  520. {
  521. }
  522. static void
  523. nvd0_dac_commit(struct drm_encoder *encoder)
  524. {
  525. }
  526. static void
  527. nvd0_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
  528. struct drm_display_mode *adjusted_mode)
  529. {
  530. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  531. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  532. u32 *push;
  533. nvd0_dac_dpms(encoder, DRM_MODE_DPMS_ON);
  534. push = evo_wait(encoder->dev, 0, 2);
  535. if (push) {
  536. evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
  537. evo_data(push, 1 << nv_crtc->index);
  538. evo_kick(push, encoder->dev, 0);
  539. }
  540. nv_encoder->crtc = encoder->crtc;
  541. }
  542. static void
  543. nvd0_dac_disconnect(struct drm_encoder *encoder)
  544. {
  545. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  546. struct drm_device *dev = encoder->dev;
  547. u32 *push;
  548. if (nv_encoder->crtc) {
  549. nvd0_crtc_prepare(nv_encoder->crtc);
  550. push = evo_wait(dev, 0, 4);
  551. if (push) {
  552. evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
  553. evo_data(push, 0x00000000);
  554. evo_mthd(push, 0x0080, 1);
  555. evo_data(push, 0x00000000);
  556. evo_kick(push, dev, 0);
  557. }
  558. nv_encoder->crtc = NULL;
  559. }
  560. }
  561. static enum drm_connector_status
  562. nvd0_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
  563. {
  564. return connector_status_disconnected;
  565. }
  566. static void
  567. nvd0_dac_destroy(struct drm_encoder *encoder)
  568. {
  569. drm_encoder_cleanup(encoder);
  570. kfree(encoder);
  571. }
  572. static const struct drm_encoder_helper_funcs nvd0_dac_hfunc = {
  573. .dpms = nvd0_dac_dpms,
  574. .mode_fixup = nvd0_dac_mode_fixup,
  575. .prepare = nvd0_dac_prepare,
  576. .commit = nvd0_dac_commit,
  577. .mode_set = nvd0_dac_mode_set,
  578. .disable = nvd0_dac_disconnect,
  579. .get_crtc = nvd0_display_crtc_get,
  580. .detect = nvd0_dac_detect
  581. };
  582. static const struct drm_encoder_funcs nvd0_dac_func = {
  583. .destroy = nvd0_dac_destroy,
  584. };
  585. static int
  586. nvd0_dac_create(struct drm_connector *connector, struct dcb_entry *dcbe)
  587. {
  588. struct drm_device *dev = connector->dev;
  589. struct nouveau_encoder *nv_encoder;
  590. struct drm_encoder *encoder;
  591. nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
  592. if (!nv_encoder)
  593. return -ENOMEM;
  594. nv_encoder->dcb = dcbe;
  595. nv_encoder->or = ffs(dcbe->or) - 1;
  596. encoder = to_drm_encoder(nv_encoder);
  597. encoder->possible_crtcs = dcbe->heads;
  598. encoder->possible_clones = 0;
  599. drm_encoder_init(dev, encoder, &nvd0_dac_func, DRM_MODE_ENCODER_DAC);
  600. drm_encoder_helper_add(encoder, &nvd0_dac_hfunc);
  601. drm_mode_connector_attach_encoder(connector, encoder);
  602. return 0;
  603. }
  604. /******************************************************************************
  605. * SOR
  606. *****************************************************************************/
  607. static void
  608. nvd0_sor_dpms(struct drm_encoder *encoder, int mode)
  609. {
  610. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  611. struct drm_device *dev = encoder->dev;
  612. struct drm_encoder *partner;
  613. int or = nv_encoder->or;
  614. u32 dpms_ctrl;
  615. nv_encoder->last_dpms = mode;
  616. list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
  617. struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
  618. if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
  619. continue;
  620. if (nv_partner != nv_encoder &&
  621. nv_partner->dcb->or == nv_encoder->or) {
  622. if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
  623. return;
  624. break;
  625. }
  626. }
  627. dpms_ctrl = (mode == DRM_MODE_DPMS_ON);
  628. dpms_ctrl |= 0x80000000;
  629. nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
  630. nv_mask(dev, 0x61c004 + (or * 0x0800), 0x80000001, dpms_ctrl);
  631. nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
  632. nv_wait(dev, 0x61c030 + (or * 0x0800), 0x10000000, 0x00000000);
  633. }
  634. static bool
  635. nvd0_sor_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
  636. struct drm_display_mode *adjusted_mode)
  637. {
  638. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  639. struct nouveau_connector *nv_connector;
  640. nv_connector = nouveau_encoder_connector_get(nv_encoder);
  641. if (nv_connector && nv_connector->native_mode) {
  642. if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
  643. int id = adjusted_mode->base.id;
  644. *adjusted_mode = *nv_connector->native_mode;
  645. adjusted_mode->base.id = id;
  646. }
  647. }
  648. return true;
  649. }
  650. static void
  651. nvd0_sor_prepare(struct drm_encoder *encoder)
  652. {
  653. }
  654. static void
  655. nvd0_sor_commit(struct drm_encoder *encoder)
  656. {
  657. }
  658. static void
  659. nvd0_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
  660. struct drm_display_mode *adjusted_mode)
  661. {
  662. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  663. struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
  664. u32 mode_ctrl = (1 << nv_crtc->index);
  665. u32 *push;
  666. if (nv_encoder->dcb->sorconf.link & 1) {
  667. if (adjusted_mode->clock < 165000)
  668. mode_ctrl |= 0x00000100;
  669. else
  670. mode_ctrl |= 0x00000500;
  671. } else {
  672. mode_ctrl |= 0x00000200;
  673. }
  674. nvd0_sor_dpms(encoder, DRM_MODE_DPMS_ON);
  675. push = evo_wait(encoder->dev, 0, 2);
  676. if (push) {
  677. evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
  678. evo_data(push, mode_ctrl);
  679. evo_kick(push, encoder->dev, 0);
  680. }
  681. nv_encoder->crtc = encoder->crtc;
  682. }
  683. static void
  684. nvd0_sor_disconnect(struct drm_encoder *encoder)
  685. {
  686. struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
  687. struct drm_device *dev = encoder->dev;
  688. u32 *push;
  689. if (nv_encoder->crtc) {
  690. nvd0_crtc_prepare(nv_encoder->crtc);
  691. push = evo_wait(dev, 0, 4);
  692. if (push) {
  693. evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
  694. evo_data(push, 0x00000000);
  695. evo_mthd(push, 0x0080, 1);
  696. evo_data(push, 0x00000000);
  697. evo_kick(push, dev, 0);
  698. }
  699. nv_encoder->crtc = NULL;
  700. nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
  701. }
  702. }
  703. static void
  704. nvd0_sor_destroy(struct drm_encoder *encoder)
  705. {
  706. drm_encoder_cleanup(encoder);
  707. kfree(encoder);
  708. }
  709. static const struct drm_encoder_helper_funcs nvd0_sor_hfunc = {
  710. .dpms = nvd0_sor_dpms,
  711. .mode_fixup = nvd0_sor_mode_fixup,
  712. .prepare = nvd0_sor_prepare,
  713. .commit = nvd0_sor_commit,
  714. .mode_set = nvd0_sor_mode_set,
  715. .disable = nvd0_sor_disconnect,
  716. .get_crtc = nvd0_display_crtc_get,
  717. };
  718. static const struct drm_encoder_funcs nvd0_sor_func = {
  719. .destroy = nvd0_sor_destroy,
  720. };
  721. static int
  722. nvd0_sor_create(struct drm_connector *connector, struct dcb_entry *dcbe)
  723. {
  724. struct drm_device *dev = connector->dev;
  725. struct nouveau_encoder *nv_encoder;
  726. struct drm_encoder *encoder;
  727. nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
  728. if (!nv_encoder)
  729. return -ENOMEM;
  730. nv_encoder->dcb = dcbe;
  731. nv_encoder->or = ffs(dcbe->or) - 1;
  732. nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
  733. encoder = to_drm_encoder(nv_encoder);
  734. encoder->possible_crtcs = dcbe->heads;
  735. encoder->possible_clones = 0;
  736. drm_encoder_init(dev, encoder, &nvd0_sor_func, DRM_MODE_ENCODER_TMDS);
  737. drm_encoder_helper_add(encoder, &nvd0_sor_hfunc);
  738. drm_mode_connector_attach_encoder(connector, encoder);
  739. return 0;
  740. }
  741. /******************************************************************************
  742. * IRQ
  743. *****************************************************************************/
  744. static struct dcb_entry *
  745. lookup_dcb(struct drm_device *dev, int id, u32 mc)
  746. {
  747. struct drm_nouveau_private *dev_priv = dev->dev_private;
  748. int type, or, i;
  749. if (id < 4) {
  750. type = OUTPUT_ANALOG;
  751. or = id;
  752. } else {
  753. type = OUTPUT_TMDS;
  754. or = id - 4;
  755. }
  756. for (i = 0; i < dev_priv->vbios.dcb.entries; i++) {
  757. struct dcb_entry *dcb = &dev_priv->vbios.dcb.entry[i];
  758. if (dcb->type == type && (dcb->or & (1 << or)))
  759. return dcb;
  760. }
  761. NV_INFO(dev, "PDISP: DCB for %d/0x%08x not found\n", id, mc);
  762. return NULL;
  763. }
  764. static void
  765. nvd0_display_unk1_handler(struct drm_device *dev)
  766. {
  767. struct nvd0_display *disp = nvd0_display(dev);
  768. struct dcb_entry *dcb;
  769. u32 unkn, crtc = 0;
  770. int i;
  771. NV_INFO(dev, "PDISP: 1 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
  772. nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
  773. unkn = nv_rd32(dev, 0x6101d4);
  774. if (!unkn) {
  775. unkn = nv_rd32(dev, 0x6109d4);
  776. crtc = 1;
  777. }
  778. disp->irq.ena = NULL;
  779. disp->irq.dis = NULL;
  780. disp->irq.crtc = crtc;
  781. disp->irq.pclk = nv_rd32(dev, 0x660450 + (disp->irq.crtc * 0x300));
  782. disp->irq.pclk /= 1000;
  783. for (i = 0; i < 8; i++) {
  784. u32 mcc = nv_rd32(dev, 0x640180 + (i * 0x20));
  785. u32 mcp = nv_rd32(dev, 0x660180 + (i * 0x20));
  786. if (mcc & (1 << crtc))
  787. disp->irq.dis = lookup_dcb(dev, i, mcc);
  788. if (mcp & (1 << crtc)) {
  789. disp->irq.ena = lookup_dcb(dev, i, mcp);
  790. switch (disp->irq.ena->type) {
  791. case OUTPUT_ANALOG:
  792. disp->irq.script = 0x00ff;
  793. break;
  794. case OUTPUT_TMDS:
  795. disp->irq.script = (mcp & 0x00000f00) >> 8;
  796. if (disp->irq.pclk >= 165000)
  797. disp->irq.script |= 0x0100;
  798. break;
  799. default:
  800. disp->irq.script = 0xbeef;
  801. break;
  802. }
  803. }
  804. }
  805. dcb = disp->irq.dis;
  806. if (dcb)
  807. nouveau_bios_run_display_table(dev, 0x0000, -1, dcb, crtc);
  808. nv_wr32(dev, 0x6101d4, 0x00000000);
  809. nv_wr32(dev, 0x6109d4, 0x00000000);
  810. nv_wr32(dev, 0x6101d0, 0x80000000);
  811. }
  812. static void
  813. nvd0_display_unk2_handler(struct drm_device *dev)
  814. {
  815. struct nvd0_display *disp = nvd0_display(dev);
  816. struct dcb_entry *dcb;
  817. int crtc = disp->irq.crtc;
  818. int pclk = disp->irq.pclk;
  819. int or;
  820. u32 tmp;
  821. NV_INFO(dev, "PDISP: 2 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
  822. nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
  823. dcb = disp->irq.dis;
  824. disp->irq.dis = NULL;
  825. if (dcb)
  826. nouveau_bios_run_display_table(dev, 0x0000, -2, dcb, crtc);
  827. nv50_crtc_set_clock(dev, crtc, pclk);
  828. dcb = disp->irq.ena;
  829. if (!dcb)
  830. goto ack;
  831. or = ffs(dcb->or) - 1;
  832. nouveau_bios_run_display_table(dev, disp->irq.script, pclk, dcb, crtc);
  833. nv_wr32(dev, 0x612200 + (crtc * 0x800), 0x00000000);
  834. switch (dcb->type) {
  835. case OUTPUT_ANALOG:
  836. nv_wr32(dev, 0x612280 + (or * 0x800), 0x00000000);
  837. break;
  838. case OUTPUT_TMDS:
  839. if (disp->irq.pclk >= 165000)
  840. tmp = 0x00000101;
  841. else
  842. tmp = 0x00000000;
  843. nv_mask(dev, 0x612300 + (or * 0x800), 0x00000707, tmp);
  844. break;
  845. default:
  846. break;
  847. }
  848. ack:
  849. nv_wr32(dev, 0x6101d4, 0x00000000);
  850. nv_wr32(dev, 0x6109d4, 0x00000000);
  851. nv_wr32(dev, 0x6101d0, 0x80000000);
  852. }
  853. static void
  854. nvd0_display_unk4_handler(struct drm_device *dev)
  855. {
  856. struct nvd0_display *disp = nvd0_display(dev);
  857. struct dcb_entry *dcb;
  858. int crtc = disp->irq.crtc;
  859. int pclk = disp->irq.pclk;
  860. NV_INFO(dev, "PDISP: 4 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
  861. nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
  862. dcb = disp->irq.ena;
  863. disp->irq.ena = NULL;
  864. if (!dcb)
  865. goto ack;
  866. nouveau_bios_run_display_table(dev, disp->irq.script, pclk, dcb, crtc);
  867. ack:
  868. nv_wr32(dev, 0x6101d4, 0x00000000);
  869. nv_wr32(dev, 0x6109d4, 0x00000000);
  870. nv_wr32(dev, 0x6101d0, 0x80000000);
  871. }
  872. static void
  873. nvd0_display_intr(struct drm_device *dev)
  874. {
  875. u32 intr = nv_rd32(dev, 0x610088);
  876. if (intr & 0x00000002) {
  877. u32 stat = nv_rd32(dev, 0x61009c);
  878. int chid = ffs(stat) - 1;
  879. if (chid >= 0) {
  880. u32 mthd = nv_rd32(dev, 0x6101f0 + (chid * 12));
  881. u32 data = nv_rd32(dev, 0x6101f4 + (chid * 12));
  882. u32 unkn = nv_rd32(dev, 0x6101f8 + (chid * 12));
  883. NV_INFO(dev, "EvoCh: chid %d mthd 0x%04x data 0x%08x "
  884. "0x%08x 0x%08x\n",
  885. chid, (mthd & 0x0000ffc), data, mthd, unkn);
  886. nv_wr32(dev, 0x61009c, (1 << chid));
  887. nv_wr32(dev, 0x6101f0 + (chid * 12), 0x90000000);
  888. }
  889. intr &= ~0x00000002;
  890. }
  891. if (intr & 0x00100000) {
  892. u32 stat = nv_rd32(dev, 0x6100ac);
  893. if (stat & 0x00000007) {
  894. nv_wr32(dev, 0x6100ac, (stat & 0x00000007));
  895. if (stat & 0x00000001)
  896. nvd0_display_unk1_handler(dev);
  897. if (stat & 0x00000002)
  898. nvd0_display_unk2_handler(dev);
  899. if (stat & 0x00000004)
  900. nvd0_display_unk4_handler(dev);
  901. stat &= ~0x00000007;
  902. }
  903. if (stat) {
  904. NV_INFO(dev, "PDISP: unknown intr24 0x%08x\n", stat);
  905. nv_wr32(dev, 0x6100ac, stat);
  906. }
  907. intr &= ~0x00100000;
  908. }
  909. if (intr & 0x01000000) {
  910. u32 stat = nv_rd32(dev, 0x6100bc);
  911. nv_wr32(dev, 0x6100bc, stat);
  912. intr &= ~0x01000000;
  913. }
  914. if (intr & 0x02000000) {
  915. u32 stat = nv_rd32(dev, 0x6108bc);
  916. nv_wr32(dev, 0x6108bc, stat);
  917. intr &= ~0x02000000;
  918. }
  919. if (intr)
  920. NV_INFO(dev, "PDISP: unknown intr 0x%08x\n", intr);
  921. }
  922. /******************************************************************************
  923. * Init
  924. *****************************************************************************/
  925. static void
  926. nvd0_display_fini(struct drm_device *dev)
  927. {
  928. int i;
  929. /* fini cursors */
  930. for (i = 14; i >= 13; i--) {
  931. if (!(nv_rd32(dev, 0x610490 + (i * 0x10)) & 0x00000001))
  932. continue;
  933. nv_mask(dev, 0x610490 + (i * 0x10), 0x00000001, 0x00000000);
  934. nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00000000);
  935. nv_mask(dev, 0x610090, 1 << i, 0x00000000);
  936. nv_mask(dev, 0x6100a0, 1 << i, 0x00000000);
  937. }
  938. /* fini master */
  939. if (nv_rd32(dev, 0x610490) & 0x00000010) {
  940. nv_mask(dev, 0x610490, 0x00000010, 0x00000000);
  941. nv_mask(dev, 0x610490, 0x00000003, 0x00000000);
  942. nv_wait(dev, 0x610490, 0x80000000, 0x00000000);
  943. nv_mask(dev, 0x610090, 0x00000001, 0x00000000);
  944. nv_mask(dev, 0x6100a0, 0x00000001, 0x00000000);
  945. }
  946. }
  947. int
  948. nvd0_display_init(struct drm_device *dev)
  949. {
  950. struct nvd0_display *disp = nvd0_display(dev);
  951. u32 *push;
  952. int i;
  953. if (nv_rd32(dev, 0x6100ac) & 0x00000100) {
  954. nv_wr32(dev, 0x6100ac, 0x00000100);
  955. nv_mask(dev, 0x6194e8, 0x00000001, 0x00000000);
  956. if (!nv_wait(dev, 0x6194e8, 0x00000002, 0x00000000)) {
  957. NV_ERROR(dev, "PDISP: 0x6194e8 0x%08x\n",
  958. nv_rd32(dev, 0x6194e8));
  959. return -EBUSY;
  960. }
  961. }
  962. /* nfi what these are exactly, i do know that SOR_MODE_CTRL won't
  963. * work at all unless you do the SOR part below.
  964. */
  965. for (i = 0; i < 3; i++) {
  966. u32 dac = nv_rd32(dev, 0x61a000 + (i * 0x800));
  967. nv_wr32(dev, 0x6101c0 + (i * 0x800), dac);
  968. }
  969. for (i = 0; i < 4; i++) {
  970. u32 sor = nv_rd32(dev, 0x61c000 + (i * 0x800));
  971. nv_wr32(dev, 0x6301c4 + (i * 0x800), sor);
  972. }
  973. for (i = 0; i < 2; i++) {
  974. u32 crtc0 = nv_rd32(dev, 0x616104 + (i * 0x800));
  975. u32 crtc1 = nv_rd32(dev, 0x616108 + (i * 0x800));
  976. u32 crtc2 = nv_rd32(dev, 0x61610c + (i * 0x800));
  977. nv_wr32(dev, 0x6101b4 + (i * 0x800), crtc0);
  978. nv_wr32(dev, 0x6101b8 + (i * 0x800), crtc1);
  979. nv_wr32(dev, 0x6101bc + (i * 0x800), crtc2);
  980. }
  981. /* point at our hash table / objects, enable interrupts */
  982. nv_wr32(dev, 0x610010, (disp->mem->vinst >> 8) | 9);
  983. nv_mask(dev, 0x6100b0, 0x00000307, 0x00000307);
  984. /* init master */
  985. nv_wr32(dev, 0x610494, (disp->evo[0].handle >> 8) | 3);
  986. nv_wr32(dev, 0x610498, 0x00010000);
  987. nv_wr32(dev, 0x61049c, 0x00000001);
  988. nv_mask(dev, 0x610490, 0x00000010, 0x00000010);
  989. nv_wr32(dev, 0x640000, 0x00000000);
  990. nv_wr32(dev, 0x610490, 0x01000013);
  991. if (!nv_wait(dev, 0x610490, 0x80000000, 0x00000000)) {
  992. NV_ERROR(dev, "PDISP: master 0x%08x\n",
  993. nv_rd32(dev, 0x610490));
  994. return -EBUSY;
  995. }
  996. nv_mask(dev, 0x610090, 0x00000001, 0x00000001);
  997. nv_mask(dev, 0x6100a0, 0x00000001, 0x00000001);
  998. /* init cursors */
  999. for (i = 13; i <= 14; i++) {
  1000. nv_wr32(dev, 0x610490 + (i * 0x10), 0x00000001);
  1001. if (!nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00010000)) {
  1002. NV_ERROR(dev, "PDISP: curs%d 0x%08x\n", i,
  1003. nv_rd32(dev, 0x610490 + (i * 0x10)));
  1004. return -EBUSY;
  1005. }
  1006. nv_mask(dev, 0x610090, 1 << i, 1 << i);
  1007. nv_mask(dev, 0x6100a0, 1 << i, 1 << i);
  1008. }
  1009. push = evo_wait(dev, 0, 32);
  1010. if (!push)
  1011. return -EBUSY;
  1012. evo_mthd(push, 0x0088, 1);
  1013. evo_data(push, MEM_SYNC);
  1014. evo_mthd(push, 0x0084, 1);
  1015. evo_data(push, 0x00000000);
  1016. evo_mthd(push, 0x0084, 1);
  1017. evo_data(push, 0x80000000);
  1018. evo_mthd(push, 0x008c, 1);
  1019. evo_data(push, 0x00000000);
  1020. evo_kick(push, dev, 0);
  1021. return 0;
  1022. }
  1023. void
  1024. nvd0_display_destroy(struct drm_device *dev)
  1025. {
  1026. struct drm_nouveau_private *dev_priv = dev->dev_private;
  1027. struct nvd0_display *disp = nvd0_display(dev);
  1028. struct pci_dev *pdev = dev->pdev;
  1029. nvd0_display_fini(dev);
  1030. pci_free_consistent(pdev, PAGE_SIZE, disp->evo[0].ptr, disp->evo[0].handle);
  1031. nouveau_gpuobj_ref(NULL, &disp->mem);
  1032. nouveau_irq_unregister(dev, 26);
  1033. dev_priv->engine.display.priv = NULL;
  1034. kfree(disp);
  1035. }
  1036. int
  1037. nvd0_display_create(struct drm_device *dev)
  1038. {
  1039. struct drm_nouveau_private *dev_priv = dev->dev_private;
  1040. struct nouveau_instmem_engine *pinstmem = &dev_priv->engine.instmem;
  1041. struct dcb_table *dcb = &dev_priv->vbios.dcb;
  1042. struct drm_connector *connector, *tmp;
  1043. struct pci_dev *pdev = dev->pdev;
  1044. struct nvd0_display *disp;
  1045. struct dcb_entry *dcbe;
  1046. int ret, i;
  1047. disp = kzalloc(sizeof(*disp), GFP_KERNEL);
  1048. if (!disp)
  1049. return -ENOMEM;
  1050. dev_priv->engine.display.priv = disp;
  1051. /* create crtc objects to represent the hw heads */
  1052. for (i = 0; i < 2; i++) {
  1053. ret = nvd0_crtc_create(dev, i);
  1054. if (ret)
  1055. goto out;
  1056. }
  1057. /* create encoder/connector objects based on VBIOS DCB table */
  1058. for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
  1059. connector = nouveau_connector_create(dev, dcbe->connector);
  1060. if (IS_ERR(connector))
  1061. continue;
  1062. if (dcbe->location != DCB_LOC_ON_CHIP) {
  1063. NV_WARN(dev, "skipping off-chip encoder %d/%d\n",
  1064. dcbe->type, ffs(dcbe->or) - 1);
  1065. continue;
  1066. }
  1067. switch (dcbe->type) {
  1068. case OUTPUT_TMDS:
  1069. nvd0_sor_create(connector, dcbe);
  1070. break;
  1071. case OUTPUT_ANALOG:
  1072. nvd0_dac_create(connector, dcbe);
  1073. break;
  1074. default:
  1075. NV_WARN(dev, "skipping unsupported encoder %d/%d\n",
  1076. dcbe->type, ffs(dcbe->or) - 1);
  1077. continue;
  1078. }
  1079. }
  1080. /* cull any connectors we created that don't have an encoder */
  1081. list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
  1082. if (connector->encoder_ids[0])
  1083. continue;
  1084. NV_WARN(dev, "%s has no encoders, removing\n",
  1085. drm_get_connector_name(connector));
  1086. connector->funcs->destroy(connector);
  1087. }
  1088. /* setup interrupt handling */
  1089. nouveau_irq_register(dev, 26, nvd0_display_intr);
  1090. /* hash table and dma objects for the memory areas we care about */
  1091. ret = nouveau_gpuobj_new(dev, NULL, 0x4000, 0x10000,
  1092. NVOBJ_FLAG_ZERO_ALLOC, &disp->mem);
  1093. if (ret)
  1094. goto out;
  1095. nv_wo32(disp->mem, 0x1000, 0x00000049);
  1096. nv_wo32(disp->mem, 0x1004, (disp->mem->vinst + 0x2000) >> 8);
  1097. nv_wo32(disp->mem, 0x1008, (disp->mem->vinst + 0x2fff) >> 8);
  1098. nv_wo32(disp->mem, 0x100c, 0x00000000);
  1099. nv_wo32(disp->mem, 0x1010, 0x00000000);
  1100. nv_wo32(disp->mem, 0x1014, 0x00000000);
  1101. nv_wo32(disp->mem, 0x0000, MEM_SYNC);
  1102. nv_wo32(disp->mem, 0x0004, (0x1000 << 9) | 0x00000001);
  1103. nv_wo32(disp->mem, 0x1020, 0x00000049);
  1104. nv_wo32(disp->mem, 0x1024, 0x00000000);
  1105. nv_wo32(disp->mem, 0x1028, (dev_priv->vram_size - 1) >> 8);
  1106. nv_wo32(disp->mem, 0x102c, 0x00000000);
  1107. nv_wo32(disp->mem, 0x1030, 0x00000000);
  1108. nv_wo32(disp->mem, 0x1034, 0x00000000);
  1109. nv_wo32(disp->mem, 0x0008, MEM_VRAM);
  1110. nv_wo32(disp->mem, 0x000c, (0x1020 << 9) | 0x00000001);
  1111. nv_wo32(disp->mem, 0x1040, 0x00000009);
  1112. nv_wo32(disp->mem, 0x1044, 0x00000000);
  1113. nv_wo32(disp->mem, 0x1048, (dev_priv->vram_size - 1) >> 8);
  1114. nv_wo32(disp->mem, 0x104c, 0x00000000);
  1115. nv_wo32(disp->mem, 0x1050, 0x00000000);
  1116. nv_wo32(disp->mem, 0x1054, 0x00000000);
  1117. nv_wo32(disp->mem, 0x0010, NvEvoVRAM_LP);
  1118. nv_wo32(disp->mem, 0x0014, (0x1040 << 9) | 0x00000001);
  1119. nv_wo32(disp->mem, 0x1060, 0x0fe00009);
  1120. nv_wo32(disp->mem, 0x1064, 0x00000000);
  1121. nv_wo32(disp->mem, 0x1068, (dev_priv->vram_size - 1) >> 8);
  1122. nv_wo32(disp->mem, 0x106c, 0x00000000);
  1123. nv_wo32(disp->mem, 0x1070, 0x00000000);
  1124. nv_wo32(disp->mem, 0x1074, 0x00000000);
  1125. nv_wo32(disp->mem, 0x0018, NvEvoFB32);
  1126. nv_wo32(disp->mem, 0x001c, (0x1060 << 9) | 0x00000001);
  1127. pinstmem->flush(dev);
  1128. /* push buffers for evo channels */
  1129. disp->evo[0].ptr =
  1130. pci_alloc_consistent(pdev, PAGE_SIZE, &disp->evo[0].handle);
  1131. if (!disp->evo[0].ptr) {
  1132. ret = -ENOMEM;
  1133. goto out;
  1134. }
  1135. ret = nvd0_display_init(dev);
  1136. if (ret)
  1137. goto out;
  1138. out:
  1139. if (ret)
  1140. nvd0_display_destroy(dev);
  1141. return ret;
  1142. }