nv04_graph.c 37 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340
  1. /*
  2. * Copyright 2007 Stephane Marchesin
  3. * All Rights Reserved.
  4. *
  5. * Permission is hereby granted, free of charge, to any person obtaining a
  6. * copy of this software and associated documentation files (the "Software"),
  7. * to deal in the Software without restriction, including without limitation
  8. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  9. * and/or sell copies of the Software, and to permit persons to whom the
  10. * Software is furnished to do so, subject to the following conditions:
  11. *
  12. * The above copyright notice and this permission notice (including the next
  13. * paragraph) shall be included in all copies or substantial portions of the
  14. * Software.
  15. *
  16. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  17. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  18. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  19. * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
  20. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  21. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
  22. * DEALINGS IN THE SOFTWARE.
  23. */
  24. #include "drmP.h"
  25. #include "drm.h"
  26. #include "nouveau_drm.h"
  27. #include "nouveau_drv.h"
  28. #include "nouveau_hw.h"
  29. #include "nouveau_util.h"
  30. #include "nouveau_ramht.h"
  31. static int nv04_graph_register(struct drm_device *dev);
  32. static void nv04_graph_isr(struct drm_device *dev);
  33. static uint32_t nv04_graph_ctx_regs[] = {
  34. 0x0040053c,
  35. 0x00400544,
  36. 0x00400540,
  37. 0x00400548,
  38. NV04_PGRAPH_CTX_SWITCH1,
  39. NV04_PGRAPH_CTX_SWITCH2,
  40. NV04_PGRAPH_CTX_SWITCH3,
  41. NV04_PGRAPH_CTX_SWITCH4,
  42. NV04_PGRAPH_CTX_CACHE1,
  43. NV04_PGRAPH_CTX_CACHE2,
  44. NV04_PGRAPH_CTX_CACHE3,
  45. NV04_PGRAPH_CTX_CACHE4,
  46. 0x00400184,
  47. 0x004001a4,
  48. 0x004001c4,
  49. 0x004001e4,
  50. 0x00400188,
  51. 0x004001a8,
  52. 0x004001c8,
  53. 0x004001e8,
  54. 0x0040018c,
  55. 0x004001ac,
  56. 0x004001cc,
  57. 0x004001ec,
  58. 0x00400190,
  59. 0x004001b0,
  60. 0x004001d0,
  61. 0x004001f0,
  62. 0x00400194,
  63. 0x004001b4,
  64. 0x004001d4,
  65. 0x004001f4,
  66. 0x00400198,
  67. 0x004001b8,
  68. 0x004001d8,
  69. 0x004001f8,
  70. 0x0040019c,
  71. 0x004001bc,
  72. 0x004001dc,
  73. 0x004001fc,
  74. 0x00400174,
  75. NV04_PGRAPH_DMA_START_0,
  76. NV04_PGRAPH_DMA_START_1,
  77. NV04_PGRAPH_DMA_LENGTH,
  78. NV04_PGRAPH_DMA_MISC,
  79. NV04_PGRAPH_DMA_PITCH,
  80. NV04_PGRAPH_BOFFSET0,
  81. NV04_PGRAPH_BBASE0,
  82. NV04_PGRAPH_BLIMIT0,
  83. NV04_PGRAPH_BOFFSET1,
  84. NV04_PGRAPH_BBASE1,
  85. NV04_PGRAPH_BLIMIT1,
  86. NV04_PGRAPH_BOFFSET2,
  87. NV04_PGRAPH_BBASE2,
  88. NV04_PGRAPH_BLIMIT2,
  89. NV04_PGRAPH_BOFFSET3,
  90. NV04_PGRAPH_BBASE3,
  91. NV04_PGRAPH_BLIMIT3,
  92. NV04_PGRAPH_BOFFSET4,
  93. NV04_PGRAPH_BBASE4,
  94. NV04_PGRAPH_BLIMIT4,
  95. NV04_PGRAPH_BOFFSET5,
  96. NV04_PGRAPH_BBASE5,
  97. NV04_PGRAPH_BLIMIT5,
  98. NV04_PGRAPH_BPITCH0,
  99. NV04_PGRAPH_BPITCH1,
  100. NV04_PGRAPH_BPITCH2,
  101. NV04_PGRAPH_BPITCH3,
  102. NV04_PGRAPH_BPITCH4,
  103. NV04_PGRAPH_SURFACE,
  104. NV04_PGRAPH_STATE,
  105. NV04_PGRAPH_BSWIZZLE2,
  106. NV04_PGRAPH_BSWIZZLE5,
  107. NV04_PGRAPH_BPIXEL,
  108. NV04_PGRAPH_NOTIFY,
  109. NV04_PGRAPH_PATT_COLOR0,
  110. NV04_PGRAPH_PATT_COLOR1,
  111. NV04_PGRAPH_PATT_COLORRAM+0x00,
  112. NV04_PGRAPH_PATT_COLORRAM+0x04,
  113. NV04_PGRAPH_PATT_COLORRAM+0x08,
  114. NV04_PGRAPH_PATT_COLORRAM+0x0c,
  115. NV04_PGRAPH_PATT_COLORRAM+0x10,
  116. NV04_PGRAPH_PATT_COLORRAM+0x14,
  117. NV04_PGRAPH_PATT_COLORRAM+0x18,
  118. NV04_PGRAPH_PATT_COLORRAM+0x1c,
  119. NV04_PGRAPH_PATT_COLORRAM+0x20,
  120. NV04_PGRAPH_PATT_COLORRAM+0x24,
  121. NV04_PGRAPH_PATT_COLORRAM+0x28,
  122. NV04_PGRAPH_PATT_COLORRAM+0x2c,
  123. NV04_PGRAPH_PATT_COLORRAM+0x30,
  124. NV04_PGRAPH_PATT_COLORRAM+0x34,
  125. NV04_PGRAPH_PATT_COLORRAM+0x38,
  126. NV04_PGRAPH_PATT_COLORRAM+0x3c,
  127. NV04_PGRAPH_PATT_COLORRAM+0x40,
  128. NV04_PGRAPH_PATT_COLORRAM+0x44,
  129. NV04_PGRAPH_PATT_COLORRAM+0x48,
  130. NV04_PGRAPH_PATT_COLORRAM+0x4c,
  131. NV04_PGRAPH_PATT_COLORRAM+0x50,
  132. NV04_PGRAPH_PATT_COLORRAM+0x54,
  133. NV04_PGRAPH_PATT_COLORRAM+0x58,
  134. NV04_PGRAPH_PATT_COLORRAM+0x5c,
  135. NV04_PGRAPH_PATT_COLORRAM+0x60,
  136. NV04_PGRAPH_PATT_COLORRAM+0x64,
  137. NV04_PGRAPH_PATT_COLORRAM+0x68,
  138. NV04_PGRAPH_PATT_COLORRAM+0x6c,
  139. NV04_PGRAPH_PATT_COLORRAM+0x70,
  140. NV04_PGRAPH_PATT_COLORRAM+0x74,
  141. NV04_PGRAPH_PATT_COLORRAM+0x78,
  142. NV04_PGRAPH_PATT_COLORRAM+0x7c,
  143. NV04_PGRAPH_PATT_COLORRAM+0x80,
  144. NV04_PGRAPH_PATT_COLORRAM+0x84,
  145. NV04_PGRAPH_PATT_COLORRAM+0x88,
  146. NV04_PGRAPH_PATT_COLORRAM+0x8c,
  147. NV04_PGRAPH_PATT_COLORRAM+0x90,
  148. NV04_PGRAPH_PATT_COLORRAM+0x94,
  149. NV04_PGRAPH_PATT_COLORRAM+0x98,
  150. NV04_PGRAPH_PATT_COLORRAM+0x9c,
  151. NV04_PGRAPH_PATT_COLORRAM+0xa0,
  152. NV04_PGRAPH_PATT_COLORRAM+0xa4,
  153. NV04_PGRAPH_PATT_COLORRAM+0xa8,
  154. NV04_PGRAPH_PATT_COLORRAM+0xac,
  155. NV04_PGRAPH_PATT_COLORRAM+0xb0,
  156. NV04_PGRAPH_PATT_COLORRAM+0xb4,
  157. NV04_PGRAPH_PATT_COLORRAM+0xb8,
  158. NV04_PGRAPH_PATT_COLORRAM+0xbc,
  159. NV04_PGRAPH_PATT_COLORRAM+0xc0,
  160. NV04_PGRAPH_PATT_COLORRAM+0xc4,
  161. NV04_PGRAPH_PATT_COLORRAM+0xc8,
  162. NV04_PGRAPH_PATT_COLORRAM+0xcc,
  163. NV04_PGRAPH_PATT_COLORRAM+0xd0,
  164. NV04_PGRAPH_PATT_COLORRAM+0xd4,
  165. NV04_PGRAPH_PATT_COLORRAM+0xd8,
  166. NV04_PGRAPH_PATT_COLORRAM+0xdc,
  167. NV04_PGRAPH_PATT_COLORRAM+0xe0,
  168. NV04_PGRAPH_PATT_COLORRAM+0xe4,
  169. NV04_PGRAPH_PATT_COLORRAM+0xe8,
  170. NV04_PGRAPH_PATT_COLORRAM+0xec,
  171. NV04_PGRAPH_PATT_COLORRAM+0xf0,
  172. NV04_PGRAPH_PATT_COLORRAM+0xf4,
  173. NV04_PGRAPH_PATT_COLORRAM+0xf8,
  174. NV04_PGRAPH_PATT_COLORRAM+0xfc,
  175. NV04_PGRAPH_PATTERN,
  176. 0x0040080c,
  177. NV04_PGRAPH_PATTERN_SHAPE,
  178. 0x00400600,
  179. NV04_PGRAPH_ROP3,
  180. NV04_PGRAPH_CHROMA,
  181. NV04_PGRAPH_BETA_AND,
  182. NV04_PGRAPH_BETA_PREMULT,
  183. NV04_PGRAPH_CONTROL0,
  184. NV04_PGRAPH_CONTROL1,
  185. NV04_PGRAPH_CONTROL2,
  186. NV04_PGRAPH_BLEND,
  187. NV04_PGRAPH_STORED_FMT,
  188. NV04_PGRAPH_SOURCE_COLOR,
  189. 0x00400560,
  190. 0x00400568,
  191. 0x00400564,
  192. 0x0040056c,
  193. 0x00400400,
  194. 0x00400480,
  195. 0x00400404,
  196. 0x00400484,
  197. 0x00400408,
  198. 0x00400488,
  199. 0x0040040c,
  200. 0x0040048c,
  201. 0x00400410,
  202. 0x00400490,
  203. 0x00400414,
  204. 0x00400494,
  205. 0x00400418,
  206. 0x00400498,
  207. 0x0040041c,
  208. 0x0040049c,
  209. 0x00400420,
  210. 0x004004a0,
  211. 0x00400424,
  212. 0x004004a4,
  213. 0x00400428,
  214. 0x004004a8,
  215. 0x0040042c,
  216. 0x004004ac,
  217. 0x00400430,
  218. 0x004004b0,
  219. 0x00400434,
  220. 0x004004b4,
  221. 0x00400438,
  222. 0x004004b8,
  223. 0x0040043c,
  224. 0x004004bc,
  225. 0x00400440,
  226. 0x004004c0,
  227. 0x00400444,
  228. 0x004004c4,
  229. 0x00400448,
  230. 0x004004c8,
  231. 0x0040044c,
  232. 0x004004cc,
  233. 0x00400450,
  234. 0x004004d0,
  235. 0x00400454,
  236. 0x004004d4,
  237. 0x00400458,
  238. 0x004004d8,
  239. 0x0040045c,
  240. 0x004004dc,
  241. 0x00400460,
  242. 0x004004e0,
  243. 0x00400464,
  244. 0x004004e4,
  245. 0x00400468,
  246. 0x004004e8,
  247. 0x0040046c,
  248. 0x004004ec,
  249. 0x00400470,
  250. 0x004004f0,
  251. 0x00400474,
  252. 0x004004f4,
  253. 0x00400478,
  254. 0x004004f8,
  255. 0x0040047c,
  256. 0x004004fc,
  257. 0x00400534,
  258. 0x00400538,
  259. 0x00400514,
  260. 0x00400518,
  261. 0x0040051c,
  262. 0x00400520,
  263. 0x00400524,
  264. 0x00400528,
  265. 0x0040052c,
  266. 0x00400530,
  267. 0x00400d00,
  268. 0x00400d40,
  269. 0x00400d80,
  270. 0x00400d04,
  271. 0x00400d44,
  272. 0x00400d84,
  273. 0x00400d08,
  274. 0x00400d48,
  275. 0x00400d88,
  276. 0x00400d0c,
  277. 0x00400d4c,
  278. 0x00400d8c,
  279. 0x00400d10,
  280. 0x00400d50,
  281. 0x00400d90,
  282. 0x00400d14,
  283. 0x00400d54,
  284. 0x00400d94,
  285. 0x00400d18,
  286. 0x00400d58,
  287. 0x00400d98,
  288. 0x00400d1c,
  289. 0x00400d5c,
  290. 0x00400d9c,
  291. 0x00400d20,
  292. 0x00400d60,
  293. 0x00400da0,
  294. 0x00400d24,
  295. 0x00400d64,
  296. 0x00400da4,
  297. 0x00400d28,
  298. 0x00400d68,
  299. 0x00400da8,
  300. 0x00400d2c,
  301. 0x00400d6c,
  302. 0x00400dac,
  303. 0x00400d30,
  304. 0x00400d70,
  305. 0x00400db0,
  306. 0x00400d34,
  307. 0x00400d74,
  308. 0x00400db4,
  309. 0x00400d38,
  310. 0x00400d78,
  311. 0x00400db8,
  312. 0x00400d3c,
  313. 0x00400d7c,
  314. 0x00400dbc,
  315. 0x00400590,
  316. 0x00400594,
  317. 0x00400598,
  318. 0x0040059c,
  319. 0x004005a8,
  320. 0x004005ac,
  321. 0x004005b0,
  322. 0x004005b4,
  323. 0x004005c0,
  324. 0x004005c4,
  325. 0x004005c8,
  326. 0x004005cc,
  327. 0x004005d0,
  328. 0x004005d4,
  329. 0x004005d8,
  330. 0x004005dc,
  331. 0x004005e0,
  332. NV04_PGRAPH_PASSTHRU_0,
  333. NV04_PGRAPH_PASSTHRU_1,
  334. NV04_PGRAPH_PASSTHRU_2,
  335. NV04_PGRAPH_DVD_COLORFMT,
  336. NV04_PGRAPH_SCALED_FORMAT,
  337. NV04_PGRAPH_MISC24_0,
  338. NV04_PGRAPH_MISC24_1,
  339. NV04_PGRAPH_MISC24_2,
  340. 0x00400500,
  341. 0x00400504,
  342. NV04_PGRAPH_VALID1,
  343. NV04_PGRAPH_VALID2,
  344. NV04_PGRAPH_DEBUG_3
  345. };
  346. struct graph_state {
  347. uint32_t nv04[ARRAY_SIZE(nv04_graph_ctx_regs)];
  348. };
  349. struct nouveau_channel *
  350. nv04_graph_channel(struct drm_device *dev)
  351. {
  352. struct drm_nouveau_private *dev_priv = dev->dev_private;
  353. int chid = dev_priv->engine.fifo.channels;
  354. if (nv_rd32(dev, NV04_PGRAPH_CTX_CONTROL) & 0x00010000)
  355. chid = nv_rd32(dev, NV04_PGRAPH_CTX_USER) >> 24;
  356. if (chid >= dev_priv->engine.fifo.channels)
  357. return NULL;
  358. return dev_priv->channels.ptr[chid];
  359. }
  360. static void
  361. nv04_graph_context_switch(struct drm_device *dev)
  362. {
  363. struct drm_nouveau_private *dev_priv = dev->dev_private;
  364. struct nouveau_pgraph_engine *pgraph = &dev_priv->engine.graph;
  365. struct nouveau_channel *chan = NULL;
  366. int chid;
  367. nouveau_wait_for_idle(dev);
  368. /* If previous context is valid, we need to save it */
  369. pgraph->unload_context(dev);
  370. /* Load context for next channel */
  371. chid = dev_priv->engine.fifo.channel_id(dev);
  372. chan = dev_priv->channels.ptr[chid];
  373. if (chan)
  374. nv04_graph_load_context(chan);
  375. }
  376. static uint32_t *ctx_reg(struct graph_state *ctx, uint32_t reg)
  377. {
  378. int i;
  379. for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++) {
  380. if (nv04_graph_ctx_regs[i] == reg)
  381. return &ctx->nv04[i];
  382. }
  383. return NULL;
  384. }
  385. int nv04_graph_create_context(struct nouveau_channel *chan)
  386. {
  387. struct graph_state *pgraph_ctx;
  388. NV_DEBUG(chan->dev, "nv04_graph_context_create %d\n", chan->id);
  389. chan->pgraph_ctx = pgraph_ctx = kzalloc(sizeof(*pgraph_ctx),
  390. GFP_KERNEL);
  391. if (pgraph_ctx == NULL)
  392. return -ENOMEM;
  393. *ctx_reg(pgraph_ctx, NV04_PGRAPH_DEBUG_3) = 0xfad4ff31;
  394. return 0;
  395. }
  396. void nv04_graph_destroy_context(struct nouveau_channel *chan)
  397. {
  398. struct drm_device *dev = chan->dev;
  399. struct drm_nouveau_private *dev_priv = dev->dev_private;
  400. struct nouveau_pgraph_engine *pgraph = &dev_priv->engine.graph;
  401. struct graph_state *pgraph_ctx = chan->pgraph_ctx;
  402. unsigned long flags;
  403. spin_lock_irqsave(&dev_priv->context_switch_lock, flags);
  404. pgraph->fifo_access(dev, false);
  405. /* Unload the context if it's the currently active one */
  406. if (pgraph->channel(dev) == chan)
  407. pgraph->unload_context(dev);
  408. /* Free the context resources */
  409. kfree(pgraph_ctx);
  410. chan->pgraph_ctx = NULL;
  411. pgraph->fifo_access(dev, true);
  412. spin_unlock_irqrestore(&dev_priv->context_switch_lock, flags);
  413. }
  414. int nv04_graph_load_context(struct nouveau_channel *chan)
  415. {
  416. struct drm_device *dev = chan->dev;
  417. struct graph_state *pgraph_ctx = chan->pgraph_ctx;
  418. uint32_t tmp;
  419. int i;
  420. for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++)
  421. nv_wr32(dev, nv04_graph_ctx_regs[i], pgraph_ctx->nv04[i]);
  422. nv_wr32(dev, NV04_PGRAPH_CTX_CONTROL, 0x10010100);
  423. tmp = nv_rd32(dev, NV04_PGRAPH_CTX_USER) & 0x00ffffff;
  424. nv_wr32(dev, NV04_PGRAPH_CTX_USER, tmp | chan->id << 24);
  425. tmp = nv_rd32(dev, NV04_PGRAPH_FFINTFC_ST2);
  426. nv_wr32(dev, NV04_PGRAPH_FFINTFC_ST2, tmp & 0x000fffff);
  427. return 0;
  428. }
  429. int
  430. nv04_graph_unload_context(struct drm_device *dev)
  431. {
  432. struct drm_nouveau_private *dev_priv = dev->dev_private;
  433. struct nouveau_pgraph_engine *pgraph = &dev_priv->engine.graph;
  434. struct nouveau_channel *chan = NULL;
  435. struct graph_state *ctx;
  436. uint32_t tmp;
  437. int i;
  438. chan = pgraph->channel(dev);
  439. if (!chan)
  440. return 0;
  441. ctx = chan->pgraph_ctx;
  442. for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++)
  443. ctx->nv04[i] = nv_rd32(dev, nv04_graph_ctx_regs[i]);
  444. nv_wr32(dev, NV04_PGRAPH_CTX_CONTROL, 0x10000000);
  445. tmp = nv_rd32(dev, NV04_PGRAPH_CTX_USER) & 0x00ffffff;
  446. tmp |= (dev_priv->engine.fifo.channels - 1) << 24;
  447. nv_wr32(dev, NV04_PGRAPH_CTX_USER, tmp);
  448. return 0;
  449. }
  450. int
  451. nv04_graph_object_new(struct nouveau_channel *chan, u32 handle, u16 class)
  452. {
  453. struct drm_device *dev = chan->dev;
  454. struct nouveau_gpuobj *obj = NULL;
  455. int ret;
  456. ret = nouveau_gpuobj_new(dev, chan, 16, 16, NVOBJ_FLAG_ZERO_FREE, &obj);
  457. if (ret)
  458. return ret;
  459. obj->engine = 1;
  460. obj->class = class;
  461. #ifdef __BIG_ENDIAN
  462. nv_wo32(obj, 0x00, 0x00080000 | class);
  463. #else
  464. nv_wo32(obj, 0x00, class);
  465. #endif
  466. nv_wo32(obj, 0x04, 0x00000000);
  467. nv_wo32(obj, 0x08, 0x00000000);
  468. nv_wo32(obj, 0x0c, 0x00000000);
  469. ret = nouveau_ramht_insert(chan, handle, obj);
  470. nouveau_gpuobj_ref(NULL, &obj);
  471. return ret;
  472. }
  473. int nv04_graph_init(struct drm_device *dev)
  474. {
  475. struct drm_nouveau_private *dev_priv = dev->dev_private;
  476. uint32_t tmp;
  477. int ret;
  478. nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) &
  479. ~NV_PMC_ENABLE_PGRAPH);
  480. nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) |
  481. NV_PMC_ENABLE_PGRAPH);
  482. ret = nv04_graph_register(dev);
  483. if (ret)
  484. return ret;
  485. /* Enable PGRAPH interrupts */
  486. nouveau_irq_register(dev, 12, nv04_graph_isr);
  487. nv_wr32(dev, NV03_PGRAPH_INTR, 0xFFFFFFFF);
  488. nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF);
  489. nv_wr32(dev, NV04_PGRAPH_VALID1, 0);
  490. nv_wr32(dev, NV04_PGRAPH_VALID2, 0);
  491. /*nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x000001FF);
  492. nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x001FFFFF);*/
  493. nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x1231c000);
  494. /*1231C000 blob, 001 haiku*/
  495. /*V_WRITE(NV04_PGRAPH_DEBUG_1, 0xf2d91100);*/
  496. nv_wr32(dev, NV04_PGRAPH_DEBUG_1, 0x72111100);
  497. /*0x72111100 blob , 01 haiku*/
  498. /*nv_wr32(dev, NV04_PGRAPH_DEBUG_2, 0x11d5f870);*/
  499. nv_wr32(dev, NV04_PGRAPH_DEBUG_2, 0x11d5f071);
  500. /*haiku same*/
  501. /*nv_wr32(dev, NV04_PGRAPH_DEBUG_3, 0xfad4ff31);*/
  502. nv_wr32(dev, NV04_PGRAPH_DEBUG_3, 0xf0d4ff31);
  503. /*haiku and blob 10d4*/
  504. nv_wr32(dev, NV04_PGRAPH_STATE , 0xFFFFFFFF);
  505. nv_wr32(dev, NV04_PGRAPH_CTX_CONTROL , 0x10000100);
  506. tmp = nv_rd32(dev, NV04_PGRAPH_CTX_USER) & 0x00ffffff;
  507. tmp |= (dev_priv->engine.fifo.channels - 1) << 24;
  508. nv_wr32(dev, NV04_PGRAPH_CTX_USER, tmp);
  509. /* These don't belong here, they're part of a per-channel context */
  510. nv_wr32(dev, NV04_PGRAPH_PATTERN_SHAPE, 0x00000000);
  511. nv_wr32(dev, NV04_PGRAPH_BETA_AND , 0xFFFFFFFF);
  512. return 0;
  513. }
  514. void nv04_graph_takedown(struct drm_device *dev)
  515. {
  516. nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0x00000000);
  517. nouveau_irq_unregister(dev, 12);
  518. }
  519. void
  520. nv04_graph_fifo_access(struct drm_device *dev, bool enabled)
  521. {
  522. if (enabled)
  523. nv_wr32(dev, NV04_PGRAPH_FIFO,
  524. nv_rd32(dev, NV04_PGRAPH_FIFO) | 1);
  525. else
  526. nv_wr32(dev, NV04_PGRAPH_FIFO,
  527. nv_rd32(dev, NV04_PGRAPH_FIFO) & ~1);
  528. }
  529. static int
  530. nv04_graph_mthd_set_ref(struct nouveau_channel *chan,
  531. u32 class, u32 mthd, u32 data)
  532. {
  533. atomic_set(&chan->fence.last_sequence_irq, data);
  534. return 0;
  535. }
  536. int
  537. nv04_graph_mthd_page_flip(struct nouveau_channel *chan,
  538. u32 class, u32 mthd, u32 data)
  539. {
  540. struct drm_device *dev = chan->dev;
  541. struct nouveau_page_flip_state s;
  542. if (!nouveau_finish_page_flip(chan, &s))
  543. nv_set_crtc_base(dev, s.crtc,
  544. s.offset + s.y * s.pitch + s.x * s.bpp / 8);
  545. return 0;
  546. }
  547. /*
  548. * Software methods, why they are needed, and how they all work:
  549. *
  550. * NV04 and NV05 keep most of the state in PGRAPH context itself, but some
  551. * 2d engine settings are kept inside the grobjs themselves. The grobjs are
  552. * 3 words long on both. grobj format on NV04 is:
  553. *
  554. * word 0:
  555. * - bits 0-7: class
  556. * - bit 12: color key active
  557. * - bit 13: clip rect active
  558. * - bit 14: if set, destination surface is swizzled and taken from buffer 5
  559. * [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
  560. * from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
  561. * NV03_CONTEXT_SURFACE_DST].
  562. * - bits 15-17: 2d operation [aka patch config]
  563. * - bit 24: patch valid [enables rendering using this object]
  564. * - bit 25: surf3d valid [for tex_tri and multitex_tri only]
  565. * word 1:
  566. * - bits 0-1: mono format
  567. * - bits 8-13: color format
  568. * - bits 16-31: DMA_NOTIFY instance
  569. * word 2:
  570. * - bits 0-15: DMA_A instance
  571. * - bits 16-31: DMA_B instance
  572. *
  573. * On NV05 it's:
  574. *
  575. * word 0:
  576. * - bits 0-7: class
  577. * - bit 12: color key active
  578. * - bit 13: clip rect active
  579. * - bit 14: if set, destination surface is swizzled and taken from buffer 5
  580. * [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
  581. * from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
  582. * NV03_CONTEXT_SURFACE_DST].
  583. * - bits 15-17: 2d operation [aka patch config]
  584. * - bits 20-22: dither mode
  585. * - bit 24: patch valid [enables rendering using this object]
  586. * - bit 25: surface_dst/surface_color/surf2d/surf3d valid
  587. * - bit 26: surface_src/surface_zeta valid
  588. * - bit 27: pattern valid
  589. * - bit 28: rop valid
  590. * - bit 29: beta1 valid
  591. * - bit 30: beta4 valid
  592. * word 1:
  593. * - bits 0-1: mono format
  594. * - bits 8-13: color format
  595. * - bits 16-31: DMA_NOTIFY instance
  596. * word 2:
  597. * - bits 0-15: DMA_A instance
  598. * - bits 16-31: DMA_B instance
  599. *
  600. * NV05 will set/unset the relevant valid bits when you poke the relevant
  601. * object-binding methods with object of the proper type, or with the NULL
  602. * type. It'll only allow rendering using the grobj if all needed objects
  603. * are bound. The needed set of objects depends on selected operation: for
  604. * example rop object is needed by ROP_AND, but not by SRCCOPY_AND.
  605. *
  606. * NV04 doesn't have these methods implemented at all, and doesn't have the
  607. * relevant bits in grobj. Instead, it'll allow rendering whenever bit 24
  608. * is set. So we have to emulate them in software, internally keeping the
  609. * same bits as NV05 does. Since grobjs are aligned to 16 bytes on nv04,
  610. * but the last word isn't actually used for anything, we abuse it for this
  611. * purpose.
  612. *
  613. * Actually, NV05 can optionally check bit 24 too, but we disable this since
  614. * there's no use for it.
  615. *
  616. * For unknown reasons, NV04 implements surf3d binding in hardware as an
  617. * exception. Also for unknown reasons, NV04 doesn't implement the clipping
  618. * methods on the surf3d object, so we have to emulate them too.
  619. */
  620. static void
  621. nv04_graph_set_ctx1(struct nouveau_channel *chan, u32 mask, u32 value)
  622. {
  623. struct drm_device *dev = chan->dev;
  624. u32 instance = (nv_rd32(dev, NV04_PGRAPH_CTX_SWITCH4) & 0xffff) << 4;
  625. int subc = (nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR) >> 13) & 0x7;
  626. u32 tmp;
  627. tmp = nv_ri32(dev, instance);
  628. tmp &= ~mask;
  629. tmp |= value;
  630. nv_wi32(dev, instance, tmp);
  631. nv_wr32(dev, NV04_PGRAPH_CTX_SWITCH1, tmp);
  632. nv_wr32(dev, NV04_PGRAPH_CTX_CACHE1 + (subc<<2), tmp);
  633. }
  634. static void
  635. nv04_graph_set_ctx_val(struct nouveau_channel *chan, u32 mask, u32 value)
  636. {
  637. struct drm_device *dev = chan->dev;
  638. u32 instance = (nv_rd32(dev, NV04_PGRAPH_CTX_SWITCH4) & 0xffff) << 4;
  639. u32 tmp, ctx1;
  640. int class, op, valid = 1;
  641. ctx1 = nv_ri32(dev, instance);
  642. class = ctx1 & 0xff;
  643. op = (ctx1 >> 15) & 7;
  644. tmp = nv_ri32(dev, instance + 0xc);
  645. tmp &= ~mask;
  646. tmp |= value;
  647. nv_wi32(dev, instance + 0xc, tmp);
  648. /* check for valid surf2d/surf_dst/surf_color */
  649. if (!(tmp & 0x02000000))
  650. valid = 0;
  651. /* check for valid surf_src/surf_zeta */
  652. if ((class == 0x1f || class == 0x48) && !(tmp & 0x04000000))
  653. valid = 0;
  654. switch (op) {
  655. /* SRCCOPY_AND, SRCCOPY: no extra objects required */
  656. case 0:
  657. case 3:
  658. break;
  659. /* ROP_AND: requires pattern and rop */
  660. case 1:
  661. if (!(tmp & 0x18000000))
  662. valid = 0;
  663. break;
  664. /* BLEND_AND: requires beta1 */
  665. case 2:
  666. if (!(tmp & 0x20000000))
  667. valid = 0;
  668. break;
  669. /* SRCCOPY_PREMULT, BLEND_PREMULT: beta4 required */
  670. case 4:
  671. case 5:
  672. if (!(tmp & 0x40000000))
  673. valid = 0;
  674. break;
  675. }
  676. nv04_graph_set_ctx1(chan, 0x01000000, valid << 24);
  677. }
  678. static int
  679. nv04_graph_mthd_set_operation(struct nouveau_channel *chan,
  680. u32 class, u32 mthd, u32 data)
  681. {
  682. if (data > 5)
  683. return 1;
  684. /* Old versions of the objects only accept first three operations. */
  685. if (data > 2 && class < 0x40)
  686. return 1;
  687. nv04_graph_set_ctx1(chan, 0x00038000, data << 15);
  688. /* changing operation changes set of objects needed for validation */
  689. nv04_graph_set_ctx_val(chan, 0, 0);
  690. return 0;
  691. }
  692. static int
  693. nv04_graph_mthd_surf3d_clip_h(struct nouveau_channel *chan,
  694. u32 class, u32 mthd, u32 data)
  695. {
  696. uint32_t min = data & 0xffff, max;
  697. uint32_t w = data >> 16;
  698. if (min & 0x8000)
  699. /* too large */
  700. return 1;
  701. if (w & 0x8000)
  702. /* yes, it accepts negative for some reason. */
  703. w |= 0xffff0000;
  704. max = min + w;
  705. max &= 0x3ffff;
  706. nv_wr32(chan->dev, 0x40053c, min);
  707. nv_wr32(chan->dev, 0x400544, max);
  708. return 0;
  709. }
  710. static int
  711. nv04_graph_mthd_surf3d_clip_v(struct nouveau_channel *chan,
  712. u32 class, u32 mthd, u32 data)
  713. {
  714. uint32_t min = data & 0xffff, max;
  715. uint32_t w = data >> 16;
  716. if (min & 0x8000)
  717. /* too large */
  718. return 1;
  719. if (w & 0x8000)
  720. /* yes, it accepts negative for some reason. */
  721. w |= 0xffff0000;
  722. max = min + w;
  723. max &= 0x3ffff;
  724. nv_wr32(chan->dev, 0x400540, min);
  725. nv_wr32(chan->dev, 0x400548, max);
  726. return 0;
  727. }
  728. static int
  729. nv04_graph_mthd_bind_surf2d(struct nouveau_channel *chan,
  730. u32 class, u32 mthd, u32 data)
  731. {
  732. switch (nv_ri32(chan->dev, data << 4) & 0xff) {
  733. case 0x30:
  734. nv04_graph_set_ctx1(chan, 0x00004000, 0);
  735. nv04_graph_set_ctx_val(chan, 0x02000000, 0);
  736. return 0;
  737. case 0x42:
  738. nv04_graph_set_ctx1(chan, 0x00004000, 0);
  739. nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
  740. return 0;
  741. }
  742. return 1;
  743. }
  744. static int
  745. nv04_graph_mthd_bind_surf2d_swzsurf(struct nouveau_channel *chan,
  746. u32 class, u32 mthd, u32 data)
  747. {
  748. switch (nv_ri32(chan->dev, data << 4) & 0xff) {
  749. case 0x30:
  750. nv04_graph_set_ctx1(chan, 0x00004000, 0);
  751. nv04_graph_set_ctx_val(chan, 0x02000000, 0);
  752. return 0;
  753. case 0x42:
  754. nv04_graph_set_ctx1(chan, 0x00004000, 0);
  755. nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
  756. return 0;
  757. case 0x52:
  758. nv04_graph_set_ctx1(chan, 0x00004000, 0x00004000);
  759. nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
  760. return 0;
  761. }
  762. return 1;
  763. }
  764. static int
  765. nv04_graph_mthd_bind_nv01_patt(struct nouveau_channel *chan,
  766. u32 class, u32 mthd, u32 data)
  767. {
  768. switch (nv_ri32(chan->dev, data << 4) & 0xff) {
  769. case 0x30:
  770. nv04_graph_set_ctx_val(chan, 0x08000000, 0);
  771. return 0;
  772. case 0x18:
  773. nv04_graph_set_ctx_val(chan, 0x08000000, 0x08000000);
  774. return 0;
  775. }
  776. return 1;
  777. }
  778. static int
  779. nv04_graph_mthd_bind_nv04_patt(struct nouveau_channel *chan,
  780. u32 class, u32 mthd, u32 data)
  781. {
  782. switch (nv_ri32(chan->dev, data << 4) & 0xff) {
  783. case 0x30:
  784. nv04_graph_set_ctx_val(chan, 0x08000000, 0);
  785. return 0;
  786. case 0x44:
  787. nv04_graph_set_ctx_val(chan, 0x08000000, 0x08000000);
  788. return 0;
  789. }
  790. return 1;
  791. }
  792. static int
  793. nv04_graph_mthd_bind_rop(struct nouveau_channel *chan,
  794. u32 class, u32 mthd, u32 data)
  795. {
  796. switch (nv_ri32(chan->dev, data << 4) & 0xff) {
  797. case 0x30:
  798. nv04_graph_set_ctx_val(chan, 0x10000000, 0);
  799. return 0;
  800. case 0x43:
  801. nv04_graph_set_ctx_val(chan, 0x10000000, 0x10000000);
  802. return 0;
  803. }
  804. return 1;
  805. }
  806. static int
  807. nv04_graph_mthd_bind_beta1(struct nouveau_channel *chan,
  808. u32 class, u32 mthd, u32 data)
  809. {
  810. switch (nv_ri32(chan->dev, data << 4) & 0xff) {
  811. case 0x30:
  812. nv04_graph_set_ctx_val(chan, 0x20000000, 0);
  813. return 0;
  814. case 0x12:
  815. nv04_graph_set_ctx_val(chan, 0x20000000, 0x20000000);
  816. return 0;
  817. }
  818. return 1;
  819. }
  820. static int
  821. nv04_graph_mthd_bind_beta4(struct nouveau_channel *chan,
  822. u32 class, u32 mthd, u32 data)
  823. {
  824. switch (nv_ri32(chan->dev, data << 4) & 0xff) {
  825. case 0x30:
  826. nv04_graph_set_ctx_val(chan, 0x40000000, 0);
  827. return 0;
  828. case 0x72:
  829. nv04_graph_set_ctx_val(chan, 0x40000000, 0x40000000);
  830. return 0;
  831. }
  832. return 1;
  833. }
  834. static int
  835. nv04_graph_mthd_bind_surf_dst(struct nouveau_channel *chan,
  836. u32 class, u32 mthd, u32 data)
  837. {
  838. switch (nv_ri32(chan->dev, data << 4) & 0xff) {
  839. case 0x30:
  840. nv04_graph_set_ctx_val(chan, 0x02000000, 0);
  841. return 0;
  842. case 0x58:
  843. nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
  844. return 0;
  845. }
  846. return 1;
  847. }
  848. static int
  849. nv04_graph_mthd_bind_surf_src(struct nouveau_channel *chan,
  850. u32 class, u32 mthd, u32 data)
  851. {
  852. switch (nv_ri32(chan->dev, data << 4) & 0xff) {
  853. case 0x30:
  854. nv04_graph_set_ctx_val(chan, 0x04000000, 0);
  855. return 0;
  856. case 0x59:
  857. nv04_graph_set_ctx_val(chan, 0x04000000, 0x04000000);
  858. return 0;
  859. }
  860. return 1;
  861. }
  862. static int
  863. nv04_graph_mthd_bind_surf_color(struct nouveau_channel *chan,
  864. u32 class, u32 mthd, u32 data)
  865. {
  866. switch (nv_ri32(chan->dev, data << 4) & 0xff) {
  867. case 0x30:
  868. nv04_graph_set_ctx_val(chan, 0x02000000, 0);
  869. return 0;
  870. case 0x5a:
  871. nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
  872. return 0;
  873. }
  874. return 1;
  875. }
  876. static int
  877. nv04_graph_mthd_bind_surf_zeta(struct nouveau_channel *chan,
  878. u32 class, u32 mthd, u32 data)
  879. {
  880. switch (nv_ri32(chan->dev, data << 4) & 0xff) {
  881. case 0x30:
  882. nv04_graph_set_ctx_val(chan, 0x04000000, 0);
  883. return 0;
  884. case 0x5b:
  885. nv04_graph_set_ctx_val(chan, 0x04000000, 0x04000000);
  886. return 0;
  887. }
  888. return 1;
  889. }
  890. static int
  891. nv04_graph_mthd_bind_clip(struct nouveau_channel *chan,
  892. u32 class, u32 mthd, u32 data)
  893. {
  894. switch (nv_ri32(chan->dev, data << 4) & 0xff) {
  895. case 0x30:
  896. nv04_graph_set_ctx1(chan, 0x2000, 0);
  897. return 0;
  898. case 0x19:
  899. nv04_graph_set_ctx1(chan, 0x2000, 0x2000);
  900. return 0;
  901. }
  902. return 1;
  903. }
  904. static int
  905. nv04_graph_mthd_bind_chroma(struct nouveau_channel *chan,
  906. u32 class, u32 mthd, u32 data)
  907. {
  908. switch (nv_ri32(chan->dev, data << 4) & 0xff) {
  909. case 0x30:
  910. nv04_graph_set_ctx1(chan, 0x1000, 0);
  911. return 0;
  912. /* Yes, for some reason even the old versions of objects
  913. * accept 0x57 and not 0x17. Consistency be damned.
  914. */
  915. case 0x57:
  916. nv04_graph_set_ctx1(chan, 0x1000, 0x1000);
  917. return 0;
  918. }
  919. return 1;
  920. }
  921. static int
  922. nv04_graph_register(struct drm_device *dev)
  923. {
  924. struct drm_nouveau_private *dev_priv = dev->dev_private;
  925. if (dev_priv->engine.graph.registered)
  926. return 0;
  927. /* dvd subpicture */
  928. NVOBJ_CLASS(dev, 0x0038, GR);
  929. /* m2mf */
  930. NVOBJ_CLASS(dev, 0x0039, GR);
  931. /* nv03 gdirect */
  932. NVOBJ_CLASS(dev, 0x004b, GR);
  933. NVOBJ_MTHD (dev, 0x004b, 0x0184, nv04_graph_mthd_bind_nv01_patt);
  934. NVOBJ_MTHD (dev, 0x004b, 0x0188, nv04_graph_mthd_bind_rop);
  935. NVOBJ_MTHD (dev, 0x004b, 0x018c, nv04_graph_mthd_bind_beta1);
  936. NVOBJ_MTHD (dev, 0x004b, 0x0190, nv04_graph_mthd_bind_surf_dst);
  937. NVOBJ_MTHD (dev, 0x004b, 0x02fc, nv04_graph_mthd_set_operation);
  938. /* nv04 gdirect */
  939. NVOBJ_CLASS(dev, 0x004a, GR);
  940. NVOBJ_MTHD (dev, 0x004a, 0x0188, nv04_graph_mthd_bind_nv04_patt);
  941. NVOBJ_MTHD (dev, 0x004a, 0x018c, nv04_graph_mthd_bind_rop);
  942. NVOBJ_MTHD (dev, 0x004a, 0x0190, nv04_graph_mthd_bind_beta1);
  943. NVOBJ_MTHD (dev, 0x004a, 0x0194, nv04_graph_mthd_bind_beta4);
  944. NVOBJ_MTHD (dev, 0x004a, 0x0198, nv04_graph_mthd_bind_surf2d);
  945. NVOBJ_MTHD (dev, 0x004a, 0x02fc, nv04_graph_mthd_set_operation);
  946. /* nv01 imageblit */
  947. NVOBJ_CLASS(dev, 0x001f, GR);
  948. NVOBJ_MTHD (dev, 0x001f, 0x0184, nv04_graph_mthd_bind_chroma);
  949. NVOBJ_MTHD (dev, 0x001f, 0x0188, nv04_graph_mthd_bind_clip);
  950. NVOBJ_MTHD (dev, 0x001f, 0x018c, nv04_graph_mthd_bind_nv01_patt);
  951. NVOBJ_MTHD (dev, 0x001f, 0x0190, nv04_graph_mthd_bind_rop);
  952. NVOBJ_MTHD (dev, 0x001f, 0x0194, nv04_graph_mthd_bind_beta1);
  953. NVOBJ_MTHD (dev, 0x001f, 0x0198, nv04_graph_mthd_bind_surf_dst);
  954. NVOBJ_MTHD (dev, 0x001f, 0x019c, nv04_graph_mthd_bind_surf_src);
  955. NVOBJ_MTHD (dev, 0x001f, 0x02fc, nv04_graph_mthd_set_operation);
  956. /* nv04 imageblit */
  957. NVOBJ_CLASS(dev, 0x005f, GR);
  958. NVOBJ_MTHD (dev, 0x005f, 0x0184, nv04_graph_mthd_bind_chroma);
  959. NVOBJ_MTHD (dev, 0x005f, 0x0188, nv04_graph_mthd_bind_clip);
  960. NVOBJ_MTHD (dev, 0x005f, 0x018c, nv04_graph_mthd_bind_nv04_patt);
  961. NVOBJ_MTHD (dev, 0x005f, 0x0190, nv04_graph_mthd_bind_rop);
  962. NVOBJ_MTHD (dev, 0x005f, 0x0194, nv04_graph_mthd_bind_beta1);
  963. NVOBJ_MTHD (dev, 0x005f, 0x0198, nv04_graph_mthd_bind_beta4);
  964. NVOBJ_MTHD (dev, 0x005f, 0x019c, nv04_graph_mthd_bind_surf2d);
  965. NVOBJ_MTHD (dev, 0x005f, 0x02fc, nv04_graph_mthd_set_operation);
  966. /* nv04 iifc */
  967. NVOBJ_CLASS(dev, 0x0060, GR);
  968. NVOBJ_MTHD (dev, 0x0060, 0x0188, nv04_graph_mthd_bind_chroma);
  969. NVOBJ_MTHD (dev, 0x0060, 0x018c, nv04_graph_mthd_bind_clip);
  970. NVOBJ_MTHD (dev, 0x0060, 0x0190, nv04_graph_mthd_bind_nv04_patt);
  971. NVOBJ_MTHD (dev, 0x0060, 0x0194, nv04_graph_mthd_bind_rop);
  972. NVOBJ_MTHD (dev, 0x0060, 0x0198, nv04_graph_mthd_bind_beta1);
  973. NVOBJ_MTHD (dev, 0x0060, 0x019c, nv04_graph_mthd_bind_beta4);
  974. NVOBJ_MTHD (dev, 0x0060, 0x01a0, nv04_graph_mthd_bind_surf2d_swzsurf);
  975. NVOBJ_MTHD (dev, 0x0060, 0x03e4, nv04_graph_mthd_set_operation);
  976. /* nv05 iifc */
  977. NVOBJ_CLASS(dev, 0x0064, GR);
  978. /* nv01 ifc */
  979. NVOBJ_CLASS(dev, 0x0021, GR);
  980. NVOBJ_MTHD (dev, 0x0021, 0x0184, nv04_graph_mthd_bind_chroma);
  981. NVOBJ_MTHD (dev, 0x0021, 0x0188, nv04_graph_mthd_bind_clip);
  982. NVOBJ_MTHD (dev, 0x0021, 0x018c, nv04_graph_mthd_bind_nv01_patt);
  983. NVOBJ_MTHD (dev, 0x0021, 0x0190, nv04_graph_mthd_bind_rop);
  984. NVOBJ_MTHD (dev, 0x0021, 0x0194, nv04_graph_mthd_bind_beta1);
  985. NVOBJ_MTHD (dev, 0x0021, 0x0198, nv04_graph_mthd_bind_surf_dst);
  986. NVOBJ_MTHD (dev, 0x0021, 0x02fc, nv04_graph_mthd_set_operation);
  987. /* nv04 ifc */
  988. NVOBJ_CLASS(dev, 0x0061, GR);
  989. NVOBJ_MTHD (dev, 0x0061, 0x0184, nv04_graph_mthd_bind_chroma);
  990. NVOBJ_MTHD (dev, 0x0061, 0x0188, nv04_graph_mthd_bind_clip);
  991. NVOBJ_MTHD (dev, 0x0061, 0x018c, nv04_graph_mthd_bind_nv04_patt);
  992. NVOBJ_MTHD (dev, 0x0061, 0x0190, nv04_graph_mthd_bind_rop);
  993. NVOBJ_MTHD (dev, 0x0061, 0x0194, nv04_graph_mthd_bind_beta1);
  994. NVOBJ_MTHD (dev, 0x0061, 0x0198, nv04_graph_mthd_bind_beta4);
  995. NVOBJ_MTHD (dev, 0x0061, 0x019c, nv04_graph_mthd_bind_surf2d);
  996. NVOBJ_MTHD (dev, 0x0061, 0x02fc, nv04_graph_mthd_set_operation);
  997. /* nv05 ifc */
  998. NVOBJ_CLASS(dev, 0x0065, GR);
  999. /* nv03 sifc */
  1000. NVOBJ_CLASS(dev, 0x0036, GR);
  1001. NVOBJ_MTHD (dev, 0x0036, 0x0184, nv04_graph_mthd_bind_chroma);
  1002. NVOBJ_MTHD (dev, 0x0036, 0x0188, nv04_graph_mthd_bind_nv01_patt);
  1003. NVOBJ_MTHD (dev, 0x0036, 0x018c, nv04_graph_mthd_bind_rop);
  1004. NVOBJ_MTHD (dev, 0x0036, 0x0190, nv04_graph_mthd_bind_beta1);
  1005. NVOBJ_MTHD (dev, 0x0036, 0x0194, nv04_graph_mthd_bind_surf_dst);
  1006. NVOBJ_MTHD (dev, 0x0036, 0x02fc, nv04_graph_mthd_set_operation);
  1007. /* nv04 sifc */
  1008. NVOBJ_CLASS(dev, 0x0076, GR);
  1009. NVOBJ_MTHD (dev, 0x0076, 0x0184, nv04_graph_mthd_bind_chroma);
  1010. NVOBJ_MTHD (dev, 0x0076, 0x0188, nv04_graph_mthd_bind_nv04_patt);
  1011. NVOBJ_MTHD (dev, 0x0076, 0x018c, nv04_graph_mthd_bind_rop);
  1012. NVOBJ_MTHD (dev, 0x0076, 0x0190, nv04_graph_mthd_bind_beta1);
  1013. NVOBJ_MTHD (dev, 0x0076, 0x0194, nv04_graph_mthd_bind_beta4);
  1014. NVOBJ_MTHD (dev, 0x0076, 0x0198, nv04_graph_mthd_bind_surf2d);
  1015. NVOBJ_MTHD (dev, 0x0076, 0x02fc, nv04_graph_mthd_set_operation);
  1016. /* nv05 sifc */
  1017. NVOBJ_CLASS(dev, 0x0066, GR);
  1018. /* nv03 sifm */
  1019. NVOBJ_CLASS(dev, 0x0037, GR);
  1020. NVOBJ_MTHD (dev, 0x0037, 0x0188, nv04_graph_mthd_bind_nv01_patt);
  1021. NVOBJ_MTHD (dev, 0x0037, 0x018c, nv04_graph_mthd_bind_rop);
  1022. NVOBJ_MTHD (dev, 0x0037, 0x0190, nv04_graph_mthd_bind_beta1);
  1023. NVOBJ_MTHD (dev, 0x0037, 0x0194, nv04_graph_mthd_bind_surf_dst);
  1024. NVOBJ_MTHD (dev, 0x0037, 0x0304, nv04_graph_mthd_set_operation);
  1025. /* nv04 sifm */
  1026. NVOBJ_CLASS(dev, 0x0077, GR);
  1027. NVOBJ_MTHD (dev, 0x0077, 0x0188, nv04_graph_mthd_bind_nv04_patt);
  1028. NVOBJ_MTHD (dev, 0x0077, 0x018c, nv04_graph_mthd_bind_rop);
  1029. NVOBJ_MTHD (dev, 0x0077, 0x0190, nv04_graph_mthd_bind_beta1);
  1030. NVOBJ_MTHD (dev, 0x0077, 0x0194, nv04_graph_mthd_bind_beta4);
  1031. NVOBJ_MTHD (dev, 0x0077, 0x0198, nv04_graph_mthd_bind_surf2d_swzsurf);
  1032. NVOBJ_MTHD (dev, 0x0077, 0x0304, nv04_graph_mthd_set_operation);
  1033. /* null */
  1034. NVOBJ_CLASS(dev, 0x0030, GR);
  1035. /* surf2d */
  1036. NVOBJ_CLASS(dev, 0x0042, GR);
  1037. /* rop */
  1038. NVOBJ_CLASS(dev, 0x0043, GR);
  1039. /* beta1 */
  1040. NVOBJ_CLASS(dev, 0x0012, GR);
  1041. /* beta4 */
  1042. NVOBJ_CLASS(dev, 0x0072, GR);
  1043. /* cliprect */
  1044. NVOBJ_CLASS(dev, 0x0019, GR);
  1045. /* nv01 pattern */
  1046. NVOBJ_CLASS(dev, 0x0018, GR);
  1047. /* nv04 pattern */
  1048. NVOBJ_CLASS(dev, 0x0044, GR);
  1049. /* swzsurf */
  1050. NVOBJ_CLASS(dev, 0x0052, GR);
  1051. /* surf3d */
  1052. NVOBJ_CLASS(dev, 0x0053, GR);
  1053. NVOBJ_MTHD (dev, 0x0053, 0x02f8, nv04_graph_mthd_surf3d_clip_h);
  1054. NVOBJ_MTHD (dev, 0x0053, 0x02fc, nv04_graph_mthd_surf3d_clip_v);
  1055. /* nv03 tex_tri */
  1056. NVOBJ_CLASS(dev, 0x0048, GR);
  1057. NVOBJ_MTHD (dev, 0x0048, 0x0188, nv04_graph_mthd_bind_clip);
  1058. NVOBJ_MTHD (dev, 0x0048, 0x018c, nv04_graph_mthd_bind_surf_color);
  1059. NVOBJ_MTHD (dev, 0x0048, 0x0190, nv04_graph_mthd_bind_surf_zeta);
  1060. /* tex_tri */
  1061. NVOBJ_CLASS(dev, 0x0054, GR);
  1062. /* multitex_tri */
  1063. NVOBJ_CLASS(dev, 0x0055, GR);
  1064. /* nv01 chroma */
  1065. NVOBJ_CLASS(dev, 0x0017, GR);
  1066. /* nv04 chroma */
  1067. NVOBJ_CLASS(dev, 0x0057, GR);
  1068. /* surf_dst */
  1069. NVOBJ_CLASS(dev, 0x0058, GR);
  1070. /* surf_src */
  1071. NVOBJ_CLASS(dev, 0x0059, GR);
  1072. /* surf_color */
  1073. NVOBJ_CLASS(dev, 0x005a, GR);
  1074. /* surf_zeta */
  1075. NVOBJ_CLASS(dev, 0x005b, GR);
  1076. /* nv01 line */
  1077. NVOBJ_CLASS(dev, 0x001c, GR);
  1078. NVOBJ_MTHD (dev, 0x001c, 0x0184, nv04_graph_mthd_bind_clip);
  1079. NVOBJ_MTHD (dev, 0x001c, 0x0188, nv04_graph_mthd_bind_nv01_patt);
  1080. NVOBJ_MTHD (dev, 0x001c, 0x018c, nv04_graph_mthd_bind_rop);
  1081. NVOBJ_MTHD (dev, 0x001c, 0x0190, nv04_graph_mthd_bind_beta1);
  1082. NVOBJ_MTHD (dev, 0x001c, 0x0194, nv04_graph_mthd_bind_surf_dst);
  1083. NVOBJ_MTHD (dev, 0x001c, 0x02fc, nv04_graph_mthd_set_operation);
  1084. /* nv04 line */
  1085. NVOBJ_CLASS(dev, 0x005c, GR);
  1086. NVOBJ_MTHD (dev, 0x005c, 0x0184, nv04_graph_mthd_bind_clip);
  1087. NVOBJ_MTHD (dev, 0x005c, 0x0188, nv04_graph_mthd_bind_nv04_patt);
  1088. NVOBJ_MTHD (dev, 0x005c, 0x018c, nv04_graph_mthd_bind_rop);
  1089. NVOBJ_MTHD (dev, 0x005c, 0x0190, nv04_graph_mthd_bind_beta1);
  1090. NVOBJ_MTHD (dev, 0x005c, 0x0194, nv04_graph_mthd_bind_beta4);
  1091. NVOBJ_MTHD (dev, 0x005c, 0x0198, nv04_graph_mthd_bind_surf2d);
  1092. NVOBJ_MTHD (dev, 0x005c, 0x02fc, nv04_graph_mthd_set_operation);
  1093. /* nv01 tri */
  1094. NVOBJ_CLASS(dev, 0x001d, GR);
  1095. NVOBJ_MTHD (dev, 0x001d, 0x0184, nv04_graph_mthd_bind_clip);
  1096. NVOBJ_MTHD (dev, 0x001d, 0x0188, nv04_graph_mthd_bind_nv01_patt);
  1097. NVOBJ_MTHD (dev, 0x001d, 0x018c, nv04_graph_mthd_bind_rop);
  1098. NVOBJ_MTHD (dev, 0x001d, 0x0190, nv04_graph_mthd_bind_beta1);
  1099. NVOBJ_MTHD (dev, 0x001d, 0x0194, nv04_graph_mthd_bind_surf_dst);
  1100. NVOBJ_MTHD (dev, 0x001d, 0x02fc, nv04_graph_mthd_set_operation);
  1101. /* nv04 tri */
  1102. NVOBJ_CLASS(dev, 0x005d, GR);
  1103. NVOBJ_MTHD (dev, 0x005d, 0x0184, nv04_graph_mthd_bind_clip);
  1104. NVOBJ_MTHD (dev, 0x005d, 0x0188, nv04_graph_mthd_bind_nv04_patt);
  1105. NVOBJ_MTHD (dev, 0x005d, 0x018c, nv04_graph_mthd_bind_rop);
  1106. NVOBJ_MTHD (dev, 0x005d, 0x0190, nv04_graph_mthd_bind_beta1);
  1107. NVOBJ_MTHD (dev, 0x005d, 0x0194, nv04_graph_mthd_bind_beta4);
  1108. NVOBJ_MTHD (dev, 0x005d, 0x0198, nv04_graph_mthd_bind_surf2d);
  1109. NVOBJ_MTHD (dev, 0x005d, 0x02fc, nv04_graph_mthd_set_operation);
  1110. /* nv01 rect */
  1111. NVOBJ_CLASS(dev, 0x001e, GR);
  1112. NVOBJ_MTHD (dev, 0x001e, 0x0184, nv04_graph_mthd_bind_clip);
  1113. NVOBJ_MTHD (dev, 0x001e, 0x0188, nv04_graph_mthd_bind_nv01_patt);
  1114. NVOBJ_MTHD (dev, 0x001e, 0x018c, nv04_graph_mthd_bind_rop);
  1115. NVOBJ_MTHD (dev, 0x001e, 0x0190, nv04_graph_mthd_bind_beta1);
  1116. NVOBJ_MTHD (dev, 0x001e, 0x0194, nv04_graph_mthd_bind_surf_dst);
  1117. NVOBJ_MTHD (dev, 0x001e, 0x02fc, nv04_graph_mthd_set_operation);
  1118. /* nv04 rect */
  1119. NVOBJ_CLASS(dev, 0x005e, GR);
  1120. NVOBJ_MTHD (dev, 0x005e, 0x0184, nv04_graph_mthd_bind_clip);
  1121. NVOBJ_MTHD (dev, 0x005e, 0x0188, nv04_graph_mthd_bind_nv04_patt);
  1122. NVOBJ_MTHD (dev, 0x005e, 0x018c, nv04_graph_mthd_bind_rop);
  1123. NVOBJ_MTHD (dev, 0x005e, 0x0190, nv04_graph_mthd_bind_beta1);
  1124. NVOBJ_MTHD (dev, 0x005e, 0x0194, nv04_graph_mthd_bind_beta4);
  1125. NVOBJ_MTHD (dev, 0x005e, 0x0198, nv04_graph_mthd_bind_surf2d);
  1126. NVOBJ_MTHD (dev, 0x005e, 0x02fc, nv04_graph_mthd_set_operation);
  1127. /* nvsw */
  1128. NVOBJ_CLASS(dev, 0x506e, SW);
  1129. NVOBJ_MTHD (dev, 0x506e, 0x0150, nv04_graph_mthd_set_ref);
  1130. NVOBJ_MTHD (dev, 0x506e, 0x0500, nv04_graph_mthd_page_flip);
  1131. dev_priv->engine.graph.registered = true;
  1132. return 0;
  1133. };
  1134. static struct nouveau_bitfield nv04_graph_intr[] = {
  1135. { NV_PGRAPH_INTR_NOTIFY, "NOTIFY" },
  1136. {}
  1137. };
  1138. static struct nouveau_bitfield nv04_graph_nstatus[] = {
  1139. { NV04_PGRAPH_NSTATUS_STATE_IN_USE, "STATE_IN_USE" },
  1140. { NV04_PGRAPH_NSTATUS_INVALID_STATE, "INVALID_STATE" },
  1141. { NV04_PGRAPH_NSTATUS_BAD_ARGUMENT, "BAD_ARGUMENT" },
  1142. { NV04_PGRAPH_NSTATUS_PROTECTION_FAULT, "PROTECTION_FAULT" },
  1143. {}
  1144. };
  1145. struct nouveau_bitfield nv04_graph_nsource[] = {
  1146. { NV03_PGRAPH_NSOURCE_NOTIFICATION, "NOTIFICATION" },
  1147. { NV03_PGRAPH_NSOURCE_DATA_ERROR, "DATA_ERROR" },
  1148. { NV03_PGRAPH_NSOURCE_PROTECTION_ERROR, "PROTECTION_ERROR" },
  1149. { NV03_PGRAPH_NSOURCE_RANGE_EXCEPTION, "RANGE_EXCEPTION" },
  1150. { NV03_PGRAPH_NSOURCE_LIMIT_COLOR, "LIMIT_COLOR" },
  1151. { NV03_PGRAPH_NSOURCE_LIMIT_ZETA, "LIMIT_ZETA" },
  1152. { NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD, "ILLEGAL_MTHD" },
  1153. { NV03_PGRAPH_NSOURCE_DMA_R_PROTECTION, "DMA_R_PROTECTION" },
  1154. { NV03_PGRAPH_NSOURCE_DMA_W_PROTECTION, "DMA_W_PROTECTION" },
  1155. { NV03_PGRAPH_NSOURCE_FORMAT_EXCEPTION, "FORMAT_EXCEPTION" },
  1156. { NV03_PGRAPH_NSOURCE_PATCH_EXCEPTION, "PATCH_EXCEPTION" },
  1157. { NV03_PGRAPH_NSOURCE_STATE_INVALID, "STATE_INVALID" },
  1158. { NV03_PGRAPH_NSOURCE_DOUBLE_NOTIFY, "DOUBLE_NOTIFY" },
  1159. { NV03_PGRAPH_NSOURCE_NOTIFY_IN_USE, "NOTIFY_IN_USE" },
  1160. { NV03_PGRAPH_NSOURCE_METHOD_CNT, "METHOD_CNT" },
  1161. { NV03_PGRAPH_NSOURCE_BFR_NOTIFICATION, "BFR_NOTIFICATION" },
  1162. { NV03_PGRAPH_NSOURCE_DMA_VTX_PROTECTION, "DMA_VTX_PROTECTION" },
  1163. { NV03_PGRAPH_NSOURCE_DMA_WIDTH_A, "DMA_WIDTH_A" },
  1164. { NV03_PGRAPH_NSOURCE_DMA_WIDTH_B, "DMA_WIDTH_B" },
  1165. {}
  1166. };
  1167. static void
  1168. nv04_graph_isr(struct drm_device *dev)
  1169. {
  1170. u32 stat;
  1171. while ((stat = nv_rd32(dev, NV03_PGRAPH_INTR))) {
  1172. u32 nsource = nv_rd32(dev, NV03_PGRAPH_NSOURCE);
  1173. u32 nstatus = nv_rd32(dev, NV03_PGRAPH_NSTATUS);
  1174. u32 addr = nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR);
  1175. u32 chid = (addr & 0x0f000000) >> 24;
  1176. u32 subc = (addr & 0x0000e000) >> 13;
  1177. u32 mthd = (addr & 0x00001ffc);
  1178. u32 data = nv_rd32(dev, NV04_PGRAPH_TRAPPED_DATA);
  1179. u32 class = nv_rd32(dev, 0x400180 + subc * 4) & 0xff;
  1180. u32 show = stat;
  1181. if (stat & NV_PGRAPH_INTR_NOTIFY) {
  1182. if (nsource & NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD) {
  1183. if (!nouveau_gpuobj_mthd_call2(dev, chid, class, mthd, data))
  1184. show &= ~NV_PGRAPH_INTR_NOTIFY;
  1185. }
  1186. }
  1187. if (stat & NV_PGRAPH_INTR_CONTEXT_SWITCH) {
  1188. nv_wr32(dev, NV03_PGRAPH_INTR, NV_PGRAPH_INTR_CONTEXT_SWITCH);
  1189. stat &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
  1190. show &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
  1191. nv04_graph_context_switch(dev);
  1192. }
  1193. nv_wr32(dev, NV03_PGRAPH_INTR, stat);
  1194. nv_wr32(dev, NV04_PGRAPH_FIFO, 0x00000001);
  1195. if (show && nouveau_ratelimit()) {
  1196. NV_INFO(dev, "PGRAPH -");
  1197. nouveau_bitfield_print(nv04_graph_intr, show);
  1198. printk(" nsource:");
  1199. nouveau_bitfield_print(nv04_graph_nsource, nsource);
  1200. printk(" nstatus:");
  1201. nouveau_bitfield_print(nv04_graph_nstatus, nstatus);
  1202. printk("\n");
  1203. NV_INFO(dev, "PGRAPH - ch %d/%d class 0x%04x "
  1204. "mthd 0x%04x data 0x%08x\n",
  1205. chid, subc, class, mthd, data);
  1206. }
  1207. }
  1208. }