nv04_graph.c 37 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348
  1. /*
  2. * Copyright 2007 Stephane Marchesin
  3. * All Rights Reserved.
  4. *
  5. * Permission is hereby granted, free of charge, to any person obtaining a
  6. * copy of this software and associated documentation files (the "Software"),
  7. * to deal in the Software without restriction, including without limitation
  8. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  9. * and/or sell copies of the Software, and to permit persons to whom the
  10. * Software is furnished to do so, subject to the following conditions:
  11. *
  12. * The above copyright notice and this permission notice (including the next
  13. * paragraph) shall be included in all copies or substantial portions of the
  14. * Software.
  15. *
  16. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  17. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  18. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  19. * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
  20. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  21. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
  22. * DEALINGS IN THE SOFTWARE.
  23. */
  24. #include "drmP.h"
  25. #include "drm.h"
  26. #include "nouveau_drm.h"
  27. #include "nouveau_drv.h"
  28. #include "nouveau_hw.h"
  29. #include "nouveau_util.h"
  30. #include "nouveau_ramht.h"
  31. struct nv04_graph_engine {
  32. struct nouveau_exec_engine base;
  33. };
  34. static uint32_t nv04_graph_ctx_regs[] = {
  35. 0x0040053c,
  36. 0x00400544,
  37. 0x00400540,
  38. 0x00400548,
  39. NV04_PGRAPH_CTX_SWITCH1,
  40. NV04_PGRAPH_CTX_SWITCH2,
  41. NV04_PGRAPH_CTX_SWITCH3,
  42. NV04_PGRAPH_CTX_SWITCH4,
  43. NV04_PGRAPH_CTX_CACHE1,
  44. NV04_PGRAPH_CTX_CACHE2,
  45. NV04_PGRAPH_CTX_CACHE3,
  46. NV04_PGRAPH_CTX_CACHE4,
  47. 0x00400184,
  48. 0x004001a4,
  49. 0x004001c4,
  50. 0x004001e4,
  51. 0x00400188,
  52. 0x004001a8,
  53. 0x004001c8,
  54. 0x004001e8,
  55. 0x0040018c,
  56. 0x004001ac,
  57. 0x004001cc,
  58. 0x004001ec,
  59. 0x00400190,
  60. 0x004001b0,
  61. 0x004001d0,
  62. 0x004001f0,
  63. 0x00400194,
  64. 0x004001b4,
  65. 0x004001d4,
  66. 0x004001f4,
  67. 0x00400198,
  68. 0x004001b8,
  69. 0x004001d8,
  70. 0x004001f8,
  71. 0x0040019c,
  72. 0x004001bc,
  73. 0x004001dc,
  74. 0x004001fc,
  75. 0x00400174,
  76. NV04_PGRAPH_DMA_START_0,
  77. NV04_PGRAPH_DMA_START_1,
  78. NV04_PGRAPH_DMA_LENGTH,
  79. NV04_PGRAPH_DMA_MISC,
  80. NV04_PGRAPH_DMA_PITCH,
  81. NV04_PGRAPH_BOFFSET0,
  82. NV04_PGRAPH_BBASE0,
  83. NV04_PGRAPH_BLIMIT0,
  84. NV04_PGRAPH_BOFFSET1,
  85. NV04_PGRAPH_BBASE1,
  86. NV04_PGRAPH_BLIMIT1,
  87. NV04_PGRAPH_BOFFSET2,
  88. NV04_PGRAPH_BBASE2,
  89. NV04_PGRAPH_BLIMIT2,
  90. NV04_PGRAPH_BOFFSET3,
  91. NV04_PGRAPH_BBASE3,
  92. NV04_PGRAPH_BLIMIT3,
  93. NV04_PGRAPH_BOFFSET4,
  94. NV04_PGRAPH_BBASE4,
  95. NV04_PGRAPH_BLIMIT4,
  96. NV04_PGRAPH_BOFFSET5,
  97. NV04_PGRAPH_BBASE5,
  98. NV04_PGRAPH_BLIMIT5,
  99. NV04_PGRAPH_BPITCH0,
  100. NV04_PGRAPH_BPITCH1,
  101. NV04_PGRAPH_BPITCH2,
  102. NV04_PGRAPH_BPITCH3,
  103. NV04_PGRAPH_BPITCH4,
  104. NV04_PGRAPH_SURFACE,
  105. NV04_PGRAPH_STATE,
  106. NV04_PGRAPH_BSWIZZLE2,
  107. NV04_PGRAPH_BSWIZZLE5,
  108. NV04_PGRAPH_BPIXEL,
  109. NV04_PGRAPH_NOTIFY,
  110. NV04_PGRAPH_PATT_COLOR0,
  111. NV04_PGRAPH_PATT_COLOR1,
  112. NV04_PGRAPH_PATT_COLORRAM+0x00,
  113. NV04_PGRAPH_PATT_COLORRAM+0x04,
  114. NV04_PGRAPH_PATT_COLORRAM+0x08,
  115. NV04_PGRAPH_PATT_COLORRAM+0x0c,
  116. NV04_PGRAPH_PATT_COLORRAM+0x10,
  117. NV04_PGRAPH_PATT_COLORRAM+0x14,
  118. NV04_PGRAPH_PATT_COLORRAM+0x18,
  119. NV04_PGRAPH_PATT_COLORRAM+0x1c,
  120. NV04_PGRAPH_PATT_COLORRAM+0x20,
  121. NV04_PGRAPH_PATT_COLORRAM+0x24,
  122. NV04_PGRAPH_PATT_COLORRAM+0x28,
  123. NV04_PGRAPH_PATT_COLORRAM+0x2c,
  124. NV04_PGRAPH_PATT_COLORRAM+0x30,
  125. NV04_PGRAPH_PATT_COLORRAM+0x34,
  126. NV04_PGRAPH_PATT_COLORRAM+0x38,
  127. NV04_PGRAPH_PATT_COLORRAM+0x3c,
  128. NV04_PGRAPH_PATT_COLORRAM+0x40,
  129. NV04_PGRAPH_PATT_COLORRAM+0x44,
  130. NV04_PGRAPH_PATT_COLORRAM+0x48,
  131. NV04_PGRAPH_PATT_COLORRAM+0x4c,
  132. NV04_PGRAPH_PATT_COLORRAM+0x50,
  133. NV04_PGRAPH_PATT_COLORRAM+0x54,
  134. NV04_PGRAPH_PATT_COLORRAM+0x58,
  135. NV04_PGRAPH_PATT_COLORRAM+0x5c,
  136. NV04_PGRAPH_PATT_COLORRAM+0x60,
  137. NV04_PGRAPH_PATT_COLORRAM+0x64,
  138. NV04_PGRAPH_PATT_COLORRAM+0x68,
  139. NV04_PGRAPH_PATT_COLORRAM+0x6c,
  140. NV04_PGRAPH_PATT_COLORRAM+0x70,
  141. NV04_PGRAPH_PATT_COLORRAM+0x74,
  142. NV04_PGRAPH_PATT_COLORRAM+0x78,
  143. NV04_PGRAPH_PATT_COLORRAM+0x7c,
  144. NV04_PGRAPH_PATT_COLORRAM+0x80,
  145. NV04_PGRAPH_PATT_COLORRAM+0x84,
  146. NV04_PGRAPH_PATT_COLORRAM+0x88,
  147. NV04_PGRAPH_PATT_COLORRAM+0x8c,
  148. NV04_PGRAPH_PATT_COLORRAM+0x90,
  149. NV04_PGRAPH_PATT_COLORRAM+0x94,
  150. NV04_PGRAPH_PATT_COLORRAM+0x98,
  151. NV04_PGRAPH_PATT_COLORRAM+0x9c,
  152. NV04_PGRAPH_PATT_COLORRAM+0xa0,
  153. NV04_PGRAPH_PATT_COLORRAM+0xa4,
  154. NV04_PGRAPH_PATT_COLORRAM+0xa8,
  155. NV04_PGRAPH_PATT_COLORRAM+0xac,
  156. NV04_PGRAPH_PATT_COLORRAM+0xb0,
  157. NV04_PGRAPH_PATT_COLORRAM+0xb4,
  158. NV04_PGRAPH_PATT_COLORRAM+0xb8,
  159. NV04_PGRAPH_PATT_COLORRAM+0xbc,
  160. NV04_PGRAPH_PATT_COLORRAM+0xc0,
  161. NV04_PGRAPH_PATT_COLORRAM+0xc4,
  162. NV04_PGRAPH_PATT_COLORRAM+0xc8,
  163. NV04_PGRAPH_PATT_COLORRAM+0xcc,
  164. NV04_PGRAPH_PATT_COLORRAM+0xd0,
  165. NV04_PGRAPH_PATT_COLORRAM+0xd4,
  166. NV04_PGRAPH_PATT_COLORRAM+0xd8,
  167. NV04_PGRAPH_PATT_COLORRAM+0xdc,
  168. NV04_PGRAPH_PATT_COLORRAM+0xe0,
  169. NV04_PGRAPH_PATT_COLORRAM+0xe4,
  170. NV04_PGRAPH_PATT_COLORRAM+0xe8,
  171. NV04_PGRAPH_PATT_COLORRAM+0xec,
  172. NV04_PGRAPH_PATT_COLORRAM+0xf0,
  173. NV04_PGRAPH_PATT_COLORRAM+0xf4,
  174. NV04_PGRAPH_PATT_COLORRAM+0xf8,
  175. NV04_PGRAPH_PATT_COLORRAM+0xfc,
  176. NV04_PGRAPH_PATTERN,
  177. 0x0040080c,
  178. NV04_PGRAPH_PATTERN_SHAPE,
  179. 0x00400600,
  180. NV04_PGRAPH_ROP3,
  181. NV04_PGRAPH_CHROMA,
  182. NV04_PGRAPH_BETA_AND,
  183. NV04_PGRAPH_BETA_PREMULT,
  184. NV04_PGRAPH_CONTROL0,
  185. NV04_PGRAPH_CONTROL1,
  186. NV04_PGRAPH_CONTROL2,
  187. NV04_PGRAPH_BLEND,
  188. NV04_PGRAPH_STORED_FMT,
  189. NV04_PGRAPH_SOURCE_COLOR,
  190. 0x00400560,
  191. 0x00400568,
  192. 0x00400564,
  193. 0x0040056c,
  194. 0x00400400,
  195. 0x00400480,
  196. 0x00400404,
  197. 0x00400484,
  198. 0x00400408,
  199. 0x00400488,
  200. 0x0040040c,
  201. 0x0040048c,
  202. 0x00400410,
  203. 0x00400490,
  204. 0x00400414,
  205. 0x00400494,
  206. 0x00400418,
  207. 0x00400498,
  208. 0x0040041c,
  209. 0x0040049c,
  210. 0x00400420,
  211. 0x004004a0,
  212. 0x00400424,
  213. 0x004004a4,
  214. 0x00400428,
  215. 0x004004a8,
  216. 0x0040042c,
  217. 0x004004ac,
  218. 0x00400430,
  219. 0x004004b0,
  220. 0x00400434,
  221. 0x004004b4,
  222. 0x00400438,
  223. 0x004004b8,
  224. 0x0040043c,
  225. 0x004004bc,
  226. 0x00400440,
  227. 0x004004c0,
  228. 0x00400444,
  229. 0x004004c4,
  230. 0x00400448,
  231. 0x004004c8,
  232. 0x0040044c,
  233. 0x004004cc,
  234. 0x00400450,
  235. 0x004004d0,
  236. 0x00400454,
  237. 0x004004d4,
  238. 0x00400458,
  239. 0x004004d8,
  240. 0x0040045c,
  241. 0x004004dc,
  242. 0x00400460,
  243. 0x004004e0,
  244. 0x00400464,
  245. 0x004004e4,
  246. 0x00400468,
  247. 0x004004e8,
  248. 0x0040046c,
  249. 0x004004ec,
  250. 0x00400470,
  251. 0x004004f0,
  252. 0x00400474,
  253. 0x004004f4,
  254. 0x00400478,
  255. 0x004004f8,
  256. 0x0040047c,
  257. 0x004004fc,
  258. 0x00400534,
  259. 0x00400538,
  260. 0x00400514,
  261. 0x00400518,
  262. 0x0040051c,
  263. 0x00400520,
  264. 0x00400524,
  265. 0x00400528,
  266. 0x0040052c,
  267. 0x00400530,
  268. 0x00400d00,
  269. 0x00400d40,
  270. 0x00400d80,
  271. 0x00400d04,
  272. 0x00400d44,
  273. 0x00400d84,
  274. 0x00400d08,
  275. 0x00400d48,
  276. 0x00400d88,
  277. 0x00400d0c,
  278. 0x00400d4c,
  279. 0x00400d8c,
  280. 0x00400d10,
  281. 0x00400d50,
  282. 0x00400d90,
  283. 0x00400d14,
  284. 0x00400d54,
  285. 0x00400d94,
  286. 0x00400d18,
  287. 0x00400d58,
  288. 0x00400d98,
  289. 0x00400d1c,
  290. 0x00400d5c,
  291. 0x00400d9c,
  292. 0x00400d20,
  293. 0x00400d60,
  294. 0x00400da0,
  295. 0x00400d24,
  296. 0x00400d64,
  297. 0x00400da4,
  298. 0x00400d28,
  299. 0x00400d68,
  300. 0x00400da8,
  301. 0x00400d2c,
  302. 0x00400d6c,
  303. 0x00400dac,
  304. 0x00400d30,
  305. 0x00400d70,
  306. 0x00400db0,
  307. 0x00400d34,
  308. 0x00400d74,
  309. 0x00400db4,
  310. 0x00400d38,
  311. 0x00400d78,
  312. 0x00400db8,
  313. 0x00400d3c,
  314. 0x00400d7c,
  315. 0x00400dbc,
  316. 0x00400590,
  317. 0x00400594,
  318. 0x00400598,
  319. 0x0040059c,
  320. 0x004005a8,
  321. 0x004005ac,
  322. 0x004005b0,
  323. 0x004005b4,
  324. 0x004005c0,
  325. 0x004005c4,
  326. 0x004005c8,
  327. 0x004005cc,
  328. 0x004005d0,
  329. 0x004005d4,
  330. 0x004005d8,
  331. 0x004005dc,
  332. 0x004005e0,
  333. NV04_PGRAPH_PASSTHRU_0,
  334. NV04_PGRAPH_PASSTHRU_1,
  335. NV04_PGRAPH_PASSTHRU_2,
  336. NV04_PGRAPH_DVD_COLORFMT,
  337. NV04_PGRAPH_SCALED_FORMAT,
  338. NV04_PGRAPH_MISC24_0,
  339. NV04_PGRAPH_MISC24_1,
  340. NV04_PGRAPH_MISC24_2,
  341. 0x00400500,
  342. 0x00400504,
  343. NV04_PGRAPH_VALID1,
  344. NV04_PGRAPH_VALID2,
  345. NV04_PGRAPH_DEBUG_3
  346. };
  347. struct graph_state {
  348. uint32_t nv04[ARRAY_SIZE(nv04_graph_ctx_regs)];
  349. };
  350. static struct nouveau_channel *
  351. nv04_graph_channel(struct drm_device *dev)
  352. {
  353. struct drm_nouveau_private *dev_priv = dev->dev_private;
  354. int chid = dev_priv->engine.fifo.channels;
  355. if (nv_rd32(dev, NV04_PGRAPH_CTX_CONTROL) & 0x00010000)
  356. chid = nv_rd32(dev, NV04_PGRAPH_CTX_USER) >> 24;
  357. if (chid >= dev_priv->engine.fifo.channels)
  358. return NULL;
  359. return dev_priv->channels.ptr[chid];
  360. }
  361. static uint32_t *ctx_reg(struct graph_state *ctx, uint32_t reg)
  362. {
  363. int i;
  364. for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++) {
  365. if (nv04_graph_ctx_regs[i] == reg)
  366. return &ctx->nv04[i];
  367. }
  368. return NULL;
  369. }
  370. static int
  371. nv04_graph_load_context(struct nouveau_channel *chan)
  372. {
  373. struct graph_state *pgraph_ctx = chan->engctx[NVOBJ_ENGINE_GR];
  374. struct drm_device *dev = chan->dev;
  375. uint32_t tmp;
  376. int i;
  377. for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++)
  378. nv_wr32(dev, nv04_graph_ctx_regs[i], pgraph_ctx->nv04[i]);
  379. nv_wr32(dev, NV04_PGRAPH_CTX_CONTROL, 0x10010100);
  380. tmp = nv_rd32(dev, NV04_PGRAPH_CTX_USER) & 0x00ffffff;
  381. nv_wr32(dev, NV04_PGRAPH_CTX_USER, tmp | chan->id << 24);
  382. tmp = nv_rd32(dev, NV04_PGRAPH_FFINTFC_ST2);
  383. nv_wr32(dev, NV04_PGRAPH_FFINTFC_ST2, tmp & 0x000fffff);
  384. return 0;
  385. }
  386. static int
  387. nv04_graph_unload_context(struct drm_device *dev)
  388. {
  389. struct drm_nouveau_private *dev_priv = dev->dev_private;
  390. struct nouveau_channel *chan = NULL;
  391. struct graph_state *ctx;
  392. uint32_t tmp;
  393. int i;
  394. chan = nv04_graph_channel(dev);
  395. if (!chan)
  396. return 0;
  397. ctx = chan->engctx[NVOBJ_ENGINE_GR];
  398. for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++)
  399. ctx->nv04[i] = nv_rd32(dev, nv04_graph_ctx_regs[i]);
  400. nv_wr32(dev, NV04_PGRAPH_CTX_CONTROL, 0x10000000);
  401. tmp = nv_rd32(dev, NV04_PGRAPH_CTX_USER) & 0x00ffffff;
  402. tmp |= (dev_priv->engine.fifo.channels - 1) << 24;
  403. nv_wr32(dev, NV04_PGRAPH_CTX_USER, tmp);
  404. return 0;
  405. }
  406. static int
  407. nv04_graph_context_new(struct nouveau_channel *chan, int engine)
  408. {
  409. struct graph_state *pgraph_ctx;
  410. NV_DEBUG(chan->dev, "nv04_graph_context_create %d\n", chan->id);
  411. pgraph_ctx = kzalloc(sizeof(*pgraph_ctx), GFP_KERNEL);
  412. if (pgraph_ctx == NULL)
  413. return -ENOMEM;
  414. *ctx_reg(pgraph_ctx, NV04_PGRAPH_DEBUG_3) = 0xfad4ff31;
  415. chan->engctx[engine] = pgraph_ctx;
  416. return 0;
  417. }
  418. static void
  419. nv04_graph_context_del(struct nouveau_channel *chan, int engine)
  420. {
  421. struct drm_device *dev = chan->dev;
  422. struct drm_nouveau_private *dev_priv = dev->dev_private;
  423. struct graph_state *pgraph_ctx = chan->engctx[engine];
  424. unsigned long flags;
  425. spin_lock_irqsave(&dev_priv->context_switch_lock, flags);
  426. nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
  427. /* Unload the context if it's the currently active one */
  428. if (nv04_graph_channel(dev) == chan)
  429. nv04_graph_unload_context(dev);
  430. nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
  431. spin_unlock_irqrestore(&dev_priv->context_switch_lock, flags);
  432. /* Free the context resources */
  433. kfree(pgraph_ctx);
  434. chan->engctx[engine] = NULL;
  435. }
  436. int
  437. nv04_graph_object_new(struct nouveau_channel *chan, int engine,
  438. u32 handle, u16 class)
  439. {
  440. struct drm_device *dev = chan->dev;
  441. struct nouveau_gpuobj *obj = NULL;
  442. int ret;
  443. ret = nouveau_gpuobj_new(dev, chan, 16, 16, NVOBJ_FLAG_ZERO_FREE, &obj);
  444. if (ret)
  445. return ret;
  446. obj->engine = 1;
  447. obj->class = class;
  448. #ifdef __BIG_ENDIAN
  449. nv_wo32(obj, 0x00, 0x00080000 | class);
  450. #else
  451. nv_wo32(obj, 0x00, class);
  452. #endif
  453. nv_wo32(obj, 0x04, 0x00000000);
  454. nv_wo32(obj, 0x08, 0x00000000);
  455. nv_wo32(obj, 0x0c, 0x00000000);
  456. ret = nouveau_ramht_insert(chan, handle, obj);
  457. nouveau_gpuobj_ref(NULL, &obj);
  458. return ret;
  459. }
  460. static int
  461. nv04_graph_init(struct drm_device *dev, int engine)
  462. {
  463. struct drm_nouveau_private *dev_priv = dev->dev_private;
  464. uint32_t tmp;
  465. nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) &
  466. ~NV_PMC_ENABLE_PGRAPH);
  467. nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) |
  468. NV_PMC_ENABLE_PGRAPH);
  469. /* Enable PGRAPH interrupts */
  470. nv_wr32(dev, NV03_PGRAPH_INTR, 0xFFFFFFFF);
  471. nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF);
  472. nv_wr32(dev, NV04_PGRAPH_VALID1, 0);
  473. nv_wr32(dev, NV04_PGRAPH_VALID2, 0);
  474. /*nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x000001FF);
  475. nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x001FFFFF);*/
  476. nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x1231c000);
  477. /*1231C000 blob, 001 haiku*/
  478. /*V_WRITE(NV04_PGRAPH_DEBUG_1, 0xf2d91100);*/
  479. nv_wr32(dev, NV04_PGRAPH_DEBUG_1, 0x72111100);
  480. /*0x72111100 blob , 01 haiku*/
  481. /*nv_wr32(dev, NV04_PGRAPH_DEBUG_2, 0x11d5f870);*/
  482. nv_wr32(dev, NV04_PGRAPH_DEBUG_2, 0x11d5f071);
  483. /*haiku same*/
  484. /*nv_wr32(dev, NV04_PGRAPH_DEBUG_3, 0xfad4ff31);*/
  485. nv_wr32(dev, NV04_PGRAPH_DEBUG_3, 0xf0d4ff31);
  486. /*haiku and blob 10d4*/
  487. nv_wr32(dev, NV04_PGRAPH_STATE , 0xFFFFFFFF);
  488. nv_wr32(dev, NV04_PGRAPH_CTX_CONTROL , 0x10000100);
  489. tmp = nv_rd32(dev, NV04_PGRAPH_CTX_USER) & 0x00ffffff;
  490. tmp |= (dev_priv->engine.fifo.channels - 1) << 24;
  491. nv_wr32(dev, NV04_PGRAPH_CTX_USER, tmp);
  492. /* These don't belong here, they're part of a per-channel context */
  493. nv_wr32(dev, NV04_PGRAPH_PATTERN_SHAPE, 0x00000000);
  494. nv_wr32(dev, NV04_PGRAPH_BETA_AND , 0xFFFFFFFF);
  495. return 0;
  496. }
  497. static int
  498. nv04_graph_fini(struct drm_device *dev, int engine)
  499. {
  500. nv04_graph_unload_context(dev);
  501. nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0x00000000);
  502. return 0;
  503. }
  504. static int
  505. nv04_graph_mthd_set_ref(struct nouveau_channel *chan,
  506. u32 class, u32 mthd, u32 data)
  507. {
  508. atomic_set(&chan->fence.last_sequence_irq, data);
  509. return 0;
  510. }
  511. int
  512. nv04_graph_mthd_page_flip(struct nouveau_channel *chan,
  513. u32 class, u32 mthd, u32 data)
  514. {
  515. struct drm_device *dev = chan->dev;
  516. struct nouveau_page_flip_state s;
  517. if (!nouveau_finish_page_flip(chan, &s))
  518. nv_set_crtc_base(dev, s.crtc,
  519. s.offset + s.y * s.pitch + s.x * s.bpp / 8);
  520. return 0;
  521. }
  522. /*
  523. * Software methods, why they are needed, and how they all work:
  524. *
  525. * NV04 and NV05 keep most of the state in PGRAPH context itself, but some
  526. * 2d engine settings are kept inside the grobjs themselves. The grobjs are
  527. * 3 words long on both. grobj format on NV04 is:
  528. *
  529. * word 0:
  530. * - bits 0-7: class
  531. * - bit 12: color key active
  532. * - bit 13: clip rect active
  533. * - bit 14: if set, destination surface is swizzled and taken from buffer 5
  534. * [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
  535. * from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
  536. * NV03_CONTEXT_SURFACE_DST].
  537. * - bits 15-17: 2d operation [aka patch config]
  538. * - bit 24: patch valid [enables rendering using this object]
  539. * - bit 25: surf3d valid [for tex_tri and multitex_tri only]
  540. * word 1:
  541. * - bits 0-1: mono format
  542. * - bits 8-13: color format
  543. * - bits 16-31: DMA_NOTIFY instance
  544. * word 2:
  545. * - bits 0-15: DMA_A instance
  546. * - bits 16-31: DMA_B instance
  547. *
  548. * On NV05 it's:
  549. *
  550. * word 0:
  551. * - bits 0-7: class
  552. * - bit 12: color key active
  553. * - bit 13: clip rect active
  554. * - bit 14: if set, destination surface is swizzled and taken from buffer 5
  555. * [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
  556. * from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
  557. * NV03_CONTEXT_SURFACE_DST].
  558. * - bits 15-17: 2d operation [aka patch config]
  559. * - bits 20-22: dither mode
  560. * - bit 24: patch valid [enables rendering using this object]
  561. * - bit 25: surface_dst/surface_color/surf2d/surf3d valid
  562. * - bit 26: surface_src/surface_zeta valid
  563. * - bit 27: pattern valid
  564. * - bit 28: rop valid
  565. * - bit 29: beta1 valid
  566. * - bit 30: beta4 valid
  567. * word 1:
  568. * - bits 0-1: mono format
  569. * - bits 8-13: color format
  570. * - bits 16-31: DMA_NOTIFY instance
  571. * word 2:
  572. * - bits 0-15: DMA_A instance
  573. * - bits 16-31: DMA_B instance
  574. *
  575. * NV05 will set/unset the relevant valid bits when you poke the relevant
  576. * object-binding methods with object of the proper type, or with the NULL
  577. * type. It'll only allow rendering using the grobj if all needed objects
  578. * are bound. The needed set of objects depends on selected operation: for
  579. * example rop object is needed by ROP_AND, but not by SRCCOPY_AND.
  580. *
  581. * NV04 doesn't have these methods implemented at all, and doesn't have the
  582. * relevant bits in grobj. Instead, it'll allow rendering whenever bit 24
  583. * is set. So we have to emulate them in software, internally keeping the
  584. * same bits as NV05 does. Since grobjs are aligned to 16 bytes on nv04,
  585. * but the last word isn't actually used for anything, we abuse it for this
  586. * purpose.
  587. *
  588. * Actually, NV05 can optionally check bit 24 too, but we disable this since
  589. * there's no use for it.
  590. *
  591. * For unknown reasons, NV04 implements surf3d binding in hardware as an
  592. * exception. Also for unknown reasons, NV04 doesn't implement the clipping
  593. * methods on the surf3d object, so we have to emulate them too.
  594. */
  595. static void
  596. nv04_graph_set_ctx1(struct nouveau_channel *chan, u32 mask, u32 value)
  597. {
  598. struct drm_device *dev = chan->dev;
  599. u32 instance = (nv_rd32(dev, NV04_PGRAPH_CTX_SWITCH4) & 0xffff) << 4;
  600. int subc = (nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR) >> 13) & 0x7;
  601. u32 tmp;
  602. tmp = nv_ri32(dev, instance);
  603. tmp &= ~mask;
  604. tmp |= value;
  605. nv_wi32(dev, instance, tmp);
  606. nv_wr32(dev, NV04_PGRAPH_CTX_SWITCH1, tmp);
  607. nv_wr32(dev, NV04_PGRAPH_CTX_CACHE1 + (subc<<2), tmp);
  608. }
  609. static void
  610. nv04_graph_set_ctx_val(struct nouveau_channel *chan, u32 mask, u32 value)
  611. {
  612. struct drm_device *dev = chan->dev;
  613. u32 instance = (nv_rd32(dev, NV04_PGRAPH_CTX_SWITCH4) & 0xffff) << 4;
  614. u32 tmp, ctx1;
  615. int class, op, valid = 1;
  616. ctx1 = nv_ri32(dev, instance);
  617. class = ctx1 & 0xff;
  618. op = (ctx1 >> 15) & 7;
  619. tmp = nv_ri32(dev, instance + 0xc);
  620. tmp &= ~mask;
  621. tmp |= value;
  622. nv_wi32(dev, instance + 0xc, tmp);
  623. /* check for valid surf2d/surf_dst/surf_color */
  624. if (!(tmp & 0x02000000))
  625. valid = 0;
  626. /* check for valid surf_src/surf_zeta */
  627. if ((class == 0x1f || class == 0x48) && !(tmp & 0x04000000))
  628. valid = 0;
  629. switch (op) {
  630. /* SRCCOPY_AND, SRCCOPY: no extra objects required */
  631. case 0:
  632. case 3:
  633. break;
  634. /* ROP_AND: requires pattern and rop */
  635. case 1:
  636. if (!(tmp & 0x18000000))
  637. valid = 0;
  638. break;
  639. /* BLEND_AND: requires beta1 */
  640. case 2:
  641. if (!(tmp & 0x20000000))
  642. valid = 0;
  643. break;
  644. /* SRCCOPY_PREMULT, BLEND_PREMULT: beta4 required */
  645. case 4:
  646. case 5:
  647. if (!(tmp & 0x40000000))
  648. valid = 0;
  649. break;
  650. }
  651. nv04_graph_set_ctx1(chan, 0x01000000, valid << 24);
  652. }
  653. static int
  654. nv04_graph_mthd_set_operation(struct nouveau_channel *chan,
  655. u32 class, u32 mthd, u32 data)
  656. {
  657. if (data > 5)
  658. return 1;
  659. /* Old versions of the objects only accept first three operations. */
  660. if (data > 2 && class < 0x40)
  661. return 1;
  662. nv04_graph_set_ctx1(chan, 0x00038000, data << 15);
  663. /* changing operation changes set of objects needed for validation */
  664. nv04_graph_set_ctx_val(chan, 0, 0);
  665. return 0;
  666. }
  667. static int
  668. nv04_graph_mthd_surf3d_clip_h(struct nouveau_channel *chan,
  669. u32 class, u32 mthd, u32 data)
  670. {
  671. uint32_t min = data & 0xffff, max;
  672. uint32_t w = data >> 16;
  673. if (min & 0x8000)
  674. /* too large */
  675. return 1;
  676. if (w & 0x8000)
  677. /* yes, it accepts negative for some reason. */
  678. w |= 0xffff0000;
  679. max = min + w;
  680. max &= 0x3ffff;
  681. nv_wr32(chan->dev, 0x40053c, min);
  682. nv_wr32(chan->dev, 0x400544, max);
  683. return 0;
  684. }
  685. static int
  686. nv04_graph_mthd_surf3d_clip_v(struct nouveau_channel *chan,
  687. u32 class, u32 mthd, u32 data)
  688. {
  689. uint32_t min = data & 0xffff, max;
  690. uint32_t w = data >> 16;
  691. if (min & 0x8000)
  692. /* too large */
  693. return 1;
  694. if (w & 0x8000)
  695. /* yes, it accepts negative for some reason. */
  696. w |= 0xffff0000;
  697. max = min + w;
  698. max &= 0x3ffff;
  699. nv_wr32(chan->dev, 0x400540, min);
  700. nv_wr32(chan->dev, 0x400548, max);
  701. return 0;
  702. }
  703. static int
  704. nv04_graph_mthd_bind_surf2d(struct nouveau_channel *chan,
  705. u32 class, u32 mthd, u32 data)
  706. {
  707. switch (nv_ri32(chan->dev, data << 4) & 0xff) {
  708. case 0x30:
  709. nv04_graph_set_ctx1(chan, 0x00004000, 0);
  710. nv04_graph_set_ctx_val(chan, 0x02000000, 0);
  711. return 0;
  712. case 0x42:
  713. nv04_graph_set_ctx1(chan, 0x00004000, 0);
  714. nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
  715. return 0;
  716. }
  717. return 1;
  718. }
  719. static int
  720. nv04_graph_mthd_bind_surf2d_swzsurf(struct nouveau_channel *chan,
  721. u32 class, u32 mthd, u32 data)
  722. {
  723. switch (nv_ri32(chan->dev, data << 4) & 0xff) {
  724. case 0x30:
  725. nv04_graph_set_ctx1(chan, 0x00004000, 0);
  726. nv04_graph_set_ctx_val(chan, 0x02000000, 0);
  727. return 0;
  728. case 0x42:
  729. nv04_graph_set_ctx1(chan, 0x00004000, 0);
  730. nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
  731. return 0;
  732. case 0x52:
  733. nv04_graph_set_ctx1(chan, 0x00004000, 0x00004000);
  734. nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
  735. return 0;
  736. }
  737. return 1;
  738. }
  739. static int
  740. nv04_graph_mthd_bind_nv01_patt(struct nouveau_channel *chan,
  741. u32 class, u32 mthd, u32 data)
  742. {
  743. switch (nv_ri32(chan->dev, data << 4) & 0xff) {
  744. case 0x30:
  745. nv04_graph_set_ctx_val(chan, 0x08000000, 0);
  746. return 0;
  747. case 0x18:
  748. nv04_graph_set_ctx_val(chan, 0x08000000, 0x08000000);
  749. return 0;
  750. }
  751. return 1;
  752. }
  753. static int
  754. nv04_graph_mthd_bind_nv04_patt(struct nouveau_channel *chan,
  755. u32 class, u32 mthd, u32 data)
  756. {
  757. switch (nv_ri32(chan->dev, data << 4) & 0xff) {
  758. case 0x30:
  759. nv04_graph_set_ctx_val(chan, 0x08000000, 0);
  760. return 0;
  761. case 0x44:
  762. nv04_graph_set_ctx_val(chan, 0x08000000, 0x08000000);
  763. return 0;
  764. }
  765. return 1;
  766. }
  767. static int
  768. nv04_graph_mthd_bind_rop(struct nouveau_channel *chan,
  769. u32 class, u32 mthd, u32 data)
  770. {
  771. switch (nv_ri32(chan->dev, data << 4) & 0xff) {
  772. case 0x30:
  773. nv04_graph_set_ctx_val(chan, 0x10000000, 0);
  774. return 0;
  775. case 0x43:
  776. nv04_graph_set_ctx_val(chan, 0x10000000, 0x10000000);
  777. return 0;
  778. }
  779. return 1;
  780. }
  781. static int
  782. nv04_graph_mthd_bind_beta1(struct nouveau_channel *chan,
  783. u32 class, u32 mthd, u32 data)
  784. {
  785. switch (nv_ri32(chan->dev, data << 4) & 0xff) {
  786. case 0x30:
  787. nv04_graph_set_ctx_val(chan, 0x20000000, 0);
  788. return 0;
  789. case 0x12:
  790. nv04_graph_set_ctx_val(chan, 0x20000000, 0x20000000);
  791. return 0;
  792. }
  793. return 1;
  794. }
  795. static int
  796. nv04_graph_mthd_bind_beta4(struct nouveau_channel *chan,
  797. u32 class, u32 mthd, u32 data)
  798. {
  799. switch (nv_ri32(chan->dev, data << 4) & 0xff) {
  800. case 0x30:
  801. nv04_graph_set_ctx_val(chan, 0x40000000, 0);
  802. return 0;
  803. case 0x72:
  804. nv04_graph_set_ctx_val(chan, 0x40000000, 0x40000000);
  805. return 0;
  806. }
  807. return 1;
  808. }
  809. static int
  810. nv04_graph_mthd_bind_surf_dst(struct nouveau_channel *chan,
  811. u32 class, u32 mthd, u32 data)
  812. {
  813. switch (nv_ri32(chan->dev, data << 4) & 0xff) {
  814. case 0x30:
  815. nv04_graph_set_ctx_val(chan, 0x02000000, 0);
  816. return 0;
  817. case 0x58:
  818. nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
  819. return 0;
  820. }
  821. return 1;
  822. }
  823. static int
  824. nv04_graph_mthd_bind_surf_src(struct nouveau_channel *chan,
  825. u32 class, u32 mthd, u32 data)
  826. {
  827. switch (nv_ri32(chan->dev, data << 4) & 0xff) {
  828. case 0x30:
  829. nv04_graph_set_ctx_val(chan, 0x04000000, 0);
  830. return 0;
  831. case 0x59:
  832. nv04_graph_set_ctx_val(chan, 0x04000000, 0x04000000);
  833. return 0;
  834. }
  835. return 1;
  836. }
  837. static int
  838. nv04_graph_mthd_bind_surf_color(struct nouveau_channel *chan,
  839. u32 class, u32 mthd, u32 data)
  840. {
  841. switch (nv_ri32(chan->dev, data << 4) & 0xff) {
  842. case 0x30:
  843. nv04_graph_set_ctx_val(chan, 0x02000000, 0);
  844. return 0;
  845. case 0x5a:
  846. nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
  847. return 0;
  848. }
  849. return 1;
  850. }
  851. static int
  852. nv04_graph_mthd_bind_surf_zeta(struct nouveau_channel *chan,
  853. u32 class, u32 mthd, u32 data)
  854. {
  855. switch (nv_ri32(chan->dev, data << 4) & 0xff) {
  856. case 0x30:
  857. nv04_graph_set_ctx_val(chan, 0x04000000, 0);
  858. return 0;
  859. case 0x5b:
  860. nv04_graph_set_ctx_val(chan, 0x04000000, 0x04000000);
  861. return 0;
  862. }
  863. return 1;
  864. }
  865. static int
  866. nv04_graph_mthd_bind_clip(struct nouveau_channel *chan,
  867. u32 class, u32 mthd, u32 data)
  868. {
  869. switch (nv_ri32(chan->dev, data << 4) & 0xff) {
  870. case 0x30:
  871. nv04_graph_set_ctx1(chan, 0x2000, 0);
  872. return 0;
  873. case 0x19:
  874. nv04_graph_set_ctx1(chan, 0x2000, 0x2000);
  875. return 0;
  876. }
  877. return 1;
  878. }
  879. static int
  880. nv04_graph_mthd_bind_chroma(struct nouveau_channel *chan,
  881. u32 class, u32 mthd, u32 data)
  882. {
  883. switch (nv_ri32(chan->dev, data << 4) & 0xff) {
  884. case 0x30:
  885. nv04_graph_set_ctx1(chan, 0x1000, 0);
  886. return 0;
  887. /* Yes, for some reason even the old versions of objects
  888. * accept 0x57 and not 0x17. Consistency be damned.
  889. */
  890. case 0x57:
  891. nv04_graph_set_ctx1(chan, 0x1000, 0x1000);
  892. return 0;
  893. }
  894. return 1;
  895. }
  896. static struct nouveau_bitfield nv04_graph_intr[] = {
  897. { NV_PGRAPH_INTR_NOTIFY, "NOTIFY" },
  898. {}
  899. };
  900. static struct nouveau_bitfield nv04_graph_nstatus[] = {
  901. { NV04_PGRAPH_NSTATUS_STATE_IN_USE, "STATE_IN_USE" },
  902. { NV04_PGRAPH_NSTATUS_INVALID_STATE, "INVALID_STATE" },
  903. { NV04_PGRAPH_NSTATUS_BAD_ARGUMENT, "BAD_ARGUMENT" },
  904. { NV04_PGRAPH_NSTATUS_PROTECTION_FAULT, "PROTECTION_FAULT" },
  905. {}
  906. };
  907. struct nouveau_bitfield nv04_graph_nsource[] = {
  908. { NV03_PGRAPH_NSOURCE_NOTIFICATION, "NOTIFICATION" },
  909. { NV03_PGRAPH_NSOURCE_DATA_ERROR, "DATA_ERROR" },
  910. { NV03_PGRAPH_NSOURCE_PROTECTION_ERROR, "PROTECTION_ERROR" },
  911. { NV03_PGRAPH_NSOURCE_RANGE_EXCEPTION, "RANGE_EXCEPTION" },
  912. { NV03_PGRAPH_NSOURCE_LIMIT_COLOR, "LIMIT_COLOR" },
  913. { NV03_PGRAPH_NSOURCE_LIMIT_ZETA, "LIMIT_ZETA" },
  914. { NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD, "ILLEGAL_MTHD" },
  915. { NV03_PGRAPH_NSOURCE_DMA_R_PROTECTION, "DMA_R_PROTECTION" },
  916. { NV03_PGRAPH_NSOURCE_DMA_W_PROTECTION, "DMA_W_PROTECTION" },
  917. { NV03_PGRAPH_NSOURCE_FORMAT_EXCEPTION, "FORMAT_EXCEPTION" },
  918. { NV03_PGRAPH_NSOURCE_PATCH_EXCEPTION, "PATCH_EXCEPTION" },
  919. { NV03_PGRAPH_NSOURCE_STATE_INVALID, "STATE_INVALID" },
  920. { NV03_PGRAPH_NSOURCE_DOUBLE_NOTIFY, "DOUBLE_NOTIFY" },
  921. { NV03_PGRAPH_NSOURCE_NOTIFY_IN_USE, "NOTIFY_IN_USE" },
  922. { NV03_PGRAPH_NSOURCE_METHOD_CNT, "METHOD_CNT" },
  923. { NV03_PGRAPH_NSOURCE_BFR_NOTIFICATION, "BFR_NOTIFICATION" },
  924. { NV03_PGRAPH_NSOURCE_DMA_VTX_PROTECTION, "DMA_VTX_PROTECTION" },
  925. { NV03_PGRAPH_NSOURCE_DMA_WIDTH_A, "DMA_WIDTH_A" },
  926. { NV03_PGRAPH_NSOURCE_DMA_WIDTH_B, "DMA_WIDTH_B" },
  927. {}
  928. };
  929. static void
  930. nv04_graph_context_switch(struct drm_device *dev)
  931. {
  932. struct drm_nouveau_private *dev_priv = dev->dev_private;
  933. struct nouveau_channel *chan = NULL;
  934. int chid;
  935. nouveau_wait_for_idle(dev);
  936. /* If previous context is valid, we need to save it */
  937. nv04_graph_unload_context(dev);
  938. /* Load context for next channel */
  939. chid = dev_priv->engine.fifo.channel_id(dev);
  940. chan = dev_priv->channels.ptr[chid];
  941. if (chan)
  942. nv04_graph_load_context(chan);
  943. }
  944. static void
  945. nv04_graph_isr(struct drm_device *dev)
  946. {
  947. u32 stat;
  948. while ((stat = nv_rd32(dev, NV03_PGRAPH_INTR))) {
  949. u32 nsource = nv_rd32(dev, NV03_PGRAPH_NSOURCE);
  950. u32 nstatus = nv_rd32(dev, NV03_PGRAPH_NSTATUS);
  951. u32 addr = nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR);
  952. u32 chid = (addr & 0x0f000000) >> 24;
  953. u32 subc = (addr & 0x0000e000) >> 13;
  954. u32 mthd = (addr & 0x00001ffc);
  955. u32 data = nv_rd32(dev, NV04_PGRAPH_TRAPPED_DATA);
  956. u32 class = nv_rd32(dev, 0x400180 + subc * 4) & 0xff;
  957. u32 show = stat;
  958. if (stat & NV_PGRAPH_INTR_NOTIFY) {
  959. if (nsource & NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD) {
  960. if (!nouveau_gpuobj_mthd_call2(dev, chid, class, mthd, data))
  961. show &= ~NV_PGRAPH_INTR_NOTIFY;
  962. }
  963. }
  964. if (stat & NV_PGRAPH_INTR_CONTEXT_SWITCH) {
  965. nv_wr32(dev, NV03_PGRAPH_INTR, NV_PGRAPH_INTR_CONTEXT_SWITCH);
  966. stat &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
  967. show &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
  968. nv04_graph_context_switch(dev);
  969. }
  970. nv_wr32(dev, NV03_PGRAPH_INTR, stat);
  971. nv_wr32(dev, NV04_PGRAPH_FIFO, 0x00000001);
  972. if (show && nouveau_ratelimit()) {
  973. NV_INFO(dev, "PGRAPH -");
  974. nouveau_bitfield_print(nv04_graph_intr, show);
  975. printk(" nsource:");
  976. nouveau_bitfield_print(nv04_graph_nsource, nsource);
  977. printk(" nstatus:");
  978. nouveau_bitfield_print(nv04_graph_nstatus, nstatus);
  979. printk("\n");
  980. NV_INFO(dev, "PGRAPH - ch %d/%d class 0x%04x "
  981. "mthd 0x%04x data 0x%08x\n",
  982. chid, subc, class, mthd, data);
  983. }
  984. }
  985. }
  986. static void
  987. nv04_graph_destroy(struct drm_device *dev, int engine)
  988. {
  989. struct nv04_graph_engine *pgraph = nv_engine(dev, engine);
  990. nouveau_irq_unregister(dev, 12);
  991. NVOBJ_ENGINE_DEL(dev, GR);
  992. kfree(pgraph);
  993. }
  994. int
  995. nv04_graph_create(struct drm_device *dev)
  996. {
  997. struct nv04_graph_engine *pgraph;
  998. pgraph = kzalloc(sizeof(*pgraph), GFP_KERNEL);
  999. if (!pgraph)
  1000. return -ENOMEM;
  1001. pgraph->base.destroy = nv04_graph_destroy;
  1002. pgraph->base.init = nv04_graph_init;
  1003. pgraph->base.fini = nv04_graph_fini;
  1004. pgraph->base.context_new = nv04_graph_context_new;
  1005. pgraph->base.context_del = nv04_graph_context_del;
  1006. pgraph->base.object_new = nv04_graph_object_new;
  1007. NVOBJ_ENGINE_ADD(dev, GR, &pgraph->base);
  1008. nouveau_irq_register(dev, 12, nv04_graph_isr);
  1009. /* dvd subpicture */
  1010. NVOBJ_CLASS(dev, 0x0038, GR);
  1011. /* m2mf */
  1012. NVOBJ_CLASS(dev, 0x0039, GR);
  1013. /* nv03 gdirect */
  1014. NVOBJ_CLASS(dev, 0x004b, GR);
  1015. NVOBJ_MTHD (dev, 0x004b, 0x0184, nv04_graph_mthd_bind_nv01_patt);
  1016. NVOBJ_MTHD (dev, 0x004b, 0x0188, nv04_graph_mthd_bind_rop);
  1017. NVOBJ_MTHD (dev, 0x004b, 0x018c, nv04_graph_mthd_bind_beta1);
  1018. NVOBJ_MTHD (dev, 0x004b, 0x0190, nv04_graph_mthd_bind_surf_dst);
  1019. NVOBJ_MTHD (dev, 0x004b, 0x02fc, nv04_graph_mthd_set_operation);
  1020. /* nv04 gdirect */
  1021. NVOBJ_CLASS(dev, 0x004a, GR);
  1022. NVOBJ_MTHD (dev, 0x004a, 0x0188, nv04_graph_mthd_bind_nv04_patt);
  1023. NVOBJ_MTHD (dev, 0x004a, 0x018c, nv04_graph_mthd_bind_rop);
  1024. NVOBJ_MTHD (dev, 0x004a, 0x0190, nv04_graph_mthd_bind_beta1);
  1025. NVOBJ_MTHD (dev, 0x004a, 0x0194, nv04_graph_mthd_bind_beta4);
  1026. NVOBJ_MTHD (dev, 0x004a, 0x0198, nv04_graph_mthd_bind_surf2d);
  1027. NVOBJ_MTHD (dev, 0x004a, 0x02fc, nv04_graph_mthd_set_operation);
  1028. /* nv01 imageblit */
  1029. NVOBJ_CLASS(dev, 0x001f, GR);
  1030. NVOBJ_MTHD (dev, 0x001f, 0x0184, nv04_graph_mthd_bind_chroma);
  1031. NVOBJ_MTHD (dev, 0x001f, 0x0188, nv04_graph_mthd_bind_clip);
  1032. NVOBJ_MTHD (dev, 0x001f, 0x018c, nv04_graph_mthd_bind_nv01_patt);
  1033. NVOBJ_MTHD (dev, 0x001f, 0x0190, nv04_graph_mthd_bind_rop);
  1034. NVOBJ_MTHD (dev, 0x001f, 0x0194, nv04_graph_mthd_bind_beta1);
  1035. NVOBJ_MTHD (dev, 0x001f, 0x0198, nv04_graph_mthd_bind_surf_dst);
  1036. NVOBJ_MTHD (dev, 0x001f, 0x019c, nv04_graph_mthd_bind_surf_src);
  1037. NVOBJ_MTHD (dev, 0x001f, 0x02fc, nv04_graph_mthd_set_operation);
  1038. /* nv04 imageblit */
  1039. NVOBJ_CLASS(dev, 0x005f, GR);
  1040. NVOBJ_MTHD (dev, 0x005f, 0x0184, nv04_graph_mthd_bind_chroma);
  1041. NVOBJ_MTHD (dev, 0x005f, 0x0188, nv04_graph_mthd_bind_clip);
  1042. NVOBJ_MTHD (dev, 0x005f, 0x018c, nv04_graph_mthd_bind_nv04_patt);
  1043. NVOBJ_MTHD (dev, 0x005f, 0x0190, nv04_graph_mthd_bind_rop);
  1044. NVOBJ_MTHD (dev, 0x005f, 0x0194, nv04_graph_mthd_bind_beta1);
  1045. NVOBJ_MTHD (dev, 0x005f, 0x0198, nv04_graph_mthd_bind_beta4);
  1046. NVOBJ_MTHD (dev, 0x005f, 0x019c, nv04_graph_mthd_bind_surf2d);
  1047. NVOBJ_MTHD (dev, 0x005f, 0x02fc, nv04_graph_mthd_set_operation);
  1048. /* nv04 iifc */
  1049. NVOBJ_CLASS(dev, 0x0060, GR);
  1050. NVOBJ_MTHD (dev, 0x0060, 0x0188, nv04_graph_mthd_bind_chroma);
  1051. NVOBJ_MTHD (dev, 0x0060, 0x018c, nv04_graph_mthd_bind_clip);
  1052. NVOBJ_MTHD (dev, 0x0060, 0x0190, nv04_graph_mthd_bind_nv04_patt);
  1053. NVOBJ_MTHD (dev, 0x0060, 0x0194, nv04_graph_mthd_bind_rop);
  1054. NVOBJ_MTHD (dev, 0x0060, 0x0198, nv04_graph_mthd_bind_beta1);
  1055. NVOBJ_MTHD (dev, 0x0060, 0x019c, nv04_graph_mthd_bind_beta4);
  1056. NVOBJ_MTHD (dev, 0x0060, 0x01a0, nv04_graph_mthd_bind_surf2d_swzsurf);
  1057. NVOBJ_MTHD (dev, 0x0060, 0x03e4, nv04_graph_mthd_set_operation);
  1058. /* nv05 iifc */
  1059. NVOBJ_CLASS(dev, 0x0064, GR);
  1060. /* nv01 ifc */
  1061. NVOBJ_CLASS(dev, 0x0021, GR);
  1062. NVOBJ_MTHD (dev, 0x0021, 0x0184, nv04_graph_mthd_bind_chroma);
  1063. NVOBJ_MTHD (dev, 0x0021, 0x0188, nv04_graph_mthd_bind_clip);
  1064. NVOBJ_MTHD (dev, 0x0021, 0x018c, nv04_graph_mthd_bind_nv01_patt);
  1065. NVOBJ_MTHD (dev, 0x0021, 0x0190, nv04_graph_mthd_bind_rop);
  1066. NVOBJ_MTHD (dev, 0x0021, 0x0194, nv04_graph_mthd_bind_beta1);
  1067. NVOBJ_MTHD (dev, 0x0021, 0x0198, nv04_graph_mthd_bind_surf_dst);
  1068. NVOBJ_MTHD (dev, 0x0021, 0x02fc, nv04_graph_mthd_set_operation);
  1069. /* nv04 ifc */
  1070. NVOBJ_CLASS(dev, 0x0061, GR);
  1071. NVOBJ_MTHD (dev, 0x0061, 0x0184, nv04_graph_mthd_bind_chroma);
  1072. NVOBJ_MTHD (dev, 0x0061, 0x0188, nv04_graph_mthd_bind_clip);
  1073. NVOBJ_MTHD (dev, 0x0061, 0x018c, nv04_graph_mthd_bind_nv04_patt);
  1074. NVOBJ_MTHD (dev, 0x0061, 0x0190, nv04_graph_mthd_bind_rop);
  1075. NVOBJ_MTHD (dev, 0x0061, 0x0194, nv04_graph_mthd_bind_beta1);
  1076. NVOBJ_MTHD (dev, 0x0061, 0x0198, nv04_graph_mthd_bind_beta4);
  1077. NVOBJ_MTHD (dev, 0x0061, 0x019c, nv04_graph_mthd_bind_surf2d);
  1078. NVOBJ_MTHD (dev, 0x0061, 0x02fc, nv04_graph_mthd_set_operation);
  1079. /* nv05 ifc */
  1080. NVOBJ_CLASS(dev, 0x0065, GR);
  1081. /* nv03 sifc */
  1082. NVOBJ_CLASS(dev, 0x0036, GR);
  1083. NVOBJ_MTHD (dev, 0x0036, 0x0184, nv04_graph_mthd_bind_chroma);
  1084. NVOBJ_MTHD (dev, 0x0036, 0x0188, nv04_graph_mthd_bind_nv01_patt);
  1085. NVOBJ_MTHD (dev, 0x0036, 0x018c, nv04_graph_mthd_bind_rop);
  1086. NVOBJ_MTHD (dev, 0x0036, 0x0190, nv04_graph_mthd_bind_beta1);
  1087. NVOBJ_MTHD (dev, 0x0036, 0x0194, nv04_graph_mthd_bind_surf_dst);
  1088. NVOBJ_MTHD (dev, 0x0036, 0x02fc, nv04_graph_mthd_set_operation);
  1089. /* nv04 sifc */
  1090. NVOBJ_CLASS(dev, 0x0076, GR);
  1091. NVOBJ_MTHD (dev, 0x0076, 0x0184, nv04_graph_mthd_bind_chroma);
  1092. NVOBJ_MTHD (dev, 0x0076, 0x0188, nv04_graph_mthd_bind_nv04_patt);
  1093. NVOBJ_MTHD (dev, 0x0076, 0x018c, nv04_graph_mthd_bind_rop);
  1094. NVOBJ_MTHD (dev, 0x0076, 0x0190, nv04_graph_mthd_bind_beta1);
  1095. NVOBJ_MTHD (dev, 0x0076, 0x0194, nv04_graph_mthd_bind_beta4);
  1096. NVOBJ_MTHD (dev, 0x0076, 0x0198, nv04_graph_mthd_bind_surf2d);
  1097. NVOBJ_MTHD (dev, 0x0076, 0x02fc, nv04_graph_mthd_set_operation);
  1098. /* nv05 sifc */
  1099. NVOBJ_CLASS(dev, 0x0066, GR);
  1100. /* nv03 sifm */
  1101. NVOBJ_CLASS(dev, 0x0037, GR);
  1102. NVOBJ_MTHD (dev, 0x0037, 0x0188, nv04_graph_mthd_bind_nv01_patt);
  1103. NVOBJ_MTHD (dev, 0x0037, 0x018c, nv04_graph_mthd_bind_rop);
  1104. NVOBJ_MTHD (dev, 0x0037, 0x0190, nv04_graph_mthd_bind_beta1);
  1105. NVOBJ_MTHD (dev, 0x0037, 0x0194, nv04_graph_mthd_bind_surf_dst);
  1106. NVOBJ_MTHD (dev, 0x0037, 0x0304, nv04_graph_mthd_set_operation);
  1107. /* nv04 sifm */
  1108. NVOBJ_CLASS(dev, 0x0077, GR);
  1109. NVOBJ_MTHD (dev, 0x0077, 0x0188, nv04_graph_mthd_bind_nv04_patt);
  1110. NVOBJ_MTHD (dev, 0x0077, 0x018c, nv04_graph_mthd_bind_rop);
  1111. NVOBJ_MTHD (dev, 0x0077, 0x0190, nv04_graph_mthd_bind_beta1);
  1112. NVOBJ_MTHD (dev, 0x0077, 0x0194, nv04_graph_mthd_bind_beta4);
  1113. NVOBJ_MTHD (dev, 0x0077, 0x0198, nv04_graph_mthd_bind_surf2d_swzsurf);
  1114. NVOBJ_MTHD (dev, 0x0077, 0x0304, nv04_graph_mthd_set_operation);
  1115. /* null */
  1116. NVOBJ_CLASS(dev, 0x0030, GR);
  1117. /* surf2d */
  1118. NVOBJ_CLASS(dev, 0x0042, GR);
  1119. /* rop */
  1120. NVOBJ_CLASS(dev, 0x0043, GR);
  1121. /* beta1 */
  1122. NVOBJ_CLASS(dev, 0x0012, GR);
  1123. /* beta4 */
  1124. NVOBJ_CLASS(dev, 0x0072, GR);
  1125. /* cliprect */
  1126. NVOBJ_CLASS(dev, 0x0019, GR);
  1127. /* nv01 pattern */
  1128. NVOBJ_CLASS(dev, 0x0018, GR);
  1129. /* nv04 pattern */
  1130. NVOBJ_CLASS(dev, 0x0044, GR);
  1131. /* swzsurf */
  1132. NVOBJ_CLASS(dev, 0x0052, GR);
  1133. /* surf3d */
  1134. NVOBJ_CLASS(dev, 0x0053, GR);
  1135. NVOBJ_MTHD (dev, 0x0053, 0x02f8, nv04_graph_mthd_surf3d_clip_h);
  1136. NVOBJ_MTHD (dev, 0x0053, 0x02fc, nv04_graph_mthd_surf3d_clip_v);
  1137. /* nv03 tex_tri */
  1138. NVOBJ_CLASS(dev, 0x0048, GR);
  1139. NVOBJ_MTHD (dev, 0x0048, 0x0188, nv04_graph_mthd_bind_clip);
  1140. NVOBJ_MTHD (dev, 0x0048, 0x018c, nv04_graph_mthd_bind_surf_color);
  1141. NVOBJ_MTHD (dev, 0x0048, 0x0190, nv04_graph_mthd_bind_surf_zeta);
  1142. /* tex_tri */
  1143. NVOBJ_CLASS(dev, 0x0054, GR);
  1144. /* multitex_tri */
  1145. NVOBJ_CLASS(dev, 0x0055, GR);
  1146. /* nv01 chroma */
  1147. NVOBJ_CLASS(dev, 0x0017, GR);
  1148. /* nv04 chroma */
  1149. NVOBJ_CLASS(dev, 0x0057, GR);
  1150. /* surf_dst */
  1151. NVOBJ_CLASS(dev, 0x0058, GR);
  1152. /* surf_src */
  1153. NVOBJ_CLASS(dev, 0x0059, GR);
  1154. /* surf_color */
  1155. NVOBJ_CLASS(dev, 0x005a, GR);
  1156. /* surf_zeta */
  1157. NVOBJ_CLASS(dev, 0x005b, GR);
  1158. /* nv01 line */
  1159. NVOBJ_CLASS(dev, 0x001c, GR);
  1160. NVOBJ_MTHD (dev, 0x001c, 0x0184, nv04_graph_mthd_bind_clip);
  1161. NVOBJ_MTHD (dev, 0x001c, 0x0188, nv04_graph_mthd_bind_nv01_patt);
  1162. NVOBJ_MTHD (dev, 0x001c, 0x018c, nv04_graph_mthd_bind_rop);
  1163. NVOBJ_MTHD (dev, 0x001c, 0x0190, nv04_graph_mthd_bind_beta1);
  1164. NVOBJ_MTHD (dev, 0x001c, 0x0194, nv04_graph_mthd_bind_surf_dst);
  1165. NVOBJ_MTHD (dev, 0x001c, 0x02fc, nv04_graph_mthd_set_operation);
  1166. /* nv04 line */
  1167. NVOBJ_CLASS(dev, 0x005c, GR);
  1168. NVOBJ_MTHD (dev, 0x005c, 0x0184, nv04_graph_mthd_bind_clip);
  1169. NVOBJ_MTHD (dev, 0x005c, 0x0188, nv04_graph_mthd_bind_nv04_patt);
  1170. NVOBJ_MTHD (dev, 0x005c, 0x018c, nv04_graph_mthd_bind_rop);
  1171. NVOBJ_MTHD (dev, 0x005c, 0x0190, nv04_graph_mthd_bind_beta1);
  1172. NVOBJ_MTHD (dev, 0x005c, 0x0194, nv04_graph_mthd_bind_beta4);
  1173. NVOBJ_MTHD (dev, 0x005c, 0x0198, nv04_graph_mthd_bind_surf2d);
  1174. NVOBJ_MTHD (dev, 0x005c, 0x02fc, nv04_graph_mthd_set_operation);
  1175. /* nv01 tri */
  1176. NVOBJ_CLASS(dev, 0x001d, GR);
  1177. NVOBJ_MTHD (dev, 0x001d, 0x0184, nv04_graph_mthd_bind_clip);
  1178. NVOBJ_MTHD (dev, 0x001d, 0x0188, nv04_graph_mthd_bind_nv01_patt);
  1179. NVOBJ_MTHD (dev, 0x001d, 0x018c, nv04_graph_mthd_bind_rop);
  1180. NVOBJ_MTHD (dev, 0x001d, 0x0190, nv04_graph_mthd_bind_beta1);
  1181. NVOBJ_MTHD (dev, 0x001d, 0x0194, nv04_graph_mthd_bind_surf_dst);
  1182. NVOBJ_MTHD (dev, 0x001d, 0x02fc, nv04_graph_mthd_set_operation);
  1183. /* nv04 tri */
  1184. NVOBJ_CLASS(dev, 0x005d, GR);
  1185. NVOBJ_MTHD (dev, 0x005d, 0x0184, nv04_graph_mthd_bind_clip);
  1186. NVOBJ_MTHD (dev, 0x005d, 0x0188, nv04_graph_mthd_bind_nv04_patt);
  1187. NVOBJ_MTHD (dev, 0x005d, 0x018c, nv04_graph_mthd_bind_rop);
  1188. NVOBJ_MTHD (dev, 0x005d, 0x0190, nv04_graph_mthd_bind_beta1);
  1189. NVOBJ_MTHD (dev, 0x005d, 0x0194, nv04_graph_mthd_bind_beta4);
  1190. NVOBJ_MTHD (dev, 0x005d, 0x0198, nv04_graph_mthd_bind_surf2d);
  1191. NVOBJ_MTHD (dev, 0x005d, 0x02fc, nv04_graph_mthd_set_operation);
  1192. /* nv01 rect */
  1193. NVOBJ_CLASS(dev, 0x001e, GR);
  1194. NVOBJ_MTHD (dev, 0x001e, 0x0184, nv04_graph_mthd_bind_clip);
  1195. NVOBJ_MTHD (dev, 0x001e, 0x0188, nv04_graph_mthd_bind_nv01_patt);
  1196. NVOBJ_MTHD (dev, 0x001e, 0x018c, nv04_graph_mthd_bind_rop);
  1197. NVOBJ_MTHD (dev, 0x001e, 0x0190, nv04_graph_mthd_bind_beta1);
  1198. NVOBJ_MTHD (dev, 0x001e, 0x0194, nv04_graph_mthd_bind_surf_dst);
  1199. NVOBJ_MTHD (dev, 0x001e, 0x02fc, nv04_graph_mthd_set_operation);
  1200. /* nv04 rect */
  1201. NVOBJ_CLASS(dev, 0x005e, GR);
  1202. NVOBJ_MTHD (dev, 0x005e, 0x0184, nv04_graph_mthd_bind_clip);
  1203. NVOBJ_MTHD (dev, 0x005e, 0x0188, nv04_graph_mthd_bind_nv04_patt);
  1204. NVOBJ_MTHD (dev, 0x005e, 0x018c, nv04_graph_mthd_bind_rop);
  1205. NVOBJ_MTHD (dev, 0x005e, 0x0190, nv04_graph_mthd_bind_beta1);
  1206. NVOBJ_MTHD (dev, 0x005e, 0x0194, nv04_graph_mthd_bind_beta4);
  1207. NVOBJ_MTHD (dev, 0x005e, 0x0198, nv04_graph_mthd_bind_surf2d);
  1208. NVOBJ_MTHD (dev, 0x005e, 0x02fc, nv04_graph_mthd_set_operation);
  1209. /* nvsw */
  1210. NVOBJ_CLASS(dev, 0x506e, SW);
  1211. NVOBJ_MTHD (dev, 0x506e, 0x0150, nv04_graph_mthd_set_ref);
  1212. NVOBJ_MTHD (dev, 0x506e, 0x0500, nv04_graph_mthd_page_flip);
  1213. return 0;
  1214. }