nv10_graph.c 32 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188
  1. /*
  2. * Copyright 2007 Matthieu CASTET <castet.matthieu@free.fr>
  3. * All Rights Reserved.
  4. *
  5. * Permission is hereby granted, free of charge, to any person obtaining a
  6. * copy of this software and associated documentation files (the "Software"),
  7. * to deal in the Software without restriction, including without limitation
  8. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  9. * and/or sell copies of the Software, and to permit persons to whom the
  10. * Software is furnished to do so, subject to the following conditions:
  11. *
  12. * The above copyright notice and this permission notice (including the next
  13. * paragraph) shall be included in all copies or substantial portions of the
  14. * Software.
  15. *
  16. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  17. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  18. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  19. * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
  20. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  21. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
  22. * DEALINGS IN THE SOFTWARE.
  23. */
  24. #include "drmP.h"
  25. #include "nouveau_drm.h"
  26. #include "nouveau_drv.h"
  27. #include "nouveau_util.h"
  28. struct nv10_graph_engine {
  29. struct nouveau_exec_engine base;
  30. };
  31. struct pipe_state {
  32. uint32_t pipe_0x0000[0x040/4];
  33. uint32_t pipe_0x0040[0x010/4];
  34. uint32_t pipe_0x0200[0x0c0/4];
  35. uint32_t pipe_0x4400[0x080/4];
  36. uint32_t pipe_0x6400[0x3b0/4];
  37. uint32_t pipe_0x6800[0x2f0/4];
  38. uint32_t pipe_0x6c00[0x030/4];
  39. uint32_t pipe_0x7000[0x130/4];
  40. uint32_t pipe_0x7400[0x0c0/4];
  41. uint32_t pipe_0x7800[0x0c0/4];
  42. };
  43. static int nv10_graph_ctx_regs[] = {
  44. NV10_PGRAPH_CTX_SWITCH(0),
  45. NV10_PGRAPH_CTX_SWITCH(1),
  46. NV10_PGRAPH_CTX_SWITCH(2),
  47. NV10_PGRAPH_CTX_SWITCH(3),
  48. NV10_PGRAPH_CTX_SWITCH(4),
  49. NV10_PGRAPH_CTX_CACHE(0, 0),
  50. NV10_PGRAPH_CTX_CACHE(0, 1),
  51. NV10_PGRAPH_CTX_CACHE(0, 2),
  52. NV10_PGRAPH_CTX_CACHE(0, 3),
  53. NV10_PGRAPH_CTX_CACHE(0, 4),
  54. NV10_PGRAPH_CTX_CACHE(1, 0),
  55. NV10_PGRAPH_CTX_CACHE(1, 1),
  56. NV10_PGRAPH_CTX_CACHE(1, 2),
  57. NV10_PGRAPH_CTX_CACHE(1, 3),
  58. NV10_PGRAPH_CTX_CACHE(1, 4),
  59. NV10_PGRAPH_CTX_CACHE(2, 0),
  60. NV10_PGRAPH_CTX_CACHE(2, 1),
  61. NV10_PGRAPH_CTX_CACHE(2, 2),
  62. NV10_PGRAPH_CTX_CACHE(2, 3),
  63. NV10_PGRAPH_CTX_CACHE(2, 4),
  64. NV10_PGRAPH_CTX_CACHE(3, 0),
  65. NV10_PGRAPH_CTX_CACHE(3, 1),
  66. NV10_PGRAPH_CTX_CACHE(3, 2),
  67. NV10_PGRAPH_CTX_CACHE(3, 3),
  68. NV10_PGRAPH_CTX_CACHE(3, 4),
  69. NV10_PGRAPH_CTX_CACHE(4, 0),
  70. NV10_PGRAPH_CTX_CACHE(4, 1),
  71. NV10_PGRAPH_CTX_CACHE(4, 2),
  72. NV10_PGRAPH_CTX_CACHE(4, 3),
  73. NV10_PGRAPH_CTX_CACHE(4, 4),
  74. NV10_PGRAPH_CTX_CACHE(5, 0),
  75. NV10_PGRAPH_CTX_CACHE(5, 1),
  76. NV10_PGRAPH_CTX_CACHE(5, 2),
  77. NV10_PGRAPH_CTX_CACHE(5, 3),
  78. NV10_PGRAPH_CTX_CACHE(5, 4),
  79. NV10_PGRAPH_CTX_CACHE(6, 0),
  80. NV10_PGRAPH_CTX_CACHE(6, 1),
  81. NV10_PGRAPH_CTX_CACHE(6, 2),
  82. NV10_PGRAPH_CTX_CACHE(6, 3),
  83. NV10_PGRAPH_CTX_CACHE(6, 4),
  84. NV10_PGRAPH_CTX_CACHE(7, 0),
  85. NV10_PGRAPH_CTX_CACHE(7, 1),
  86. NV10_PGRAPH_CTX_CACHE(7, 2),
  87. NV10_PGRAPH_CTX_CACHE(7, 3),
  88. NV10_PGRAPH_CTX_CACHE(7, 4),
  89. NV10_PGRAPH_CTX_USER,
  90. NV04_PGRAPH_DMA_START_0,
  91. NV04_PGRAPH_DMA_START_1,
  92. NV04_PGRAPH_DMA_LENGTH,
  93. NV04_PGRAPH_DMA_MISC,
  94. NV10_PGRAPH_DMA_PITCH,
  95. NV04_PGRAPH_BOFFSET0,
  96. NV04_PGRAPH_BBASE0,
  97. NV04_PGRAPH_BLIMIT0,
  98. NV04_PGRAPH_BOFFSET1,
  99. NV04_PGRAPH_BBASE1,
  100. NV04_PGRAPH_BLIMIT1,
  101. NV04_PGRAPH_BOFFSET2,
  102. NV04_PGRAPH_BBASE2,
  103. NV04_PGRAPH_BLIMIT2,
  104. NV04_PGRAPH_BOFFSET3,
  105. NV04_PGRAPH_BBASE3,
  106. NV04_PGRAPH_BLIMIT3,
  107. NV04_PGRAPH_BOFFSET4,
  108. NV04_PGRAPH_BBASE4,
  109. NV04_PGRAPH_BLIMIT4,
  110. NV04_PGRAPH_BOFFSET5,
  111. NV04_PGRAPH_BBASE5,
  112. NV04_PGRAPH_BLIMIT5,
  113. NV04_PGRAPH_BPITCH0,
  114. NV04_PGRAPH_BPITCH1,
  115. NV04_PGRAPH_BPITCH2,
  116. NV04_PGRAPH_BPITCH3,
  117. NV04_PGRAPH_BPITCH4,
  118. NV10_PGRAPH_SURFACE,
  119. NV10_PGRAPH_STATE,
  120. NV04_PGRAPH_BSWIZZLE2,
  121. NV04_PGRAPH_BSWIZZLE5,
  122. NV04_PGRAPH_BPIXEL,
  123. NV10_PGRAPH_NOTIFY,
  124. NV04_PGRAPH_PATT_COLOR0,
  125. NV04_PGRAPH_PATT_COLOR1,
  126. NV04_PGRAPH_PATT_COLORRAM, /* 64 values from 0x400900 to 0x4009fc */
  127. 0x00400904,
  128. 0x00400908,
  129. 0x0040090c,
  130. 0x00400910,
  131. 0x00400914,
  132. 0x00400918,
  133. 0x0040091c,
  134. 0x00400920,
  135. 0x00400924,
  136. 0x00400928,
  137. 0x0040092c,
  138. 0x00400930,
  139. 0x00400934,
  140. 0x00400938,
  141. 0x0040093c,
  142. 0x00400940,
  143. 0x00400944,
  144. 0x00400948,
  145. 0x0040094c,
  146. 0x00400950,
  147. 0x00400954,
  148. 0x00400958,
  149. 0x0040095c,
  150. 0x00400960,
  151. 0x00400964,
  152. 0x00400968,
  153. 0x0040096c,
  154. 0x00400970,
  155. 0x00400974,
  156. 0x00400978,
  157. 0x0040097c,
  158. 0x00400980,
  159. 0x00400984,
  160. 0x00400988,
  161. 0x0040098c,
  162. 0x00400990,
  163. 0x00400994,
  164. 0x00400998,
  165. 0x0040099c,
  166. 0x004009a0,
  167. 0x004009a4,
  168. 0x004009a8,
  169. 0x004009ac,
  170. 0x004009b0,
  171. 0x004009b4,
  172. 0x004009b8,
  173. 0x004009bc,
  174. 0x004009c0,
  175. 0x004009c4,
  176. 0x004009c8,
  177. 0x004009cc,
  178. 0x004009d0,
  179. 0x004009d4,
  180. 0x004009d8,
  181. 0x004009dc,
  182. 0x004009e0,
  183. 0x004009e4,
  184. 0x004009e8,
  185. 0x004009ec,
  186. 0x004009f0,
  187. 0x004009f4,
  188. 0x004009f8,
  189. 0x004009fc,
  190. NV04_PGRAPH_PATTERN, /* 2 values from 0x400808 to 0x40080c */
  191. 0x0040080c,
  192. NV04_PGRAPH_PATTERN_SHAPE,
  193. NV03_PGRAPH_MONO_COLOR0,
  194. NV04_PGRAPH_ROP3,
  195. NV04_PGRAPH_CHROMA,
  196. NV04_PGRAPH_BETA_AND,
  197. NV04_PGRAPH_BETA_PREMULT,
  198. 0x00400e70,
  199. 0x00400e74,
  200. 0x00400e78,
  201. 0x00400e7c,
  202. 0x00400e80,
  203. 0x00400e84,
  204. 0x00400e88,
  205. 0x00400e8c,
  206. 0x00400ea0,
  207. 0x00400ea4,
  208. 0x00400ea8,
  209. 0x00400e90,
  210. 0x00400e94,
  211. 0x00400e98,
  212. 0x00400e9c,
  213. NV10_PGRAPH_WINDOWCLIP_HORIZONTAL, /* 8 values from 0x400f00-0x400f1c */
  214. NV10_PGRAPH_WINDOWCLIP_VERTICAL, /* 8 values from 0x400f20-0x400f3c */
  215. 0x00400f04,
  216. 0x00400f24,
  217. 0x00400f08,
  218. 0x00400f28,
  219. 0x00400f0c,
  220. 0x00400f2c,
  221. 0x00400f10,
  222. 0x00400f30,
  223. 0x00400f14,
  224. 0x00400f34,
  225. 0x00400f18,
  226. 0x00400f38,
  227. 0x00400f1c,
  228. 0x00400f3c,
  229. NV10_PGRAPH_XFMODE0,
  230. NV10_PGRAPH_XFMODE1,
  231. NV10_PGRAPH_GLOBALSTATE0,
  232. NV10_PGRAPH_GLOBALSTATE1,
  233. NV04_PGRAPH_STORED_FMT,
  234. NV04_PGRAPH_SOURCE_COLOR,
  235. NV03_PGRAPH_ABS_X_RAM, /* 32 values from 0x400400 to 0x40047c */
  236. NV03_PGRAPH_ABS_Y_RAM, /* 32 values from 0x400480 to 0x4004fc */
  237. 0x00400404,
  238. 0x00400484,
  239. 0x00400408,
  240. 0x00400488,
  241. 0x0040040c,
  242. 0x0040048c,
  243. 0x00400410,
  244. 0x00400490,
  245. 0x00400414,
  246. 0x00400494,
  247. 0x00400418,
  248. 0x00400498,
  249. 0x0040041c,
  250. 0x0040049c,
  251. 0x00400420,
  252. 0x004004a0,
  253. 0x00400424,
  254. 0x004004a4,
  255. 0x00400428,
  256. 0x004004a8,
  257. 0x0040042c,
  258. 0x004004ac,
  259. 0x00400430,
  260. 0x004004b0,
  261. 0x00400434,
  262. 0x004004b4,
  263. 0x00400438,
  264. 0x004004b8,
  265. 0x0040043c,
  266. 0x004004bc,
  267. 0x00400440,
  268. 0x004004c0,
  269. 0x00400444,
  270. 0x004004c4,
  271. 0x00400448,
  272. 0x004004c8,
  273. 0x0040044c,
  274. 0x004004cc,
  275. 0x00400450,
  276. 0x004004d0,
  277. 0x00400454,
  278. 0x004004d4,
  279. 0x00400458,
  280. 0x004004d8,
  281. 0x0040045c,
  282. 0x004004dc,
  283. 0x00400460,
  284. 0x004004e0,
  285. 0x00400464,
  286. 0x004004e4,
  287. 0x00400468,
  288. 0x004004e8,
  289. 0x0040046c,
  290. 0x004004ec,
  291. 0x00400470,
  292. 0x004004f0,
  293. 0x00400474,
  294. 0x004004f4,
  295. 0x00400478,
  296. 0x004004f8,
  297. 0x0040047c,
  298. 0x004004fc,
  299. NV03_PGRAPH_ABS_UCLIP_XMIN,
  300. NV03_PGRAPH_ABS_UCLIP_XMAX,
  301. NV03_PGRAPH_ABS_UCLIP_YMIN,
  302. NV03_PGRAPH_ABS_UCLIP_YMAX,
  303. 0x00400550,
  304. 0x00400558,
  305. 0x00400554,
  306. 0x0040055c,
  307. NV03_PGRAPH_ABS_UCLIPA_XMIN,
  308. NV03_PGRAPH_ABS_UCLIPA_XMAX,
  309. NV03_PGRAPH_ABS_UCLIPA_YMIN,
  310. NV03_PGRAPH_ABS_UCLIPA_YMAX,
  311. NV03_PGRAPH_ABS_ICLIP_XMAX,
  312. NV03_PGRAPH_ABS_ICLIP_YMAX,
  313. NV03_PGRAPH_XY_LOGIC_MISC0,
  314. NV03_PGRAPH_XY_LOGIC_MISC1,
  315. NV03_PGRAPH_XY_LOGIC_MISC2,
  316. NV03_PGRAPH_XY_LOGIC_MISC3,
  317. NV03_PGRAPH_CLIPX_0,
  318. NV03_PGRAPH_CLIPX_1,
  319. NV03_PGRAPH_CLIPY_0,
  320. NV03_PGRAPH_CLIPY_1,
  321. NV10_PGRAPH_COMBINER0_IN_ALPHA,
  322. NV10_PGRAPH_COMBINER1_IN_ALPHA,
  323. NV10_PGRAPH_COMBINER0_IN_RGB,
  324. NV10_PGRAPH_COMBINER1_IN_RGB,
  325. NV10_PGRAPH_COMBINER_COLOR0,
  326. NV10_PGRAPH_COMBINER_COLOR1,
  327. NV10_PGRAPH_COMBINER0_OUT_ALPHA,
  328. NV10_PGRAPH_COMBINER1_OUT_ALPHA,
  329. NV10_PGRAPH_COMBINER0_OUT_RGB,
  330. NV10_PGRAPH_COMBINER1_OUT_RGB,
  331. NV10_PGRAPH_COMBINER_FINAL0,
  332. NV10_PGRAPH_COMBINER_FINAL1,
  333. 0x00400e00,
  334. 0x00400e04,
  335. 0x00400e08,
  336. 0x00400e0c,
  337. 0x00400e10,
  338. 0x00400e14,
  339. 0x00400e18,
  340. 0x00400e1c,
  341. 0x00400e20,
  342. 0x00400e24,
  343. 0x00400e28,
  344. 0x00400e2c,
  345. 0x00400e30,
  346. 0x00400e34,
  347. 0x00400e38,
  348. 0x00400e3c,
  349. NV04_PGRAPH_PASSTHRU_0,
  350. NV04_PGRAPH_PASSTHRU_1,
  351. NV04_PGRAPH_PASSTHRU_2,
  352. NV10_PGRAPH_DIMX_TEXTURE,
  353. NV10_PGRAPH_WDIMX_TEXTURE,
  354. NV10_PGRAPH_DVD_COLORFMT,
  355. NV10_PGRAPH_SCALED_FORMAT,
  356. NV04_PGRAPH_MISC24_0,
  357. NV04_PGRAPH_MISC24_1,
  358. NV04_PGRAPH_MISC24_2,
  359. NV03_PGRAPH_X_MISC,
  360. NV03_PGRAPH_Y_MISC,
  361. NV04_PGRAPH_VALID1,
  362. NV04_PGRAPH_VALID2,
  363. };
  364. static int nv17_graph_ctx_regs[] = {
  365. NV10_PGRAPH_DEBUG_4,
  366. 0x004006b0,
  367. 0x00400eac,
  368. 0x00400eb0,
  369. 0x00400eb4,
  370. 0x00400eb8,
  371. 0x00400ebc,
  372. 0x00400ec0,
  373. 0x00400ec4,
  374. 0x00400ec8,
  375. 0x00400ecc,
  376. 0x00400ed0,
  377. 0x00400ed4,
  378. 0x00400ed8,
  379. 0x00400edc,
  380. 0x00400ee0,
  381. 0x00400a00,
  382. 0x00400a04,
  383. };
  384. struct graph_state {
  385. int nv10[ARRAY_SIZE(nv10_graph_ctx_regs)];
  386. int nv17[ARRAY_SIZE(nv17_graph_ctx_regs)];
  387. struct pipe_state pipe_state;
  388. uint32_t lma_window[4];
  389. };
  390. #define PIPE_SAVE(dev, state, addr) \
  391. do { \
  392. int __i; \
  393. nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, addr); \
  394. for (__i = 0; __i < ARRAY_SIZE(state); __i++) \
  395. state[__i] = nv_rd32(dev, NV10_PGRAPH_PIPE_DATA); \
  396. } while (0)
  397. #define PIPE_RESTORE(dev, state, addr) \
  398. do { \
  399. int __i; \
  400. nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, addr); \
  401. for (__i = 0; __i < ARRAY_SIZE(state); __i++) \
  402. nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, state[__i]); \
  403. } while (0)
  404. static void nv10_graph_save_pipe(struct nouveau_channel *chan)
  405. {
  406. struct graph_state *pgraph_ctx = chan->engctx[NVOBJ_ENGINE_GR];
  407. struct pipe_state *pipe = &pgraph_ctx->pipe_state;
  408. struct drm_device *dev = chan->dev;
  409. PIPE_SAVE(dev, pipe->pipe_0x4400, 0x4400);
  410. PIPE_SAVE(dev, pipe->pipe_0x0200, 0x0200);
  411. PIPE_SAVE(dev, pipe->pipe_0x6400, 0x6400);
  412. PIPE_SAVE(dev, pipe->pipe_0x6800, 0x6800);
  413. PIPE_SAVE(dev, pipe->pipe_0x6c00, 0x6c00);
  414. PIPE_SAVE(dev, pipe->pipe_0x7000, 0x7000);
  415. PIPE_SAVE(dev, pipe->pipe_0x7400, 0x7400);
  416. PIPE_SAVE(dev, pipe->pipe_0x7800, 0x7800);
  417. PIPE_SAVE(dev, pipe->pipe_0x0040, 0x0040);
  418. PIPE_SAVE(dev, pipe->pipe_0x0000, 0x0000);
  419. }
  420. static void nv10_graph_load_pipe(struct nouveau_channel *chan)
  421. {
  422. struct graph_state *pgraph_ctx = chan->engctx[NVOBJ_ENGINE_GR];
  423. struct pipe_state *pipe = &pgraph_ctx->pipe_state;
  424. struct drm_device *dev = chan->dev;
  425. uint32_t xfmode0, xfmode1;
  426. int i;
  427. nouveau_wait_for_idle(dev);
  428. /* XXX check haiku comments */
  429. xfmode0 = nv_rd32(dev, NV10_PGRAPH_XFMODE0);
  430. xfmode1 = nv_rd32(dev, NV10_PGRAPH_XFMODE1);
  431. nv_wr32(dev, NV10_PGRAPH_XFMODE0, 0x10000000);
  432. nv_wr32(dev, NV10_PGRAPH_XFMODE1, 0x00000000);
  433. nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x000064c0);
  434. for (i = 0; i < 4; i++)
  435. nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
  436. for (i = 0; i < 4; i++)
  437. nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
  438. nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00006ab0);
  439. for (i = 0; i < 3; i++)
  440. nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
  441. nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00006a80);
  442. for (i = 0; i < 3; i++)
  443. nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
  444. nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00000040);
  445. nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000008);
  446. PIPE_RESTORE(dev, pipe->pipe_0x0200, 0x0200);
  447. nouveau_wait_for_idle(dev);
  448. /* restore XFMODE */
  449. nv_wr32(dev, NV10_PGRAPH_XFMODE0, xfmode0);
  450. nv_wr32(dev, NV10_PGRAPH_XFMODE1, xfmode1);
  451. PIPE_RESTORE(dev, pipe->pipe_0x6400, 0x6400);
  452. PIPE_RESTORE(dev, pipe->pipe_0x6800, 0x6800);
  453. PIPE_RESTORE(dev, pipe->pipe_0x6c00, 0x6c00);
  454. PIPE_RESTORE(dev, pipe->pipe_0x7000, 0x7000);
  455. PIPE_RESTORE(dev, pipe->pipe_0x7400, 0x7400);
  456. PIPE_RESTORE(dev, pipe->pipe_0x7800, 0x7800);
  457. PIPE_RESTORE(dev, pipe->pipe_0x4400, 0x4400);
  458. PIPE_RESTORE(dev, pipe->pipe_0x0000, 0x0000);
  459. PIPE_RESTORE(dev, pipe->pipe_0x0040, 0x0040);
  460. nouveau_wait_for_idle(dev);
  461. }
  462. static void nv10_graph_create_pipe(struct nouveau_channel *chan)
  463. {
  464. struct graph_state *pgraph_ctx = chan->engctx[NVOBJ_ENGINE_GR];
  465. struct pipe_state *fifo_pipe_state = &pgraph_ctx->pipe_state;
  466. struct drm_device *dev = chan->dev;
  467. uint32_t *fifo_pipe_state_addr;
  468. int i;
  469. #define PIPE_INIT(addr) \
  470. do { \
  471. fifo_pipe_state_addr = fifo_pipe_state->pipe_##addr; \
  472. } while (0)
  473. #define PIPE_INIT_END(addr) \
  474. do { \
  475. uint32_t *__end_addr = fifo_pipe_state->pipe_##addr + \
  476. ARRAY_SIZE(fifo_pipe_state->pipe_##addr); \
  477. if (fifo_pipe_state_addr != __end_addr) \
  478. NV_ERROR(dev, "incomplete pipe init for 0x%x : %p/%p\n", \
  479. addr, fifo_pipe_state_addr, __end_addr); \
  480. } while (0)
  481. #define NV_WRITE_PIPE_INIT(value) *(fifo_pipe_state_addr++) = value
  482. PIPE_INIT(0x0200);
  483. for (i = 0; i < 48; i++)
  484. NV_WRITE_PIPE_INIT(0x00000000);
  485. PIPE_INIT_END(0x0200);
  486. PIPE_INIT(0x6400);
  487. for (i = 0; i < 211; i++)
  488. NV_WRITE_PIPE_INIT(0x00000000);
  489. NV_WRITE_PIPE_INIT(0x3f800000);
  490. NV_WRITE_PIPE_INIT(0x40000000);
  491. NV_WRITE_PIPE_INIT(0x40000000);
  492. NV_WRITE_PIPE_INIT(0x40000000);
  493. NV_WRITE_PIPE_INIT(0x40000000);
  494. NV_WRITE_PIPE_INIT(0x00000000);
  495. NV_WRITE_PIPE_INIT(0x00000000);
  496. NV_WRITE_PIPE_INIT(0x3f800000);
  497. NV_WRITE_PIPE_INIT(0x00000000);
  498. NV_WRITE_PIPE_INIT(0x3f000000);
  499. NV_WRITE_PIPE_INIT(0x3f000000);
  500. NV_WRITE_PIPE_INIT(0x00000000);
  501. NV_WRITE_PIPE_INIT(0x00000000);
  502. NV_WRITE_PIPE_INIT(0x00000000);
  503. NV_WRITE_PIPE_INIT(0x00000000);
  504. NV_WRITE_PIPE_INIT(0x3f800000);
  505. NV_WRITE_PIPE_INIT(0x00000000);
  506. NV_WRITE_PIPE_INIT(0x00000000);
  507. NV_WRITE_PIPE_INIT(0x00000000);
  508. NV_WRITE_PIPE_INIT(0x00000000);
  509. NV_WRITE_PIPE_INIT(0x00000000);
  510. NV_WRITE_PIPE_INIT(0x3f800000);
  511. NV_WRITE_PIPE_INIT(0x3f800000);
  512. NV_WRITE_PIPE_INIT(0x3f800000);
  513. NV_WRITE_PIPE_INIT(0x3f800000);
  514. PIPE_INIT_END(0x6400);
  515. PIPE_INIT(0x6800);
  516. for (i = 0; i < 162; i++)
  517. NV_WRITE_PIPE_INIT(0x00000000);
  518. NV_WRITE_PIPE_INIT(0x3f800000);
  519. for (i = 0; i < 25; i++)
  520. NV_WRITE_PIPE_INIT(0x00000000);
  521. PIPE_INIT_END(0x6800);
  522. PIPE_INIT(0x6c00);
  523. NV_WRITE_PIPE_INIT(0x00000000);
  524. NV_WRITE_PIPE_INIT(0x00000000);
  525. NV_WRITE_PIPE_INIT(0x00000000);
  526. NV_WRITE_PIPE_INIT(0x00000000);
  527. NV_WRITE_PIPE_INIT(0xbf800000);
  528. NV_WRITE_PIPE_INIT(0x00000000);
  529. NV_WRITE_PIPE_INIT(0x00000000);
  530. NV_WRITE_PIPE_INIT(0x00000000);
  531. NV_WRITE_PIPE_INIT(0x00000000);
  532. NV_WRITE_PIPE_INIT(0x00000000);
  533. NV_WRITE_PIPE_INIT(0x00000000);
  534. NV_WRITE_PIPE_INIT(0x00000000);
  535. PIPE_INIT_END(0x6c00);
  536. PIPE_INIT(0x7000);
  537. NV_WRITE_PIPE_INIT(0x00000000);
  538. NV_WRITE_PIPE_INIT(0x00000000);
  539. NV_WRITE_PIPE_INIT(0x00000000);
  540. NV_WRITE_PIPE_INIT(0x00000000);
  541. NV_WRITE_PIPE_INIT(0x00000000);
  542. NV_WRITE_PIPE_INIT(0x00000000);
  543. NV_WRITE_PIPE_INIT(0x00000000);
  544. NV_WRITE_PIPE_INIT(0x00000000);
  545. NV_WRITE_PIPE_INIT(0x00000000);
  546. NV_WRITE_PIPE_INIT(0x00000000);
  547. NV_WRITE_PIPE_INIT(0x00000000);
  548. NV_WRITE_PIPE_INIT(0x00000000);
  549. NV_WRITE_PIPE_INIT(0x7149f2ca);
  550. NV_WRITE_PIPE_INIT(0x00000000);
  551. NV_WRITE_PIPE_INIT(0x00000000);
  552. NV_WRITE_PIPE_INIT(0x00000000);
  553. NV_WRITE_PIPE_INIT(0x7149f2ca);
  554. NV_WRITE_PIPE_INIT(0x00000000);
  555. NV_WRITE_PIPE_INIT(0x00000000);
  556. NV_WRITE_PIPE_INIT(0x00000000);
  557. NV_WRITE_PIPE_INIT(0x7149f2ca);
  558. NV_WRITE_PIPE_INIT(0x00000000);
  559. NV_WRITE_PIPE_INIT(0x00000000);
  560. NV_WRITE_PIPE_INIT(0x00000000);
  561. NV_WRITE_PIPE_INIT(0x7149f2ca);
  562. NV_WRITE_PIPE_INIT(0x00000000);
  563. NV_WRITE_PIPE_INIT(0x00000000);
  564. NV_WRITE_PIPE_INIT(0x00000000);
  565. NV_WRITE_PIPE_INIT(0x7149f2ca);
  566. NV_WRITE_PIPE_INIT(0x00000000);
  567. NV_WRITE_PIPE_INIT(0x00000000);
  568. NV_WRITE_PIPE_INIT(0x00000000);
  569. NV_WRITE_PIPE_INIT(0x7149f2ca);
  570. NV_WRITE_PIPE_INIT(0x00000000);
  571. NV_WRITE_PIPE_INIT(0x00000000);
  572. NV_WRITE_PIPE_INIT(0x00000000);
  573. NV_WRITE_PIPE_INIT(0x7149f2ca);
  574. NV_WRITE_PIPE_INIT(0x00000000);
  575. NV_WRITE_PIPE_INIT(0x00000000);
  576. NV_WRITE_PIPE_INIT(0x00000000);
  577. NV_WRITE_PIPE_INIT(0x7149f2ca);
  578. for (i = 0; i < 35; i++)
  579. NV_WRITE_PIPE_INIT(0x00000000);
  580. PIPE_INIT_END(0x7000);
  581. PIPE_INIT(0x7400);
  582. for (i = 0; i < 48; i++)
  583. NV_WRITE_PIPE_INIT(0x00000000);
  584. PIPE_INIT_END(0x7400);
  585. PIPE_INIT(0x7800);
  586. for (i = 0; i < 48; i++)
  587. NV_WRITE_PIPE_INIT(0x00000000);
  588. PIPE_INIT_END(0x7800);
  589. PIPE_INIT(0x4400);
  590. for (i = 0; i < 32; i++)
  591. NV_WRITE_PIPE_INIT(0x00000000);
  592. PIPE_INIT_END(0x4400);
  593. PIPE_INIT(0x0000);
  594. for (i = 0; i < 16; i++)
  595. NV_WRITE_PIPE_INIT(0x00000000);
  596. PIPE_INIT_END(0x0000);
  597. PIPE_INIT(0x0040);
  598. for (i = 0; i < 4; i++)
  599. NV_WRITE_PIPE_INIT(0x00000000);
  600. PIPE_INIT_END(0x0040);
  601. #undef PIPE_INIT
  602. #undef PIPE_INIT_END
  603. #undef NV_WRITE_PIPE_INIT
  604. }
  605. static int nv10_graph_ctx_regs_find_offset(struct drm_device *dev, int reg)
  606. {
  607. int i;
  608. for (i = 0; i < ARRAY_SIZE(nv10_graph_ctx_regs); i++) {
  609. if (nv10_graph_ctx_regs[i] == reg)
  610. return i;
  611. }
  612. NV_ERROR(dev, "unknow offset nv10_ctx_regs %d\n", reg);
  613. return -1;
  614. }
  615. static int nv17_graph_ctx_regs_find_offset(struct drm_device *dev, int reg)
  616. {
  617. int i;
  618. for (i = 0; i < ARRAY_SIZE(nv17_graph_ctx_regs); i++) {
  619. if (nv17_graph_ctx_regs[i] == reg)
  620. return i;
  621. }
  622. NV_ERROR(dev, "unknow offset nv17_ctx_regs %d\n", reg);
  623. return -1;
  624. }
  625. static void nv10_graph_load_dma_vtxbuf(struct nouveau_channel *chan,
  626. uint32_t inst)
  627. {
  628. struct drm_device *dev = chan->dev;
  629. uint32_t st2, st2_dl, st2_dh, fifo_ptr, fifo[0x60/4];
  630. uint32_t ctx_user, ctx_switch[5];
  631. int i, subchan = -1;
  632. /* NV10TCL_DMA_VTXBUF (method 0x18c) modifies hidden state
  633. * that cannot be restored via MMIO. Do it through the FIFO
  634. * instead.
  635. */
  636. /* Look for a celsius object */
  637. for (i = 0; i < 8; i++) {
  638. int class = nv_rd32(dev, NV10_PGRAPH_CTX_CACHE(i, 0)) & 0xfff;
  639. if (class == 0x56 || class == 0x96 || class == 0x99) {
  640. subchan = i;
  641. break;
  642. }
  643. }
  644. if (subchan < 0 || !inst)
  645. return;
  646. /* Save the current ctx object */
  647. ctx_user = nv_rd32(dev, NV10_PGRAPH_CTX_USER);
  648. for (i = 0; i < 5; i++)
  649. ctx_switch[i] = nv_rd32(dev, NV10_PGRAPH_CTX_SWITCH(i));
  650. /* Save the FIFO state */
  651. st2 = nv_rd32(dev, NV10_PGRAPH_FFINTFC_ST2);
  652. st2_dl = nv_rd32(dev, NV10_PGRAPH_FFINTFC_ST2_DL);
  653. st2_dh = nv_rd32(dev, NV10_PGRAPH_FFINTFC_ST2_DH);
  654. fifo_ptr = nv_rd32(dev, NV10_PGRAPH_FFINTFC_FIFO_PTR);
  655. for (i = 0; i < ARRAY_SIZE(fifo); i++)
  656. fifo[i] = nv_rd32(dev, 0x4007a0 + 4 * i);
  657. /* Switch to the celsius subchannel */
  658. for (i = 0; i < 5; i++)
  659. nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(i),
  660. nv_rd32(dev, NV10_PGRAPH_CTX_CACHE(subchan, i)));
  661. nv_mask(dev, NV10_PGRAPH_CTX_USER, 0xe000, subchan << 13);
  662. /* Inject NV10TCL_DMA_VTXBUF */
  663. nv_wr32(dev, NV10_PGRAPH_FFINTFC_FIFO_PTR, 0);
  664. nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2,
  665. 0x2c000000 | chan->id << 20 | subchan << 16 | 0x18c);
  666. nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2_DL, inst);
  667. nv_mask(dev, NV10_PGRAPH_CTX_CONTROL, 0, 0x10000);
  668. nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
  669. nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
  670. /* Restore the FIFO state */
  671. for (i = 0; i < ARRAY_SIZE(fifo); i++)
  672. nv_wr32(dev, 0x4007a0 + 4 * i, fifo[i]);
  673. nv_wr32(dev, NV10_PGRAPH_FFINTFC_FIFO_PTR, fifo_ptr);
  674. nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2, st2);
  675. nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2_DL, st2_dl);
  676. nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2_DH, st2_dh);
  677. /* Restore the current ctx object */
  678. for (i = 0; i < 5; i++)
  679. nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(i), ctx_switch[i]);
  680. nv_wr32(dev, NV10_PGRAPH_CTX_USER, ctx_user);
  681. }
  682. static int
  683. nv10_graph_load_context(struct nouveau_channel *chan)
  684. {
  685. struct drm_device *dev = chan->dev;
  686. struct drm_nouveau_private *dev_priv = dev->dev_private;
  687. struct graph_state *pgraph_ctx = chan->engctx[NVOBJ_ENGINE_GR];
  688. uint32_t tmp;
  689. int i;
  690. for (i = 0; i < ARRAY_SIZE(nv10_graph_ctx_regs); i++)
  691. nv_wr32(dev, nv10_graph_ctx_regs[i], pgraph_ctx->nv10[i]);
  692. if (dev_priv->chipset >= 0x17) {
  693. for (i = 0; i < ARRAY_SIZE(nv17_graph_ctx_regs); i++)
  694. nv_wr32(dev, nv17_graph_ctx_regs[i],
  695. pgraph_ctx->nv17[i]);
  696. }
  697. nv10_graph_load_pipe(chan);
  698. nv10_graph_load_dma_vtxbuf(chan, (nv_rd32(dev, NV10_PGRAPH_GLOBALSTATE1)
  699. & 0xffff));
  700. nv_wr32(dev, NV10_PGRAPH_CTX_CONTROL, 0x10010100);
  701. tmp = nv_rd32(dev, NV10_PGRAPH_CTX_USER);
  702. nv_wr32(dev, NV10_PGRAPH_CTX_USER, (tmp & 0xffffff) | chan->id << 24);
  703. tmp = nv_rd32(dev, NV10_PGRAPH_FFINTFC_ST2);
  704. nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2, tmp & 0xcfffffff);
  705. return 0;
  706. }
  707. static int
  708. nv10_graph_unload_context(struct drm_device *dev)
  709. {
  710. struct drm_nouveau_private *dev_priv = dev->dev_private;
  711. struct nouveau_channel *chan;
  712. struct graph_state *ctx;
  713. uint32_t tmp;
  714. int i;
  715. chan = nv10_graph_channel(dev);
  716. if (!chan)
  717. return 0;
  718. ctx = chan->engctx[NVOBJ_ENGINE_GR];
  719. for (i = 0; i < ARRAY_SIZE(nv10_graph_ctx_regs); i++)
  720. ctx->nv10[i] = nv_rd32(dev, nv10_graph_ctx_regs[i]);
  721. if (dev_priv->chipset >= 0x17) {
  722. for (i = 0; i < ARRAY_SIZE(nv17_graph_ctx_regs); i++)
  723. ctx->nv17[i] = nv_rd32(dev, nv17_graph_ctx_regs[i]);
  724. }
  725. nv10_graph_save_pipe(chan);
  726. nv_wr32(dev, NV10_PGRAPH_CTX_CONTROL, 0x10000000);
  727. tmp = nv_rd32(dev, NV10_PGRAPH_CTX_USER) & 0x00ffffff;
  728. tmp |= 31 << 24;
  729. nv_wr32(dev, NV10_PGRAPH_CTX_USER, tmp);
  730. return 0;
  731. }
  732. static void
  733. nv10_graph_context_switch(struct drm_device *dev)
  734. {
  735. struct drm_nouveau_private *dev_priv = dev->dev_private;
  736. struct nouveau_channel *chan = NULL;
  737. int chid;
  738. nouveau_wait_for_idle(dev);
  739. /* If previous context is valid, we need to save it */
  740. nv10_graph_unload_context(dev);
  741. /* Load context for next channel */
  742. chid = (nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR) >> 20) & 0x1f;
  743. chan = dev_priv->channels.ptr[chid];
  744. if (chan && chan->engctx[NVOBJ_ENGINE_GR])
  745. nv10_graph_load_context(chan);
  746. }
  747. #define NV_WRITE_CTX(reg, val) do { \
  748. int offset = nv10_graph_ctx_regs_find_offset(dev, reg); \
  749. if (offset > 0) \
  750. pgraph_ctx->nv10[offset] = val; \
  751. } while (0)
  752. #define NV17_WRITE_CTX(reg, val) do { \
  753. int offset = nv17_graph_ctx_regs_find_offset(dev, reg); \
  754. if (offset > 0) \
  755. pgraph_ctx->nv17[offset] = val; \
  756. } while (0)
  757. struct nouveau_channel *
  758. nv10_graph_channel(struct drm_device *dev)
  759. {
  760. struct drm_nouveau_private *dev_priv = dev->dev_private;
  761. int chid = 31;
  762. if (nv_rd32(dev, NV10_PGRAPH_CTX_CONTROL) & 0x00010000)
  763. chid = nv_rd32(dev, NV10_PGRAPH_CTX_USER) >> 24;
  764. if (chid >= 31)
  765. return NULL;
  766. return dev_priv->channels.ptr[chid];
  767. }
  768. static int
  769. nv10_graph_context_new(struct nouveau_channel *chan, int engine)
  770. {
  771. struct drm_device *dev = chan->dev;
  772. struct drm_nouveau_private *dev_priv = dev->dev_private;
  773. struct graph_state *pgraph_ctx;
  774. NV_DEBUG(dev, "nv10_graph_context_create %d\n", chan->id);
  775. pgraph_ctx = kzalloc(sizeof(*pgraph_ctx), GFP_KERNEL);
  776. if (pgraph_ctx == NULL)
  777. return -ENOMEM;
  778. chan->engctx[engine] = pgraph_ctx;
  779. NV_WRITE_CTX(0x00400e88, 0x08000000);
  780. NV_WRITE_CTX(0x00400e9c, 0x4b7fffff);
  781. NV_WRITE_CTX(NV03_PGRAPH_XY_LOGIC_MISC0, 0x0001ffff);
  782. NV_WRITE_CTX(0x00400e10, 0x00001000);
  783. NV_WRITE_CTX(0x00400e14, 0x00001000);
  784. NV_WRITE_CTX(0x00400e30, 0x00080008);
  785. NV_WRITE_CTX(0x00400e34, 0x00080008);
  786. if (dev_priv->chipset >= 0x17) {
  787. /* is it really needed ??? */
  788. NV17_WRITE_CTX(NV10_PGRAPH_DEBUG_4,
  789. nv_rd32(dev, NV10_PGRAPH_DEBUG_4));
  790. NV17_WRITE_CTX(0x004006b0, nv_rd32(dev, 0x004006b0));
  791. NV17_WRITE_CTX(0x00400eac, 0x0fff0000);
  792. NV17_WRITE_CTX(0x00400eb0, 0x0fff0000);
  793. NV17_WRITE_CTX(0x00400ec0, 0x00000080);
  794. NV17_WRITE_CTX(0x00400ed0, 0x00000080);
  795. }
  796. NV_WRITE_CTX(NV10_PGRAPH_CTX_USER, chan->id << 24);
  797. nv10_graph_create_pipe(chan);
  798. return 0;
  799. }
  800. static void
  801. nv10_graph_context_del(struct nouveau_channel *chan, int engine)
  802. {
  803. struct drm_device *dev = chan->dev;
  804. struct drm_nouveau_private *dev_priv = dev->dev_private;
  805. struct graph_state *pgraph_ctx = chan->engctx[engine];
  806. unsigned long flags;
  807. spin_lock_irqsave(&dev_priv->context_switch_lock, flags);
  808. nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
  809. /* Unload the context if it's the currently active one */
  810. if (nv10_graph_channel(dev) == chan)
  811. nv10_graph_unload_context(dev);
  812. nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
  813. spin_unlock_irqrestore(&dev_priv->context_switch_lock, flags);
  814. /* Free the context resources */
  815. chan->engctx[engine] = NULL;
  816. kfree(pgraph_ctx);
  817. }
  818. static void
  819. nv10_graph_set_tile_region(struct drm_device *dev, int i)
  820. {
  821. struct drm_nouveau_private *dev_priv = dev->dev_private;
  822. struct nouveau_tile_reg *tile = &dev_priv->tile.reg[i];
  823. nv_wr32(dev, NV10_PGRAPH_TLIMIT(i), tile->limit);
  824. nv_wr32(dev, NV10_PGRAPH_TSIZE(i), tile->pitch);
  825. nv_wr32(dev, NV10_PGRAPH_TILE(i), tile->addr);
  826. }
  827. static int
  828. nv10_graph_init(struct drm_device *dev, int engine)
  829. {
  830. struct drm_nouveau_private *dev_priv = dev->dev_private;
  831. u32 tmp;
  832. int i;
  833. nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) &
  834. ~NV_PMC_ENABLE_PGRAPH);
  835. nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) |
  836. NV_PMC_ENABLE_PGRAPH);
  837. nv_wr32(dev, NV03_PGRAPH_INTR , 0xFFFFFFFF);
  838. nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF);
  839. nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0xFFFFFFFF);
  840. nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x00000000);
  841. nv_wr32(dev, NV04_PGRAPH_DEBUG_1, 0x00118700);
  842. /* nv_wr32(dev, NV04_PGRAPH_DEBUG_2, 0x24E00810); */ /* 0x25f92ad9 */
  843. nv_wr32(dev, NV04_PGRAPH_DEBUG_2, 0x25f92ad9);
  844. nv_wr32(dev, NV04_PGRAPH_DEBUG_3, 0x55DE0830 |
  845. (1<<29) |
  846. (1<<31));
  847. if (dev_priv->chipset >= 0x17) {
  848. nv_wr32(dev, NV10_PGRAPH_DEBUG_4, 0x1f000000);
  849. nv_wr32(dev, 0x400a10, 0x3ff3fb6);
  850. nv_wr32(dev, 0x400838, 0x2f8684);
  851. nv_wr32(dev, 0x40083c, 0x115f3f);
  852. nv_wr32(dev, 0x004006b0, 0x40000020);
  853. } else
  854. nv_wr32(dev, NV10_PGRAPH_DEBUG_4, 0x00000000);
  855. /* Turn all the tiling regions off. */
  856. for (i = 0; i < NV10_PFB_TILE__SIZE; i++)
  857. nv10_graph_set_tile_region(dev, i);
  858. nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(0), 0x00000000);
  859. nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(1), 0x00000000);
  860. nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(2), 0x00000000);
  861. nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(3), 0x00000000);
  862. nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(4), 0x00000000);
  863. nv_wr32(dev, NV10_PGRAPH_STATE, 0xFFFFFFFF);
  864. tmp = nv_rd32(dev, NV10_PGRAPH_CTX_USER) & 0x00ffffff;
  865. tmp |= 31 << 24;
  866. nv_wr32(dev, NV10_PGRAPH_CTX_USER, tmp);
  867. nv_wr32(dev, NV10_PGRAPH_CTX_CONTROL, 0x10000100);
  868. nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2, 0x08000000);
  869. return 0;
  870. }
  871. static int
  872. nv10_graph_fini(struct drm_device *dev, int engine, bool suspend)
  873. {
  874. nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
  875. if (!nv_wait(dev, NV04_PGRAPH_STATUS, ~0, 0) && suspend) {
  876. nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
  877. return -EBUSY;
  878. }
  879. nv10_graph_unload_context(dev);
  880. nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0x00000000);
  881. return 0;
  882. }
  883. static int
  884. nv17_graph_mthd_lma_window(struct nouveau_channel *chan,
  885. u32 class, u32 mthd, u32 data)
  886. {
  887. struct graph_state *ctx = chan->engctx[NVOBJ_ENGINE_GR];
  888. struct drm_device *dev = chan->dev;
  889. struct pipe_state *pipe = &ctx->pipe_state;
  890. uint32_t pipe_0x0040[1], pipe_0x64c0[8], pipe_0x6a80[3], pipe_0x6ab0[3];
  891. uint32_t xfmode0, xfmode1;
  892. int i;
  893. ctx->lma_window[(mthd - 0x1638) / 4] = data;
  894. if (mthd != 0x1644)
  895. return 0;
  896. nouveau_wait_for_idle(dev);
  897. PIPE_SAVE(dev, pipe_0x0040, 0x0040);
  898. PIPE_SAVE(dev, pipe->pipe_0x0200, 0x0200);
  899. PIPE_RESTORE(dev, ctx->lma_window, 0x6790);
  900. nouveau_wait_for_idle(dev);
  901. xfmode0 = nv_rd32(dev, NV10_PGRAPH_XFMODE0);
  902. xfmode1 = nv_rd32(dev, NV10_PGRAPH_XFMODE1);
  903. PIPE_SAVE(dev, pipe->pipe_0x4400, 0x4400);
  904. PIPE_SAVE(dev, pipe_0x64c0, 0x64c0);
  905. PIPE_SAVE(dev, pipe_0x6ab0, 0x6ab0);
  906. PIPE_SAVE(dev, pipe_0x6a80, 0x6a80);
  907. nouveau_wait_for_idle(dev);
  908. nv_wr32(dev, NV10_PGRAPH_XFMODE0, 0x10000000);
  909. nv_wr32(dev, NV10_PGRAPH_XFMODE1, 0x00000000);
  910. nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x000064c0);
  911. for (i = 0; i < 4; i++)
  912. nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
  913. for (i = 0; i < 4; i++)
  914. nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
  915. nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00006ab0);
  916. for (i = 0; i < 3; i++)
  917. nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
  918. nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00006a80);
  919. for (i = 0; i < 3; i++)
  920. nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
  921. nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00000040);
  922. nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000008);
  923. PIPE_RESTORE(dev, pipe->pipe_0x0200, 0x0200);
  924. nouveau_wait_for_idle(dev);
  925. PIPE_RESTORE(dev, pipe_0x0040, 0x0040);
  926. nv_wr32(dev, NV10_PGRAPH_XFMODE0, xfmode0);
  927. nv_wr32(dev, NV10_PGRAPH_XFMODE1, xfmode1);
  928. PIPE_RESTORE(dev, pipe_0x64c0, 0x64c0);
  929. PIPE_RESTORE(dev, pipe_0x6ab0, 0x6ab0);
  930. PIPE_RESTORE(dev, pipe_0x6a80, 0x6a80);
  931. PIPE_RESTORE(dev, pipe->pipe_0x4400, 0x4400);
  932. nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x000000c0);
  933. nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
  934. nouveau_wait_for_idle(dev);
  935. return 0;
  936. }
  937. static int
  938. nv17_graph_mthd_lma_enable(struct nouveau_channel *chan,
  939. u32 class, u32 mthd, u32 data)
  940. {
  941. struct drm_device *dev = chan->dev;
  942. nouveau_wait_for_idle(dev);
  943. nv_wr32(dev, NV10_PGRAPH_DEBUG_4,
  944. nv_rd32(dev, NV10_PGRAPH_DEBUG_4) | 0x1 << 8);
  945. nv_wr32(dev, 0x004006b0,
  946. nv_rd32(dev, 0x004006b0) | 0x8 << 24);
  947. return 0;
  948. }
  949. struct nouveau_bitfield nv10_graph_intr[] = {
  950. { NV_PGRAPH_INTR_NOTIFY, "NOTIFY" },
  951. { NV_PGRAPH_INTR_ERROR, "ERROR" },
  952. {}
  953. };
  954. struct nouveau_bitfield nv10_graph_nstatus[] = {
  955. { NV10_PGRAPH_NSTATUS_STATE_IN_USE, "STATE_IN_USE" },
  956. { NV10_PGRAPH_NSTATUS_INVALID_STATE, "INVALID_STATE" },
  957. { NV10_PGRAPH_NSTATUS_BAD_ARGUMENT, "BAD_ARGUMENT" },
  958. { NV10_PGRAPH_NSTATUS_PROTECTION_FAULT, "PROTECTION_FAULT" },
  959. {}
  960. };
  961. static void
  962. nv10_graph_isr(struct drm_device *dev)
  963. {
  964. u32 stat;
  965. while ((stat = nv_rd32(dev, NV03_PGRAPH_INTR))) {
  966. u32 nsource = nv_rd32(dev, NV03_PGRAPH_NSOURCE);
  967. u32 nstatus = nv_rd32(dev, NV03_PGRAPH_NSTATUS);
  968. u32 addr = nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR);
  969. u32 chid = (addr & 0x01f00000) >> 20;
  970. u32 subc = (addr & 0x00070000) >> 16;
  971. u32 mthd = (addr & 0x00001ffc);
  972. u32 data = nv_rd32(dev, NV04_PGRAPH_TRAPPED_DATA);
  973. u32 class = nv_rd32(dev, 0x400160 + subc * 4) & 0xfff;
  974. u32 show = stat;
  975. if (stat & NV_PGRAPH_INTR_ERROR) {
  976. if (nsource & NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD) {
  977. if (!nouveau_gpuobj_mthd_call2(dev, chid, class, mthd, data))
  978. show &= ~NV_PGRAPH_INTR_ERROR;
  979. }
  980. }
  981. if (stat & NV_PGRAPH_INTR_CONTEXT_SWITCH) {
  982. nv_wr32(dev, NV03_PGRAPH_INTR, NV_PGRAPH_INTR_CONTEXT_SWITCH);
  983. stat &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
  984. show &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
  985. nv10_graph_context_switch(dev);
  986. }
  987. nv_wr32(dev, NV03_PGRAPH_INTR, stat);
  988. nv_wr32(dev, NV04_PGRAPH_FIFO, 0x00000001);
  989. if (show && nouveau_ratelimit()) {
  990. NV_INFO(dev, "PGRAPH -");
  991. nouveau_bitfield_print(nv10_graph_intr, show);
  992. printk(" nsource:");
  993. nouveau_bitfield_print(nv04_graph_nsource, nsource);
  994. printk(" nstatus:");
  995. nouveau_bitfield_print(nv10_graph_nstatus, nstatus);
  996. printk("\n");
  997. NV_INFO(dev, "PGRAPH - ch %d/%d class 0x%04x "
  998. "mthd 0x%04x data 0x%08x\n",
  999. chid, subc, class, mthd, data);
  1000. }
  1001. }
  1002. }
  1003. static void
  1004. nv10_graph_destroy(struct drm_device *dev, int engine)
  1005. {
  1006. struct nv10_graph_engine *pgraph = nv_engine(dev, engine);
  1007. nouveau_irq_unregister(dev, 12);
  1008. kfree(pgraph);
  1009. }
  1010. int
  1011. nv10_graph_create(struct drm_device *dev)
  1012. {
  1013. struct drm_nouveau_private *dev_priv = dev->dev_private;
  1014. struct nv10_graph_engine *pgraph;
  1015. pgraph = kzalloc(sizeof(*pgraph), GFP_KERNEL);
  1016. if (!pgraph)
  1017. return -ENOMEM;
  1018. pgraph->base.destroy = nv10_graph_destroy;
  1019. pgraph->base.init = nv10_graph_init;
  1020. pgraph->base.fini = nv10_graph_fini;
  1021. pgraph->base.context_new = nv10_graph_context_new;
  1022. pgraph->base.context_del = nv10_graph_context_del;
  1023. pgraph->base.object_new = nv04_graph_object_new;
  1024. pgraph->base.set_tile_region = nv10_graph_set_tile_region;
  1025. NVOBJ_ENGINE_ADD(dev, GR, &pgraph->base);
  1026. nouveau_irq_register(dev, 12, nv10_graph_isr);
  1027. NVOBJ_CLASS(dev, 0x0030, GR); /* null */
  1028. NVOBJ_CLASS(dev, 0x0039, GR); /* m2mf */
  1029. NVOBJ_CLASS(dev, 0x004a, GR); /* gdirect */
  1030. NVOBJ_CLASS(dev, 0x005f, GR); /* imageblit */
  1031. NVOBJ_CLASS(dev, 0x009f, GR); /* imageblit (nv12) */
  1032. NVOBJ_CLASS(dev, 0x008a, GR); /* ifc */
  1033. NVOBJ_CLASS(dev, 0x0089, GR); /* sifm */
  1034. NVOBJ_CLASS(dev, 0x0062, GR); /* surf2d */
  1035. NVOBJ_CLASS(dev, 0x0043, GR); /* rop */
  1036. NVOBJ_CLASS(dev, 0x0012, GR); /* beta1 */
  1037. NVOBJ_CLASS(dev, 0x0072, GR); /* beta4 */
  1038. NVOBJ_CLASS(dev, 0x0019, GR); /* cliprect */
  1039. NVOBJ_CLASS(dev, 0x0044, GR); /* pattern */
  1040. NVOBJ_CLASS(dev, 0x0052, GR); /* swzsurf */
  1041. NVOBJ_CLASS(dev, 0x0093, GR); /* surf3d */
  1042. NVOBJ_CLASS(dev, 0x0094, GR); /* tex_tri */
  1043. NVOBJ_CLASS(dev, 0x0095, GR); /* multitex_tri */
  1044. /* celcius */
  1045. if (dev_priv->chipset <= 0x10) {
  1046. NVOBJ_CLASS(dev, 0x0056, GR);
  1047. } else
  1048. if (dev_priv->chipset < 0x17 || dev_priv->chipset == 0x1a) {
  1049. NVOBJ_CLASS(dev, 0x0096, GR);
  1050. } else {
  1051. NVOBJ_CLASS(dev, 0x0099, GR);
  1052. NVOBJ_MTHD (dev, 0x0099, 0x1638, nv17_graph_mthd_lma_window);
  1053. NVOBJ_MTHD (dev, 0x0099, 0x163c, nv17_graph_mthd_lma_window);
  1054. NVOBJ_MTHD (dev, 0x0099, 0x1640, nv17_graph_mthd_lma_window);
  1055. NVOBJ_MTHD (dev, 0x0099, 0x1644, nv17_graph_mthd_lma_window);
  1056. NVOBJ_MTHD (dev, 0x0099, 0x1658, nv17_graph_mthd_lma_enable);
  1057. }
  1058. return 0;
  1059. }