r128_state.c 40 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666
  1. /* r128_state.c -- State support for r128 -*- linux-c -*-
  2. * Created: Thu Jan 27 02:53:43 2000 by gareth@valinux.com
  3. */
  4. /*
  5. * Copyright 2000 VA Linux Systems, Inc., Sunnyvale, California.
  6. * All Rights Reserved.
  7. *
  8. * Permission is hereby granted, free of charge, to any person obtaining a
  9. * copy of this software and associated documentation files (the "Software"),
  10. * to deal in the Software without restriction, including without limitation
  11. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  12. * and/or sell copies of the Software, and to permit persons to whom the
  13. * Software is furnished to do so, subject to the following conditions:
  14. *
  15. * The above copyright notice and this permission notice (including the next
  16. * paragraph) shall be included in all copies or substantial portions of the
  17. * Software.
  18. *
  19. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  20. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  21. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  22. * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
  23. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  24. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
  25. * DEALINGS IN THE SOFTWARE.
  26. *
  27. * Authors:
  28. * Gareth Hughes <gareth@valinux.com>
  29. */
  30. #include <drm/drmP.h>
  31. #include <drm/r128_drm.h>
  32. #include "r128_drv.h"
  33. /* ================================================================
  34. * CCE hardware state programming functions
  35. */
  36. static void r128_emit_clip_rects(drm_r128_private_t *dev_priv,
  37. struct drm_clip_rect *boxes, int count)
  38. {
  39. u32 aux_sc_cntl = 0x00000000;
  40. RING_LOCALS;
  41. DRM_DEBUG("\n");
  42. BEGIN_RING((count < 3 ? count : 3) * 5 + 2);
  43. if (count >= 1) {
  44. OUT_RING(CCE_PACKET0(R128_AUX1_SC_LEFT, 3));
  45. OUT_RING(boxes[0].x1);
  46. OUT_RING(boxes[0].x2 - 1);
  47. OUT_RING(boxes[0].y1);
  48. OUT_RING(boxes[0].y2 - 1);
  49. aux_sc_cntl |= (R128_AUX1_SC_EN | R128_AUX1_SC_MODE_OR);
  50. }
  51. if (count >= 2) {
  52. OUT_RING(CCE_PACKET0(R128_AUX2_SC_LEFT, 3));
  53. OUT_RING(boxes[1].x1);
  54. OUT_RING(boxes[1].x2 - 1);
  55. OUT_RING(boxes[1].y1);
  56. OUT_RING(boxes[1].y2 - 1);
  57. aux_sc_cntl |= (R128_AUX2_SC_EN | R128_AUX2_SC_MODE_OR);
  58. }
  59. if (count >= 3) {
  60. OUT_RING(CCE_PACKET0(R128_AUX3_SC_LEFT, 3));
  61. OUT_RING(boxes[2].x1);
  62. OUT_RING(boxes[2].x2 - 1);
  63. OUT_RING(boxes[2].y1);
  64. OUT_RING(boxes[2].y2 - 1);
  65. aux_sc_cntl |= (R128_AUX3_SC_EN | R128_AUX3_SC_MODE_OR);
  66. }
  67. OUT_RING(CCE_PACKET0(R128_AUX_SC_CNTL, 0));
  68. OUT_RING(aux_sc_cntl);
  69. ADVANCE_RING();
  70. }
  71. static __inline__ void r128_emit_core(drm_r128_private_t *dev_priv)
  72. {
  73. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  74. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  75. RING_LOCALS;
  76. DRM_DEBUG("\n");
  77. BEGIN_RING(2);
  78. OUT_RING(CCE_PACKET0(R128_SCALE_3D_CNTL, 0));
  79. OUT_RING(ctx->scale_3d_cntl);
  80. ADVANCE_RING();
  81. }
  82. static __inline__ void r128_emit_context(drm_r128_private_t *dev_priv)
  83. {
  84. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  85. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  86. RING_LOCALS;
  87. DRM_DEBUG("\n");
  88. BEGIN_RING(13);
  89. OUT_RING(CCE_PACKET0(R128_DST_PITCH_OFFSET_C, 11));
  90. OUT_RING(ctx->dst_pitch_offset_c);
  91. OUT_RING(ctx->dp_gui_master_cntl_c);
  92. OUT_RING(ctx->sc_top_left_c);
  93. OUT_RING(ctx->sc_bottom_right_c);
  94. OUT_RING(ctx->z_offset_c);
  95. OUT_RING(ctx->z_pitch_c);
  96. OUT_RING(ctx->z_sten_cntl_c);
  97. OUT_RING(ctx->tex_cntl_c);
  98. OUT_RING(ctx->misc_3d_state_cntl_reg);
  99. OUT_RING(ctx->texture_clr_cmp_clr_c);
  100. OUT_RING(ctx->texture_clr_cmp_msk_c);
  101. OUT_RING(ctx->fog_color_c);
  102. ADVANCE_RING();
  103. }
  104. static __inline__ void r128_emit_setup(drm_r128_private_t *dev_priv)
  105. {
  106. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  107. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  108. RING_LOCALS;
  109. DRM_DEBUG("\n");
  110. BEGIN_RING(3);
  111. OUT_RING(CCE_PACKET1(R128_SETUP_CNTL, R128_PM4_VC_FPU_SETUP));
  112. OUT_RING(ctx->setup_cntl);
  113. OUT_RING(ctx->pm4_vc_fpu_setup);
  114. ADVANCE_RING();
  115. }
  116. static __inline__ void r128_emit_masks(drm_r128_private_t *dev_priv)
  117. {
  118. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  119. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  120. RING_LOCALS;
  121. DRM_DEBUG("\n");
  122. BEGIN_RING(5);
  123. OUT_RING(CCE_PACKET0(R128_DP_WRITE_MASK, 0));
  124. OUT_RING(ctx->dp_write_mask);
  125. OUT_RING(CCE_PACKET0(R128_STEN_REF_MASK_C, 1));
  126. OUT_RING(ctx->sten_ref_mask_c);
  127. OUT_RING(ctx->plane_3d_mask_c);
  128. ADVANCE_RING();
  129. }
  130. static __inline__ void r128_emit_window(drm_r128_private_t *dev_priv)
  131. {
  132. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  133. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  134. RING_LOCALS;
  135. DRM_DEBUG("\n");
  136. BEGIN_RING(2);
  137. OUT_RING(CCE_PACKET0(R128_WINDOW_XY_OFFSET, 0));
  138. OUT_RING(ctx->window_xy_offset);
  139. ADVANCE_RING();
  140. }
  141. static __inline__ void r128_emit_tex0(drm_r128_private_t *dev_priv)
  142. {
  143. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  144. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  145. drm_r128_texture_regs_t *tex = &sarea_priv->tex_state[0];
  146. int i;
  147. RING_LOCALS;
  148. DRM_DEBUG("\n");
  149. BEGIN_RING(7 + R128_MAX_TEXTURE_LEVELS);
  150. OUT_RING(CCE_PACKET0(R128_PRIM_TEX_CNTL_C,
  151. 2 + R128_MAX_TEXTURE_LEVELS));
  152. OUT_RING(tex->tex_cntl);
  153. OUT_RING(tex->tex_combine_cntl);
  154. OUT_RING(ctx->tex_size_pitch_c);
  155. for (i = 0; i < R128_MAX_TEXTURE_LEVELS; i++)
  156. OUT_RING(tex->tex_offset[i]);
  157. OUT_RING(CCE_PACKET0(R128_CONSTANT_COLOR_C, 1));
  158. OUT_RING(ctx->constant_color_c);
  159. OUT_RING(tex->tex_border_color);
  160. ADVANCE_RING();
  161. }
  162. static __inline__ void r128_emit_tex1(drm_r128_private_t *dev_priv)
  163. {
  164. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  165. drm_r128_texture_regs_t *tex = &sarea_priv->tex_state[1];
  166. int i;
  167. RING_LOCALS;
  168. DRM_DEBUG("\n");
  169. BEGIN_RING(5 + R128_MAX_TEXTURE_LEVELS);
  170. OUT_RING(CCE_PACKET0(R128_SEC_TEX_CNTL_C, 1 + R128_MAX_TEXTURE_LEVELS));
  171. OUT_RING(tex->tex_cntl);
  172. OUT_RING(tex->tex_combine_cntl);
  173. for (i = 0; i < R128_MAX_TEXTURE_LEVELS; i++)
  174. OUT_RING(tex->tex_offset[i]);
  175. OUT_RING(CCE_PACKET0(R128_SEC_TEXTURE_BORDER_COLOR_C, 0));
  176. OUT_RING(tex->tex_border_color);
  177. ADVANCE_RING();
  178. }
  179. static void r128_emit_state(drm_r128_private_t *dev_priv)
  180. {
  181. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  182. unsigned int dirty = sarea_priv->dirty;
  183. DRM_DEBUG("dirty=0x%08x\n", dirty);
  184. if (dirty & R128_UPLOAD_CORE) {
  185. r128_emit_core(dev_priv);
  186. sarea_priv->dirty &= ~R128_UPLOAD_CORE;
  187. }
  188. if (dirty & R128_UPLOAD_CONTEXT) {
  189. r128_emit_context(dev_priv);
  190. sarea_priv->dirty &= ~R128_UPLOAD_CONTEXT;
  191. }
  192. if (dirty & R128_UPLOAD_SETUP) {
  193. r128_emit_setup(dev_priv);
  194. sarea_priv->dirty &= ~R128_UPLOAD_SETUP;
  195. }
  196. if (dirty & R128_UPLOAD_MASKS) {
  197. r128_emit_masks(dev_priv);
  198. sarea_priv->dirty &= ~R128_UPLOAD_MASKS;
  199. }
  200. if (dirty & R128_UPLOAD_WINDOW) {
  201. r128_emit_window(dev_priv);
  202. sarea_priv->dirty &= ~R128_UPLOAD_WINDOW;
  203. }
  204. if (dirty & R128_UPLOAD_TEX0) {
  205. r128_emit_tex0(dev_priv);
  206. sarea_priv->dirty &= ~R128_UPLOAD_TEX0;
  207. }
  208. if (dirty & R128_UPLOAD_TEX1) {
  209. r128_emit_tex1(dev_priv);
  210. sarea_priv->dirty &= ~R128_UPLOAD_TEX1;
  211. }
  212. /* Turn off the texture cache flushing */
  213. sarea_priv->context_state.tex_cntl_c &= ~R128_TEX_CACHE_FLUSH;
  214. sarea_priv->dirty &= ~R128_REQUIRE_QUIESCENCE;
  215. }
  216. #if R128_PERFORMANCE_BOXES
  217. /* ================================================================
  218. * Performance monitoring functions
  219. */
  220. static void r128_clear_box(drm_r128_private_t *dev_priv,
  221. int x, int y, int w, int h, int r, int g, int b)
  222. {
  223. u32 pitch, offset;
  224. u32 fb_bpp, color;
  225. RING_LOCALS;
  226. switch (dev_priv->fb_bpp) {
  227. case 16:
  228. fb_bpp = R128_GMC_DST_16BPP;
  229. color = (((r & 0xf8) << 8) |
  230. ((g & 0xfc) << 3) | ((b & 0xf8) >> 3));
  231. break;
  232. case 24:
  233. fb_bpp = R128_GMC_DST_24BPP;
  234. color = ((r << 16) | (g << 8) | b);
  235. break;
  236. case 32:
  237. fb_bpp = R128_GMC_DST_32BPP;
  238. color = (((0xff) << 24) | (r << 16) | (g << 8) | b);
  239. break;
  240. default:
  241. return;
  242. }
  243. offset = dev_priv->back_offset;
  244. pitch = dev_priv->back_pitch >> 3;
  245. BEGIN_RING(6);
  246. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  247. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  248. R128_GMC_BRUSH_SOLID_COLOR |
  249. fb_bpp |
  250. R128_GMC_SRC_DATATYPE_COLOR |
  251. R128_ROP3_P |
  252. R128_GMC_CLR_CMP_CNTL_DIS | R128_GMC_AUX_CLIP_DIS);
  253. OUT_RING((pitch << 21) | (offset >> 5));
  254. OUT_RING(color);
  255. OUT_RING((x << 16) | y);
  256. OUT_RING((w << 16) | h);
  257. ADVANCE_RING();
  258. }
  259. static void r128_cce_performance_boxes(drm_r128_private_t *dev_priv)
  260. {
  261. if (atomic_read(&dev_priv->idle_count) == 0)
  262. r128_clear_box(dev_priv, 64, 4, 8, 8, 0, 255, 0);
  263. else
  264. atomic_set(&dev_priv->idle_count, 0);
  265. }
  266. #endif
  267. /* ================================================================
  268. * CCE command dispatch functions
  269. */
  270. static void r128_print_dirty(const char *msg, unsigned int flags)
  271. {
  272. DRM_INFO("%s: (0x%x) %s%s%s%s%s%s%s%s%s\n",
  273. msg,
  274. flags,
  275. (flags & R128_UPLOAD_CORE) ? "core, " : "",
  276. (flags & R128_UPLOAD_CONTEXT) ? "context, " : "",
  277. (flags & R128_UPLOAD_SETUP) ? "setup, " : "",
  278. (flags & R128_UPLOAD_TEX0) ? "tex0, " : "",
  279. (flags & R128_UPLOAD_TEX1) ? "tex1, " : "",
  280. (flags & R128_UPLOAD_MASKS) ? "masks, " : "",
  281. (flags & R128_UPLOAD_WINDOW) ? "window, " : "",
  282. (flags & R128_UPLOAD_CLIPRECTS) ? "cliprects, " : "",
  283. (flags & R128_REQUIRE_QUIESCENCE) ? "quiescence, " : "");
  284. }
  285. static void r128_cce_dispatch_clear(struct drm_device *dev,
  286. drm_r128_clear_t *clear)
  287. {
  288. drm_r128_private_t *dev_priv = dev->dev_private;
  289. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  290. int nbox = sarea_priv->nbox;
  291. struct drm_clip_rect *pbox = sarea_priv->boxes;
  292. unsigned int flags = clear->flags;
  293. int i;
  294. RING_LOCALS;
  295. DRM_DEBUG("\n");
  296. if (dev_priv->page_flipping && dev_priv->current_page == 1) {
  297. unsigned int tmp = flags;
  298. flags &= ~(R128_FRONT | R128_BACK);
  299. if (tmp & R128_FRONT)
  300. flags |= R128_BACK;
  301. if (tmp & R128_BACK)
  302. flags |= R128_FRONT;
  303. }
  304. for (i = 0; i < nbox; i++) {
  305. int x = pbox[i].x1;
  306. int y = pbox[i].y1;
  307. int w = pbox[i].x2 - x;
  308. int h = pbox[i].y2 - y;
  309. DRM_DEBUG("dispatch clear %d,%d-%d,%d flags 0x%x\n",
  310. pbox[i].x1, pbox[i].y1, pbox[i].x2,
  311. pbox[i].y2, flags);
  312. if (flags & (R128_FRONT | R128_BACK)) {
  313. BEGIN_RING(2);
  314. OUT_RING(CCE_PACKET0(R128_DP_WRITE_MASK, 0));
  315. OUT_RING(clear->color_mask);
  316. ADVANCE_RING();
  317. }
  318. if (flags & R128_FRONT) {
  319. BEGIN_RING(6);
  320. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  321. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  322. R128_GMC_BRUSH_SOLID_COLOR |
  323. (dev_priv->color_fmt << 8) |
  324. R128_GMC_SRC_DATATYPE_COLOR |
  325. R128_ROP3_P |
  326. R128_GMC_CLR_CMP_CNTL_DIS |
  327. R128_GMC_AUX_CLIP_DIS);
  328. OUT_RING(dev_priv->front_pitch_offset_c);
  329. OUT_RING(clear->clear_color);
  330. OUT_RING((x << 16) | y);
  331. OUT_RING((w << 16) | h);
  332. ADVANCE_RING();
  333. }
  334. if (flags & R128_BACK) {
  335. BEGIN_RING(6);
  336. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  337. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  338. R128_GMC_BRUSH_SOLID_COLOR |
  339. (dev_priv->color_fmt << 8) |
  340. R128_GMC_SRC_DATATYPE_COLOR |
  341. R128_ROP3_P |
  342. R128_GMC_CLR_CMP_CNTL_DIS |
  343. R128_GMC_AUX_CLIP_DIS);
  344. OUT_RING(dev_priv->back_pitch_offset_c);
  345. OUT_RING(clear->clear_color);
  346. OUT_RING((x << 16) | y);
  347. OUT_RING((w << 16) | h);
  348. ADVANCE_RING();
  349. }
  350. if (flags & R128_DEPTH) {
  351. BEGIN_RING(6);
  352. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  353. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  354. R128_GMC_BRUSH_SOLID_COLOR |
  355. (dev_priv->depth_fmt << 8) |
  356. R128_GMC_SRC_DATATYPE_COLOR |
  357. R128_ROP3_P |
  358. R128_GMC_CLR_CMP_CNTL_DIS |
  359. R128_GMC_AUX_CLIP_DIS | R128_GMC_WR_MSK_DIS);
  360. OUT_RING(dev_priv->depth_pitch_offset_c);
  361. OUT_RING(clear->clear_depth);
  362. OUT_RING((x << 16) | y);
  363. OUT_RING((w << 16) | h);
  364. ADVANCE_RING();
  365. }
  366. }
  367. }
  368. static void r128_cce_dispatch_swap(struct drm_device *dev)
  369. {
  370. drm_r128_private_t *dev_priv = dev->dev_private;
  371. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  372. int nbox = sarea_priv->nbox;
  373. struct drm_clip_rect *pbox = sarea_priv->boxes;
  374. int i;
  375. RING_LOCALS;
  376. DRM_DEBUG("\n");
  377. #if R128_PERFORMANCE_BOXES
  378. /* Do some trivial performance monitoring...
  379. */
  380. r128_cce_performance_boxes(dev_priv);
  381. #endif
  382. for (i = 0; i < nbox; i++) {
  383. int x = pbox[i].x1;
  384. int y = pbox[i].y1;
  385. int w = pbox[i].x2 - x;
  386. int h = pbox[i].y2 - y;
  387. BEGIN_RING(7);
  388. OUT_RING(CCE_PACKET3(R128_CNTL_BITBLT_MULTI, 5));
  389. OUT_RING(R128_GMC_SRC_PITCH_OFFSET_CNTL |
  390. R128_GMC_DST_PITCH_OFFSET_CNTL |
  391. R128_GMC_BRUSH_NONE |
  392. (dev_priv->color_fmt << 8) |
  393. R128_GMC_SRC_DATATYPE_COLOR |
  394. R128_ROP3_S |
  395. R128_DP_SRC_SOURCE_MEMORY |
  396. R128_GMC_CLR_CMP_CNTL_DIS |
  397. R128_GMC_AUX_CLIP_DIS | R128_GMC_WR_MSK_DIS);
  398. /* Make this work even if front & back are flipped:
  399. */
  400. if (dev_priv->current_page == 0) {
  401. OUT_RING(dev_priv->back_pitch_offset_c);
  402. OUT_RING(dev_priv->front_pitch_offset_c);
  403. } else {
  404. OUT_RING(dev_priv->front_pitch_offset_c);
  405. OUT_RING(dev_priv->back_pitch_offset_c);
  406. }
  407. OUT_RING((x << 16) | y);
  408. OUT_RING((x << 16) | y);
  409. OUT_RING((w << 16) | h);
  410. ADVANCE_RING();
  411. }
  412. /* Increment the frame counter. The client-side 3D driver must
  413. * throttle the framerate by waiting for this value before
  414. * performing the swapbuffer ioctl.
  415. */
  416. dev_priv->sarea_priv->last_frame++;
  417. BEGIN_RING(2);
  418. OUT_RING(CCE_PACKET0(R128_LAST_FRAME_REG, 0));
  419. OUT_RING(dev_priv->sarea_priv->last_frame);
  420. ADVANCE_RING();
  421. }
  422. static void r128_cce_dispatch_flip(struct drm_device *dev)
  423. {
  424. drm_r128_private_t *dev_priv = dev->dev_private;
  425. RING_LOCALS;
  426. DRM_DEBUG("page=%d pfCurrentPage=%d\n",
  427. dev_priv->current_page, dev_priv->sarea_priv->pfCurrentPage);
  428. #if R128_PERFORMANCE_BOXES
  429. /* Do some trivial performance monitoring...
  430. */
  431. r128_cce_performance_boxes(dev_priv);
  432. #endif
  433. BEGIN_RING(4);
  434. R128_WAIT_UNTIL_PAGE_FLIPPED();
  435. OUT_RING(CCE_PACKET0(R128_CRTC_OFFSET, 0));
  436. if (dev_priv->current_page == 0)
  437. OUT_RING(dev_priv->back_offset);
  438. else
  439. OUT_RING(dev_priv->front_offset);
  440. ADVANCE_RING();
  441. /* Increment the frame counter. The client-side 3D driver must
  442. * throttle the framerate by waiting for this value before
  443. * performing the swapbuffer ioctl.
  444. */
  445. dev_priv->sarea_priv->last_frame++;
  446. dev_priv->sarea_priv->pfCurrentPage = dev_priv->current_page =
  447. 1 - dev_priv->current_page;
  448. BEGIN_RING(2);
  449. OUT_RING(CCE_PACKET0(R128_LAST_FRAME_REG, 0));
  450. OUT_RING(dev_priv->sarea_priv->last_frame);
  451. ADVANCE_RING();
  452. }
  453. static void r128_cce_dispatch_vertex(struct drm_device *dev, struct drm_buf *buf)
  454. {
  455. drm_r128_private_t *dev_priv = dev->dev_private;
  456. drm_r128_buf_priv_t *buf_priv = buf->dev_private;
  457. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  458. int format = sarea_priv->vc_format;
  459. int offset = buf->bus_address;
  460. int size = buf->used;
  461. int prim = buf_priv->prim;
  462. int i = 0;
  463. RING_LOCALS;
  464. DRM_DEBUG("buf=%d nbox=%d\n", buf->idx, sarea_priv->nbox);
  465. if (0)
  466. r128_print_dirty("dispatch_vertex", sarea_priv->dirty);
  467. if (buf->used) {
  468. buf_priv->dispatched = 1;
  469. if (sarea_priv->dirty & ~R128_UPLOAD_CLIPRECTS)
  470. r128_emit_state(dev_priv);
  471. do {
  472. /* Emit the next set of up to three cliprects */
  473. if (i < sarea_priv->nbox) {
  474. r128_emit_clip_rects(dev_priv,
  475. &sarea_priv->boxes[i],
  476. sarea_priv->nbox - i);
  477. }
  478. /* Emit the vertex buffer rendering commands */
  479. BEGIN_RING(5);
  480. OUT_RING(CCE_PACKET3(R128_3D_RNDR_GEN_INDX_PRIM, 3));
  481. OUT_RING(offset);
  482. OUT_RING(size);
  483. OUT_RING(format);
  484. OUT_RING(prim | R128_CCE_VC_CNTL_PRIM_WALK_LIST |
  485. (size << R128_CCE_VC_CNTL_NUM_SHIFT));
  486. ADVANCE_RING();
  487. i += 3;
  488. } while (i < sarea_priv->nbox);
  489. }
  490. if (buf_priv->discard) {
  491. buf_priv->age = dev_priv->sarea_priv->last_dispatch;
  492. /* Emit the vertex buffer age */
  493. BEGIN_RING(2);
  494. OUT_RING(CCE_PACKET0(R128_LAST_DISPATCH_REG, 0));
  495. OUT_RING(buf_priv->age);
  496. ADVANCE_RING();
  497. buf->pending = 1;
  498. buf->used = 0;
  499. /* FIXME: Check dispatched field */
  500. buf_priv->dispatched = 0;
  501. }
  502. dev_priv->sarea_priv->last_dispatch++;
  503. sarea_priv->dirty &= ~R128_UPLOAD_CLIPRECTS;
  504. sarea_priv->nbox = 0;
  505. }
  506. static void r128_cce_dispatch_indirect(struct drm_device *dev,
  507. struct drm_buf *buf, int start, int end)
  508. {
  509. drm_r128_private_t *dev_priv = dev->dev_private;
  510. drm_r128_buf_priv_t *buf_priv = buf->dev_private;
  511. RING_LOCALS;
  512. DRM_DEBUG("indirect: buf=%d s=0x%x e=0x%x\n", buf->idx, start, end);
  513. if (start != end) {
  514. int offset = buf->bus_address + start;
  515. int dwords = (end - start + 3) / sizeof(u32);
  516. /* Indirect buffer data must be an even number of
  517. * dwords, so if we've been given an odd number we must
  518. * pad the data with a Type-2 CCE packet.
  519. */
  520. if (dwords & 1) {
  521. u32 *data = (u32 *)
  522. ((char *)dev->agp_buffer_map->handle
  523. + buf->offset + start);
  524. data[dwords++] = cpu_to_le32(R128_CCE_PACKET2);
  525. }
  526. buf_priv->dispatched = 1;
  527. /* Fire off the indirect buffer */
  528. BEGIN_RING(3);
  529. OUT_RING(CCE_PACKET0(R128_PM4_IW_INDOFF, 1));
  530. OUT_RING(offset);
  531. OUT_RING(dwords);
  532. ADVANCE_RING();
  533. }
  534. if (buf_priv->discard) {
  535. buf_priv->age = dev_priv->sarea_priv->last_dispatch;
  536. /* Emit the indirect buffer age */
  537. BEGIN_RING(2);
  538. OUT_RING(CCE_PACKET0(R128_LAST_DISPATCH_REG, 0));
  539. OUT_RING(buf_priv->age);
  540. ADVANCE_RING();
  541. buf->pending = 1;
  542. buf->used = 0;
  543. /* FIXME: Check dispatched field */
  544. buf_priv->dispatched = 0;
  545. }
  546. dev_priv->sarea_priv->last_dispatch++;
  547. }
  548. static void r128_cce_dispatch_indices(struct drm_device *dev,
  549. struct drm_buf *buf,
  550. int start, int end, int count)
  551. {
  552. drm_r128_private_t *dev_priv = dev->dev_private;
  553. drm_r128_buf_priv_t *buf_priv = buf->dev_private;
  554. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  555. int format = sarea_priv->vc_format;
  556. int offset = dev->agp_buffer_map->offset - dev_priv->cce_buffers_offset;
  557. int prim = buf_priv->prim;
  558. u32 *data;
  559. int dwords;
  560. int i = 0;
  561. RING_LOCALS;
  562. DRM_DEBUG("indices: s=%d e=%d c=%d\n", start, end, count);
  563. if (0)
  564. r128_print_dirty("dispatch_indices", sarea_priv->dirty);
  565. if (start != end) {
  566. buf_priv->dispatched = 1;
  567. if (sarea_priv->dirty & ~R128_UPLOAD_CLIPRECTS)
  568. r128_emit_state(dev_priv);
  569. dwords = (end - start + 3) / sizeof(u32);
  570. data = (u32 *) ((char *)dev->agp_buffer_map->handle
  571. + buf->offset + start);
  572. data[0] = cpu_to_le32(CCE_PACKET3(R128_3D_RNDR_GEN_INDX_PRIM,
  573. dwords - 2));
  574. data[1] = cpu_to_le32(offset);
  575. data[2] = cpu_to_le32(R128_MAX_VB_VERTS);
  576. data[3] = cpu_to_le32(format);
  577. data[4] = cpu_to_le32((prim | R128_CCE_VC_CNTL_PRIM_WALK_IND |
  578. (count << 16)));
  579. if (count & 0x1) {
  580. #ifdef __LITTLE_ENDIAN
  581. data[dwords - 1] &= 0x0000ffff;
  582. #else
  583. data[dwords - 1] &= 0xffff0000;
  584. #endif
  585. }
  586. do {
  587. /* Emit the next set of up to three cliprects */
  588. if (i < sarea_priv->nbox) {
  589. r128_emit_clip_rects(dev_priv,
  590. &sarea_priv->boxes[i],
  591. sarea_priv->nbox - i);
  592. }
  593. r128_cce_dispatch_indirect(dev, buf, start, end);
  594. i += 3;
  595. } while (i < sarea_priv->nbox);
  596. }
  597. if (buf_priv->discard) {
  598. buf_priv->age = dev_priv->sarea_priv->last_dispatch;
  599. /* Emit the vertex buffer age */
  600. BEGIN_RING(2);
  601. OUT_RING(CCE_PACKET0(R128_LAST_DISPATCH_REG, 0));
  602. OUT_RING(buf_priv->age);
  603. ADVANCE_RING();
  604. buf->pending = 1;
  605. /* FIXME: Check dispatched field */
  606. buf_priv->dispatched = 0;
  607. }
  608. dev_priv->sarea_priv->last_dispatch++;
  609. sarea_priv->dirty &= ~R128_UPLOAD_CLIPRECTS;
  610. sarea_priv->nbox = 0;
  611. }
  612. static int r128_cce_dispatch_blit(struct drm_device *dev,
  613. struct drm_file *file_priv,
  614. drm_r128_blit_t *blit)
  615. {
  616. drm_r128_private_t *dev_priv = dev->dev_private;
  617. struct drm_device_dma *dma = dev->dma;
  618. struct drm_buf *buf;
  619. drm_r128_buf_priv_t *buf_priv;
  620. u32 *data;
  621. int dword_shift, dwords;
  622. RING_LOCALS;
  623. DRM_DEBUG("\n");
  624. /* The compiler won't optimize away a division by a variable,
  625. * even if the only legal values are powers of two. Thus, we'll
  626. * use a shift instead.
  627. */
  628. switch (blit->format) {
  629. case R128_DATATYPE_ARGB8888:
  630. dword_shift = 0;
  631. break;
  632. case R128_DATATYPE_ARGB1555:
  633. case R128_DATATYPE_RGB565:
  634. case R128_DATATYPE_ARGB4444:
  635. case R128_DATATYPE_YVYU422:
  636. case R128_DATATYPE_VYUY422:
  637. dword_shift = 1;
  638. break;
  639. case R128_DATATYPE_CI8:
  640. case R128_DATATYPE_RGB8:
  641. dword_shift = 2;
  642. break;
  643. default:
  644. DRM_ERROR("invalid blit format %d\n", blit->format);
  645. return -EINVAL;
  646. }
  647. /* Flush the pixel cache, and mark the contents as Read Invalid.
  648. * This ensures no pixel data gets mixed up with the texture
  649. * data from the host data blit, otherwise part of the texture
  650. * image may be corrupted.
  651. */
  652. BEGIN_RING(2);
  653. OUT_RING(CCE_PACKET0(R128_PC_GUI_CTLSTAT, 0));
  654. OUT_RING(R128_PC_RI_GUI | R128_PC_FLUSH_GUI);
  655. ADVANCE_RING();
  656. /* Dispatch the indirect buffer.
  657. */
  658. buf = dma->buflist[blit->idx];
  659. buf_priv = buf->dev_private;
  660. if (buf->file_priv != file_priv) {
  661. DRM_ERROR("process %d using buffer owned by %p\n",
  662. DRM_CURRENTPID, buf->file_priv);
  663. return -EINVAL;
  664. }
  665. if (buf->pending) {
  666. DRM_ERROR("sending pending buffer %d\n", blit->idx);
  667. return -EINVAL;
  668. }
  669. buf_priv->discard = 1;
  670. dwords = (blit->width * blit->height) >> dword_shift;
  671. data = (u32 *) ((char *)dev->agp_buffer_map->handle + buf->offset);
  672. data[0] = cpu_to_le32(CCE_PACKET3(R128_CNTL_HOSTDATA_BLT, dwords + 6));
  673. data[1] = cpu_to_le32((R128_GMC_DST_PITCH_OFFSET_CNTL |
  674. R128_GMC_BRUSH_NONE |
  675. (blit->format << 8) |
  676. R128_GMC_SRC_DATATYPE_COLOR |
  677. R128_ROP3_S |
  678. R128_DP_SRC_SOURCE_HOST_DATA |
  679. R128_GMC_CLR_CMP_CNTL_DIS |
  680. R128_GMC_AUX_CLIP_DIS | R128_GMC_WR_MSK_DIS));
  681. data[2] = cpu_to_le32((blit->pitch << 21) | (blit->offset >> 5));
  682. data[3] = cpu_to_le32(0xffffffff);
  683. data[4] = cpu_to_le32(0xffffffff);
  684. data[5] = cpu_to_le32((blit->y << 16) | blit->x);
  685. data[6] = cpu_to_le32((blit->height << 16) | blit->width);
  686. data[7] = cpu_to_le32(dwords);
  687. buf->used = (dwords + 8) * sizeof(u32);
  688. r128_cce_dispatch_indirect(dev, buf, 0, buf->used);
  689. /* Flush the pixel cache after the blit completes. This ensures
  690. * the texture data is written out to memory before rendering
  691. * continues.
  692. */
  693. BEGIN_RING(2);
  694. OUT_RING(CCE_PACKET0(R128_PC_GUI_CTLSTAT, 0));
  695. OUT_RING(R128_PC_FLUSH_GUI);
  696. ADVANCE_RING();
  697. return 0;
  698. }
  699. /* ================================================================
  700. * Tiled depth buffer management
  701. *
  702. * FIXME: These should all set the destination write mask for when we
  703. * have hardware stencil support.
  704. */
  705. static int r128_cce_dispatch_write_span(struct drm_device *dev,
  706. drm_r128_depth_t *depth)
  707. {
  708. drm_r128_private_t *dev_priv = dev->dev_private;
  709. int count, x, y;
  710. u32 *buffer;
  711. u8 *mask;
  712. int i, buffer_size, mask_size;
  713. RING_LOCALS;
  714. DRM_DEBUG("\n");
  715. count = depth->n;
  716. if (count > 4096 || count <= 0)
  717. return -EMSGSIZE;
  718. if (DRM_COPY_FROM_USER(&x, depth->x, sizeof(x)))
  719. return -EFAULT;
  720. if (DRM_COPY_FROM_USER(&y, depth->y, sizeof(y)))
  721. return -EFAULT;
  722. buffer_size = depth->n * sizeof(u32);
  723. buffer = kmalloc(buffer_size, GFP_KERNEL);
  724. if (buffer == NULL)
  725. return -ENOMEM;
  726. if (DRM_COPY_FROM_USER(buffer, depth->buffer, buffer_size)) {
  727. kfree(buffer);
  728. return -EFAULT;
  729. }
  730. mask_size = depth->n * sizeof(u8);
  731. if (depth->mask) {
  732. mask = kmalloc(mask_size, GFP_KERNEL);
  733. if (mask == NULL) {
  734. kfree(buffer);
  735. return -ENOMEM;
  736. }
  737. if (DRM_COPY_FROM_USER(mask, depth->mask, mask_size)) {
  738. kfree(buffer);
  739. kfree(mask);
  740. return -EFAULT;
  741. }
  742. for (i = 0; i < count; i++, x++) {
  743. if (mask[i]) {
  744. BEGIN_RING(6);
  745. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  746. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  747. R128_GMC_BRUSH_SOLID_COLOR |
  748. (dev_priv->depth_fmt << 8) |
  749. R128_GMC_SRC_DATATYPE_COLOR |
  750. R128_ROP3_P |
  751. R128_GMC_CLR_CMP_CNTL_DIS |
  752. R128_GMC_WR_MSK_DIS);
  753. OUT_RING(dev_priv->depth_pitch_offset_c);
  754. OUT_RING(buffer[i]);
  755. OUT_RING((x << 16) | y);
  756. OUT_RING((1 << 16) | 1);
  757. ADVANCE_RING();
  758. }
  759. }
  760. kfree(mask);
  761. } else {
  762. for (i = 0; i < count; i++, x++) {
  763. BEGIN_RING(6);
  764. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  765. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  766. R128_GMC_BRUSH_SOLID_COLOR |
  767. (dev_priv->depth_fmt << 8) |
  768. R128_GMC_SRC_DATATYPE_COLOR |
  769. R128_ROP3_P |
  770. R128_GMC_CLR_CMP_CNTL_DIS |
  771. R128_GMC_WR_MSK_DIS);
  772. OUT_RING(dev_priv->depth_pitch_offset_c);
  773. OUT_RING(buffer[i]);
  774. OUT_RING((x << 16) | y);
  775. OUT_RING((1 << 16) | 1);
  776. ADVANCE_RING();
  777. }
  778. }
  779. kfree(buffer);
  780. return 0;
  781. }
  782. static int r128_cce_dispatch_write_pixels(struct drm_device *dev,
  783. drm_r128_depth_t *depth)
  784. {
  785. drm_r128_private_t *dev_priv = dev->dev_private;
  786. int count, *x, *y;
  787. u32 *buffer;
  788. u8 *mask;
  789. int i, xbuf_size, ybuf_size, buffer_size, mask_size;
  790. RING_LOCALS;
  791. DRM_DEBUG("\n");
  792. count = depth->n;
  793. if (count > 4096 || count <= 0)
  794. return -EMSGSIZE;
  795. xbuf_size = count * sizeof(*x);
  796. ybuf_size = count * sizeof(*y);
  797. x = kmalloc(xbuf_size, GFP_KERNEL);
  798. if (x == NULL)
  799. return -ENOMEM;
  800. y = kmalloc(ybuf_size, GFP_KERNEL);
  801. if (y == NULL) {
  802. kfree(x);
  803. return -ENOMEM;
  804. }
  805. if (DRM_COPY_FROM_USER(x, depth->x, xbuf_size)) {
  806. kfree(x);
  807. kfree(y);
  808. return -EFAULT;
  809. }
  810. if (DRM_COPY_FROM_USER(y, depth->y, xbuf_size)) {
  811. kfree(x);
  812. kfree(y);
  813. return -EFAULT;
  814. }
  815. buffer_size = depth->n * sizeof(u32);
  816. buffer = kmalloc(buffer_size, GFP_KERNEL);
  817. if (buffer == NULL) {
  818. kfree(x);
  819. kfree(y);
  820. return -ENOMEM;
  821. }
  822. if (DRM_COPY_FROM_USER(buffer, depth->buffer, buffer_size)) {
  823. kfree(x);
  824. kfree(y);
  825. kfree(buffer);
  826. return -EFAULT;
  827. }
  828. if (depth->mask) {
  829. mask_size = depth->n * sizeof(u8);
  830. mask = kmalloc(mask_size, GFP_KERNEL);
  831. if (mask == NULL) {
  832. kfree(x);
  833. kfree(y);
  834. kfree(buffer);
  835. return -ENOMEM;
  836. }
  837. if (DRM_COPY_FROM_USER(mask, depth->mask, mask_size)) {
  838. kfree(x);
  839. kfree(y);
  840. kfree(buffer);
  841. kfree(mask);
  842. return -EFAULT;
  843. }
  844. for (i = 0; i < count; i++) {
  845. if (mask[i]) {
  846. BEGIN_RING(6);
  847. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  848. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  849. R128_GMC_BRUSH_SOLID_COLOR |
  850. (dev_priv->depth_fmt << 8) |
  851. R128_GMC_SRC_DATATYPE_COLOR |
  852. R128_ROP3_P |
  853. R128_GMC_CLR_CMP_CNTL_DIS |
  854. R128_GMC_WR_MSK_DIS);
  855. OUT_RING(dev_priv->depth_pitch_offset_c);
  856. OUT_RING(buffer[i]);
  857. OUT_RING((x[i] << 16) | y[i]);
  858. OUT_RING((1 << 16) | 1);
  859. ADVANCE_RING();
  860. }
  861. }
  862. kfree(mask);
  863. } else {
  864. for (i = 0; i < count; i++) {
  865. BEGIN_RING(6);
  866. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  867. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  868. R128_GMC_BRUSH_SOLID_COLOR |
  869. (dev_priv->depth_fmt << 8) |
  870. R128_GMC_SRC_DATATYPE_COLOR |
  871. R128_ROP3_P |
  872. R128_GMC_CLR_CMP_CNTL_DIS |
  873. R128_GMC_WR_MSK_DIS);
  874. OUT_RING(dev_priv->depth_pitch_offset_c);
  875. OUT_RING(buffer[i]);
  876. OUT_RING((x[i] << 16) | y[i]);
  877. OUT_RING((1 << 16) | 1);
  878. ADVANCE_RING();
  879. }
  880. }
  881. kfree(x);
  882. kfree(y);
  883. kfree(buffer);
  884. return 0;
  885. }
  886. static int r128_cce_dispatch_read_span(struct drm_device *dev,
  887. drm_r128_depth_t *depth)
  888. {
  889. drm_r128_private_t *dev_priv = dev->dev_private;
  890. int count, x, y;
  891. RING_LOCALS;
  892. DRM_DEBUG("\n");
  893. count = depth->n;
  894. if (count > 4096 || count <= 0)
  895. return -EMSGSIZE;
  896. if (DRM_COPY_FROM_USER(&x, depth->x, sizeof(x)))
  897. return -EFAULT;
  898. if (DRM_COPY_FROM_USER(&y, depth->y, sizeof(y)))
  899. return -EFAULT;
  900. BEGIN_RING(7);
  901. OUT_RING(CCE_PACKET3(R128_CNTL_BITBLT_MULTI, 5));
  902. OUT_RING(R128_GMC_SRC_PITCH_OFFSET_CNTL |
  903. R128_GMC_DST_PITCH_OFFSET_CNTL |
  904. R128_GMC_BRUSH_NONE |
  905. (dev_priv->depth_fmt << 8) |
  906. R128_GMC_SRC_DATATYPE_COLOR |
  907. R128_ROP3_S |
  908. R128_DP_SRC_SOURCE_MEMORY |
  909. R128_GMC_CLR_CMP_CNTL_DIS | R128_GMC_WR_MSK_DIS);
  910. OUT_RING(dev_priv->depth_pitch_offset_c);
  911. OUT_RING(dev_priv->span_pitch_offset_c);
  912. OUT_RING((x << 16) | y);
  913. OUT_RING((0 << 16) | 0);
  914. OUT_RING((count << 16) | 1);
  915. ADVANCE_RING();
  916. return 0;
  917. }
  918. static int r128_cce_dispatch_read_pixels(struct drm_device *dev,
  919. drm_r128_depth_t *depth)
  920. {
  921. drm_r128_private_t *dev_priv = dev->dev_private;
  922. int count, *x, *y;
  923. int i, xbuf_size, ybuf_size;
  924. RING_LOCALS;
  925. DRM_DEBUG("\n");
  926. count = depth->n;
  927. if (count > 4096 || count <= 0)
  928. return -EMSGSIZE;
  929. if (count > dev_priv->depth_pitch)
  930. count = dev_priv->depth_pitch;
  931. xbuf_size = count * sizeof(*x);
  932. ybuf_size = count * sizeof(*y);
  933. x = kmalloc(xbuf_size, GFP_KERNEL);
  934. if (x == NULL)
  935. return -ENOMEM;
  936. y = kmalloc(ybuf_size, GFP_KERNEL);
  937. if (y == NULL) {
  938. kfree(x);
  939. return -ENOMEM;
  940. }
  941. if (DRM_COPY_FROM_USER(x, depth->x, xbuf_size)) {
  942. kfree(x);
  943. kfree(y);
  944. return -EFAULT;
  945. }
  946. if (DRM_COPY_FROM_USER(y, depth->y, ybuf_size)) {
  947. kfree(x);
  948. kfree(y);
  949. return -EFAULT;
  950. }
  951. for (i = 0; i < count; i++) {
  952. BEGIN_RING(7);
  953. OUT_RING(CCE_PACKET3(R128_CNTL_BITBLT_MULTI, 5));
  954. OUT_RING(R128_GMC_SRC_PITCH_OFFSET_CNTL |
  955. R128_GMC_DST_PITCH_OFFSET_CNTL |
  956. R128_GMC_BRUSH_NONE |
  957. (dev_priv->depth_fmt << 8) |
  958. R128_GMC_SRC_DATATYPE_COLOR |
  959. R128_ROP3_S |
  960. R128_DP_SRC_SOURCE_MEMORY |
  961. R128_GMC_CLR_CMP_CNTL_DIS | R128_GMC_WR_MSK_DIS);
  962. OUT_RING(dev_priv->depth_pitch_offset_c);
  963. OUT_RING(dev_priv->span_pitch_offset_c);
  964. OUT_RING((x[i] << 16) | y[i]);
  965. OUT_RING((i << 16) | 0);
  966. OUT_RING((1 << 16) | 1);
  967. ADVANCE_RING();
  968. }
  969. kfree(x);
  970. kfree(y);
  971. return 0;
  972. }
  973. /* ================================================================
  974. * Polygon stipple
  975. */
  976. static void r128_cce_dispatch_stipple(struct drm_device *dev, u32 *stipple)
  977. {
  978. drm_r128_private_t *dev_priv = dev->dev_private;
  979. int i;
  980. RING_LOCALS;
  981. DRM_DEBUG("\n");
  982. BEGIN_RING(33);
  983. OUT_RING(CCE_PACKET0(R128_BRUSH_DATA0, 31));
  984. for (i = 0; i < 32; i++)
  985. OUT_RING(stipple[i]);
  986. ADVANCE_RING();
  987. }
  988. /* ================================================================
  989. * IOCTL functions
  990. */
  991. static int r128_cce_clear(struct drm_device *dev, void *data, struct drm_file *file_priv)
  992. {
  993. drm_r128_private_t *dev_priv = dev->dev_private;
  994. drm_r128_sarea_t *sarea_priv;
  995. drm_r128_clear_t *clear = data;
  996. DRM_DEBUG("\n");
  997. LOCK_TEST_WITH_RETURN(dev, file_priv);
  998. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  999. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1000. sarea_priv = dev_priv->sarea_priv;
  1001. if (sarea_priv->nbox > R128_NR_SAREA_CLIPRECTS)
  1002. sarea_priv->nbox = R128_NR_SAREA_CLIPRECTS;
  1003. r128_cce_dispatch_clear(dev, clear);
  1004. COMMIT_RING();
  1005. /* Make sure we restore the 3D state next time.
  1006. */
  1007. dev_priv->sarea_priv->dirty |= R128_UPLOAD_CONTEXT | R128_UPLOAD_MASKS;
  1008. return 0;
  1009. }
  1010. static int r128_do_init_pageflip(struct drm_device *dev)
  1011. {
  1012. drm_r128_private_t *dev_priv = dev->dev_private;
  1013. DRM_DEBUG("\n");
  1014. dev_priv->crtc_offset = R128_READ(R128_CRTC_OFFSET);
  1015. dev_priv->crtc_offset_cntl = R128_READ(R128_CRTC_OFFSET_CNTL);
  1016. R128_WRITE(R128_CRTC_OFFSET, dev_priv->front_offset);
  1017. R128_WRITE(R128_CRTC_OFFSET_CNTL,
  1018. dev_priv->crtc_offset_cntl | R128_CRTC_OFFSET_FLIP_CNTL);
  1019. dev_priv->page_flipping = 1;
  1020. dev_priv->current_page = 0;
  1021. dev_priv->sarea_priv->pfCurrentPage = dev_priv->current_page;
  1022. return 0;
  1023. }
  1024. static int r128_do_cleanup_pageflip(struct drm_device *dev)
  1025. {
  1026. drm_r128_private_t *dev_priv = dev->dev_private;
  1027. DRM_DEBUG("\n");
  1028. R128_WRITE(R128_CRTC_OFFSET, dev_priv->crtc_offset);
  1029. R128_WRITE(R128_CRTC_OFFSET_CNTL, dev_priv->crtc_offset_cntl);
  1030. if (dev_priv->current_page != 0) {
  1031. r128_cce_dispatch_flip(dev);
  1032. COMMIT_RING();
  1033. }
  1034. dev_priv->page_flipping = 0;
  1035. return 0;
  1036. }
  1037. /* Swapping and flipping are different operations, need different ioctls.
  1038. * They can & should be intermixed to support multiple 3d windows.
  1039. */
  1040. static int r128_cce_flip(struct drm_device *dev, void *data, struct drm_file *file_priv)
  1041. {
  1042. drm_r128_private_t *dev_priv = dev->dev_private;
  1043. DRM_DEBUG("\n");
  1044. LOCK_TEST_WITH_RETURN(dev, file_priv);
  1045. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  1046. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1047. if (!dev_priv->page_flipping)
  1048. r128_do_init_pageflip(dev);
  1049. r128_cce_dispatch_flip(dev);
  1050. COMMIT_RING();
  1051. return 0;
  1052. }
  1053. static int r128_cce_swap(struct drm_device *dev, void *data, struct drm_file *file_priv)
  1054. {
  1055. drm_r128_private_t *dev_priv = dev->dev_private;
  1056. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  1057. DRM_DEBUG("\n");
  1058. LOCK_TEST_WITH_RETURN(dev, file_priv);
  1059. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  1060. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1061. if (sarea_priv->nbox > R128_NR_SAREA_CLIPRECTS)
  1062. sarea_priv->nbox = R128_NR_SAREA_CLIPRECTS;
  1063. r128_cce_dispatch_swap(dev);
  1064. dev_priv->sarea_priv->dirty |= (R128_UPLOAD_CONTEXT |
  1065. R128_UPLOAD_MASKS);
  1066. COMMIT_RING();
  1067. return 0;
  1068. }
  1069. static int r128_cce_vertex(struct drm_device *dev, void *data, struct drm_file *file_priv)
  1070. {
  1071. drm_r128_private_t *dev_priv = dev->dev_private;
  1072. struct drm_device_dma *dma = dev->dma;
  1073. struct drm_buf *buf;
  1074. drm_r128_buf_priv_t *buf_priv;
  1075. drm_r128_vertex_t *vertex = data;
  1076. LOCK_TEST_WITH_RETURN(dev, file_priv);
  1077. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  1078. DRM_DEBUG("pid=%d index=%d count=%d discard=%d\n",
  1079. DRM_CURRENTPID, vertex->idx, vertex->count, vertex->discard);
  1080. if (vertex->idx < 0 || vertex->idx >= dma->buf_count) {
  1081. DRM_ERROR("buffer index %d (of %d max)\n",
  1082. vertex->idx, dma->buf_count - 1);
  1083. return -EINVAL;
  1084. }
  1085. if (vertex->prim < 0 ||
  1086. vertex->prim > R128_CCE_VC_CNTL_PRIM_TYPE_TRI_TYPE2) {
  1087. DRM_ERROR("buffer prim %d\n", vertex->prim);
  1088. return -EINVAL;
  1089. }
  1090. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1091. VB_AGE_TEST_WITH_RETURN(dev_priv);
  1092. buf = dma->buflist[vertex->idx];
  1093. buf_priv = buf->dev_private;
  1094. if (buf->file_priv != file_priv) {
  1095. DRM_ERROR("process %d using buffer owned by %p\n",
  1096. DRM_CURRENTPID, buf->file_priv);
  1097. return -EINVAL;
  1098. }
  1099. if (buf->pending) {
  1100. DRM_ERROR("sending pending buffer %d\n", vertex->idx);
  1101. return -EINVAL;
  1102. }
  1103. buf->used = vertex->count;
  1104. buf_priv->prim = vertex->prim;
  1105. buf_priv->discard = vertex->discard;
  1106. r128_cce_dispatch_vertex(dev, buf);
  1107. COMMIT_RING();
  1108. return 0;
  1109. }
  1110. static int r128_cce_indices(struct drm_device *dev, void *data, struct drm_file *file_priv)
  1111. {
  1112. drm_r128_private_t *dev_priv = dev->dev_private;
  1113. struct drm_device_dma *dma = dev->dma;
  1114. struct drm_buf *buf;
  1115. drm_r128_buf_priv_t *buf_priv;
  1116. drm_r128_indices_t *elts = data;
  1117. int count;
  1118. LOCK_TEST_WITH_RETURN(dev, file_priv);
  1119. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  1120. DRM_DEBUG("pid=%d buf=%d s=%d e=%d d=%d\n", DRM_CURRENTPID,
  1121. elts->idx, elts->start, elts->end, elts->discard);
  1122. if (elts->idx < 0 || elts->idx >= dma->buf_count) {
  1123. DRM_ERROR("buffer index %d (of %d max)\n",
  1124. elts->idx, dma->buf_count - 1);
  1125. return -EINVAL;
  1126. }
  1127. if (elts->prim < 0 ||
  1128. elts->prim > R128_CCE_VC_CNTL_PRIM_TYPE_TRI_TYPE2) {
  1129. DRM_ERROR("buffer prim %d\n", elts->prim);
  1130. return -EINVAL;
  1131. }
  1132. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1133. VB_AGE_TEST_WITH_RETURN(dev_priv);
  1134. buf = dma->buflist[elts->idx];
  1135. buf_priv = buf->dev_private;
  1136. if (buf->file_priv != file_priv) {
  1137. DRM_ERROR("process %d using buffer owned by %p\n",
  1138. DRM_CURRENTPID, buf->file_priv);
  1139. return -EINVAL;
  1140. }
  1141. if (buf->pending) {
  1142. DRM_ERROR("sending pending buffer %d\n", elts->idx);
  1143. return -EINVAL;
  1144. }
  1145. count = (elts->end - elts->start) / sizeof(u16);
  1146. elts->start -= R128_INDEX_PRIM_OFFSET;
  1147. if (elts->start & 0x7) {
  1148. DRM_ERROR("misaligned buffer 0x%x\n", elts->start);
  1149. return -EINVAL;
  1150. }
  1151. if (elts->start < buf->used) {
  1152. DRM_ERROR("no header 0x%x - 0x%x\n", elts->start, buf->used);
  1153. return -EINVAL;
  1154. }
  1155. buf->used = elts->end;
  1156. buf_priv->prim = elts->prim;
  1157. buf_priv->discard = elts->discard;
  1158. r128_cce_dispatch_indices(dev, buf, elts->start, elts->end, count);
  1159. COMMIT_RING();
  1160. return 0;
  1161. }
  1162. static int r128_cce_blit(struct drm_device *dev, void *data, struct drm_file *file_priv)
  1163. {
  1164. struct drm_device_dma *dma = dev->dma;
  1165. drm_r128_private_t *dev_priv = dev->dev_private;
  1166. drm_r128_blit_t *blit = data;
  1167. int ret;
  1168. LOCK_TEST_WITH_RETURN(dev, file_priv);
  1169. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  1170. DRM_DEBUG("pid=%d index=%d\n", DRM_CURRENTPID, blit->idx);
  1171. if (blit->idx < 0 || blit->idx >= dma->buf_count) {
  1172. DRM_ERROR("buffer index %d (of %d max)\n",
  1173. blit->idx, dma->buf_count - 1);
  1174. return -EINVAL;
  1175. }
  1176. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1177. VB_AGE_TEST_WITH_RETURN(dev_priv);
  1178. ret = r128_cce_dispatch_blit(dev, file_priv, blit);
  1179. COMMIT_RING();
  1180. return ret;
  1181. }
  1182. static int r128_cce_depth(struct drm_device *dev, void *data, struct drm_file *file_priv)
  1183. {
  1184. drm_r128_private_t *dev_priv = dev->dev_private;
  1185. drm_r128_depth_t *depth = data;
  1186. int ret;
  1187. LOCK_TEST_WITH_RETURN(dev, file_priv);
  1188. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  1189. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1190. ret = -EINVAL;
  1191. switch (depth->func) {
  1192. case R128_WRITE_SPAN:
  1193. ret = r128_cce_dispatch_write_span(dev, depth);
  1194. break;
  1195. case R128_WRITE_PIXELS:
  1196. ret = r128_cce_dispatch_write_pixels(dev, depth);
  1197. break;
  1198. case R128_READ_SPAN:
  1199. ret = r128_cce_dispatch_read_span(dev, depth);
  1200. break;
  1201. case R128_READ_PIXELS:
  1202. ret = r128_cce_dispatch_read_pixels(dev, depth);
  1203. break;
  1204. }
  1205. COMMIT_RING();
  1206. return ret;
  1207. }
  1208. static int r128_cce_stipple(struct drm_device *dev, void *data, struct drm_file *file_priv)
  1209. {
  1210. drm_r128_private_t *dev_priv = dev->dev_private;
  1211. drm_r128_stipple_t *stipple = data;
  1212. u32 mask[32];
  1213. LOCK_TEST_WITH_RETURN(dev, file_priv);
  1214. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  1215. if (DRM_COPY_FROM_USER(&mask, stipple->mask, 32 * sizeof(u32)))
  1216. return -EFAULT;
  1217. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1218. r128_cce_dispatch_stipple(dev, mask);
  1219. COMMIT_RING();
  1220. return 0;
  1221. }
  1222. static int r128_cce_indirect(struct drm_device *dev, void *data, struct drm_file *file_priv)
  1223. {
  1224. drm_r128_private_t *dev_priv = dev->dev_private;
  1225. struct drm_device_dma *dma = dev->dma;
  1226. struct drm_buf *buf;
  1227. drm_r128_buf_priv_t *buf_priv;
  1228. drm_r128_indirect_t *indirect = data;
  1229. #if 0
  1230. RING_LOCALS;
  1231. #endif
  1232. LOCK_TEST_WITH_RETURN(dev, file_priv);
  1233. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  1234. DRM_DEBUG("idx=%d s=%d e=%d d=%d\n",
  1235. indirect->idx, indirect->start, indirect->end,
  1236. indirect->discard);
  1237. if (indirect->idx < 0 || indirect->idx >= dma->buf_count) {
  1238. DRM_ERROR("buffer index %d (of %d max)\n",
  1239. indirect->idx, dma->buf_count - 1);
  1240. return -EINVAL;
  1241. }
  1242. buf = dma->buflist[indirect->idx];
  1243. buf_priv = buf->dev_private;
  1244. if (buf->file_priv != file_priv) {
  1245. DRM_ERROR("process %d using buffer owned by %p\n",
  1246. DRM_CURRENTPID, buf->file_priv);
  1247. return -EINVAL;
  1248. }
  1249. if (buf->pending) {
  1250. DRM_ERROR("sending pending buffer %d\n", indirect->idx);
  1251. return -EINVAL;
  1252. }
  1253. if (indirect->start < buf->used) {
  1254. DRM_ERROR("reusing indirect: start=0x%x actual=0x%x\n",
  1255. indirect->start, buf->used);
  1256. return -EINVAL;
  1257. }
  1258. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1259. VB_AGE_TEST_WITH_RETURN(dev_priv);
  1260. buf->used = indirect->end;
  1261. buf_priv->discard = indirect->discard;
  1262. #if 0
  1263. /* Wait for the 3D stream to idle before the indirect buffer
  1264. * containing 2D acceleration commands is processed.
  1265. */
  1266. BEGIN_RING(2);
  1267. RADEON_WAIT_UNTIL_3D_IDLE();
  1268. ADVANCE_RING();
  1269. #endif
  1270. /* Dispatch the indirect buffer full of commands from the
  1271. * X server. This is insecure and is thus only available to
  1272. * privileged clients.
  1273. */
  1274. r128_cce_dispatch_indirect(dev, buf, indirect->start, indirect->end);
  1275. COMMIT_RING();
  1276. return 0;
  1277. }
  1278. static int r128_getparam(struct drm_device *dev, void *data, struct drm_file *file_priv)
  1279. {
  1280. drm_r128_private_t *dev_priv = dev->dev_private;
  1281. drm_r128_getparam_t *param = data;
  1282. int value;
  1283. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  1284. DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
  1285. switch (param->param) {
  1286. case R128_PARAM_IRQ_NR:
  1287. value = drm_dev_to_irq(dev);
  1288. break;
  1289. default:
  1290. return -EINVAL;
  1291. }
  1292. if (DRM_COPY_TO_USER(param->value, &value, sizeof(int))) {
  1293. DRM_ERROR("copy_to_user\n");
  1294. return -EFAULT;
  1295. }
  1296. return 0;
  1297. }
  1298. void r128_driver_preclose(struct drm_device *dev, struct drm_file *file_priv)
  1299. {
  1300. if (dev->dev_private) {
  1301. drm_r128_private_t *dev_priv = dev->dev_private;
  1302. if (dev_priv->page_flipping)
  1303. r128_do_cleanup_pageflip(dev);
  1304. }
  1305. }
  1306. void r128_driver_lastclose(struct drm_device *dev)
  1307. {
  1308. r128_do_cleanup_cce(dev);
  1309. }
  1310. struct drm_ioctl_desc r128_ioctls[] = {
  1311. DRM_IOCTL_DEF_DRV(R128_INIT, r128_cce_init, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
  1312. DRM_IOCTL_DEF_DRV(R128_CCE_START, r128_cce_start, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
  1313. DRM_IOCTL_DEF_DRV(R128_CCE_STOP, r128_cce_stop, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
  1314. DRM_IOCTL_DEF_DRV(R128_CCE_RESET, r128_cce_reset, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
  1315. DRM_IOCTL_DEF_DRV(R128_CCE_IDLE, r128_cce_idle, DRM_AUTH),
  1316. DRM_IOCTL_DEF_DRV(R128_RESET, r128_engine_reset, DRM_AUTH),
  1317. DRM_IOCTL_DEF_DRV(R128_FULLSCREEN, r128_fullscreen, DRM_AUTH),
  1318. DRM_IOCTL_DEF_DRV(R128_SWAP, r128_cce_swap, DRM_AUTH),
  1319. DRM_IOCTL_DEF_DRV(R128_FLIP, r128_cce_flip, DRM_AUTH),
  1320. DRM_IOCTL_DEF_DRV(R128_CLEAR, r128_cce_clear, DRM_AUTH),
  1321. DRM_IOCTL_DEF_DRV(R128_VERTEX, r128_cce_vertex, DRM_AUTH),
  1322. DRM_IOCTL_DEF_DRV(R128_INDICES, r128_cce_indices, DRM_AUTH),
  1323. DRM_IOCTL_DEF_DRV(R128_BLIT, r128_cce_blit, DRM_AUTH),
  1324. DRM_IOCTL_DEF_DRV(R128_DEPTH, r128_cce_depth, DRM_AUTH),
  1325. DRM_IOCTL_DEF_DRV(R128_STIPPLE, r128_cce_stipple, DRM_AUTH),
  1326. DRM_IOCTL_DEF_DRV(R128_INDIRECT, r128_cce_indirect, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
  1327. DRM_IOCTL_DEF_DRV(R128_GETPARAM, r128_getparam, DRM_AUTH),
  1328. };
  1329. int r128_max_ioctl = DRM_ARRAY_SIZE(r128_ioctls);