r128_state.c 42 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713
  1. /* r128_state.c -- State support for r128 -*- linux-c -*-
  2. * Created: Thu Jan 27 02:53:43 2000 by gareth@valinux.com
  3. */
  4. /*
  5. * Copyright 2000 VA Linux Systems, Inc., Sunnyvale, California.
  6. * All Rights Reserved.
  7. *
  8. * Permission is hereby granted, free of charge, to any person obtaining a
  9. * copy of this software and associated documentation files (the "Software"),
  10. * to deal in the Software without restriction, including without limitation
  11. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  12. * and/or sell copies of the Software, and to permit persons to whom the
  13. * Software is furnished to do so, subject to the following conditions:
  14. *
  15. * The above copyright notice and this permission notice (including the next
  16. * paragraph) shall be included in all copies or substantial portions of the
  17. * Software.
  18. *
  19. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  20. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  21. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  22. * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
  23. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  24. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
  25. * DEALINGS IN THE SOFTWARE.
  26. *
  27. * Authors:
  28. * Gareth Hughes <gareth@valinux.com>
  29. */
  30. #include "drmP.h"
  31. #include "drm.h"
  32. #include "r128_drm.h"
  33. #include "r128_drv.h"
  34. /* ================================================================
  35. * CCE hardware state programming functions
  36. */
  37. static void r128_emit_clip_rects(drm_r128_private_t * dev_priv,
  38. drm_clip_rect_t * boxes, int count)
  39. {
  40. u32 aux_sc_cntl = 0x00000000;
  41. RING_LOCALS;
  42. DRM_DEBUG(" %s\n", __FUNCTION__);
  43. BEGIN_RING((count < 3 ? count : 3) * 5 + 2);
  44. if (count >= 1) {
  45. OUT_RING(CCE_PACKET0(R128_AUX1_SC_LEFT, 3));
  46. OUT_RING(boxes[0].x1);
  47. OUT_RING(boxes[0].x2 - 1);
  48. OUT_RING(boxes[0].y1);
  49. OUT_RING(boxes[0].y2 - 1);
  50. aux_sc_cntl |= (R128_AUX1_SC_EN | R128_AUX1_SC_MODE_OR);
  51. }
  52. if (count >= 2) {
  53. OUT_RING(CCE_PACKET0(R128_AUX2_SC_LEFT, 3));
  54. OUT_RING(boxes[1].x1);
  55. OUT_RING(boxes[1].x2 - 1);
  56. OUT_RING(boxes[1].y1);
  57. OUT_RING(boxes[1].y2 - 1);
  58. aux_sc_cntl |= (R128_AUX2_SC_EN | R128_AUX2_SC_MODE_OR);
  59. }
  60. if (count >= 3) {
  61. OUT_RING(CCE_PACKET0(R128_AUX3_SC_LEFT, 3));
  62. OUT_RING(boxes[2].x1);
  63. OUT_RING(boxes[2].x2 - 1);
  64. OUT_RING(boxes[2].y1);
  65. OUT_RING(boxes[2].y2 - 1);
  66. aux_sc_cntl |= (R128_AUX3_SC_EN | R128_AUX3_SC_MODE_OR);
  67. }
  68. OUT_RING(CCE_PACKET0(R128_AUX_SC_CNTL, 0));
  69. OUT_RING(aux_sc_cntl);
  70. ADVANCE_RING();
  71. }
  72. static __inline__ void r128_emit_core(drm_r128_private_t * dev_priv)
  73. {
  74. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  75. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  76. RING_LOCALS;
  77. DRM_DEBUG(" %s\n", __FUNCTION__);
  78. BEGIN_RING(2);
  79. OUT_RING(CCE_PACKET0(R128_SCALE_3D_CNTL, 0));
  80. OUT_RING(ctx->scale_3d_cntl);
  81. ADVANCE_RING();
  82. }
  83. static __inline__ void r128_emit_context(drm_r128_private_t * dev_priv)
  84. {
  85. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  86. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  87. RING_LOCALS;
  88. DRM_DEBUG(" %s\n", __FUNCTION__);
  89. BEGIN_RING(13);
  90. OUT_RING(CCE_PACKET0(R128_DST_PITCH_OFFSET_C, 11));
  91. OUT_RING(ctx->dst_pitch_offset_c);
  92. OUT_RING(ctx->dp_gui_master_cntl_c);
  93. OUT_RING(ctx->sc_top_left_c);
  94. OUT_RING(ctx->sc_bottom_right_c);
  95. OUT_RING(ctx->z_offset_c);
  96. OUT_RING(ctx->z_pitch_c);
  97. OUT_RING(ctx->z_sten_cntl_c);
  98. OUT_RING(ctx->tex_cntl_c);
  99. OUT_RING(ctx->misc_3d_state_cntl_reg);
  100. OUT_RING(ctx->texture_clr_cmp_clr_c);
  101. OUT_RING(ctx->texture_clr_cmp_msk_c);
  102. OUT_RING(ctx->fog_color_c);
  103. ADVANCE_RING();
  104. }
  105. static __inline__ void r128_emit_setup(drm_r128_private_t * dev_priv)
  106. {
  107. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  108. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  109. RING_LOCALS;
  110. DRM_DEBUG(" %s\n", __FUNCTION__);
  111. BEGIN_RING(3);
  112. OUT_RING(CCE_PACKET1(R128_SETUP_CNTL, R128_PM4_VC_FPU_SETUP));
  113. OUT_RING(ctx->setup_cntl);
  114. OUT_RING(ctx->pm4_vc_fpu_setup);
  115. ADVANCE_RING();
  116. }
  117. static __inline__ void r128_emit_masks(drm_r128_private_t * dev_priv)
  118. {
  119. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  120. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  121. RING_LOCALS;
  122. DRM_DEBUG(" %s\n", __FUNCTION__);
  123. BEGIN_RING(5);
  124. OUT_RING(CCE_PACKET0(R128_DP_WRITE_MASK, 0));
  125. OUT_RING(ctx->dp_write_mask);
  126. OUT_RING(CCE_PACKET0(R128_STEN_REF_MASK_C, 1));
  127. OUT_RING(ctx->sten_ref_mask_c);
  128. OUT_RING(ctx->plane_3d_mask_c);
  129. ADVANCE_RING();
  130. }
  131. static __inline__ void r128_emit_window(drm_r128_private_t * dev_priv)
  132. {
  133. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  134. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  135. RING_LOCALS;
  136. DRM_DEBUG(" %s\n", __FUNCTION__);
  137. BEGIN_RING(2);
  138. OUT_RING(CCE_PACKET0(R128_WINDOW_XY_OFFSET, 0));
  139. OUT_RING(ctx->window_xy_offset);
  140. ADVANCE_RING();
  141. }
  142. static __inline__ void r128_emit_tex0(drm_r128_private_t * dev_priv)
  143. {
  144. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  145. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  146. drm_r128_texture_regs_t *tex = &sarea_priv->tex_state[0];
  147. int i;
  148. RING_LOCALS;
  149. DRM_DEBUG(" %s\n", __FUNCTION__);
  150. BEGIN_RING(7 + R128_MAX_TEXTURE_LEVELS);
  151. OUT_RING(CCE_PACKET0(R128_PRIM_TEX_CNTL_C,
  152. 2 + R128_MAX_TEXTURE_LEVELS));
  153. OUT_RING(tex->tex_cntl);
  154. OUT_RING(tex->tex_combine_cntl);
  155. OUT_RING(ctx->tex_size_pitch_c);
  156. for (i = 0; i < R128_MAX_TEXTURE_LEVELS; i++) {
  157. OUT_RING(tex->tex_offset[i]);
  158. }
  159. OUT_RING(CCE_PACKET0(R128_CONSTANT_COLOR_C, 1));
  160. OUT_RING(ctx->constant_color_c);
  161. OUT_RING(tex->tex_border_color);
  162. ADVANCE_RING();
  163. }
  164. static __inline__ void r128_emit_tex1(drm_r128_private_t * dev_priv)
  165. {
  166. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  167. drm_r128_texture_regs_t *tex = &sarea_priv->tex_state[1];
  168. int i;
  169. RING_LOCALS;
  170. DRM_DEBUG(" %s\n", __FUNCTION__);
  171. BEGIN_RING(5 + R128_MAX_TEXTURE_LEVELS);
  172. OUT_RING(CCE_PACKET0(R128_SEC_TEX_CNTL_C, 1 + R128_MAX_TEXTURE_LEVELS));
  173. OUT_RING(tex->tex_cntl);
  174. OUT_RING(tex->tex_combine_cntl);
  175. for (i = 0; i < R128_MAX_TEXTURE_LEVELS; i++) {
  176. OUT_RING(tex->tex_offset[i]);
  177. }
  178. OUT_RING(CCE_PACKET0(R128_SEC_TEXTURE_BORDER_COLOR_C, 0));
  179. OUT_RING(tex->tex_border_color);
  180. ADVANCE_RING();
  181. }
  182. static void r128_emit_state(drm_r128_private_t * dev_priv)
  183. {
  184. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  185. unsigned int dirty = sarea_priv->dirty;
  186. DRM_DEBUG("%s: dirty=0x%08x\n", __FUNCTION__, dirty);
  187. if (dirty & R128_UPLOAD_CORE) {
  188. r128_emit_core(dev_priv);
  189. sarea_priv->dirty &= ~R128_UPLOAD_CORE;
  190. }
  191. if (dirty & R128_UPLOAD_CONTEXT) {
  192. r128_emit_context(dev_priv);
  193. sarea_priv->dirty &= ~R128_UPLOAD_CONTEXT;
  194. }
  195. if (dirty & R128_UPLOAD_SETUP) {
  196. r128_emit_setup(dev_priv);
  197. sarea_priv->dirty &= ~R128_UPLOAD_SETUP;
  198. }
  199. if (dirty & R128_UPLOAD_MASKS) {
  200. r128_emit_masks(dev_priv);
  201. sarea_priv->dirty &= ~R128_UPLOAD_MASKS;
  202. }
  203. if (dirty & R128_UPLOAD_WINDOW) {
  204. r128_emit_window(dev_priv);
  205. sarea_priv->dirty &= ~R128_UPLOAD_WINDOW;
  206. }
  207. if (dirty & R128_UPLOAD_TEX0) {
  208. r128_emit_tex0(dev_priv);
  209. sarea_priv->dirty &= ~R128_UPLOAD_TEX0;
  210. }
  211. if (dirty & R128_UPLOAD_TEX1) {
  212. r128_emit_tex1(dev_priv);
  213. sarea_priv->dirty &= ~R128_UPLOAD_TEX1;
  214. }
  215. /* Turn off the texture cache flushing */
  216. sarea_priv->context_state.tex_cntl_c &= ~R128_TEX_CACHE_FLUSH;
  217. sarea_priv->dirty &= ~R128_REQUIRE_QUIESCENCE;
  218. }
  219. #if R128_PERFORMANCE_BOXES
  220. /* ================================================================
  221. * Performance monitoring functions
  222. */
  223. static void r128_clear_box(drm_r128_private_t * dev_priv,
  224. int x, int y, int w, int h, int r, int g, int b)
  225. {
  226. u32 pitch, offset;
  227. u32 fb_bpp, color;
  228. RING_LOCALS;
  229. switch (dev_priv->fb_bpp) {
  230. case 16:
  231. fb_bpp = R128_GMC_DST_16BPP;
  232. color = (((r & 0xf8) << 8) |
  233. ((g & 0xfc) << 3) | ((b & 0xf8) >> 3));
  234. break;
  235. case 24:
  236. fb_bpp = R128_GMC_DST_24BPP;
  237. color = ((r << 16) | (g << 8) | b);
  238. break;
  239. case 32:
  240. fb_bpp = R128_GMC_DST_32BPP;
  241. color = (((0xff) << 24) | (r << 16) | (g << 8) | b);
  242. break;
  243. default:
  244. return;
  245. }
  246. offset = dev_priv->back_offset;
  247. pitch = dev_priv->back_pitch >> 3;
  248. BEGIN_RING(6);
  249. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  250. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  251. R128_GMC_BRUSH_SOLID_COLOR |
  252. fb_bpp |
  253. R128_GMC_SRC_DATATYPE_COLOR |
  254. R128_ROP3_P |
  255. R128_GMC_CLR_CMP_CNTL_DIS | R128_GMC_AUX_CLIP_DIS);
  256. OUT_RING((pitch << 21) | (offset >> 5));
  257. OUT_RING(color);
  258. OUT_RING((x << 16) | y);
  259. OUT_RING((w << 16) | h);
  260. ADVANCE_RING();
  261. }
  262. static void r128_cce_performance_boxes(drm_r128_private_t * dev_priv)
  263. {
  264. if (atomic_read(&dev_priv->idle_count) == 0) {
  265. r128_clear_box(dev_priv, 64, 4, 8, 8, 0, 255, 0);
  266. } else {
  267. atomic_set(&dev_priv->idle_count, 0);
  268. }
  269. }
  270. #endif
  271. /* ================================================================
  272. * CCE command dispatch functions
  273. */
  274. static void r128_print_dirty(const char *msg, unsigned int flags)
  275. {
  276. DRM_INFO("%s: (0x%x) %s%s%s%s%s%s%s%s%s\n",
  277. msg,
  278. flags,
  279. (flags & R128_UPLOAD_CORE) ? "core, " : "",
  280. (flags & R128_UPLOAD_CONTEXT) ? "context, " : "",
  281. (flags & R128_UPLOAD_SETUP) ? "setup, " : "",
  282. (flags & R128_UPLOAD_TEX0) ? "tex0, " : "",
  283. (flags & R128_UPLOAD_TEX1) ? "tex1, " : "",
  284. (flags & R128_UPLOAD_MASKS) ? "masks, " : "",
  285. (flags & R128_UPLOAD_WINDOW) ? "window, " : "",
  286. (flags & R128_UPLOAD_CLIPRECTS) ? "cliprects, " : "",
  287. (flags & R128_REQUIRE_QUIESCENCE) ? "quiescence, " : "");
  288. }
  289. static void r128_cce_dispatch_clear(drm_device_t * dev,
  290. drm_r128_clear_t * clear)
  291. {
  292. drm_r128_private_t *dev_priv = dev->dev_private;
  293. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  294. int nbox = sarea_priv->nbox;
  295. drm_clip_rect_t *pbox = sarea_priv->boxes;
  296. unsigned int flags = clear->flags;
  297. int i;
  298. RING_LOCALS;
  299. DRM_DEBUG("%s\n", __FUNCTION__);
  300. if (dev_priv->page_flipping && dev_priv->current_page == 1) {
  301. unsigned int tmp = flags;
  302. flags &= ~(R128_FRONT | R128_BACK);
  303. if (tmp & R128_FRONT)
  304. flags |= R128_BACK;
  305. if (tmp & R128_BACK)
  306. flags |= R128_FRONT;
  307. }
  308. for (i = 0; i < nbox; i++) {
  309. int x = pbox[i].x1;
  310. int y = pbox[i].y1;
  311. int w = pbox[i].x2 - x;
  312. int h = pbox[i].y2 - y;
  313. DRM_DEBUG("dispatch clear %d,%d-%d,%d flags 0x%x\n",
  314. pbox[i].x1, pbox[i].y1, pbox[i].x2,
  315. pbox[i].y2, flags);
  316. if (flags & (R128_FRONT | R128_BACK)) {
  317. BEGIN_RING(2);
  318. OUT_RING(CCE_PACKET0(R128_DP_WRITE_MASK, 0));
  319. OUT_RING(clear->color_mask);
  320. ADVANCE_RING();
  321. }
  322. if (flags & R128_FRONT) {
  323. BEGIN_RING(6);
  324. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  325. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  326. R128_GMC_BRUSH_SOLID_COLOR |
  327. (dev_priv->color_fmt << 8) |
  328. R128_GMC_SRC_DATATYPE_COLOR |
  329. R128_ROP3_P |
  330. R128_GMC_CLR_CMP_CNTL_DIS |
  331. R128_GMC_AUX_CLIP_DIS);
  332. OUT_RING(dev_priv->front_pitch_offset_c);
  333. OUT_RING(clear->clear_color);
  334. OUT_RING((x << 16) | y);
  335. OUT_RING((w << 16) | h);
  336. ADVANCE_RING();
  337. }
  338. if (flags & R128_BACK) {
  339. BEGIN_RING(6);
  340. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  341. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  342. R128_GMC_BRUSH_SOLID_COLOR |
  343. (dev_priv->color_fmt << 8) |
  344. R128_GMC_SRC_DATATYPE_COLOR |
  345. R128_ROP3_P |
  346. R128_GMC_CLR_CMP_CNTL_DIS |
  347. R128_GMC_AUX_CLIP_DIS);
  348. OUT_RING(dev_priv->back_pitch_offset_c);
  349. OUT_RING(clear->clear_color);
  350. OUT_RING((x << 16) | y);
  351. OUT_RING((w << 16) | h);
  352. ADVANCE_RING();
  353. }
  354. if (flags & R128_DEPTH) {
  355. BEGIN_RING(6);
  356. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  357. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  358. R128_GMC_BRUSH_SOLID_COLOR |
  359. (dev_priv->depth_fmt << 8) |
  360. R128_GMC_SRC_DATATYPE_COLOR |
  361. R128_ROP3_P |
  362. R128_GMC_CLR_CMP_CNTL_DIS |
  363. R128_GMC_AUX_CLIP_DIS | R128_GMC_WR_MSK_DIS);
  364. OUT_RING(dev_priv->depth_pitch_offset_c);
  365. OUT_RING(clear->clear_depth);
  366. OUT_RING((x << 16) | y);
  367. OUT_RING((w << 16) | h);
  368. ADVANCE_RING();
  369. }
  370. }
  371. }
  372. static void r128_cce_dispatch_swap(drm_device_t * dev)
  373. {
  374. drm_r128_private_t *dev_priv = dev->dev_private;
  375. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  376. int nbox = sarea_priv->nbox;
  377. drm_clip_rect_t *pbox = sarea_priv->boxes;
  378. int i;
  379. RING_LOCALS;
  380. DRM_DEBUG("%s\n", __FUNCTION__);
  381. #if R128_PERFORMANCE_BOXES
  382. /* Do some trivial performance monitoring...
  383. */
  384. r128_cce_performance_boxes(dev_priv);
  385. #endif
  386. for (i = 0; i < nbox; i++) {
  387. int x = pbox[i].x1;
  388. int y = pbox[i].y1;
  389. int w = pbox[i].x2 - x;
  390. int h = pbox[i].y2 - y;
  391. BEGIN_RING(7);
  392. OUT_RING(CCE_PACKET3(R128_CNTL_BITBLT_MULTI, 5));
  393. OUT_RING(R128_GMC_SRC_PITCH_OFFSET_CNTL |
  394. R128_GMC_DST_PITCH_OFFSET_CNTL |
  395. R128_GMC_BRUSH_NONE |
  396. (dev_priv->color_fmt << 8) |
  397. R128_GMC_SRC_DATATYPE_COLOR |
  398. R128_ROP3_S |
  399. R128_DP_SRC_SOURCE_MEMORY |
  400. R128_GMC_CLR_CMP_CNTL_DIS |
  401. R128_GMC_AUX_CLIP_DIS | R128_GMC_WR_MSK_DIS);
  402. /* Make this work even if front & back are flipped:
  403. */
  404. if (dev_priv->current_page == 0) {
  405. OUT_RING(dev_priv->back_pitch_offset_c);
  406. OUT_RING(dev_priv->front_pitch_offset_c);
  407. } else {
  408. OUT_RING(dev_priv->front_pitch_offset_c);
  409. OUT_RING(dev_priv->back_pitch_offset_c);
  410. }
  411. OUT_RING((x << 16) | y);
  412. OUT_RING((x << 16) | y);
  413. OUT_RING((w << 16) | h);
  414. ADVANCE_RING();
  415. }
  416. /* Increment the frame counter. The client-side 3D driver must
  417. * throttle the framerate by waiting for this value before
  418. * performing the swapbuffer ioctl.
  419. */
  420. dev_priv->sarea_priv->last_frame++;
  421. BEGIN_RING(2);
  422. OUT_RING(CCE_PACKET0(R128_LAST_FRAME_REG, 0));
  423. OUT_RING(dev_priv->sarea_priv->last_frame);
  424. ADVANCE_RING();
  425. }
  426. static void r128_cce_dispatch_flip(drm_device_t * dev)
  427. {
  428. drm_r128_private_t *dev_priv = dev->dev_private;
  429. RING_LOCALS;
  430. DRM_DEBUG("%s: page=%d pfCurrentPage=%d\n",
  431. __FUNCTION__,
  432. dev_priv->current_page, dev_priv->sarea_priv->pfCurrentPage);
  433. #if R128_PERFORMANCE_BOXES
  434. /* Do some trivial performance monitoring...
  435. */
  436. r128_cce_performance_boxes(dev_priv);
  437. #endif
  438. BEGIN_RING(4);
  439. R128_WAIT_UNTIL_PAGE_FLIPPED();
  440. OUT_RING(CCE_PACKET0(R128_CRTC_OFFSET, 0));
  441. if (dev_priv->current_page == 0) {
  442. OUT_RING(dev_priv->back_offset);
  443. } else {
  444. OUT_RING(dev_priv->front_offset);
  445. }
  446. ADVANCE_RING();
  447. /* Increment the frame counter. The client-side 3D driver must
  448. * throttle the framerate by waiting for this value before
  449. * performing the swapbuffer ioctl.
  450. */
  451. dev_priv->sarea_priv->last_frame++;
  452. dev_priv->sarea_priv->pfCurrentPage = dev_priv->current_page =
  453. 1 - dev_priv->current_page;
  454. BEGIN_RING(2);
  455. OUT_RING(CCE_PACKET0(R128_LAST_FRAME_REG, 0));
  456. OUT_RING(dev_priv->sarea_priv->last_frame);
  457. ADVANCE_RING();
  458. }
  459. static void r128_cce_dispatch_vertex(drm_device_t * dev, drm_buf_t * buf)
  460. {
  461. drm_r128_private_t *dev_priv = dev->dev_private;
  462. drm_r128_buf_priv_t *buf_priv = buf->dev_private;
  463. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  464. int format = sarea_priv->vc_format;
  465. int offset = buf->bus_address;
  466. int size = buf->used;
  467. int prim = buf_priv->prim;
  468. int i = 0;
  469. RING_LOCALS;
  470. DRM_DEBUG("buf=%d nbox=%d\n", buf->idx, sarea_priv->nbox);
  471. if (0)
  472. r128_print_dirty("dispatch_vertex", sarea_priv->dirty);
  473. if (buf->used) {
  474. buf_priv->dispatched = 1;
  475. if (sarea_priv->dirty & ~R128_UPLOAD_CLIPRECTS) {
  476. r128_emit_state(dev_priv);
  477. }
  478. do {
  479. /* Emit the next set of up to three cliprects */
  480. if (i < sarea_priv->nbox) {
  481. r128_emit_clip_rects(dev_priv,
  482. &sarea_priv->boxes[i],
  483. sarea_priv->nbox - i);
  484. }
  485. /* Emit the vertex buffer rendering commands */
  486. BEGIN_RING(5);
  487. OUT_RING(CCE_PACKET3(R128_3D_RNDR_GEN_INDX_PRIM, 3));
  488. OUT_RING(offset);
  489. OUT_RING(size);
  490. OUT_RING(format);
  491. OUT_RING(prim | R128_CCE_VC_CNTL_PRIM_WALK_LIST |
  492. (size << R128_CCE_VC_CNTL_NUM_SHIFT));
  493. ADVANCE_RING();
  494. i += 3;
  495. } while (i < sarea_priv->nbox);
  496. }
  497. if (buf_priv->discard) {
  498. buf_priv->age = dev_priv->sarea_priv->last_dispatch;
  499. /* Emit the vertex buffer age */
  500. BEGIN_RING(2);
  501. OUT_RING(CCE_PACKET0(R128_LAST_DISPATCH_REG, 0));
  502. OUT_RING(buf_priv->age);
  503. ADVANCE_RING();
  504. buf->pending = 1;
  505. buf->used = 0;
  506. /* FIXME: Check dispatched field */
  507. buf_priv->dispatched = 0;
  508. }
  509. dev_priv->sarea_priv->last_dispatch++;
  510. sarea_priv->dirty &= ~R128_UPLOAD_CLIPRECTS;
  511. sarea_priv->nbox = 0;
  512. }
  513. static void r128_cce_dispatch_indirect(drm_device_t * dev,
  514. drm_buf_t * buf, int start, int end)
  515. {
  516. drm_r128_private_t *dev_priv = dev->dev_private;
  517. drm_r128_buf_priv_t *buf_priv = buf->dev_private;
  518. RING_LOCALS;
  519. DRM_DEBUG("indirect: buf=%d s=0x%x e=0x%x\n", buf->idx, start, end);
  520. if (start != end) {
  521. int offset = buf->bus_address + start;
  522. int dwords = (end - start + 3) / sizeof(u32);
  523. /* Indirect buffer data must be an even number of
  524. * dwords, so if we've been given an odd number we must
  525. * pad the data with a Type-2 CCE packet.
  526. */
  527. if (dwords & 1) {
  528. u32 *data = (u32 *)
  529. ((char *)dev->agp_buffer_map->handle
  530. + buf->offset + start);
  531. data[dwords++] = cpu_to_le32(R128_CCE_PACKET2);
  532. }
  533. buf_priv->dispatched = 1;
  534. /* Fire off the indirect buffer */
  535. BEGIN_RING(3);
  536. OUT_RING(CCE_PACKET0(R128_PM4_IW_INDOFF, 1));
  537. OUT_RING(offset);
  538. OUT_RING(dwords);
  539. ADVANCE_RING();
  540. }
  541. if (buf_priv->discard) {
  542. buf_priv->age = dev_priv->sarea_priv->last_dispatch;
  543. /* Emit the indirect buffer age */
  544. BEGIN_RING(2);
  545. OUT_RING(CCE_PACKET0(R128_LAST_DISPATCH_REG, 0));
  546. OUT_RING(buf_priv->age);
  547. ADVANCE_RING();
  548. buf->pending = 1;
  549. buf->used = 0;
  550. /* FIXME: Check dispatched field */
  551. buf_priv->dispatched = 0;
  552. }
  553. dev_priv->sarea_priv->last_dispatch++;
  554. }
  555. static void r128_cce_dispatch_indices(drm_device_t * dev,
  556. drm_buf_t * buf,
  557. int start, int end, int count)
  558. {
  559. drm_r128_private_t *dev_priv = dev->dev_private;
  560. drm_r128_buf_priv_t *buf_priv = buf->dev_private;
  561. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  562. int format = sarea_priv->vc_format;
  563. int offset = dev->agp_buffer_map->offset - dev_priv->cce_buffers_offset;
  564. int prim = buf_priv->prim;
  565. u32 *data;
  566. int dwords;
  567. int i = 0;
  568. RING_LOCALS;
  569. DRM_DEBUG("indices: s=%d e=%d c=%d\n", start, end, count);
  570. if (0)
  571. r128_print_dirty("dispatch_indices", sarea_priv->dirty);
  572. if (start != end) {
  573. buf_priv->dispatched = 1;
  574. if (sarea_priv->dirty & ~R128_UPLOAD_CLIPRECTS) {
  575. r128_emit_state(dev_priv);
  576. }
  577. dwords = (end - start + 3) / sizeof(u32);
  578. data = (u32 *) ((char *)dev->agp_buffer_map->handle
  579. + buf->offset + start);
  580. data[0] = cpu_to_le32(CCE_PACKET3(R128_3D_RNDR_GEN_INDX_PRIM,
  581. dwords - 2));
  582. data[1] = cpu_to_le32(offset);
  583. data[2] = cpu_to_le32(R128_MAX_VB_VERTS);
  584. data[3] = cpu_to_le32(format);
  585. data[4] = cpu_to_le32((prim | R128_CCE_VC_CNTL_PRIM_WALK_IND |
  586. (count << 16)));
  587. if (count & 0x1) {
  588. #ifdef __LITTLE_ENDIAN
  589. data[dwords - 1] &= 0x0000ffff;
  590. #else
  591. data[dwords - 1] &= 0xffff0000;
  592. #endif
  593. }
  594. do {
  595. /* Emit the next set of up to three cliprects */
  596. if (i < sarea_priv->nbox) {
  597. r128_emit_clip_rects(dev_priv,
  598. &sarea_priv->boxes[i],
  599. sarea_priv->nbox - i);
  600. }
  601. r128_cce_dispatch_indirect(dev, buf, start, end);
  602. i += 3;
  603. } while (i < sarea_priv->nbox);
  604. }
  605. if (buf_priv->discard) {
  606. buf_priv->age = dev_priv->sarea_priv->last_dispatch;
  607. /* Emit the vertex buffer age */
  608. BEGIN_RING(2);
  609. OUT_RING(CCE_PACKET0(R128_LAST_DISPATCH_REG, 0));
  610. OUT_RING(buf_priv->age);
  611. ADVANCE_RING();
  612. buf->pending = 1;
  613. /* FIXME: Check dispatched field */
  614. buf_priv->dispatched = 0;
  615. }
  616. dev_priv->sarea_priv->last_dispatch++;
  617. sarea_priv->dirty &= ~R128_UPLOAD_CLIPRECTS;
  618. sarea_priv->nbox = 0;
  619. }
  620. static int r128_cce_dispatch_blit(DRMFILE filp,
  621. drm_device_t * dev, drm_r128_blit_t * blit)
  622. {
  623. drm_r128_private_t *dev_priv = dev->dev_private;
  624. drm_device_dma_t *dma = dev->dma;
  625. drm_buf_t *buf;
  626. drm_r128_buf_priv_t *buf_priv;
  627. u32 *data;
  628. int dword_shift, dwords;
  629. RING_LOCALS;
  630. DRM_DEBUG("\n");
  631. /* The compiler won't optimize away a division by a variable,
  632. * even if the only legal values are powers of two. Thus, we'll
  633. * use a shift instead.
  634. */
  635. switch (blit->format) {
  636. case R128_DATATYPE_ARGB8888:
  637. dword_shift = 0;
  638. break;
  639. case R128_DATATYPE_ARGB1555:
  640. case R128_DATATYPE_RGB565:
  641. case R128_DATATYPE_ARGB4444:
  642. case R128_DATATYPE_YVYU422:
  643. case R128_DATATYPE_VYUY422:
  644. dword_shift = 1;
  645. break;
  646. case R128_DATATYPE_CI8:
  647. case R128_DATATYPE_RGB8:
  648. dword_shift = 2;
  649. break;
  650. default:
  651. DRM_ERROR("invalid blit format %d\n", blit->format);
  652. return DRM_ERR(EINVAL);
  653. }
  654. /* Flush the pixel cache, and mark the contents as Read Invalid.
  655. * This ensures no pixel data gets mixed up with the texture
  656. * data from the host data blit, otherwise part of the texture
  657. * image may be corrupted.
  658. */
  659. BEGIN_RING(2);
  660. OUT_RING(CCE_PACKET0(R128_PC_GUI_CTLSTAT, 0));
  661. OUT_RING(R128_PC_RI_GUI | R128_PC_FLUSH_GUI);
  662. ADVANCE_RING();
  663. /* Dispatch the indirect buffer.
  664. */
  665. buf = dma->buflist[blit->idx];
  666. buf_priv = buf->dev_private;
  667. if (buf->filp != filp) {
  668. DRM_ERROR("process %d using buffer owned by %p\n",
  669. DRM_CURRENTPID, buf->filp);
  670. return DRM_ERR(EINVAL);
  671. }
  672. if (buf->pending) {
  673. DRM_ERROR("sending pending buffer %d\n", blit->idx);
  674. return DRM_ERR(EINVAL);
  675. }
  676. buf_priv->discard = 1;
  677. dwords = (blit->width * blit->height) >> dword_shift;
  678. data = (u32 *) ((char *)dev->agp_buffer_map->handle + buf->offset);
  679. data[0] = cpu_to_le32(CCE_PACKET3(R128_CNTL_HOSTDATA_BLT, dwords + 6));
  680. data[1] = cpu_to_le32((R128_GMC_DST_PITCH_OFFSET_CNTL |
  681. R128_GMC_BRUSH_NONE |
  682. (blit->format << 8) |
  683. R128_GMC_SRC_DATATYPE_COLOR |
  684. R128_ROP3_S |
  685. R128_DP_SRC_SOURCE_HOST_DATA |
  686. R128_GMC_CLR_CMP_CNTL_DIS |
  687. R128_GMC_AUX_CLIP_DIS | R128_GMC_WR_MSK_DIS));
  688. data[2] = cpu_to_le32((blit->pitch << 21) | (blit->offset >> 5));
  689. data[3] = cpu_to_le32(0xffffffff);
  690. data[4] = cpu_to_le32(0xffffffff);
  691. data[5] = cpu_to_le32((blit->y << 16) | blit->x);
  692. data[6] = cpu_to_le32((blit->height << 16) | blit->width);
  693. data[7] = cpu_to_le32(dwords);
  694. buf->used = (dwords + 8) * sizeof(u32);
  695. r128_cce_dispatch_indirect(dev, buf, 0, buf->used);
  696. /* Flush the pixel cache after the blit completes. This ensures
  697. * the texture data is written out to memory before rendering
  698. * continues.
  699. */
  700. BEGIN_RING(2);
  701. OUT_RING(CCE_PACKET0(R128_PC_GUI_CTLSTAT, 0));
  702. OUT_RING(R128_PC_FLUSH_GUI);
  703. ADVANCE_RING();
  704. return 0;
  705. }
  706. /* ================================================================
  707. * Tiled depth buffer management
  708. *
  709. * FIXME: These should all set the destination write mask for when we
  710. * have hardware stencil support.
  711. */
  712. static int r128_cce_dispatch_write_span(drm_device_t * dev,
  713. drm_r128_depth_t * depth)
  714. {
  715. drm_r128_private_t *dev_priv = dev->dev_private;
  716. int count, x, y;
  717. u32 *buffer;
  718. u8 *mask;
  719. int i, buffer_size, mask_size;
  720. RING_LOCALS;
  721. DRM_DEBUG("\n");
  722. count = depth->n;
  723. if (count > 4096 || count <= 0)
  724. return DRM_ERR(EMSGSIZE);
  725. if (DRM_COPY_FROM_USER(&x, depth->x, sizeof(x))) {
  726. return DRM_ERR(EFAULT);
  727. }
  728. if (DRM_COPY_FROM_USER(&y, depth->y, sizeof(y))) {
  729. return DRM_ERR(EFAULT);
  730. }
  731. buffer_size = depth->n * sizeof(u32);
  732. buffer = drm_alloc(buffer_size, DRM_MEM_BUFS);
  733. if (buffer == NULL)
  734. return DRM_ERR(ENOMEM);
  735. if (DRM_COPY_FROM_USER(buffer, depth->buffer, buffer_size)) {
  736. drm_free(buffer, buffer_size, DRM_MEM_BUFS);
  737. return DRM_ERR(EFAULT);
  738. }
  739. mask_size = depth->n * sizeof(u8);
  740. if (depth->mask) {
  741. mask = drm_alloc(mask_size, DRM_MEM_BUFS);
  742. if (mask == NULL) {
  743. drm_free(buffer, buffer_size, DRM_MEM_BUFS);
  744. return DRM_ERR(ENOMEM);
  745. }
  746. if (DRM_COPY_FROM_USER(mask, depth->mask, mask_size)) {
  747. drm_free(buffer, buffer_size, DRM_MEM_BUFS);
  748. drm_free(mask, mask_size, DRM_MEM_BUFS);
  749. return DRM_ERR(EFAULT);
  750. }
  751. for (i = 0; i < count; i++, x++) {
  752. if (mask[i]) {
  753. BEGIN_RING(6);
  754. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  755. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  756. R128_GMC_BRUSH_SOLID_COLOR |
  757. (dev_priv->depth_fmt << 8) |
  758. R128_GMC_SRC_DATATYPE_COLOR |
  759. R128_ROP3_P |
  760. R128_GMC_CLR_CMP_CNTL_DIS |
  761. R128_GMC_WR_MSK_DIS);
  762. OUT_RING(dev_priv->depth_pitch_offset_c);
  763. OUT_RING(buffer[i]);
  764. OUT_RING((x << 16) | y);
  765. OUT_RING((1 << 16) | 1);
  766. ADVANCE_RING();
  767. }
  768. }
  769. drm_free(mask, mask_size, DRM_MEM_BUFS);
  770. } else {
  771. for (i = 0; i < count; i++, x++) {
  772. BEGIN_RING(6);
  773. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  774. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  775. R128_GMC_BRUSH_SOLID_COLOR |
  776. (dev_priv->depth_fmt << 8) |
  777. R128_GMC_SRC_DATATYPE_COLOR |
  778. R128_ROP3_P |
  779. R128_GMC_CLR_CMP_CNTL_DIS |
  780. R128_GMC_WR_MSK_DIS);
  781. OUT_RING(dev_priv->depth_pitch_offset_c);
  782. OUT_RING(buffer[i]);
  783. OUT_RING((x << 16) | y);
  784. OUT_RING((1 << 16) | 1);
  785. ADVANCE_RING();
  786. }
  787. }
  788. drm_free(buffer, buffer_size, DRM_MEM_BUFS);
  789. return 0;
  790. }
  791. static int r128_cce_dispatch_write_pixels(drm_device_t * dev,
  792. drm_r128_depth_t * depth)
  793. {
  794. drm_r128_private_t *dev_priv = dev->dev_private;
  795. int count, *x, *y;
  796. u32 *buffer;
  797. u8 *mask;
  798. int i, xbuf_size, ybuf_size, buffer_size, mask_size;
  799. RING_LOCALS;
  800. DRM_DEBUG("\n");
  801. count = depth->n;
  802. if (count > 4096 || count <= 0)
  803. return DRM_ERR(EMSGSIZE);
  804. xbuf_size = count * sizeof(*x);
  805. ybuf_size = count * sizeof(*y);
  806. x = drm_alloc(xbuf_size, DRM_MEM_BUFS);
  807. if (x == NULL) {
  808. return DRM_ERR(ENOMEM);
  809. }
  810. y = drm_alloc(ybuf_size, DRM_MEM_BUFS);
  811. if (y == NULL) {
  812. drm_free(x, xbuf_size, DRM_MEM_BUFS);
  813. return DRM_ERR(ENOMEM);
  814. }
  815. if (DRM_COPY_FROM_USER(x, depth->x, xbuf_size)) {
  816. drm_free(x, xbuf_size, DRM_MEM_BUFS);
  817. drm_free(y, ybuf_size, DRM_MEM_BUFS);
  818. return DRM_ERR(EFAULT);
  819. }
  820. if (DRM_COPY_FROM_USER(y, depth->y, xbuf_size)) {
  821. drm_free(x, xbuf_size, DRM_MEM_BUFS);
  822. drm_free(y, ybuf_size, DRM_MEM_BUFS);
  823. return DRM_ERR(EFAULT);
  824. }
  825. buffer_size = depth->n * sizeof(u32);
  826. buffer = drm_alloc(buffer_size, DRM_MEM_BUFS);
  827. if (buffer == NULL) {
  828. drm_free(x, xbuf_size, DRM_MEM_BUFS);
  829. drm_free(y, ybuf_size, DRM_MEM_BUFS);
  830. return DRM_ERR(ENOMEM);
  831. }
  832. if (DRM_COPY_FROM_USER(buffer, depth->buffer, buffer_size)) {
  833. drm_free(x, xbuf_size, DRM_MEM_BUFS);
  834. drm_free(y, ybuf_size, DRM_MEM_BUFS);
  835. drm_free(buffer, buffer_size, DRM_MEM_BUFS);
  836. return DRM_ERR(EFAULT);
  837. }
  838. if (depth->mask) {
  839. mask_size = depth->n * sizeof(u8);
  840. mask = drm_alloc(mask_size, DRM_MEM_BUFS);
  841. if (mask == NULL) {
  842. drm_free(x, xbuf_size, DRM_MEM_BUFS);
  843. drm_free(y, ybuf_size, DRM_MEM_BUFS);
  844. drm_free(buffer, buffer_size, DRM_MEM_BUFS);
  845. return DRM_ERR(ENOMEM);
  846. }
  847. if (DRM_COPY_FROM_USER(mask, depth->mask, mask_size)) {
  848. drm_free(x, xbuf_size, DRM_MEM_BUFS);
  849. drm_free(y, ybuf_size, DRM_MEM_BUFS);
  850. drm_free(buffer, buffer_size, DRM_MEM_BUFS);
  851. drm_free(mask, mask_size, DRM_MEM_BUFS);
  852. return DRM_ERR(EFAULT);
  853. }
  854. for (i = 0; i < count; i++) {
  855. if (mask[i]) {
  856. BEGIN_RING(6);
  857. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  858. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  859. R128_GMC_BRUSH_SOLID_COLOR |
  860. (dev_priv->depth_fmt << 8) |
  861. R128_GMC_SRC_DATATYPE_COLOR |
  862. R128_ROP3_P |
  863. R128_GMC_CLR_CMP_CNTL_DIS |
  864. R128_GMC_WR_MSK_DIS);
  865. OUT_RING(dev_priv->depth_pitch_offset_c);
  866. OUT_RING(buffer[i]);
  867. OUT_RING((x[i] << 16) | y[i]);
  868. OUT_RING((1 << 16) | 1);
  869. ADVANCE_RING();
  870. }
  871. }
  872. drm_free(mask, mask_size, DRM_MEM_BUFS);
  873. } else {
  874. for (i = 0; i < count; i++) {
  875. BEGIN_RING(6);
  876. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  877. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  878. R128_GMC_BRUSH_SOLID_COLOR |
  879. (dev_priv->depth_fmt << 8) |
  880. R128_GMC_SRC_DATATYPE_COLOR |
  881. R128_ROP3_P |
  882. R128_GMC_CLR_CMP_CNTL_DIS |
  883. R128_GMC_WR_MSK_DIS);
  884. OUT_RING(dev_priv->depth_pitch_offset_c);
  885. OUT_RING(buffer[i]);
  886. OUT_RING((x[i] << 16) | y[i]);
  887. OUT_RING((1 << 16) | 1);
  888. ADVANCE_RING();
  889. }
  890. }
  891. drm_free(x, xbuf_size, DRM_MEM_BUFS);
  892. drm_free(y, ybuf_size, DRM_MEM_BUFS);
  893. drm_free(buffer, buffer_size, DRM_MEM_BUFS);
  894. return 0;
  895. }
  896. static int r128_cce_dispatch_read_span(drm_device_t * dev,
  897. drm_r128_depth_t * depth)
  898. {
  899. drm_r128_private_t *dev_priv = dev->dev_private;
  900. int count, x, y;
  901. RING_LOCALS;
  902. DRM_DEBUG("\n");
  903. count = depth->n;
  904. if (count > 4096 || count <= 0)
  905. return DRM_ERR(EMSGSIZE);
  906. if (DRM_COPY_FROM_USER(&x, depth->x, sizeof(x))) {
  907. return DRM_ERR(EFAULT);
  908. }
  909. if (DRM_COPY_FROM_USER(&y, depth->y, sizeof(y))) {
  910. return DRM_ERR(EFAULT);
  911. }
  912. BEGIN_RING(7);
  913. OUT_RING(CCE_PACKET3(R128_CNTL_BITBLT_MULTI, 5));
  914. OUT_RING(R128_GMC_SRC_PITCH_OFFSET_CNTL |
  915. R128_GMC_DST_PITCH_OFFSET_CNTL |
  916. R128_GMC_BRUSH_NONE |
  917. (dev_priv->depth_fmt << 8) |
  918. R128_GMC_SRC_DATATYPE_COLOR |
  919. R128_ROP3_S |
  920. R128_DP_SRC_SOURCE_MEMORY |
  921. R128_GMC_CLR_CMP_CNTL_DIS | R128_GMC_WR_MSK_DIS);
  922. OUT_RING(dev_priv->depth_pitch_offset_c);
  923. OUT_RING(dev_priv->span_pitch_offset_c);
  924. OUT_RING((x << 16) | y);
  925. OUT_RING((0 << 16) | 0);
  926. OUT_RING((count << 16) | 1);
  927. ADVANCE_RING();
  928. return 0;
  929. }
  930. static int r128_cce_dispatch_read_pixels(drm_device_t * dev,
  931. drm_r128_depth_t * depth)
  932. {
  933. drm_r128_private_t *dev_priv = dev->dev_private;
  934. int count, *x, *y;
  935. int i, xbuf_size, ybuf_size;
  936. RING_LOCALS;
  937. DRM_DEBUG("%s\n", __FUNCTION__);
  938. count = depth->n;
  939. if (count > 4096 || count <= 0)
  940. return DRM_ERR(EMSGSIZE);
  941. if (count > dev_priv->depth_pitch) {
  942. count = dev_priv->depth_pitch;
  943. }
  944. xbuf_size = count * sizeof(*x);
  945. ybuf_size = count * sizeof(*y);
  946. x = drm_alloc(xbuf_size, DRM_MEM_BUFS);
  947. if (x == NULL) {
  948. return DRM_ERR(ENOMEM);
  949. }
  950. y = drm_alloc(ybuf_size, DRM_MEM_BUFS);
  951. if (y == NULL) {
  952. drm_free(x, xbuf_size, DRM_MEM_BUFS);
  953. return DRM_ERR(ENOMEM);
  954. }
  955. if (DRM_COPY_FROM_USER(x, depth->x, xbuf_size)) {
  956. drm_free(x, xbuf_size, DRM_MEM_BUFS);
  957. drm_free(y, ybuf_size, DRM_MEM_BUFS);
  958. return DRM_ERR(EFAULT);
  959. }
  960. if (DRM_COPY_FROM_USER(y, depth->y, ybuf_size)) {
  961. drm_free(x, xbuf_size, DRM_MEM_BUFS);
  962. drm_free(y, ybuf_size, DRM_MEM_BUFS);
  963. return DRM_ERR(EFAULT);
  964. }
  965. for (i = 0; i < count; i++) {
  966. BEGIN_RING(7);
  967. OUT_RING(CCE_PACKET3(R128_CNTL_BITBLT_MULTI, 5));
  968. OUT_RING(R128_GMC_SRC_PITCH_OFFSET_CNTL |
  969. R128_GMC_DST_PITCH_OFFSET_CNTL |
  970. R128_GMC_BRUSH_NONE |
  971. (dev_priv->depth_fmt << 8) |
  972. R128_GMC_SRC_DATATYPE_COLOR |
  973. R128_ROP3_S |
  974. R128_DP_SRC_SOURCE_MEMORY |
  975. R128_GMC_CLR_CMP_CNTL_DIS | R128_GMC_WR_MSK_DIS);
  976. OUT_RING(dev_priv->depth_pitch_offset_c);
  977. OUT_RING(dev_priv->span_pitch_offset_c);
  978. OUT_RING((x[i] << 16) | y[i]);
  979. OUT_RING((i << 16) | 0);
  980. OUT_RING((1 << 16) | 1);
  981. ADVANCE_RING();
  982. }
  983. drm_free(x, xbuf_size, DRM_MEM_BUFS);
  984. drm_free(y, ybuf_size, DRM_MEM_BUFS);
  985. return 0;
  986. }
  987. /* ================================================================
  988. * Polygon stipple
  989. */
  990. static void r128_cce_dispatch_stipple(drm_device_t * dev, u32 * stipple)
  991. {
  992. drm_r128_private_t *dev_priv = dev->dev_private;
  993. int i;
  994. RING_LOCALS;
  995. DRM_DEBUG("%s\n", __FUNCTION__);
  996. BEGIN_RING(33);
  997. OUT_RING(CCE_PACKET0(R128_BRUSH_DATA0, 31));
  998. for (i = 0; i < 32; i++) {
  999. OUT_RING(stipple[i]);
  1000. }
  1001. ADVANCE_RING();
  1002. }
  1003. /* ================================================================
  1004. * IOCTL functions
  1005. */
  1006. static int r128_cce_clear(DRM_IOCTL_ARGS)
  1007. {
  1008. DRM_DEVICE;
  1009. drm_r128_private_t *dev_priv = dev->dev_private;
  1010. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  1011. drm_r128_clear_t clear;
  1012. DRM_DEBUG("\n");
  1013. LOCK_TEST_WITH_RETURN(dev, filp);
  1014. DRM_COPY_FROM_USER_IOCTL(clear, (drm_r128_clear_t __user *) data,
  1015. sizeof(clear));
  1016. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1017. if (sarea_priv->nbox > R128_NR_SAREA_CLIPRECTS)
  1018. sarea_priv->nbox = R128_NR_SAREA_CLIPRECTS;
  1019. r128_cce_dispatch_clear(dev, &clear);
  1020. COMMIT_RING();
  1021. /* Make sure we restore the 3D state next time.
  1022. */
  1023. dev_priv->sarea_priv->dirty |= R128_UPLOAD_CONTEXT | R128_UPLOAD_MASKS;
  1024. return 0;
  1025. }
  1026. static int r128_do_init_pageflip(drm_device_t * dev)
  1027. {
  1028. drm_r128_private_t *dev_priv = dev->dev_private;
  1029. DRM_DEBUG("\n");
  1030. dev_priv->crtc_offset = R128_READ(R128_CRTC_OFFSET);
  1031. dev_priv->crtc_offset_cntl = R128_READ(R128_CRTC_OFFSET_CNTL);
  1032. R128_WRITE(R128_CRTC_OFFSET, dev_priv->front_offset);
  1033. R128_WRITE(R128_CRTC_OFFSET_CNTL,
  1034. dev_priv->crtc_offset_cntl | R128_CRTC_OFFSET_FLIP_CNTL);
  1035. dev_priv->page_flipping = 1;
  1036. dev_priv->current_page = 0;
  1037. dev_priv->sarea_priv->pfCurrentPage = dev_priv->current_page;
  1038. return 0;
  1039. }
  1040. static int r128_do_cleanup_pageflip(drm_device_t * dev)
  1041. {
  1042. drm_r128_private_t *dev_priv = dev->dev_private;
  1043. DRM_DEBUG("\n");
  1044. R128_WRITE(R128_CRTC_OFFSET, dev_priv->crtc_offset);
  1045. R128_WRITE(R128_CRTC_OFFSET_CNTL, dev_priv->crtc_offset_cntl);
  1046. if (dev_priv->current_page != 0) {
  1047. r128_cce_dispatch_flip(dev);
  1048. COMMIT_RING();
  1049. }
  1050. dev_priv->page_flipping = 0;
  1051. return 0;
  1052. }
  1053. /* Swapping and flipping are different operations, need different ioctls.
  1054. * They can & should be intermixed to support multiple 3d windows.
  1055. */
  1056. static int r128_cce_flip(DRM_IOCTL_ARGS)
  1057. {
  1058. DRM_DEVICE;
  1059. drm_r128_private_t *dev_priv = dev->dev_private;
  1060. DRM_DEBUG("%s\n", __FUNCTION__);
  1061. LOCK_TEST_WITH_RETURN(dev, filp);
  1062. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1063. if (!dev_priv->page_flipping)
  1064. r128_do_init_pageflip(dev);
  1065. r128_cce_dispatch_flip(dev);
  1066. COMMIT_RING();
  1067. return 0;
  1068. }
  1069. static int r128_cce_swap(DRM_IOCTL_ARGS)
  1070. {
  1071. DRM_DEVICE;
  1072. drm_r128_private_t *dev_priv = dev->dev_private;
  1073. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  1074. DRM_DEBUG("%s\n", __FUNCTION__);
  1075. LOCK_TEST_WITH_RETURN(dev, filp);
  1076. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1077. if (sarea_priv->nbox > R128_NR_SAREA_CLIPRECTS)
  1078. sarea_priv->nbox = R128_NR_SAREA_CLIPRECTS;
  1079. r128_cce_dispatch_swap(dev);
  1080. dev_priv->sarea_priv->dirty |= (R128_UPLOAD_CONTEXT |
  1081. R128_UPLOAD_MASKS);
  1082. COMMIT_RING();
  1083. return 0;
  1084. }
  1085. static int r128_cce_vertex(DRM_IOCTL_ARGS)
  1086. {
  1087. DRM_DEVICE;
  1088. drm_r128_private_t *dev_priv = dev->dev_private;
  1089. drm_device_dma_t *dma = dev->dma;
  1090. drm_buf_t *buf;
  1091. drm_r128_buf_priv_t *buf_priv;
  1092. drm_r128_vertex_t vertex;
  1093. LOCK_TEST_WITH_RETURN(dev, filp);
  1094. if (!dev_priv) {
  1095. DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
  1096. return DRM_ERR(EINVAL);
  1097. }
  1098. DRM_COPY_FROM_USER_IOCTL(vertex, (drm_r128_vertex_t __user *) data,
  1099. sizeof(vertex));
  1100. DRM_DEBUG("pid=%d index=%d count=%d discard=%d\n",
  1101. DRM_CURRENTPID, vertex.idx, vertex.count, vertex.discard);
  1102. if (vertex.idx < 0 || vertex.idx >= dma->buf_count) {
  1103. DRM_ERROR("buffer index %d (of %d max)\n",
  1104. vertex.idx, dma->buf_count - 1);
  1105. return DRM_ERR(EINVAL);
  1106. }
  1107. if (vertex.prim < 0 ||
  1108. vertex.prim > R128_CCE_VC_CNTL_PRIM_TYPE_TRI_TYPE2) {
  1109. DRM_ERROR("buffer prim %d\n", vertex.prim);
  1110. return DRM_ERR(EINVAL);
  1111. }
  1112. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1113. VB_AGE_TEST_WITH_RETURN(dev_priv);
  1114. buf = dma->buflist[vertex.idx];
  1115. buf_priv = buf->dev_private;
  1116. if (buf->filp != filp) {
  1117. DRM_ERROR("process %d using buffer owned by %p\n",
  1118. DRM_CURRENTPID, buf->filp);
  1119. return DRM_ERR(EINVAL);
  1120. }
  1121. if (buf->pending) {
  1122. DRM_ERROR("sending pending buffer %d\n", vertex.idx);
  1123. return DRM_ERR(EINVAL);
  1124. }
  1125. buf->used = vertex.count;
  1126. buf_priv->prim = vertex.prim;
  1127. buf_priv->discard = vertex.discard;
  1128. r128_cce_dispatch_vertex(dev, buf);
  1129. COMMIT_RING();
  1130. return 0;
  1131. }
  1132. static int r128_cce_indices(DRM_IOCTL_ARGS)
  1133. {
  1134. DRM_DEVICE;
  1135. drm_r128_private_t *dev_priv = dev->dev_private;
  1136. drm_device_dma_t *dma = dev->dma;
  1137. drm_buf_t *buf;
  1138. drm_r128_buf_priv_t *buf_priv;
  1139. drm_r128_indices_t elts;
  1140. int count;
  1141. LOCK_TEST_WITH_RETURN(dev, filp);
  1142. if (!dev_priv) {
  1143. DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
  1144. return DRM_ERR(EINVAL);
  1145. }
  1146. DRM_COPY_FROM_USER_IOCTL(elts, (drm_r128_indices_t __user *) data,
  1147. sizeof(elts));
  1148. DRM_DEBUG("pid=%d buf=%d s=%d e=%d d=%d\n", DRM_CURRENTPID,
  1149. elts.idx, elts.start, elts.end, elts.discard);
  1150. if (elts.idx < 0 || elts.idx >= dma->buf_count) {
  1151. DRM_ERROR("buffer index %d (of %d max)\n",
  1152. elts.idx, dma->buf_count - 1);
  1153. return DRM_ERR(EINVAL);
  1154. }
  1155. if (elts.prim < 0 || elts.prim > R128_CCE_VC_CNTL_PRIM_TYPE_TRI_TYPE2) {
  1156. DRM_ERROR("buffer prim %d\n", elts.prim);
  1157. return DRM_ERR(EINVAL);
  1158. }
  1159. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1160. VB_AGE_TEST_WITH_RETURN(dev_priv);
  1161. buf = dma->buflist[elts.idx];
  1162. buf_priv = buf->dev_private;
  1163. if (buf->filp != filp) {
  1164. DRM_ERROR("process %d using buffer owned by %p\n",
  1165. DRM_CURRENTPID, buf->filp);
  1166. return DRM_ERR(EINVAL);
  1167. }
  1168. if (buf->pending) {
  1169. DRM_ERROR("sending pending buffer %d\n", elts.idx);
  1170. return DRM_ERR(EINVAL);
  1171. }
  1172. count = (elts.end - elts.start) / sizeof(u16);
  1173. elts.start -= R128_INDEX_PRIM_OFFSET;
  1174. if (elts.start & 0x7) {
  1175. DRM_ERROR("misaligned buffer 0x%x\n", elts.start);
  1176. return DRM_ERR(EINVAL);
  1177. }
  1178. if (elts.start < buf->used) {
  1179. DRM_ERROR("no header 0x%x - 0x%x\n", elts.start, buf->used);
  1180. return DRM_ERR(EINVAL);
  1181. }
  1182. buf->used = elts.end;
  1183. buf_priv->prim = elts.prim;
  1184. buf_priv->discard = elts.discard;
  1185. r128_cce_dispatch_indices(dev, buf, elts.start, elts.end, count);
  1186. COMMIT_RING();
  1187. return 0;
  1188. }
  1189. static int r128_cce_blit(DRM_IOCTL_ARGS)
  1190. {
  1191. DRM_DEVICE;
  1192. drm_device_dma_t *dma = dev->dma;
  1193. drm_r128_private_t *dev_priv = dev->dev_private;
  1194. drm_r128_blit_t blit;
  1195. int ret;
  1196. LOCK_TEST_WITH_RETURN(dev, filp);
  1197. DRM_COPY_FROM_USER_IOCTL(blit, (drm_r128_blit_t __user *) data,
  1198. sizeof(blit));
  1199. DRM_DEBUG("pid=%d index=%d\n", DRM_CURRENTPID, blit.idx);
  1200. if (blit.idx < 0 || blit.idx >= dma->buf_count) {
  1201. DRM_ERROR("buffer index %d (of %d max)\n",
  1202. blit.idx, dma->buf_count - 1);
  1203. return DRM_ERR(EINVAL);
  1204. }
  1205. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1206. VB_AGE_TEST_WITH_RETURN(dev_priv);
  1207. ret = r128_cce_dispatch_blit(filp, dev, &blit);
  1208. COMMIT_RING();
  1209. return ret;
  1210. }
  1211. static int r128_cce_depth(DRM_IOCTL_ARGS)
  1212. {
  1213. DRM_DEVICE;
  1214. drm_r128_private_t *dev_priv = dev->dev_private;
  1215. drm_r128_depth_t depth;
  1216. int ret;
  1217. LOCK_TEST_WITH_RETURN(dev, filp);
  1218. DRM_COPY_FROM_USER_IOCTL(depth, (drm_r128_depth_t __user *) data,
  1219. sizeof(depth));
  1220. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1221. ret = DRM_ERR(EINVAL);
  1222. switch (depth.func) {
  1223. case R128_WRITE_SPAN:
  1224. ret = r128_cce_dispatch_write_span(dev, &depth);
  1225. break;
  1226. case R128_WRITE_PIXELS:
  1227. ret = r128_cce_dispatch_write_pixels(dev, &depth);
  1228. break;
  1229. case R128_READ_SPAN:
  1230. ret = r128_cce_dispatch_read_span(dev, &depth);
  1231. break;
  1232. case R128_READ_PIXELS:
  1233. ret = r128_cce_dispatch_read_pixels(dev, &depth);
  1234. break;
  1235. }
  1236. COMMIT_RING();
  1237. return ret;
  1238. }
  1239. static int r128_cce_stipple(DRM_IOCTL_ARGS)
  1240. {
  1241. DRM_DEVICE;
  1242. drm_r128_private_t *dev_priv = dev->dev_private;
  1243. drm_r128_stipple_t stipple;
  1244. u32 mask[32];
  1245. LOCK_TEST_WITH_RETURN(dev, filp);
  1246. DRM_COPY_FROM_USER_IOCTL(stipple, (drm_r128_stipple_t __user *) data,
  1247. sizeof(stipple));
  1248. if (DRM_COPY_FROM_USER(&mask, stipple.mask, 32 * sizeof(u32)))
  1249. return DRM_ERR(EFAULT);
  1250. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1251. r128_cce_dispatch_stipple(dev, mask);
  1252. COMMIT_RING();
  1253. return 0;
  1254. }
  1255. static int r128_cce_indirect(DRM_IOCTL_ARGS)
  1256. {
  1257. DRM_DEVICE;
  1258. drm_r128_private_t *dev_priv = dev->dev_private;
  1259. drm_device_dma_t *dma = dev->dma;
  1260. drm_buf_t *buf;
  1261. drm_r128_buf_priv_t *buf_priv;
  1262. drm_r128_indirect_t indirect;
  1263. #if 0
  1264. RING_LOCALS;
  1265. #endif
  1266. LOCK_TEST_WITH_RETURN(dev, filp);
  1267. if (!dev_priv) {
  1268. DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
  1269. return DRM_ERR(EINVAL);
  1270. }
  1271. DRM_COPY_FROM_USER_IOCTL(indirect, (drm_r128_indirect_t __user *) data,
  1272. sizeof(indirect));
  1273. DRM_DEBUG("indirect: idx=%d s=%d e=%d d=%d\n",
  1274. indirect.idx, indirect.start, indirect.end, indirect.discard);
  1275. if (indirect.idx < 0 || indirect.idx >= dma->buf_count) {
  1276. DRM_ERROR("buffer index %d (of %d max)\n",
  1277. indirect.idx, dma->buf_count - 1);
  1278. return DRM_ERR(EINVAL);
  1279. }
  1280. buf = dma->buflist[indirect.idx];
  1281. buf_priv = buf->dev_private;
  1282. if (buf->filp != filp) {
  1283. DRM_ERROR("process %d using buffer owned by %p\n",
  1284. DRM_CURRENTPID, buf->filp);
  1285. return DRM_ERR(EINVAL);
  1286. }
  1287. if (buf->pending) {
  1288. DRM_ERROR("sending pending buffer %d\n", indirect.idx);
  1289. return DRM_ERR(EINVAL);
  1290. }
  1291. if (indirect.start < buf->used) {
  1292. DRM_ERROR("reusing indirect: start=0x%x actual=0x%x\n",
  1293. indirect.start, buf->used);
  1294. return DRM_ERR(EINVAL);
  1295. }
  1296. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1297. VB_AGE_TEST_WITH_RETURN(dev_priv);
  1298. buf->used = indirect.end;
  1299. buf_priv->discard = indirect.discard;
  1300. #if 0
  1301. /* Wait for the 3D stream to idle before the indirect buffer
  1302. * containing 2D acceleration commands is processed.
  1303. */
  1304. BEGIN_RING(2);
  1305. RADEON_WAIT_UNTIL_3D_IDLE();
  1306. ADVANCE_RING();
  1307. #endif
  1308. /* Dispatch the indirect buffer full of commands from the
  1309. * X server. This is insecure and is thus only available to
  1310. * privileged clients.
  1311. */
  1312. r128_cce_dispatch_indirect(dev, buf, indirect.start, indirect.end);
  1313. COMMIT_RING();
  1314. return 0;
  1315. }
  1316. static int r128_getparam(DRM_IOCTL_ARGS)
  1317. {
  1318. DRM_DEVICE;
  1319. drm_r128_private_t *dev_priv = dev->dev_private;
  1320. drm_r128_getparam_t param;
  1321. int value;
  1322. if (!dev_priv) {
  1323. DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
  1324. return DRM_ERR(EINVAL);
  1325. }
  1326. DRM_COPY_FROM_USER_IOCTL(param, (drm_r128_getparam_t __user *) data,
  1327. sizeof(param));
  1328. DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
  1329. switch (param.param) {
  1330. case R128_PARAM_IRQ_NR:
  1331. value = dev->irq;
  1332. break;
  1333. default:
  1334. return DRM_ERR(EINVAL);
  1335. }
  1336. if (DRM_COPY_TO_USER(param.value, &value, sizeof(int))) {
  1337. DRM_ERROR("copy_to_user\n");
  1338. return DRM_ERR(EFAULT);
  1339. }
  1340. return 0;
  1341. }
  1342. void r128_driver_preclose(drm_device_t * dev, DRMFILE filp)
  1343. {
  1344. if (dev->dev_private) {
  1345. drm_r128_private_t *dev_priv = dev->dev_private;
  1346. if (dev_priv->page_flipping) {
  1347. r128_do_cleanup_pageflip(dev);
  1348. }
  1349. }
  1350. }
  1351. void r128_driver_lastclose(drm_device_t * dev)
  1352. {
  1353. r128_do_cleanup_cce(dev);
  1354. }
  1355. drm_ioctl_desc_t r128_ioctls[] = {
  1356. [DRM_IOCTL_NR(DRM_R128_INIT)] = {r128_cce_init, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY},
  1357. [DRM_IOCTL_NR(DRM_R128_CCE_START)] = {r128_cce_start, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY},
  1358. [DRM_IOCTL_NR(DRM_R128_CCE_STOP)] = {r128_cce_stop, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY},
  1359. [DRM_IOCTL_NR(DRM_R128_CCE_RESET)] = {r128_cce_reset, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY},
  1360. [DRM_IOCTL_NR(DRM_R128_CCE_IDLE)] = {r128_cce_idle, DRM_AUTH},
  1361. [DRM_IOCTL_NR(DRM_R128_RESET)] = {r128_engine_reset, DRM_AUTH},
  1362. [DRM_IOCTL_NR(DRM_R128_FULLSCREEN)] = {r128_fullscreen, DRM_AUTH},
  1363. [DRM_IOCTL_NR(DRM_R128_SWAP)] = {r128_cce_swap, DRM_AUTH},
  1364. [DRM_IOCTL_NR(DRM_R128_FLIP)] = {r128_cce_flip, DRM_AUTH},
  1365. [DRM_IOCTL_NR(DRM_R128_CLEAR)] = {r128_cce_clear, DRM_AUTH},
  1366. [DRM_IOCTL_NR(DRM_R128_VERTEX)] = {r128_cce_vertex, DRM_AUTH},
  1367. [DRM_IOCTL_NR(DRM_R128_INDICES)] = {r128_cce_indices, DRM_AUTH},
  1368. [DRM_IOCTL_NR(DRM_R128_BLIT)] = {r128_cce_blit, DRM_AUTH},
  1369. [DRM_IOCTL_NR(DRM_R128_DEPTH)] = {r128_cce_depth, DRM_AUTH},
  1370. [DRM_IOCTL_NR(DRM_R128_STIPPLE)] = {r128_cce_stipple, DRM_AUTH},
  1371. [DRM_IOCTL_NR(DRM_R128_INDIRECT)] = {r128_cce_indirect, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY},
  1372. [DRM_IOCTL_NR(DRM_R128_GETPARAM)] = {r128_getparam, DRM_AUTH},
  1373. };
  1374. int r128_max_ioctl = DRM_ARRAY_SIZE(r128_ioctls);