r128_state.c 42 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712
  1. /* r128_state.c -- State support for r128 -*- linux-c -*-
  2. * Created: Thu Jan 27 02:53:43 2000 by gareth@valinux.com
  3. */
  4. /* Copyright 2000 VA Linux Systems, Inc., Sunnyvale, California.
  5. * All Rights Reserved.
  6. *
  7. * Permission is hereby granted, free of charge, to any person obtaining a
  8. * copy of this software and associated documentation files (the "Software"),
  9. * to deal in the Software without restriction, including without limitation
  10. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  11. * and/or sell copies of the Software, and to permit persons to whom the
  12. * Software is furnished to do so, subject to the following conditions:
  13. *
  14. * The above copyright notice and this permission notice (including the next
  15. * paragraph) shall be included in all copies or substantial portions of the
  16. * Software.
  17. *
  18. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  19. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  20. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  21. * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
  22. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  23. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
  24. * DEALINGS IN THE SOFTWARE.
  25. *
  26. * Authors:
  27. * Gareth Hughes <gareth@valinux.com>
  28. */
  29. #include "drmP.h"
  30. #include "drm.h"
  31. #include "r128_drm.h"
  32. #include "r128_drv.h"
  33. /* ================================================================
  34. * CCE hardware state programming functions
  35. */
  36. static void r128_emit_clip_rects(drm_r128_private_t * dev_priv,
  37. drm_clip_rect_t * boxes, int count)
  38. {
  39. u32 aux_sc_cntl = 0x00000000;
  40. RING_LOCALS;
  41. DRM_DEBUG(" %s\n", __FUNCTION__);
  42. BEGIN_RING((count < 3 ? count : 3) * 5 + 2);
  43. if (count >= 1) {
  44. OUT_RING(CCE_PACKET0(R128_AUX1_SC_LEFT, 3));
  45. OUT_RING(boxes[0].x1);
  46. OUT_RING(boxes[0].x2 - 1);
  47. OUT_RING(boxes[0].y1);
  48. OUT_RING(boxes[0].y2 - 1);
  49. aux_sc_cntl |= (R128_AUX1_SC_EN | R128_AUX1_SC_MODE_OR);
  50. }
  51. if (count >= 2) {
  52. OUT_RING(CCE_PACKET0(R128_AUX2_SC_LEFT, 3));
  53. OUT_RING(boxes[1].x1);
  54. OUT_RING(boxes[1].x2 - 1);
  55. OUT_RING(boxes[1].y1);
  56. OUT_RING(boxes[1].y2 - 1);
  57. aux_sc_cntl |= (R128_AUX2_SC_EN | R128_AUX2_SC_MODE_OR);
  58. }
  59. if (count >= 3) {
  60. OUT_RING(CCE_PACKET0(R128_AUX3_SC_LEFT, 3));
  61. OUT_RING(boxes[2].x1);
  62. OUT_RING(boxes[2].x2 - 1);
  63. OUT_RING(boxes[2].y1);
  64. OUT_RING(boxes[2].y2 - 1);
  65. aux_sc_cntl |= (R128_AUX3_SC_EN | R128_AUX3_SC_MODE_OR);
  66. }
  67. OUT_RING(CCE_PACKET0(R128_AUX_SC_CNTL, 0));
  68. OUT_RING(aux_sc_cntl);
  69. ADVANCE_RING();
  70. }
  71. static __inline__ void r128_emit_core(drm_r128_private_t * dev_priv)
  72. {
  73. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  74. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  75. RING_LOCALS;
  76. DRM_DEBUG(" %s\n", __FUNCTION__);
  77. BEGIN_RING(2);
  78. OUT_RING(CCE_PACKET0(R128_SCALE_3D_CNTL, 0));
  79. OUT_RING(ctx->scale_3d_cntl);
  80. ADVANCE_RING();
  81. }
  82. static __inline__ void r128_emit_context(drm_r128_private_t * dev_priv)
  83. {
  84. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  85. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  86. RING_LOCALS;
  87. DRM_DEBUG(" %s\n", __FUNCTION__);
  88. BEGIN_RING(13);
  89. OUT_RING(CCE_PACKET0(R128_DST_PITCH_OFFSET_C, 11));
  90. OUT_RING(ctx->dst_pitch_offset_c);
  91. OUT_RING(ctx->dp_gui_master_cntl_c);
  92. OUT_RING(ctx->sc_top_left_c);
  93. OUT_RING(ctx->sc_bottom_right_c);
  94. OUT_RING(ctx->z_offset_c);
  95. OUT_RING(ctx->z_pitch_c);
  96. OUT_RING(ctx->z_sten_cntl_c);
  97. OUT_RING(ctx->tex_cntl_c);
  98. OUT_RING(ctx->misc_3d_state_cntl_reg);
  99. OUT_RING(ctx->texture_clr_cmp_clr_c);
  100. OUT_RING(ctx->texture_clr_cmp_msk_c);
  101. OUT_RING(ctx->fog_color_c);
  102. ADVANCE_RING();
  103. }
  104. static __inline__ void r128_emit_setup(drm_r128_private_t * dev_priv)
  105. {
  106. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  107. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  108. RING_LOCALS;
  109. DRM_DEBUG(" %s\n", __FUNCTION__);
  110. BEGIN_RING(3);
  111. OUT_RING(CCE_PACKET1(R128_SETUP_CNTL, R128_PM4_VC_FPU_SETUP));
  112. OUT_RING(ctx->setup_cntl);
  113. OUT_RING(ctx->pm4_vc_fpu_setup);
  114. ADVANCE_RING();
  115. }
  116. static __inline__ void r128_emit_masks(drm_r128_private_t * dev_priv)
  117. {
  118. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  119. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  120. RING_LOCALS;
  121. DRM_DEBUG(" %s\n", __FUNCTION__);
  122. BEGIN_RING(5);
  123. OUT_RING(CCE_PACKET0(R128_DP_WRITE_MASK, 0));
  124. OUT_RING(ctx->dp_write_mask);
  125. OUT_RING(CCE_PACKET0(R128_STEN_REF_MASK_C, 1));
  126. OUT_RING(ctx->sten_ref_mask_c);
  127. OUT_RING(ctx->plane_3d_mask_c);
  128. ADVANCE_RING();
  129. }
  130. static __inline__ void r128_emit_window(drm_r128_private_t * dev_priv)
  131. {
  132. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  133. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  134. RING_LOCALS;
  135. DRM_DEBUG(" %s\n", __FUNCTION__);
  136. BEGIN_RING(2);
  137. OUT_RING(CCE_PACKET0(R128_WINDOW_XY_OFFSET, 0));
  138. OUT_RING(ctx->window_xy_offset);
  139. ADVANCE_RING();
  140. }
  141. static __inline__ void r128_emit_tex0(drm_r128_private_t * dev_priv)
  142. {
  143. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  144. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  145. drm_r128_texture_regs_t *tex = &sarea_priv->tex_state[0];
  146. int i;
  147. RING_LOCALS;
  148. DRM_DEBUG(" %s\n", __FUNCTION__);
  149. BEGIN_RING(7 + R128_MAX_TEXTURE_LEVELS);
  150. OUT_RING(CCE_PACKET0(R128_PRIM_TEX_CNTL_C,
  151. 2 + R128_MAX_TEXTURE_LEVELS));
  152. OUT_RING(tex->tex_cntl);
  153. OUT_RING(tex->tex_combine_cntl);
  154. OUT_RING(ctx->tex_size_pitch_c);
  155. for (i = 0; i < R128_MAX_TEXTURE_LEVELS; i++) {
  156. OUT_RING(tex->tex_offset[i]);
  157. }
  158. OUT_RING(CCE_PACKET0(R128_CONSTANT_COLOR_C, 1));
  159. OUT_RING(ctx->constant_color_c);
  160. OUT_RING(tex->tex_border_color);
  161. ADVANCE_RING();
  162. }
  163. static __inline__ void r128_emit_tex1(drm_r128_private_t * dev_priv)
  164. {
  165. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  166. drm_r128_texture_regs_t *tex = &sarea_priv->tex_state[1];
  167. int i;
  168. RING_LOCALS;
  169. DRM_DEBUG(" %s\n", __FUNCTION__);
  170. BEGIN_RING(5 + R128_MAX_TEXTURE_LEVELS);
  171. OUT_RING(CCE_PACKET0(R128_SEC_TEX_CNTL_C, 1 + R128_MAX_TEXTURE_LEVELS));
  172. OUT_RING(tex->tex_cntl);
  173. OUT_RING(tex->tex_combine_cntl);
  174. for (i = 0; i < R128_MAX_TEXTURE_LEVELS; i++) {
  175. OUT_RING(tex->tex_offset[i]);
  176. }
  177. OUT_RING(CCE_PACKET0(R128_SEC_TEXTURE_BORDER_COLOR_C, 0));
  178. OUT_RING(tex->tex_border_color);
  179. ADVANCE_RING();
  180. }
  181. static void r128_emit_state(drm_r128_private_t * dev_priv)
  182. {
  183. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  184. unsigned int dirty = sarea_priv->dirty;
  185. DRM_DEBUG("%s: dirty=0x%08x\n", __FUNCTION__, dirty);
  186. if (dirty & R128_UPLOAD_CORE) {
  187. r128_emit_core(dev_priv);
  188. sarea_priv->dirty &= ~R128_UPLOAD_CORE;
  189. }
  190. if (dirty & R128_UPLOAD_CONTEXT) {
  191. r128_emit_context(dev_priv);
  192. sarea_priv->dirty &= ~R128_UPLOAD_CONTEXT;
  193. }
  194. if (dirty & R128_UPLOAD_SETUP) {
  195. r128_emit_setup(dev_priv);
  196. sarea_priv->dirty &= ~R128_UPLOAD_SETUP;
  197. }
  198. if (dirty & R128_UPLOAD_MASKS) {
  199. r128_emit_masks(dev_priv);
  200. sarea_priv->dirty &= ~R128_UPLOAD_MASKS;
  201. }
  202. if (dirty & R128_UPLOAD_WINDOW) {
  203. r128_emit_window(dev_priv);
  204. sarea_priv->dirty &= ~R128_UPLOAD_WINDOW;
  205. }
  206. if (dirty & R128_UPLOAD_TEX0) {
  207. r128_emit_tex0(dev_priv);
  208. sarea_priv->dirty &= ~R128_UPLOAD_TEX0;
  209. }
  210. if (dirty & R128_UPLOAD_TEX1) {
  211. r128_emit_tex1(dev_priv);
  212. sarea_priv->dirty &= ~R128_UPLOAD_TEX1;
  213. }
  214. /* Turn off the texture cache flushing */
  215. sarea_priv->context_state.tex_cntl_c &= ~R128_TEX_CACHE_FLUSH;
  216. sarea_priv->dirty &= ~R128_REQUIRE_QUIESCENCE;
  217. }
  218. #if R128_PERFORMANCE_BOXES
  219. /* ================================================================
  220. * Performance monitoring functions
  221. */
  222. static void r128_clear_box(drm_r128_private_t * dev_priv,
  223. int x, int y, int w, int h, int r, int g, int b)
  224. {
  225. u32 pitch, offset;
  226. u32 fb_bpp, color;
  227. RING_LOCALS;
  228. switch (dev_priv->fb_bpp) {
  229. case 16:
  230. fb_bpp = R128_GMC_DST_16BPP;
  231. color = (((r & 0xf8) << 8) |
  232. ((g & 0xfc) << 3) | ((b & 0xf8) >> 3));
  233. break;
  234. case 24:
  235. fb_bpp = R128_GMC_DST_24BPP;
  236. color = ((r << 16) | (g << 8) | b);
  237. break;
  238. case 32:
  239. fb_bpp = R128_GMC_DST_32BPP;
  240. color = (((0xff) << 24) | (r << 16) | (g << 8) | b);
  241. break;
  242. default:
  243. return;
  244. }
  245. offset = dev_priv->back_offset;
  246. pitch = dev_priv->back_pitch >> 3;
  247. BEGIN_RING(6);
  248. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  249. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  250. R128_GMC_BRUSH_SOLID_COLOR |
  251. fb_bpp |
  252. R128_GMC_SRC_DATATYPE_COLOR |
  253. R128_ROP3_P |
  254. R128_GMC_CLR_CMP_CNTL_DIS | R128_GMC_AUX_CLIP_DIS);
  255. OUT_RING((pitch << 21) | (offset >> 5));
  256. OUT_RING(color);
  257. OUT_RING((x << 16) | y);
  258. OUT_RING((w << 16) | h);
  259. ADVANCE_RING();
  260. }
  261. static void r128_cce_performance_boxes(drm_r128_private_t * dev_priv)
  262. {
  263. if (atomic_read(&dev_priv->idle_count) == 0) {
  264. r128_clear_box(dev_priv, 64, 4, 8, 8, 0, 255, 0);
  265. } else {
  266. atomic_set(&dev_priv->idle_count, 0);
  267. }
  268. }
  269. #endif
  270. /* ================================================================
  271. * CCE command dispatch functions
  272. */
  273. static void r128_print_dirty(const char *msg, unsigned int flags)
  274. {
  275. DRM_INFO("%s: (0x%x) %s%s%s%s%s%s%s%s%s\n",
  276. msg,
  277. flags,
  278. (flags & R128_UPLOAD_CORE) ? "core, " : "",
  279. (flags & R128_UPLOAD_CONTEXT) ? "context, " : "",
  280. (flags & R128_UPLOAD_SETUP) ? "setup, " : "",
  281. (flags & R128_UPLOAD_TEX0) ? "tex0, " : "",
  282. (flags & R128_UPLOAD_TEX1) ? "tex1, " : "",
  283. (flags & R128_UPLOAD_MASKS) ? "masks, " : "",
  284. (flags & R128_UPLOAD_WINDOW) ? "window, " : "",
  285. (flags & R128_UPLOAD_CLIPRECTS) ? "cliprects, " : "",
  286. (flags & R128_REQUIRE_QUIESCENCE) ? "quiescence, " : "");
  287. }
  288. static void r128_cce_dispatch_clear(drm_device_t * dev,
  289. drm_r128_clear_t * clear)
  290. {
  291. drm_r128_private_t *dev_priv = dev->dev_private;
  292. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  293. int nbox = sarea_priv->nbox;
  294. drm_clip_rect_t *pbox = sarea_priv->boxes;
  295. unsigned int flags = clear->flags;
  296. int i;
  297. RING_LOCALS;
  298. DRM_DEBUG("%s\n", __FUNCTION__);
  299. if (dev_priv->page_flipping && dev_priv->current_page == 1) {
  300. unsigned int tmp = flags;
  301. flags &= ~(R128_FRONT | R128_BACK);
  302. if (tmp & R128_FRONT)
  303. flags |= R128_BACK;
  304. if (tmp & R128_BACK)
  305. flags |= R128_FRONT;
  306. }
  307. for (i = 0; i < nbox; i++) {
  308. int x = pbox[i].x1;
  309. int y = pbox[i].y1;
  310. int w = pbox[i].x2 - x;
  311. int h = pbox[i].y2 - y;
  312. DRM_DEBUG("dispatch clear %d,%d-%d,%d flags 0x%x\n",
  313. pbox[i].x1, pbox[i].y1, pbox[i].x2,
  314. pbox[i].y2, flags);
  315. if (flags & (R128_FRONT | R128_BACK)) {
  316. BEGIN_RING(2);
  317. OUT_RING(CCE_PACKET0(R128_DP_WRITE_MASK, 0));
  318. OUT_RING(clear->color_mask);
  319. ADVANCE_RING();
  320. }
  321. if (flags & R128_FRONT) {
  322. BEGIN_RING(6);
  323. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  324. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  325. R128_GMC_BRUSH_SOLID_COLOR |
  326. (dev_priv->color_fmt << 8) |
  327. R128_GMC_SRC_DATATYPE_COLOR |
  328. R128_ROP3_P |
  329. R128_GMC_CLR_CMP_CNTL_DIS |
  330. R128_GMC_AUX_CLIP_DIS);
  331. OUT_RING(dev_priv->front_pitch_offset_c);
  332. OUT_RING(clear->clear_color);
  333. OUT_RING((x << 16) | y);
  334. OUT_RING((w << 16) | h);
  335. ADVANCE_RING();
  336. }
  337. if (flags & R128_BACK) {
  338. BEGIN_RING(6);
  339. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  340. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  341. R128_GMC_BRUSH_SOLID_COLOR |
  342. (dev_priv->color_fmt << 8) |
  343. R128_GMC_SRC_DATATYPE_COLOR |
  344. R128_ROP3_P |
  345. R128_GMC_CLR_CMP_CNTL_DIS |
  346. R128_GMC_AUX_CLIP_DIS);
  347. OUT_RING(dev_priv->back_pitch_offset_c);
  348. OUT_RING(clear->clear_color);
  349. OUT_RING((x << 16) | y);
  350. OUT_RING((w << 16) | h);
  351. ADVANCE_RING();
  352. }
  353. if (flags & R128_DEPTH) {
  354. BEGIN_RING(6);
  355. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  356. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  357. R128_GMC_BRUSH_SOLID_COLOR |
  358. (dev_priv->depth_fmt << 8) |
  359. R128_GMC_SRC_DATATYPE_COLOR |
  360. R128_ROP3_P |
  361. R128_GMC_CLR_CMP_CNTL_DIS |
  362. R128_GMC_AUX_CLIP_DIS | R128_GMC_WR_MSK_DIS);
  363. OUT_RING(dev_priv->depth_pitch_offset_c);
  364. OUT_RING(clear->clear_depth);
  365. OUT_RING((x << 16) | y);
  366. OUT_RING((w << 16) | h);
  367. ADVANCE_RING();
  368. }
  369. }
  370. }
  371. static void r128_cce_dispatch_swap(drm_device_t * dev)
  372. {
  373. drm_r128_private_t *dev_priv = dev->dev_private;
  374. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  375. int nbox = sarea_priv->nbox;
  376. drm_clip_rect_t *pbox = sarea_priv->boxes;
  377. int i;
  378. RING_LOCALS;
  379. DRM_DEBUG("%s\n", __FUNCTION__);
  380. #if R128_PERFORMANCE_BOXES
  381. /* Do some trivial performance monitoring...
  382. */
  383. r128_cce_performance_boxes(dev_priv);
  384. #endif
  385. for (i = 0; i < nbox; i++) {
  386. int x = pbox[i].x1;
  387. int y = pbox[i].y1;
  388. int w = pbox[i].x2 - x;
  389. int h = pbox[i].y2 - y;
  390. BEGIN_RING(7);
  391. OUT_RING(CCE_PACKET3(R128_CNTL_BITBLT_MULTI, 5));
  392. OUT_RING(R128_GMC_SRC_PITCH_OFFSET_CNTL |
  393. R128_GMC_DST_PITCH_OFFSET_CNTL |
  394. R128_GMC_BRUSH_NONE |
  395. (dev_priv->color_fmt << 8) |
  396. R128_GMC_SRC_DATATYPE_COLOR |
  397. R128_ROP3_S |
  398. R128_DP_SRC_SOURCE_MEMORY |
  399. R128_GMC_CLR_CMP_CNTL_DIS |
  400. R128_GMC_AUX_CLIP_DIS | R128_GMC_WR_MSK_DIS);
  401. /* Make this work even if front & back are flipped:
  402. */
  403. if (dev_priv->current_page == 0) {
  404. OUT_RING(dev_priv->back_pitch_offset_c);
  405. OUT_RING(dev_priv->front_pitch_offset_c);
  406. } else {
  407. OUT_RING(dev_priv->front_pitch_offset_c);
  408. OUT_RING(dev_priv->back_pitch_offset_c);
  409. }
  410. OUT_RING((x << 16) | y);
  411. OUT_RING((x << 16) | y);
  412. OUT_RING((w << 16) | h);
  413. ADVANCE_RING();
  414. }
  415. /* Increment the frame counter. The client-side 3D driver must
  416. * throttle the framerate by waiting for this value before
  417. * performing the swapbuffer ioctl.
  418. */
  419. dev_priv->sarea_priv->last_frame++;
  420. BEGIN_RING(2);
  421. OUT_RING(CCE_PACKET0(R128_LAST_FRAME_REG, 0));
  422. OUT_RING(dev_priv->sarea_priv->last_frame);
  423. ADVANCE_RING();
  424. }
  425. static void r128_cce_dispatch_flip(drm_device_t * dev)
  426. {
  427. drm_r128_private_t *dev_priv = dev->dev_private;
  428. RING_LOCALS;
  429. DRM_DEBUG("%s: page=%d pfCurrentPage=%d\n",
  430. __FUNCTION__,
  431. dev_priv->current_page, dev_priv->sarea_priv->pfCurrentPage);
  432. #if R128_PERFORMANCE_BOXES
  433. /* Do some trivial performance monitoring...
  434. */
  435. r128_cce_performance_boxes(dev_priv);
  436. #endif
  437. BEGIN_RING(4);
  438. R128_WAIT_UNTIL_PAGE_FLIPPED();
  439. OUT_RING(CCE_PACKET0(R128_CRTC_OFFSET, 0));
  440. if (dev_priv->current_page == 0) {
  441. OUT_RING(dev_priv->back_offset);
  442. } else {
  443. OUT_RING(dev_priv->front_offset);
  444. }
  445. ADVANCE_RING();
  446. /* Increment the frame counter. The client-side 3D driver must
  447. * throttle the framerate by waiting for this value before
  448. * performing the swapbuffer ioctl.
  449. */
  450. dev_priv->sarea_priv->last_frame++;
  451. dev_priv->sarea_priv->pfCurrentPage = dev_priv->current_page =
  452. 1 - dev_priv->current_page;
  453. BEGIN_RING(2);
  454. OUT_RING(CCE_PACKET0(R128_LAST_FRAME_REG, 0));
  455. OUT_RING(dev_priv->sarea_priv->last_frame);
  456. ADVANCE_RING();
  457. }
  458. static void r128_cce_dispatch_vertex(drm_device_t * dev, drm_buf_t * buf)
  459. {
  460. drm_r128_private_t *dev_priv = dev->dev_private;
  461. drm_r128_buf_priv_t *buf_priv = buf->dev_private;
  462. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  463. int format = sarea_priv->vc_format;
  464. int offset = buf->bus_address;
  465. int size = buf->used;
  466. int prim = buf_priv->prim;
  467. int i = 0;
  468. RING_LOCALS;
  469. DRM_DEBUG("buf=%d nbox=%d\n", buf->idx, sarea_priv->nbox);
  470. if (0)
  471. r128_print_dirty("dispatch_vertex", sarea_priv->dirty);
  472. if (buf->used) {
  473. buf_priv->dispatched = 1;
  474. if (sarea_priv->dirty & ~R128_UPLOAD_CLIPRECTS) {
  475. r128_emit_state(dev_priv);
  476. }
  477. do {
  478. /* Emit the next set of up to three cliprects */
  479. if (i < sarea_priv->nbox) {
  480. r128_emit_clip_rects(dev_priv,
  481. &sarea_priv->boxes[i],
  482. sarea_priv->nbox - i);
  483. }
  484. /* Emit the vertex buffer rendering commands */
  485. BEGIN_RING(5);
  486. OUT_RING(CCE_PACKET3(R128_3D_RNDR_GEN_INDX_PRIM, 3));
  487. OUT_RING(offset);
  488. OUT_RING(size);
  489. OUT_RING(format);
  490. OUT_RING(prim | R128_CCE_VC_CNTL_PRIM_WALK_LIST |
  491. (size << R128_CCE_VC_CNTL_NUM_SHIFT));
  492. ADVANCE_RING();
  493. i += 3;
  494. } while (i < sarea_priv->nbox);
  495. }
  496. if (buf_priv->discard) {
  497. buf_priv->age = dev_priv->sarea_priv->last_dispatch;
  498. /* Emit the vertex buffer age */
  499. BEGIN_RING(2);
  500. OUT_RING(CCE_PACKET0(R128_LAST_DISPATCH_REG, 0));
  501. OUT_RING(buf_priv->age);
  502. ADVANCE_RING();
  503. buf->pending = 1;
  504. buf->used = 0;
  505. /* FIXME: Check dispatched field */
  506. buf_priv->dispatched = 0;
  507. }
  508. dev_priv->sarea_priv->last_dispatch++;
  509. sarea_priv->dirty &= ~R128_UPLOAD_CLIPRECTS;
  510. sarea_priv->nbox = 0;
  511. }
  512. static void r128_cce_dispatch_indirect(drm_device_t * dev,
  513. drm_buf_t * buf, int start, int end)
  514. {
  515. drm_r128_private_t *dev_priv = dev->dev_private;
  516. drm_r128_buf_priv_t *buf_priv = buf->dev_private;
  517. RING_LOCALS;
  518. DRM_DEBUG("indirect: buf=%d s=0x%x e=0x%x\n", buf->idx, start, end);
  519. if (start != end) {
  520. int offset = buf->bus_address + start;
  521. int dwords = (end - start + 3) / sizeof(u32);
  522. /* Indirect buffer data must be an even number of
  523. * dwords, so if we've been given an odd number we must
  524. * pad the data with a Type-2 CCE packet.
  525. */
  526. if (dwords & 1) {
  527. u32 *data = (u32 *)
  528. ((char *)dev->agp_buffer_map->handle
  529. + buf->offset + start);
  530. data[dwords++] = cpu_to_le32(R128_CCE_PACKET2);
  531. }
  532. buf_priv->dispatched = 1;
  533. /* Fire off the indirect buffer */
  534. BEGIN_RING(3);
  535. OUT_RING(CCE_PACKET0(R128_PM4_IW_INDOFF, 1));
  536. OUT_RING(offset);
  537. OUT_RING(dwords);
  538. ADVANCE_RING();
  539. }
  540. if (buf_priv->discard) {
  541. buf_priv->age = dev_priv->sarea_priv->last_dispatch;
  542. /* Emit the indirect buffer age */
  543. BEGIN_RING(2);
  544. OUT_RING(CCE_PACKET0(R128_LAST_DISPATCH_REG, 0));
  545. OUT_RING(buf_priv->age);
  546. ADVANCE_RING();
  547. buf->pending = 1;
  548. buf->used = 0;
  549. /* FIXME: Check dispatched field */
  550. buf_priv->dispatched = 0;
  551. }
  552. dev_priv->sarea_priv->last_dispatch++;
  553. }
  554. static void r128_cce_dispatch_indices(drm_device_t * dev,
  555. drm_buf_t * buf,
  556. int start, int end, int count)
  557. {
  558. drm_r128_private_t *dev_priv = dev->dev_private;
  559. drm_r128_buf_priv_t *buf_priv = buf->dev_private;
  560. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  561. int format = sarea_priv->vc_format;
  562. int offset = dev->agp_buffer_map->offset - dev_priv->cce_buffers_offset;
  563. int prim = buf_priv->prim;
  564. u32 *data;
  565. int dwords;
  566. int i = 0;
  567. RING_LOCALS;
  568. DRM_DEBUG("indices: s=%d e=%d c=%d\n", start, end, count);
  569. if (0)
  570. r128_print_dirty("dispatch_indices", sarea_priv->dirty);
  571. if (start != end) {
  572. buf_priv->dispatched = 1;
  573. if (sarea_priv->dirty & ~R128_UPLOAD_CLIPRECTS) {
  574. r128_emit_state(dev_priv);
  575. }
  576. dwords = (end - start + 3) / sizeof(u32);
  577. data = (u32 *) ((char *)dev->agp_buffer_map->handle
  578. + buf->offset + start);
  579. data[0] = cpu_to_le32(CCE_PACKET3(R128_3D_RNDR_GEN_INDX_PRIM,
  580. dwords - 2));
  581. data[1] = cpu_to_le32(offset);
  582. data[2] = cpu_to_le32(R128_MAX_VB_VERTS);
  583. data[3] = cpu_to_le32(format);
  584. data[4] = cpu_to_le32((prim | R128_CCE_VC_CNTL_PRIM_WALK_IND |
  585. (count << 16)));
  586. if (count & 0x1) {
  587. #ifdef __LITTLE_ENDIAN
  588. data[dwords - 1] &= 0x0000ffff;
  589. #else
  590. data[dwords - 1] &= 0xffff0000;
  591. #endif
  592. }
  593. do {
  594. /* Emit the next set of up to three cliprects */
  595. if (i < sarea_priv->nbox) {
  596. r128_emit_clip_rects(dev_priv,
  597. &sarea_priv->boxes[i],
  598. sarea_priv->nbox - i);
  599. }
  600. r128_cce_dispatch_indirect(dev, buf, start, end);
  601. i += 3;
  602. } while (i < sarea_priv->nbox);
  603. }
  604. if (buf_priv->discard) {
  605. buf_priv->age = dev_priv->sarea_priv->last_dispatch;
  606. /* Emit the vertex buffer age */
  607. BEGIN_RING(2);
  608. OUT_RING(CCE_PACKET0(R128_LAST_DISPATCH_REG, 0));
  609. OUT_RING(buf_priv->age);
  610. ADVANCE_RING();
  611. buf->pending = 1;
  612. /* FIXME: Check dispatched field */
  613. buf_priv->dispatched = 0;
  614. }
  615. dev_priv->sarea_priv->last_dispatch++;
  616. sarea_priv->dirty &= ~R128_UPLOAD_CLIPRECTS;
  617. sarea_priv->nbox = 0;
  618. }
  619. static int r128_cce_dispatch_blit(DRMFILE filp,
  620. drm_device_t * dev, drm_r128_blit_t * blit)
  621. {
  622. drm_r128_private_t *dev_priv = dev->dev_private;
  623. drm_device_dma_t *dma = dev->dma;
  624. drm_buf_t *buf;
  625. drm_r128_buf_priv_t *buf_priv;
  626. u32 *data;
  627. int dword_shift, dwords;
  628. RING_LOCALS;
  629. DRM_DEBUG("\n");
  630. /* The compiler won't optimize away a division by a variable,
  631. * even if the only legal values are powers of two. Thus, we'll
  632. * use a shift instead.
  633. */
  634. switch (blit->format) {
  635. case R128_DATATYPE_ARGB8888:
  636. dword_shift = 0;
  637. break;
  638. case R128_DATATYPE_ARGB1555:
  639. case R128_DATATYPE_RGB565:
  640. case R128_DATATYPE_ARGB4444:
  641. case R128_DATATYPE_YVYU422:
  642. case R128_DATATYPE_VYUY422:
  643. dword_shift = 1;
  644. break;
  645. case R128_DATATYPE_CI8:
  646. case R128_DATATYPE_RGB8:
  647. dword_shift = 2;
  648. break;
  649. default:
  650. DRM_ERROR("invalid blit format %d\n", blit->format);
  651. return DRM_ERR(EINVAL);
  652. }
  653. /* Flush the pixel cache, and mark the contents as Read Invalid.
  654. * This ensures no pixel data gets mixed up with the texture
  655. * data from the host data blit, otherwise part of the texture
  656. * image may be corrupted.
  657. */
  658. BEGIN_RING(2);
  659. OUT_RING(CCE_PACKET0(R128_PC_GUI_CTLSTAT, 0));
  660. OUT_RING(R128_PC_RI_GUI | R128_PC_FLUSH_GUI);
  661. ADVANCE_RING();
  662. /* Dispatch the indirect buffer.
  663. */
  664. buf = dma->buflist[blit->idx];
  665. buf_priv = buf->dev_private;
  666. if (buf->filp != filp) {
  667. DRM_ERROR("process %d using buffer owned by %p\n",
  668. DRM_CURRENTPID, buf->filp);
  669. return DRM_ERR(EINVAL);
  670. }
  671. if (buf->pending) {
  672. DRM_ERROR("sending pending buffer %d\n", blit->idx);
  673. return DRM_ERR(EINVAL);
  674. }
  675. buf_priv->discard = 1;
  676. dwords = (blit->width * blit->height) >> dword_shift;
  677. data = (u32 *) ((char *)dev->agp_buffer_map->handle + buf->offset);
  678. data[0] = cpu_to_le32(CCE_PACKET3(R128_CNTL_HOSTDATA_BLT, dwords + 6));
  679. data[1] = cpu_to_le32((R128_GMC_DST_PITCH_OFFSET_CNTL |
  680. R128_GMC_BRUSH_NONE |
  681. (blit->format << 8) |
  682. R128_GMC_SRC_DATATYPE_COLOR |
  683. R128_ROP3_S |
  684. R128_DP_SRC_SOURCE_HOST_DATA |
  685. R128_GMC_CLR_CMP_CNTL_DIS |
  686. R128_GMC_AUX_CLIP_DIS | R128_GMC_WR_MSK_DIS));
  687. data[2] = cpu_to_le32((blit->pitch << 21) | (blit->offset >> 5));
  688. data[3] = cpu_to_le32(0xffffffff);
  689. data[4] = cpu_to_le32(0xffffffff);
  690. data[5] = cpu_to_le32((blit->y << 16) | blit->x);
  691. data[6] = cpu_to_le32((blit->height << 16) | blit->width);
  692. data[7] = cpu_to_le32(dwords);
  693. buf->used = (dwords + 8) * sizeof(u32);
  694. r128_cce_dispatch_indirect(dev, buf, 0, buf->used);
  695. /* Flush the pixel cache after the blit completes. This ensures
  696. * the texture data is written out to memory before rendering
  697. * continues.
  698. */
  699. BEGIN_RING(2);
  700. OUT_RING(CCE_PACKET0(R128_PC_GUI_CTLSTAT, 0));
  701. OUT_RING(R128_PC_FLUSH_GUI);
  702. ADVANCE_RING();
  703. return 0;
  704. }
  705. /* ================================================================
  706. * Tiled depth buffer management
  707. *
  708. * FIXME: These should all set the destination write mask for when we
  709. * have hardware stencil support.
  710. */
  711. static int r128_cce_dispatch_write_span(drm_device_t * dev,
  712. drm_r128_depth_t * depth)
  713. {
  714. drm_r128_private_t *dev_priv = dev->dev_private;
  715. int count, x, y;
  716. u32 *buffer;
  717. u8 *mask;
  718. int i, buffer_size, mask_size;
  719. RING_LOCALS;
  720. DRM_DEBUG("\n");
  721. count = depth->n;
  722. if (count > 4096 || count <= 0)
  723. return DRM_ERR(EMSGSIZE);
  724. if (DRM_COPY_FROM_USER(&x, depth->x, sizeof(x))) {
  725. return DRM_ERR(EFAULT);
  726. }
  727. if (DRM_COPY_FROM_USER(&y, depth->y, sizeof(y))) {
  728. return DRM_ERR(EFAULT);
  729. }
  730. buffer_size = depth->n * sizeof(u32);
  731. buffer = drm_alloc(buffer_size, DRM_MEM_BUFS);
  732. if (buffer == NULL)
  733. return DRM_ERR(ENOMEM);
  734. if (DRM_COPY_FROM_USER(buffer, depth->buffer, buffer_size)) {
  735. drm_free(buffer, buffer_size, DRM_MEM_BUFS);
  736. return DRM_ERR(EFAULT);
  737. }
  738. mask_size = depth->n * sizeof(u8);
  739. if (depth->mask) {
  740. mask = drm_alloc(mask_size, DRM_MEM_BUFS);
  741. if (mask == NULL) {
  742. drm_free(buffer, buffer_size, DRM_MEM_BUFS);
  743. return DRM_ERR(ENOMEM);
  744. }
  745. if (DRM_COPY_FROM_USER(mask, depth->mask, mask_size)) {
  746. drm_free(buffer, buffer_size, DRM_MEM_BUFS);
  747. drm_free(mask, mask_size, DRM_MEM_BUFS);
  748. return DRM_ERR(EFAULT);
  749. }
  750. for (i = 0; i < count; i++, x++) {
  751. if (mask[i]) {
  752. BEGIN_RING(6);
  753. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  754. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  755. R128_GMC_BRUSH_SOLID_COLOR |
  756. (dev_priv->depth_fmt << 8) |
  757. R128_GMC_SRC_DATATYPE_COLOR |
  758. R128_ROP3_P |
  759. R128_GMC_CLR_CMP_CNTL_DIS |
  760. R128_GMC_WR_MSK_DIS);
  761. OUT_RING(dev_priv->depth_pitch_offset_c);
  762. OUT_RING(buffer[i]);
  763. OUT_RING((x << 16) | y);
  764. OUT_RING((1 << 16) | 1);
  765. ADVANCE_RING();
  766. }
  767. }
  768. drm_free(mask, mask_size, DRM_MEM_BUFS);
  769. } else {
  770. for (i = 0; i < count; i++, x++) {
  771. BEGIN_RING(6);
  772. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  773. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  774. R128_GMC_BRUSH_SOLID_COLOR |
  775. (dev_priv->depth_fmt << 8) |
  776. R128_GMC_SRC_DATATYPE_COLOR |
  777. R128_ROP3_P |
  778. R128_GMC_CLR_CMP_CNTL_DIS |
  779. R128_GMC_WR_MSK_DIS);
  780. OUT_RING(dev_priv->depth_pitch_offset_c);
  781. OUT_RING(buffer[i]);
  782. OUT_RING((x << 16) | y);
  783. OUT_RING((1 << 16) | 1);
  784. ADVANCE_RING();
  785. }
  786. }
  787. drm_free(buffer, buffer_size, DRM_MEM_BUFS);
  788. return 0;
  789. }
  790. static int r128_cce_dispatch_write_pixels(drm_device_t * dev,
  791. drm_r128_depth_t * depth)
  792. {
  793. drm_r128_private_t *dev_priv = dev->dev_private;
  794. int count, *x, *y;
  795. u32 *buffer;
  796. u8 *mask;
  797. int i, xbuf_size, ybuf_size, buffer_size, mask_size;
  798. RING_LOCALS;
  799. DRM_DEBUG("\n");
  800. count = depth->n;
  801. if (count > 4096 || count <= 0)
  802. return DRM_ERR(EMSGSIZE);
  803. xbuf_size = count * sizeof(*x);
  804. ybuf_size = count * sizeof(*y);
  805. x = drm_alloc(xbuf_size, DRM_MEM_BUFS);
  806. if (x == NULL) {
  807. return DRM_ERR(ENOMEM);
  808. }
  809. y = drm_alloc(ybuf_size, DRM_MEM_BUFS);
  810. if (y == NULL) {
  811. drm_free(x, xbuf_size, DRM_MEM_BUFS);
  812. return DRM_ERR(ENOMEM);
  813. }
  814. if (DRM_COPY_FROM_USER(x, depth->x, xbuf_size)) {
  815. drm_free(x, xbuf_size, DRM_MEM_BUFS);
  816. drm_free(y, ybuf_size, DRM_MEM_BUFS);
  817. return DRM_ERR(EFAULT);
  818. }
  819. if (DRM_COPY_FROM_USER(y, depth->y, xbuf_size)) {
  820. drm_free(x, xbuf_size, DRM_MEM_BUFS);
  821. drm_free(y, ybuf_size, DRM_MEM_BUFS);
  822. return DRM_ERR(EFAULT);
  823. }
  824. buffer_size = depth->n * sizeof(u32);
  825. buffer = drm_alloc(buffer_size, DRM_MEM_BUFS);
  826. if (buffer == NULL) {
  827. drm_free(x, xbuf_size, DRM_MEM_BUFS);
  828. drm_free(y, ybuf_size, DRM_MEM_BUFS);
  829. return DRM_ERR(ENOMEM);
  830. }
  831. if (DRM_COPY_FROM_USER(buffer, depth->buffer, buffer_size)) {
  832. drm_free(x, xbuf_size, DRM_MEM_BUFS);
  833. drm_free(y, ybuf_size, DRM_MEM_BUFS);
  834. drm_free(buffer, buffer_size, DRM_MEM_BUFS);
  835. return DRM_ERR(EFAULT);
  836. }
  837. if (depth->mask) {
  838. mask_size = depth->n * sizeof(u8);
  839. mask = drm_alloc(mask_size, DRM_MEM_BUFS);
  840. if (mask == NULL) {
  841. drm_free(x, xbuf_size, DRM_MEM_BUFS);
  842. drm_free(y, ybuf_size, DRM_MEM_BUFS);
  843. drm_free(buffer, buffer_size, DRM_MEM_BUFS);
  844. return DRM_ERR(ENOMEM);
  845. }
  846. if (DRM_COPY_FROM_USER(mask, depth->mask, mask_size)) {
  847. drm_free(x, xbuf_size, DRM_MEM_BUFS);
  848. drm_free(y, ybuf_size, DRM_MEM_BUFS);
  849. drm_free(buffer, buffer_size, DRM_MEM_BUFS);
  850. drm_free(mask, mask_size, DRM_MEM_BUFS);
  851. return DRM_ERR(EFAULT);
  852. }
  853. for (i = 0; i < count; i++) {
  854. if (mask[i]) {
  855. BEGIN_RING(6);
  856. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  857. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  858. R128_GMC_BRUSH_SOLID_COLOR |
  859. (dev_priv->depth_fmt << 8) |
  860. R128_GMC_SRC_DATATYPE_COLOR |
  861. R128_ROP3_P |
  862. R128_GMC_CLR_CMP_CNTL_DIS |
  863. R128_GMC_WR_MSK_DIS);
  864. OUT_RING(dev_priv->depth_pitch_offset_c);
  865. OUT_RING(buffer[i]);
  866. OUT_RING((x[i] << 16) | y[i]);
  867. OUT_RING((1 << 16) | 1);
  868. ADVANCE_RING();
  869. }
  870. }
  871. drm_free(mask, mask_size, DRM_MEM_BUFS);
  872. } else {
  873. for (i = 0; i < count; i++) {
  874. BEGIN_RING(6);
  875. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  876. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  877. R128_GMC_BRUSH_SOLID_COLOR |
  878. (dev_priv->depth_fmt << 8) |
  879. R128_GMC_SRC_DATATYPE_COLOR |
  880. R128_ROP3_P |
  881. R128_GMC_CLR_CMP_CNTL_DIS |
  882. R128_GMC_WR_MSK_DIS);
  883. OUT_RING(dev_priv->depth_pitch_offset_c);
  884. OUT_RING(buffer[i]);
  885. OUT_RING((x[i] << 16) | y[i]);
  886. OUT_RING((1 << 16) | 1);
  887. ADVANCE_RING();
  888. }
  889. }
  890. drm_free(x, xbuf_size, DRM_MEM_BUFS);
  891. drm_free(y, ybuf_size, DRM_MEM_BUFS);
  892. drm_free(buffer, buffer_size, DRM_MEM_BUFS);
  893. return 0;
  894. }
  895. static int r128_cce_dispatch_read_span(drm_device_t * dev,
  896. drm_r128_depth_t * depth)
  897. {
  898. drm_r128_private_t *dev_priv = dev->dev_private;
  899. int count, x, y;
  900. RING_LOCALS;
  901. DRM_DEBUG("\n");
  902. count = depth->n;
  903. if (count > 4096 || count <= 0)
  904. return DRM_ERR(EMSGSIZE);
  905. if (DRM_COPY_FROM_USER(&x, depth->x, sizeof(x))) {
  906. return DRM_ERR(EFAULT);
  907. }
  908. if (DRM_COPY_FROM_USER(&y, depth->y, sizeof(y))) {
  909. return DRM_ERR(EFAULT);
  910. }
  911. BEGIN_RING(7);
  912. OUT_RING(CCE_PACKET3(R128_CNTL_BITBLT_MULTI, 5));
  913. OUT_RING(R128_GMC_SRC_PITCH_OFFSET_CNTL |
  914. R128_GMC_DST_PITCH_OFFSET_CNTL |
  915. R128_GMC_BRUSH_NONE |
  916. (dev_priv->depth_fmt << 8) |
  917. R128_GMC_SRC_DATATYPE_COLOR |
  918. R128_ROP3_S |
  919. R128_DP_SRC_SOURCE_MEMORY |
  920. R128_GMC_CLR_CMP_CNTL_DIS | R128_GMC_WR_MSK_DIS);
  921. OUT_RING(dev_priv->depth_pitch_offset_c);
  922. OUT_RING(dev_priv->span_pitch_offset_c);
  923. OUT_RING((x << 16) | y);
  924. OUT_RING((0 << 16) | 0);
  925. OUT_RING((count << 16) | 1);
  926. ADVANCE_RING();
  927. return 0;
  928. }
  929. static int r128_cce_dispatch_read_pixels(drm_device_t * dev,
  930. drm_r128_depth_t * depth)
  931. {
  932. drm_r128_private_t *dev_priv = dev->dev_private;
  933. int count, *x, *y;
  934. int i, xbuf_size, ybuf_size;
  935. RING_LOCALS;
  936. DRM_DEBUG("%s\n", __FUNCTION__);
  937. count = depth->n;
  938. if (count > 4096 || count <= 0)
  939. return DRM_ERR(EMSGSIZE);
  940. if (count > dev_priv->depth_pitch) {
  941. count = dev_priv->depth_pitch;
  942. }
  943. xbuf_size = count * sizeof(*x);
  944. ybuf_size = count * sizeof(*y);
  945. x = drm_alloc(xbuf_size, DRM_MEM_BUFS);
  946. if (x == NULL) {
  947. return DRM_ERR(ENOMEM);
  948. }
  949. y = drm_alloc(ybuf_size, DRM_MEM_BUFS);
  950. if (y == NULL) {
  951. drm_free(x, xbuf_size, DRM_MEM_BUFS);
  952. return DRM_ERR(ENOMEM);
  953. }
  954. if (DRM_COPY_FROM_USER(x, depth->x, xbuf_size)) {
  955. drm_free(x, xbuf_size, DRM_MEM_BUFS);
  956. drm_free(y, ybuf_size, DRM_MEM_BUFS);
  957. return DRM_ERR(EFAULT);
  958. }
  959. if (DRM_COPY_FROM_USER(y, depth->y, ybuf_size)) {
  960. drm_free(x, xbuf_size, DRM_MEM_BUFS);
  961. drm_free(y, ybuf_size, DRM_MEM_BUFS);
  962. return DRM_ERR(EFAULT);
  963. }
  964. for (i = 0; i < count; i++) {
  965. BEGIN_RING(7);
  966. OUT_RING(CCE_PACKET3(R128_CNTL_BITBLT_MULTI, 5));
  967. OUT_RING(R128_GMC_SRC_PITCH_OFFSET_CNTL |
  968. R128_GMC_DST_PITCH_OFFSET_CNTL |
  969. R128_GMC_BRUSH_NONE |
  970. (dev_priv->depth_fmt << 8) |
  971. R128_GMC_SRC_DATATYPE_COLOR |
  972. R128_ROP3_S |
  973. R128_DP_SRC_SOURCE_MEMORY |
  974. R128_GMC_CLR_CMP_CNTL_DIS | R128_GMC_WR_MSK_DIS);
  975. OUT_RING(dev_priv->depth_pitch_offset_c);
  976. OUT_RING(dev_priv->span_pitch_offset_c);
  977. OUT_RING((x[i] << 16) | y[i]);
  978. OUT_RING((i << 16) | 0);
  979. OUT_RING((1 << 16) | 1);
  980. ADVANCE_RING();
  981. }
  982. drm_free(x, xbuf_size, DRM_MEM_BUFS);
  983. drm_free(y, ybuf_size, DRM_MEM_BUFS);
  984. return 0;
  985. }
  986. /* ================================================================
  987. * Polygon stipple
  988. */
  989. static void r128_cce_dispatch_stipple(drm_device_t * dev, u32 * stipple)
  990. {
  991. drm_r128_private_t *dev_priv = dev->dev_private;
  992. int i;
  993. RING_LOCALS;
  994. DRM_DEBUG("%s\n", __FUNCTION__);
  995. BEGIN_RING(33);
  996. OUT_RING(CCE_PACKET0(R128_BRUSH_DATA0, 31));
  997. for (i = 0; i < 32; i++) {
  998. OUT_RING(stipple[i]);
  999. }
  1000. ADVANCE_RING();
  1001. }
  1002. /* ================================================================
  1003. * IOCTL functions
  1004. */
  1005. static int r128_cce_clear(DRM_IOCTL_ARGS)
  1006. {
  1007. DRM_DEVICE;
  1008. drm_r128_private_t *dev_priv = dev->dev_private;
  1009. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  1010. drm_r128_clear_t clear;
  1011. DRM_DEBUG("\n");
  1012. LOCK_TEST_WITH_RETURN(dev, filp);
  1013. DRM_COPY_FROM_USER_IOCTL(clear, (drm_r128_clear_t __user *) data,
  1014. sizeof(clear));
  1015. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1016. if (sarea_priv->nbox > R128_NR_SAREA_CLIPRECTS)
  1017. sarea_priv->nbox = R128_NR_SAREA_CLIPRECTS;
  1018. r128_cce_dispatch_clear(dev, &clear);
  1019. COMMIT_RING();
  1020. /* Make sure we restore the 3D state next time.
  1021. */
  1022. dev_priv->sarea_priv->dirty |= R128_UPLOAD_CONTEXT | R128_UPLOAD_MASKS;
  1023. return 0;
  1024. }
  1025. static int r128_do_init_pageflip(drm_device_t * dev)
  1026. {
  1027. drm_r128_private_t *dev_priv = dev->dev_private;
  1028. DRM_DEBUG("\n");
  1029. dev_priv->crtc_offset = R128_READ(R128_CRTC_OFFSET);
  1030. dev_priv->crtc_offset_cntl = R128_READ(R128_CRTC_OFFSET_CNTL);
  1031. R128_WRITE(R128_CRTC_OFFSET, dev_priv->front_offset);
  1032. R128_WRITE(R128_CRTC_OFFSET_CNTL,
  1033. dev_priv->crtc_offset_cntl | R128_CRTC_OFFSET_FLIP_CNTL);
  1034. dev_priv->page_flipping = 1;
  1035. dev_priv->current_page = 0;
  1036. dev_priv->sarea_priv->pfCurrentPage = dev_priv->current_page;
  1037. return 0;
  1038. }
  1039. static int r128_do_cleanup_pageflip(drm_device_t * dev)
  1040. {
  1041. drm_r128_private_t *dev_priv = dev->dev_private;
  1042. DRM_DEBUG("\n");
  1043. R128_WRITE(R128_CRTC_OFFSET, dev_priv->crtc_offset);
  1044. R128_WRITE(R128_CRTC_OFFSET_CNTL, dev_priv->crtc_offset_cntl);
  1045. if (dev_priv->current_page != 0) {
  1046. r128_cce_dispatch_flip(dev);
  1047. COMMIT_RING();
  1048. }
  1049. dev_priv->page_flipping = 0;
  1050. return 0;
  1051. }
  1052. /* Swapping and flipping are different operations, need different ioctls.
  1053. * They can & should be intermixed to support multiple 3d windows.
  1054. */
  1055. static int r128_cce_flip(DRM_IOCTL_ARGS)
  1056. {
  1057. DRM_DEVICE;
  1058. drm_r128_private_t *dev_priv = dev->dev_private;
  1059. DRM_DEBUG("%s\n", __FUNCTION__);
  1060. LOCK_TEST_WITH_RETURN(dev, filp);
  1061. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1062. if (!dev_priv->page_flipping)
  1063. r128_do_init_pageflip(dev);
  1064. r128_cce_dispatch_flip(dev);
  1065. COMMIT_RING();
  1066. return 0;
  1067. }
  1068. static int r128_cce_swap(DRM_IOCTL_ARGS)
  1069. {
  1070. DRM_DEVICE;
  1071. drm_r128_private_t *dev_priv = dev->dev_private;
  1072. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  1073. DRM_DEBUG("%s\n", __FUNCTION__);
  1074. LOCK_TEST_WITH_RETURN(dev, filp);
  1075. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1076. if (sarea_priv->nbox > R128_NR_SAREA_CLIPRECTS)
  1077. sarea_priv->nbox = R128_NR_SAREA_CLIPRECTS;
  1078. r128_cce_dispatch_swap(dev);
  1079. dev_priv->sarea_priv->dirty |= (R128_UPLOAD_CONTEXT |
  1080. R128_UPLOAD_MASKS);
  1081. COMMIT_RING();
  1082. return 0;
  1083. }
  1084. static int r128_cce_vertex(DRM_IOCTL_ARGS)
  1085. {
  1086. DRM_DEVICE;
  1087. drm_r128_private_t *dev_priv = dev->dev_private;
  1088. drm_device_dma_t *dma = dev->dma;
  1089. drm_buf_t *buf;
  1090. drm_r128_buf_priv_t *buf_priv;
  1091. drm_r128_vertex_t vertex;
  1092. LOCK_TEST_WITH_RETURN(dev, filp);
  1093. if (!dev_priv) {
  1094. DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
  1095. return DRM_ERR(EINVAL);
  1096. }
  1097. DRM_COPY_FROM_USER_IOCTL(vertex, (drm_r128_vertex_t __user *) data,
  1098. sizeof(vertex));
  1099. DRM_DEBUG("pid=%d index=%d count=%d discard=%d\n",
  1100. DRM_CURRENTPID, vertex.idx, vertex.count, vertex.discard);
  1101. if (vertex.idx < 0 || vertex.idx >= dma->buf_count) {
  1102. DRM_ERROR("buffer index %d (of %d max)\n",
  1103. vertex.idx, dma->buf_count - 1);
  1104. return DRM_ERR(EINVAL);
  1105. }
  1106. if (vertex.prim < 0 ||
  1107. vertex.prim > R128_CCE_VC_CNTL_PRIM_TYPE_TRI_TYPE2) {
  1108. DRM_ERROR("buffer prim %d\n", vertex.prim);
  1109. return DRM_ERR(EINVAL);
  1110. }
  1111. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1112. VB_AGE_TEST_WITH_RETURN(dev_priv);
  1113. buf = dma->buflist[vertex.idx];
  1114. buf_priv = buf->dev_private;
  1115. if (buf->filp != filp) {
  1116. DRM_ERROR("process %d using buffer owned by %p\n",
  1117. DRM_CURRENTPID, buf->filp);
  1118. return DRM_ERR(EINVAL);
  1119. }
  1120. if (buf->pending) {
  1121. DRM_ERROR("sending pending buffer %d\n", vertex.idx);
  1122. return DRM_ERR(EINVAL);
  1123. }
  1124. buf->used = vertex.count;
  1125. buf_priv->prim = vertex.prim;
  1126. buf_priv->discard = vertex.discard;
  1127. r128_cce_dispatch_vertex(dev, buf);
  1128. COMMIT_RING();
  1129. return 0;
  1130. }
  1131. static int r128_cce_indices(DRM_IOCTL_ARGS)
  1132. {
  1133. DRM_DEVICE;
  1134. drm_r128_private_t *dev_priv = dev->dev_private;
  1135. drm_device_dma_t *dma = dev->dma;
  1136. drm_buf_t *buf;
  1137. drm_r128_buf_priv_t *buf_priv;
  1138. drm_r128_indices_t elts;
  1139. int count;
  1140. LOCK_TEST_WITH_RETURN(dev, filp);
  1141. if (!dev_priv) {
  1142. DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
  1143. return DRM_ERR(EINVAL);
  1144. }
  1145. DRM_COPY_FROM_USER_IOCTL(elts, (drm_r128_indices_t __user *) data,
  1146. sizeof(elts));
  1147. DRM_DEBUG("pid=%d buf=%d s=%d e=%d d=%d\n", DRM_CURRENTPID,
  1148. elts.idx, elts.start, elts.end, elts.discard);
  1149. if (elts.idx < 0 || elts.idx >= dma->buf_count) {
  1150. DRM_ERROR("buffer index %d (of %d max)\n",
  1151. elts.idx, dma->buf_count - 1);
  1152. return DRM_ERR(EINVAL);
  1153. }
  1154. if (elts.prim < 0 || elts.prim > R128_CCE_VC_CNTL_PRIM_TYPE_TRI_TYPE2) {
  1155. DRM_ERROR("buffer prim %d\n", elts.prim);
  1156. return DRM_ERR(EINVAL);
  1157. }
  1158. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1159. VB_AGE_TEST_WITH_RETURN(dev_priv);
  1160. buf = dma->buflist[elts.idx];
  1161. buf_priv = buf->dev_private;
  1162. if (buf->filp != filp) {
  1163. DRM_ERROR("process %d using buffer owned by %p\n",
  1164. DRM_CURRENTPID, buf->filp);
  1165. return DRM_ERR(EINVAL);
  1166. }
  1167. if (buf->pending) {
  1168. DRM_ERROR("sending pending buffer %d\n", elts.idx);
  1169. return DRM_ERR(EINVAL);
  1170. }
  1171. count = (elts.end - elts.start) / sizeof(u16);
  1172. elts.start -= R128_INDEX_PRIM_OFFSET;
  1173. if (elts.start & 0x7) {
  1174. DRM_ERROR("misaligned buffer 0x%x\n", elts.start);
  1175. return DRM_ERR(EINVAL);
  1176. }
  1177. if (elts.start < buf->used) {
  1178. DRM_ERROR("no header 0x%x - 0x%x\n", elts.start, buf->used);
  1179. return DRM_ERR(EINVAL);
  1180. }
  1181. buf->used = elts.end;
  1182. buf_priv->prim = elts.prim;
  1183. buf_priv->discard = elts.discard;
  1184. r128_cce_dispatch_indices(dev, buf, elts.start, elts.end, count);
  1185. COMMIT_RING();
  1186. return 0;
  1187. }
  1188. static int r128_cce_blit(DRM_IOCTL_ARGS)
  1189. {
  1190. DRM_DEVICE;
  1191. drm_device_dma_t *dma = dev->dma;
  1192. drm_r128_private_t *dev_priv = dev->dev_private;
  1193. drm_r128_blit_t blit;
  1194. int ret;
  1195. LOCK_TEST_WITH_RETURN(dev, filp);
  1196. DRM_COPY_FROM_USER_IOCTL(blit, (drm_r128_blit_t __user *) data,
  1197. sizeof(blit));
  1198. DRM_DEBUG("pid=%d index=%d\n", DRM_CURRENTPID, blit.idx);
  1199. if (blit.idx < 0 || blit.idx >= dma->buf_count) {
  1200. DRM_ERROR("buffer index %d (of %d max)\n",
  1201. blit.idx, dma->buf_count - 1);
  1202. return DRM_ERR(EINVAL);
  1203. }
  1204. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1205. VB_AGE_TEST_WITH_RETURN(dev_priv);
  1206. ret = r128_cce_dispatch_blit(filp, dev, &blit);
  1207. COMMIT_RING();
  1208. return ret;
  1209. }
  1210. static int r128_cce_depth(DRM_IOCTL_ARGS)
  1211. {
  1212. DRM_DEVICE;
  1213. drm_r128_private_t *dev_priv = dev->dev_private;
  1214. drm_r128_depth_t depth;
  1215. int ret;
  1216. LOCK_TEST_WITH_RETURN(dev, filp);
  1217. DRM_COPY_FROM_USER_IOCTL(depth, (drm_r128_depth_t __user *) data,
  1218. sizeof(depth));
  1219. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1220. ret = DRM_ERR(EINVAL);
  1221. switch (depth.func) {
  1222. case R128_WRITE_SPAN:
  1223. ret = r128_cce_dispatch_write_span(dev, &depth);
  1224. break;
  1225. case R128_WRITE_PIXELS:
  1226. ret = r128_cce_dispatch_write_pixels(dev, &depth);
  1227. break;
  1228. case R128_READ_SPAN:
  1229. ret = r128_cce_dispatch_read_span(dev, &depth);
  1230. break;
  1231. case R128_READ_PIXELS:
  1232. ret = r128_cce_dispatch_read_pixels(dev, &depth);
  1233. break;
  1234. }
  1235. COMMIT_RING();
  1236. return ret;
  1237. }
  1238. static int r128_cce_stipple(DRM_IOCTL_ARGS)
  1239. {
  1240. DRM_DEVICE;
  1241. drm_r128_private_t *dev_priv = dev->dev_private;
  1242. drm_r128_stipple_t stipple;
  1243. u32 mask[32];
  1244. LOCK_TEST_WITH_RETURN(dev, filp);
  1245. DRM_COPY_FROM_USER_IOCTL(stipple, (drm_r128_stipple_t __user *) data,
  1246. sizeof(stipple));
  1247. if (DRM_COPY_FROM_USER(&mask, stipple.mask, 32 * sizeof(u32)))
  1248. return DRM_ERR(EFAULT);
  1249. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1250. r128_cce_dispatch_stipple(dev, mask);
  1251. COMMIT_RING();
  1252. return 0;
  1253. }
  1254. static int r128_cce_indirect(DRM_IOCTL_ARGS)
  1255. {
  1256. DRM_DEVICE;
  1257. drm_r128_private_t *dev_priv = dev->dev_private;
  1258. drm_device_dma_t *dma = dev->dma;
  1259. drm_buf_t *buf;
  1260. drm_r128_buf_priv_t *buf_priv;
  1261. drm_r128_indirect_t indirect;
  1262. #if 0
  1263. RING_LOCALS;
  1264. #endif
  1265. LOCK_TEST_WITH_RETURN(dev, filp);
  1266. if (!dev_priv) {
  1267. DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
  1268. return DRM_ERR(EINVAL);
  1269. }
  1270. DRM_COPY_FROM_USER_IOCTL(indirect, (drm_r128_indirect_t __user *) data,
  1271. sizeof(indirect));
  1272. DRM_DEBUG("indirect: idx=%d s=%d e=%d d=%d\n",
  1273. indirect.idx, indirect.start, indirect.end, indirect.discard);
  1274. if (indirect.idx < 0 || indirect.idx >= dma->buf_count) {
  1275. DRM_ERROR("buffer index %d (of %d max)\n",
  1276. indirect.idx, dma->buf_count - 1);
  1277. return DRM_ERR(EINVAL);
  1278. }
  1279. buf = dma->buflist[indirect.idx];
  1280. buf_priv = buf->dev_private;
  1281. if (buf->filp != filp) {
  1282. DRM_ERROR("process %d using buffer owned by %p\n",
  1283. DRM_CURRENTPID, buf->filp);
  1284. return DRM_ERR(EINVAL);
  1285. }
  1286. if (buf->pending) {
  1287. DRM_ERROR("sending pending buffer %d\n", indirect.idx);
  1288. return DRM_ERR(EINVAL);
  1289. }
  1290. if (indirect.start < buf->used) {
  1291. DRM_ERROR("reusing indirect: start=0x%x actual=0x%x\n",
  1292. indirect.start, buf->used);
  1293. return DRM_ERR(EINVAL);
  1294. }
  1295. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1296. VB_AGE_TEST_WITH_RETURN(dev_priv);
  1297. buf->used = indirect.end;
  1298. buf_priv->discard = indirect.discard;
  1299. #if 0
  1300. /* Wait for the 3D stream to idle before the indirect buffer
  1301. * containing 2D acceleration commands is processed.
  1302. */
  1303. BEGIN_RING(2);
  1304. RADEON_WAIT_UNTIL_3D_IDLE();
  1305. ADVANCE_RING();
  1306. #endif
  1307. /* Dispatch the indirect buffer full of commands from the
  1308. * X server. This is insecure and is thus only available to
  1309. * privileged clients.
  1310. */
  1311. r128_cce_dispatch_indirect(dev, buf, indirect.start, indirect.end);
  1312. COMMIT_RING();
  1313. return 0;
  1314. }
  1315. static int r128_getparam(DRM_IOCTL_ARGS)
  1316. {
  1317. DRM_DEVICE;
  1318. drm_r128_private_t *dev_priv = dev->dev_private;
  1319. drm_r128_getparam_t param;
  1320. int value;
  1321. if (!dev_priv) {
  1322. DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
  1323. return DRM_ERR(EINVAL);
  1324. }
  1325. DRM_COPY_FROM_USER_IOCTL(param, (drm_r128_getparam_t __user *) data,
  1326. sizeof(param));
  1327. DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
  1328. switch (param.param) {
  1329. case R128_PARAM_IRQ_NR:
  1330. value = dev->irq;
  1331. break;
  1332. default:
  1333. return DRM_ERR(EINVAL);
  1334. }
  1335. if (DRM_COPY_TO_USER(param.value, &value, sizeof(int))) {
  1336. DRM_ERROR("copy_to_user\n");
  1337. return DRM_ERR(EFAULT);
  1338. }
  1339. return 0;
  1340. }
  1341. void r128_driver_preclose(drm_device_t * dev, DRMFILE filp)
  1342. {
  1343. if (dev->dev_private) {
  1344. drm_r128_private_t *dev_priv = dev->dev_private;
  1345. if (dev_priv->page_flipping) {
  1346. r128_do_cleanup_pageflip(dev);
  1347. }
  1348. }
  1349. }
  1350. void r128_driver_lastclose(drm_device_t * dev)
  1351. {
  1352. r128_do_cleanup_cce(dev);
  1353. }
  1354. drm_ioctl_desc_t r128_ioctls[] = {
  1355. [DRM_IOCTL_NR(DRM_R128_INIT)] = {r128_cce_init, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY},
  1356. [DRM_IOCTL_NR(DRM_R128_CCE_START)] = {r128_cce_start, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY},
  1357. [DRM_IOCTL_NR(DRM_R128_CCE_STOP)] = {r128_cce_stop, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY},
  1358. [DRM_IOCTL_NR(DRM_R128_CCE_RESET)] = {r128_cce_reset, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY},
  1359. [DRM_IOCTL_NR(DRM_R128_CCE_IDLE)] = {r128_cce_idle, DRM_AUTH},
  1360. [DRM_IOCTL_NR(DRM_R128_RESET)] = {r128_engine_reset, DRM_AUTH},
  1361. [DRM_IOCTL_NR(DRM_R128_FULLSCREEN)] = {r128_fullscreen, DRM_AUTH},
  1362. [DRM_IOCTL_NR(DRM_R128_SWAP)] = {r128_cce_swap, DRM_AUTH},
  1363. [DRM_IOCTL_NR(DRM_R128_FLIP)] = {r128_cce_flip, DRM_AUTH},
  1364. [DRM_IOCTL_NR(DRM_R128_CLEAR)] = {r128_cce_clear, DRM_AUTH},
  1365. [DRM_IOCTL_NR(DRM_R128_VERTEX)] = {r128_cce_vertex, DRM_AUTH},
  1366. [DRM_IOCTL_NR(DRM_R128_INDICES)] = {r128_cce_indices, DRM_AUTH},
  1367. [DRM_IOCTL_NR(DRM_R128_BLIT)] = {r128_cce_blit, DRM_AUTH},
  1368. [DRM_IOCTL_NR(DRM_R128_DEPTH)] = {r128_cce_depth, DRM_AUTH},
  1369. [DRM_IOCTL_NR(DRM_R128_STIPPLE)] = {r128_cce_stipple, DRM_AUTH},
  1370. [DRM_IOCTL_NR(DRM_R128_INDIRECT)] = {r128_cce_indirect, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY},
  1371. [DRM_IOCTL_NR(DRM_R128_GETPARAM)] = {r128_getparam, DRM_AUTH},
  1372. };
  1373. int r128_max_ioctl = DRM_ARRAY_SIZE(r128_ioctls);