r128_state.c 43 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736
  1. /* r128_state.c -- State support for r128 -*- linux-c -*-
  2. * Created: Thu Jan 27 02:53:43 2000 by gareth@valinux.com
  3. *
  4. * Copyright 2000 VA Linux Systems, Inc., Sunnyvale, California.
  5. * All Rights Reserved.
  6. *
  7. * Permission is hereby granted, free of charge, to any person obtaining a
  8. * copy of this software and associated documentation files (the "Software"),
  9. * to deal in the Software without restriction, including without limitation
  10. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  11. * and/or sell copies of the Software, and to permit persons to whom the
  12. * Software is furnished to do so, subject to the following conditions:
  13. *
  14. * The above copyright notice and this permission notice (including the next
  15. * paragraph) shall be included in all copies or substantial portions of the
  16. * Software.
  17. *
  18. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  19. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  20. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  21. * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
  22. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  23. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
  24. * DEALINGS IN THE SOFTWARE.
  25. *
  26. * Authors:
  27. * Gareth Hughes <gareth@valinux.com>
  28. */
  29. #include "drmP.h"
  30. #include "drm.h"
  31. #include "r128_drm.h"
  32. #include "r128_drv.h"
  33. /* ================================================================
  34. * CCE hardware state programming functions
  35. */
  36. static void r128_emit_clip_rects( drm_r128_private_t *dev_priv,
  37. drm_clip_rect_t *boxes, int count )
  38. {
  39. u32 aux_sc_cntl = 0x00000000;
  40. RING_LOCALS;
  41. DRM_DEBUG( " %s\n", __FUNCTION__ );
  42. BEGIN_RING( (count < 3? count: 3) * 5 + 2 );
  43. if ( count >= 1 ) {
  44. OUT_RING( CCE_PACKET0( R128_AUX1_SC_LEFT, 3 ) );
  45. OUT_RING( boxes[0].x1 );
  46. OUT_RING( boxes[0].x2 - 1 );
  47. OUT_RING( boxes[0].y1 );
  48. OUT_RING( boxes[0].y2 - 1 );
  49. aux_sc_cntl |= (R128_AUX1_SC_EN | R128_AUX1_SC_MODE_OR);
  50. }
  51. if ( count >= 2 ) {
  52. OUT_RING( CCE_PACKET0( R128_AUX2_SC_LEFT, 3 ) );
  53. OUT_RING( boxes[1].x1 );
  54. OUT_RING( boxes[1].x2 - 1 );
  55. OUT_RING( boxes[1].y1 );
  56. OUT_RING( boxes[1].y2 - 1 );
  57. aux_sc_cntl |= (R128_AUX2_SC_EN | R128_AUX2_SC_MODE_OR);
  58. }
  59. if ( count >= 3 ) {
  60. OUT_RING( CCE_PACKET0( R128_AUX3_SC_LEFT, 3 ) );
  61. OUT_RING( boxes[2].x1 );
  62. OUT_RING( boxes[2].x2 - 1 );
  63. OUT_RING( boxes[2].y1 );
  64. OUT_RING( boxes[2].y2 - 1 );
  65. aux_sc_cntl |= (R128_AUX3_SC_EN | R128_AUX3_SC_MODE_OR);
  66. }
  67. OUT_RING( CCE_PACKET0( R128_AUX_SC_CNTL, 0 ) );
  68. OUT_RING( aux_sc_cntl );
  69. ADVANCE_RING();
  70. }
  71. static __inline__ void r128_emit_core( drm_r128_private_t *dev_priv )
  72. {
  73. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  74. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  75. RING_LOCALS;
  76. DRM_DEBUG( " %s\n", __FUNCTION__ );
  77. BEGIN_RING( 2 );
  78. OUT_RING( CCE_PACKET0( R128_SCALE_3D_CNTL, 0 ) );
  79. OUT_RING( ctx->scale_3d_cntl );
  80. ADVANCE_RING();
  81. }
  82. static __inline__ void r128_emit_context( drm_r128_private_t *dev_priv )
  83. {
  84. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  85. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  86. RING_LOCALS;
  87. DRM_DEBUG( " %s\n", __FUNCTION__ );
  88. BEGIN_RING( 13 );
  89. OUT_RING( CCE_PACKET0( R128_DST_PITCH_OFFSET_C, 11 ) );
  90. OUT_RING( ctx->dst_pitch_offset_c );
  91. OUT_RING( ctx->dp_gui_master_cntl_c );
  92. OUT_RING( ctx->sc_top_left_c );
  93. OUT_RING( ctx->sc_bottom_right_c );
  94. OUT_RING( ctx->z_offset_c );
  95. OUT_RING( ctx->z_pitch_c );
  96. OUT_RING( ctx->z_sten_cntl_c );
  97. OUT_RING( ctx->tex_cntl_c );
  98. OUT_RING( ctx->misc_3d_state_cntl_reg );
  99. OUT_RING( ctx->texture_clr_cmp_clr_c );
  100. OUT_RING( ctx->texture_clr_cmp_msk_c );
  101. OUT_RING( ctx->fog_color_c );
  102. ADVANCE_RING();
  103. }
  104. static __inline__ void r128_emit_setup( drm_r128_private_t *dev_priv )
  105. {
  106. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  107. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  108. RING_LOCALS;
  109. DRM_DEBUG( " %s\n", __FUNCTION__ );
  110. BEGIN_RING( 3 );
  111. OUT_RING( CCE_PACKET1( R128_SETUP_CNTL, R128_PM4_VC_FPU_SETUP ) );
  112. OUT_RING( ctx->setup_cntl );
  113. OUT_RING( ctx->pm4_vc_fpu_setup );
  114. ADVANCE_RING();
  115. }
  116. static __inline__ void r128_emit_masks( drm_r128_private_t *dev_priv )
  117. {
  118. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  119. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  120. RING_LOCALS;
  121. DRM_DEBUG( " %s\n", __FUNCTION__ );
  122. BEGIN_RING( 5 );
  123. OUT_RING( CCE_PACKET0( R128_DP_WRITE_MASK, 0 ) );
  124. OUT_RING( ctx->dp_write_mask );
  125. OUT_RING( CCE_PACKET0( R128_STEN_REF_MASK_C, 1 ) );
  126. OUT_RING( ctx->sten_ref_mask_c );
  127. OUT_RING( ctx->plane_3d_mask_c );
  128. ADVANCE_RING();
  129. }
  130. static __inline__ void r128_emit_window( drm_r128_private_t *dev_priv )
  131. {
  132. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  133. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  134. RING_LOCALS;
  135. DRM_DEBUG( " %s\n", __FUNCTION__ );
  136. BEGIN_RING( 2 );
  137. OUT_RING( CCE_PACKET0( R128_WINDOW_XY_OFFSET, 0 ) );
  138. OUT_RING( ctx->window_xy_offset );
  139. ADVANCE_RING();
  140. }
  141. static __inline__ void r128_emit_tex0( drm_r128_private_t *dev_priv )
  142. {
  143. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  144. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  145. drm_r128_texture_regs_t *tex = &sarea_priv->tex_state[0];
  146. int i;
  147. RING_LOCALS;
  148. DRM_DEBUG( " %s\n", __FUNCTION__ );
  149. BEGIN_RING( 7 + R128_MAX_TEXTURE_LEVELS );
  150. OUT_RING( CCE_PACKET0( R128_PRIM_TEX_CNTL_C,
  151. 2 + R128_MAX_TEXTURE_LEVELS ) );
  152. OUT_RING( tex->tex_cntl );
  153. OUT_RING( tex->tex_combine_cntl );
  154. OUT_RING( ctx->tex_size_pitch_c );
  155. for ( i = 0 ; i < R128_MAX_TEXTURE_LEVELS ; i++ ) {
  156. OUT_RING( tex->tex_offset[i] );
  157. }
  158. OUT_RING( CCE_PACKET0( R128_CONSTANT_COLOR_C, 1 ) );
  159. OUT_RING( ctx->constant_color_c );
  160. OUT_RING( tex->tex_border_color );
  161. ADVANCE_RING();
  162. }
  163. static __inline__ void r128_emit_tex1( drm_r128_private_t *dev_priv )
  164. {
  165. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  166. drm_r128_texture_regs_t *tex = &sarea_priv->tex_state[1];
  167. int i;
  168. RING_LOCALS;
  169. DRM_DEBUG( " %s\n", __FUNCTION__ );
  170. BEGIN_RING( 5 + R128_MAX_TEXTURE_LEVELS );
  171. OUT_RING( CCE_PACKET0( R128_SEC_TEX_CNTL_C,
  172. 1 + R128_MAX_TEXTURE_LEVELS ) );
  173. OUT_RING( tex->tex_cntl );
  174. OUT_RING( tex->tex_combine_cntl );
  175. for ( i = 0 ; i < R128_MAX_TEXTURE_LEVELS ; i++ ) {
  176. OUT_RING( tex->tex_offset[i] );
  177. }
  178. OUT_RING( CCE_PACKET0( R128_SEC_TEXTURE_BORDER_COLOR_C, 0 ) );
  179. OUT_RING( tex->tex_border_color );
  180. ADVANCE_RING();
  181. }
  182. static __inline__ void r128_emit_state( drm_r128_private_t *dev_priv )
  183. {
  184. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  185. unsigned int dirty = sarea_priv->dirty;
  186. DRM_DEBUG( "%s: dirty=0x%08x\n", __FUNCTION__, dirty );
  187. if ( dirty & R128_UPLOAD_CORE ) {
  188. r128_emit_core( dev_priv );
  189. sarea_priv->dirty &= ~R128_UPLOAD_CORE;
  190. }
  191. if ( dirty & R128_UPLOAD_CONTEXT ) {
  192. r128_emit_context( dev_priv );
  193. sarea_priv->dirty &= ~R128_UPLOAD_CONTEXT;
  194. }
  195. if ( dirty & R128_UPLOAD_SETUP ) {
  196. r128_emit_setup( dev_priv );
  197. sarea_priv->dirty &= ~R128_UPLOAD_SETUP;
  198. }
  199. if ( dirty & R128_UPLOAD_MASKS ) {
  200. r128_emit_masks( dev_priv );
  201. sarea_priv->dirty &= ~R128_UPLOAD_MASKS;
  202. }
  203. if ( dirty & R128_UPLOAD_WINDOW ) {
  204. r128_emit_window( dev_priv );
  205. sarea_priv->dirty &= ~R128_UPLOAD_WINDOW;
  206. }
  207. if ( dirty & R128_UPLOAD_TEX0 ) {
  208. r128_emit_tex0( dev_priv );
  209. sarea_priv->dirty &= ~R128_UPLOAD_TEX0;
  210. }
  211. if ( dirty & R128_UPLOAD_TEX1 ) {
  212. r128_emit_tex1( dev_priv );
  213. sarea_priv->dirty &= ~R128_UPLOAD_TEX1;
  214. }
  215. /* Turn off the texture cache flushing */
  216. sarea_priv->context_state.tex_cntl_c &= ~R128_TEX_CACHE_FLUSH;
  217. sarea_priv->dirty &= ~R128_REQUIRE_QUIESCENCE;
  218. }
  219. #if R128_PERFORMANCE_BOXES
  220. /* ================================================================
  221. * Performance monitoring functions
  222. */
  223. static void r128_clear_box( drm_r128_private_t *dev_priv,
  224. int x, int y, int w, int h,
  225. int r, int g, int b )
  226. {
  227. u32 pitch, offset;
  228. u32 fb_bpp, color;
  229. RING_LOCALS;
  230. switch ( dev_priv->fb_bpp ) {
  231. case 16:
  232. fb_bpp = R128_GMC_DST_16BPP;
  233. color = (((r & 0xf8) << 8) |
  234. ((g & 0xfc) << 3) |
  235. ((b & 0xf8) >> 3));
  236. break;
  237. case 24:
  238. fb_bpp = R128_GMC_DST_24BPP;
  239. color = ((r << 16) | (g << 8) | b);
  240. break;
  241. case 32:
  242. fb_bpp = R128_GMC_DST_32BPP;
  243. color = (((0xff) << 24) | (r << 16) | (g << 8) | b);
  244. break;
  245. default:
  246. return;
  247. }
  248. offset = dev_priv->back_offset;
  249. pitch = dev_priv->back_pitch >> 3;
  250. BEGIN_RING( 6 );
  251. OUT_RING( CCE_PACKET3( R128_CNTL_PAINT_MULTI, 4 ) );
  252. OUT_RING( R128_GMC_DST_PITCH_OFFSET_CNTL |
  253. R128_GMC_BRUSH_SOLID_COLOR |
  254. fb_bpp |
  255. R128_GMC_SRC_DATATYPE_COLOR |
  256. R128_ROP3_P |
  257. R128_GMC_CLR_CMP_CNTL_DIS |
  258. R128_GMC_AUX_CLIP_DIS );
  259. OUT_RING( (pitch << 21) | (offset >> 5) );
  260. OUT_RING( color );
  261. OUT_RING( (x << 16) | y );
  262. OUT_RING( (w << 16) | h );
  263. ADVANCE_RING();
  264. }
  265. static void r128_cce_performance_boxes( drm_r128_private_t *dev_priv )
  266. {
  267. if ( atomic_read( &dev_priv->idle_count ) == 0 ) {
  268. r128_clear_box( dev_priv, 64, 4, 8, 8, 0, 255, 0 );
  269. } else {
  270. atomic_set( &dev_priv->idle_count, 0 );
  271. }
  272. }
  273. #endif
  274. /* ================================================================
  275. * CCE command dispatch functions
  276. */
  277. static void r128_print_dirty( const char *msg, unsigned int flags )
  278. {
  279. DRM_INFO( "%s: (0x%x) %s%s%s%s%s%s%s%s%s\n",
  280. msg,
  281. flags,
  282. (flags & R128_UPLOAD_CORE) ? "core, " : "",
  283. (flags & R128_UPLOAD_CONTEXT) ? "context, " : "",
  284. (flags & R128_UPLOAD_SETUP) ? "setup, " : "",
  285. (flags & R128_UPLOAD_TEX0) ? "tex0, " : "",
  286. (flags & R128_UPLOAD_TEX1) ? "tex1, " : "",
  287. (flags & R128_UPLOAD_MASKS) ? "masks, " : "",
  288. (flags & R128_UPLOAD_WINDOW) ? "window, " : "",
  289. (flags & R128_UPLOAD_CLIPRECTS) ? "cliprects, " : "",
  290. (flags & R128_REQUIRE_QUIESCENCE) ? "quiescence, " : "" );
  291. }
  292. static void r128_cce_dispatch_clear( drm_device_t *dev,
  293. drm_r128_clear_t *clear )
  294. {
  295. drm_r128_private_t *dev_priv = dev->dev_private;
  296. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  297. int nbox = sarea_priv->nbox;
  298. drm_clip_rect_t *pbox = sarea_priv->boxes;
  299. unsigned int flags = clear->flags;
  300. int i;
  301. RING_LOCALS;
  302. DRM_DEBUG( "%s\n", __FUNCTION__ );
  303. if ( dev_priv->page_flipping && dev_priv->current_page == 1 ) {
  304. unsigned int tmp = flags;
  305. flags &= ~(R128_FRONT | R128_BACK);
  306. if ( tmp & R128_FRONT ) flags |= R128_BACK;
  307. if ( tmp & R128_BACK ) flags |= R128_FRONT;
  308. }
  309. for ( i = 0 ; i < nbox ; i++ ) {
  310. int x = pbox[i].x1;
  311. int y = pbox[i].y1;
  312. int w = pbox[i].x2 - x;
  313. int h = pbox[i].y2 - y;
  314. DRM_DEBUG( "dispatch clear %d,%d-%d,%d flags 0x%x\n",
  315. pbox[i].x1, pbox[i].y1, pbox[i].x2,
  316. pbox[i].y2, flags );
  317. if ( flags & (R128_FRONT | R128_BACK) ) {
  318. BEGIN_RING( 2 );
  319. OUT_RING( CCE_PACKET0( R128_DP_WRITE_MASK, 0 ) );
  320. OUT_RING( clear->color_mask );
  321. ADVANCE_RING();
  322. }
  323. if ( flags & R128_FRONT ) {
  324. BEGIN_RING( 6 );
  325. OUT_RING( CCE_PACKET3( R128_CNTL_PAINT_MULTI, 4 ) );
  326. OUT_RING( R128_GMC_DST_PITCH_OFFSET_CNTL |
  327. R128_GMC_BRUSH_SOLID_COLOR |
  328. (dev_priv->color_fmt << 8) |
  329. R128_GMC_SRC_DATATYPE_COLOR |
  330. R128_ROP3_P |
  331. R128_GMC_CLR_CMP_CNTL_DIS |
  332. R128_GMC_AUX_CLIP_DIS );
  333. OUT_RING( dev_priv->front_pitch_offset_c );
  334. OUT_RING( clear->clear_color );
  335. OUT_RING( (x << 16) | y );
  336. OUT_RING( (w << 16) | h );
  337. ADVANCE_RING();
  338. }
  339. if ( flags & R128_BACK ) {
  340. BEGIN_RING( 6 );
  341. OUT_RING( CCE_PACKET3( R128_CNTL_PAINT_MULTI, 4 ) );
  342. OUT_RING( R128_GMC_DST_PITCH_OFFSET_CNTL |
  343. R128_GMC_BRUSH_SOLID_COLOR |
  344. (dev_priv->color_fmt << 8) |
  345. R128_GMC_SRC_DATATYPE_COLOR |
  346. R128_ROP3_P |
  347. R128_GMC_CLR_CMP_CNTL_DIS |
  348. R128_GMC_AUX_CLIP_DIS );
  349. OUT_RING( dev_priv->back_pitch_offset_c );
  350. OUT_RING( clear->clear_color );
  351. OUT_RING( (x << 16) | y );
  352. OUT_RING( (w << 16) | h );
  353. ADVANCE_RING();
  354. }
  355. if ( flags & R128_DEPTH ) {
  356. BEGIN_RING( 6 );
  357. OUT_RING( CCE_PACKET3( R128_CNTL_PAINT_MULTI, 4 ) );
  358. OUT_RING( R128_GMC_DST_PITCH_OFFSET_CNTL |
  359. R128_GMC_BRUSH_SOLID_COLOR |
  360. (dev_priv->depth_fmt << 8) |
  361. R128_GMC_SRC_DATATYPE_COLOR |
  362. R128_ROP3_P |
  363. R128_GMC_CLR_CMP_CNTL_DIS |
  364. R128_GMC_AUX_CLIP_DIS |
  365. R128_GMC_WR_MSK_DIS );
  366. OUT_RING( dev_priv->depth_pitch_offset_c );
  367. OUT_RING( clear->clear_depth );
  368. OUT_RING( (x << 16) | y );
  369. OUT_RING( (w << 16) | h );
  370. ADVANCE_RING();
  371. }
  372. }
  373. }
  374. static void r128_cce_dispatch_swap( drm_device_t *dev )
  375. {
  376. drm_r128_private_t *dev_priv = dev->dev_private;
  377. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  378. int nbox = sarea_priv->nbox;
  379. drm_clip_rect_t *pbox = sarea_priv->boxes;
  380. int i;
  381. RING_LOCALS;
  382. DRM_DEBUG( "%s\n", __FUNCTION__ );
  383. #if R128_PERFORMANCE_BOXES
  384. /* Do some trivial performance monitoring...
  385. */
  386. r128_cce_performance_boxes( dev_priv );
  387. #endif
  388. for ( i = 0 ; i < nbox ; i++ ) {
  389. int x = pbox[i].x1;
  390. int y = pbox[i].y1;
  391. int w = pbox[i].x2 - x;
  392. int h = pbox[i].y2 - y;
  393. BEGIN_RING( 7 );
  394. OUT_RING( CCE_PACKET3( R128_CNTL_BITBLT_MULTI, 5 ) );
  395. OUT_RING( R128_GMC_SRC_PITCH_OFFSET_CNTL |
  396. R128_GMC_DST_PITCH_OFFSET_CNTL |
  397. R128_GMC_BRUSH_NONE |
  398. (dev_priv->color_fmt << 8) |
  399. R128_GMC_SRC_DATATYPE_COLOR |
  400. R128_ROP3_S |
  401. R128_DP_SRC_SOURCE_MEMORY |
  402. R128_GMC_CLR_CMP_CNTL_DIS |
  403. R128_GMC_AUX_CLIP_DIS |
  404. R128_GMC_WR_MSK_DIS );
  405. /* Make this work even if front & back are flipped:
  406. */
  407. if (dev_priv->current_page == 0) {
  408. OUT_RING( dev_priv->back_pitch_offset_c );
  409. OUT_RING( dev_priv->front_pitch_offset_c );
  410. }
  411. else {
  412. OUT_RING( dev_priv->front_pitch_offset_c );
  413. OUT_RING( dev_priv->back_pitch_offset_c );
  414. }
  415. OUT_RING( (x << 16) | y );
  416. OUT_RING( (x << 16) | y );
  417. OUT_RING( (w << 16) | h );
  418. ADVANCE_RING();
  419. }
  420. /* Increment the frame counter. The client-side 3D driver must
  421. * throttle the framerate by waiting for this value before
  422. * performing the swapbuffer ioctl.
  423. */
  424. dev_priv->sarea_priv->last_frame++;
  425. BEGIN_RING( 2 );
  426. OUT_RING( CCE_PACKET0( R128_LAST_FRAME_REG, 0 ) );
  427. OUT_RING( dev_priv->sarea_priv->last_frame );
  428. ADVANCE_RING();
  429. }
  430. static void r128_cce_dispatch_flip( drm_device_t *dev )
  431. {
  432. drm_r128_private_t *dev_priv = dev->dev_private;
  433. RING_LOCALS;
  434. DRM_DEBUG("%s: page=%d pfCurrentPage=%d\n",
  435. __FUNCTION__,
  436. dev_priv->current_page,
  437. dev_priv->sarea_priv->pfCurrentPage);
  438. #if R128_PERFORMANCE_BOXES
  439. /* Do some trivial performance monitoring...
  440. */
  441. r128_cce_performance_boxes( dev_priv );
  442. #endif
  443. BEGIN_RING( 4 );
  444. R128_WAIT_UNTIL_PAGE_FLIPPED();
  445. OUT_RING( CCE_PACKET0( R128_CRTC_OFFSET, 0 ) );
  446. if ( dev_priv->current_page == 0 ) {
  447. OUT_RING( dev_priv->back_offset );
  448. } else {
  449. OUT_RING( dev_priv->front_offset );
  450. }
  451. ADVANCE_RING();
  452. /* Increment the frame counter. The client-side 3D driver must
  453. * throttle the framerate by waiting for this value before
  454. * performing the swapbuffer ioctl.
  455. */
  456. dev_priv->sarea_priv->last_frame++;
  457. dev_priv->sarea_priv->pfCurrentPage = dev_priv->current_page =
  458. 1 - dev_priv->current_page;
  459. BEGIN_RING( 2 );
  460. OUT_RING( CCE_PACKET0( R128_LAST_FRAME_REG, 0 ) );
  461. OUT_RING( dev_priv->sarea_priv->last_frame );
  462. ADVANCE_RING();
  463. }
  464. static void r128_cce_dispatch_vertex( drm_device_t *dev,
  465. drm_buf_t *buf )
  466. {
  467. drm_r128_private_t *dev_priv = dev->dev_private;
  468. drm_r128_buf_priv_t *buf_priv = buf->dev_private;
  469. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  470. int format = sarea_priv->vc_format;
  471. int offset = buf->bus_address;
  472. int size = buf->used;
  473. int prim = buf_priv->prim;
  474. int i = 0;
  475. RING_LOCALS;
  476. DRM_DEBUG( "buf=%d nbox=%d\n", buf->idx, sarea_priv->nbox );
  477. if ( 0 )
  478. r128_print_dirty( "dispatch_vertex", sarea_priv->dirty );
  479. if ( buf->used ) {
  480. buf_priv->dispatched = 1;
  481. if ( sarea_priv->dirty & ~R128_UPLOAD_CLIPRECTS ) {
  482. r128_emit_state( dev_priv );
  483. }
  484. do {
  485. /* Emit the next set of up to three cliprects */
  486. if ( i < sarea_priv->nbox ) {
  487. r128_emit_clip_rects( dev_priv,
  488. &sarea_priv->boxes[i],
  489. sarea_priv->nbox - i );
  490. }
  491. /* Emit the vertex buffer rendering commands */
  492. BEGIN_RING( 5 );
  493. OUT_RING( CCE_PACKET3( R128_3D_RNDR_GEN_INDX_PRIM, 3 ) );
  494. OUT_RING( offset );
  495. OUT_RING( size );
  496. OUT_RING( format );
  497. OUT_RING( prim | R128_CCE_VC_CNTL_PRIM_WALK_LIST |
  498. (size << R128_CCE_VC_CNTL_NUM_SHIFT) );
  499. ADVANCE_RING();
  500. i += 3;
  501. } while ( i < sarea_priv->nbox );
  502. }
  503. if ( buf_priv->discard ) {
  504. buf_priv->age = dev_priv->sarea_priv->last_dispatch;
  505. /* Emit the vertex buffer age */
  506. BEGIN_RING( 2 );
  507. OUT_RING( CCE_PACKET0( R128_LAST_DISPATCH_REG, 0 ) );
  508. OUT_RING( buf_priv->age );
  509. ADVANCE_RING();
  510. buf->pending = 1;
  511. buf->used = 0;
  512. /* FIXME: Check dispatched field */
  513. buf_priv->dispatched = 0;
  514. }
  515. dev_priv->sarea_priv->last_dispatch++;
  516. sarea_priv->dirty &= ~R128_UPLOAD_CLIPRECTS;
  517. sarea_priv->nbox = 0;
  518. }
  519. static void r128_cce_dispatch_indirect( drm_device_t *dev,
  520. drm_buf_t *buf,
  521. int start, int end )
  522. {
  523. drm_r128_private_t *dev_priv = dev->dev_private;
  524. drm_r128_buf_priv_t *buf_priv = buf->dev_private;
  525. RING_LOCALS;
  526. DRM_DEBUG( "indirect: buf=%d s=0x%x e=0x%x\n",
  527. buf->idx, start, end );
  528. if ( start != end ) {
  529. int offset = buf->bus_address + start;
  530. int dwords = (end - start + 3) / sizeof(u32);
  531. /* Indirect buffer data must be an even number of
  532. * dwords, so if we've been given an odd number we must
  533. * pad the data with a Type-2 CCE packet.
  534. */
  535. if ( dwords & 1 ) {
  536. u32 *data = (u32 *)
  537. ((char *)dev->agp_buffer_map->handle
  538. + buf->offset + start);
  539. data[dwords++] = cpu_to_le32( R128_CCE_PACKET2 );
  540. }
  541. buf_priv->dispatched = 1;
  542. /* Fire off the indirect buffer */
  543. BEGIN_RING( 3 );
  544. OUT_RING( CCE_PACKET0( R128_PM4_IW_INDOFF, 1 ) );
  545. OUT_RING( offset );
  546. OUT_RING( dwords );
  547. ADVANCE_RING();
  548. }
  549. if ( buf_priv->discard ) {
  550. buf_priv->age = dev_priv->sarea_priv->last_dispatch;
  551. /* Emit the indirect buffer age */
  552. BEGIN_RING( 2 );
  553. OUT_RING( CCE_PACKET0( R128_LAST_DISPATCH_REG, 0 ) );
  554. OUT_RING( buf_priv->age );
  555. ADVANCE_RING();
  556. buf->pending = 1;
  557. buf->used = 0;
  558. /* FIXME: Check dispatched field */
  559. buf_priv->dispatched = 0;
  560. }
  561. dev_priv->sarea_priv->last_dispatch++;
  562. }
  563. static void r128_cce_dispatch_indices( drm_device_t *dev,
  564. drm_buf_t *buf,
  565. int start, int end,
  566. int count )
  567. {
  568. drm_r128_private_t *dev_priv = dev->dev_private;
  569. drm_r128_buf_priv_t *buf_priv = buf->dev_private;
  570. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  571. int format = sarea_priv->vc_format;
  572. int offset = dev->agp_buffer_map->offset - dev_priv->cce_buffers_offset;
  573. int prim = buf_priv->prim;
  574. u32 *data;
  575. int dwords;
  576. int i = 0;
  577. RING_LOCALS;
  578. DRM_DEBUG( "indices: s=%d e=%d c=%d\n", start, end, count );
  579. if ( 0 )
  580. r128_print_dirty( "dispatch_indices", sarea_priv->dirty );
  581. if ( start != end ) {
  582. buf_priv->dispatched = 1;
  583. if ( sarea_priv->dirty & ~R128_UPLOAD_CLIPRECTS ) {
  584. r128_emit_state( dev_priv );
  585. }
  586. dwords = (end - start + 3) / sizeof(u32);
  587. data = (u32 *)((char *)dev->agp_buffer_map->handle
  588. + buf->offset + start);
  589. data[0] = cpu_to_le32( CCE_PACKET3( R128_3D_RNDR_GEN_INDX_PRIM,
  590. dwords-2 ) );
  591. data[1] = cpu_to_le32( offset );
  592. data[2] = cpu_to_le32( R128_MAX_VB_VERTS );
  593. data[3] = cpu_to_le32( format );
  594. data[4] = cpu_to_le32( (prim | R128_CCE_VC_CNTL_PRIM_WALK_IND |
  595. (count << 16)) );
  596. if ( count & 0x1 ) {
  597. #ifdef __LITTLE_ENDIAN
  598. data[dwords-1] &= 0x0000ffff;
  599. #else
  600. data[dwords-1] &= 0xffff0000;
  601. #endif
  602. }
  603. do {
  604. /* Emit the next set of up to three cliprects */
  605. if ( i < sarea_priv->nbox ) {
  606. r128_emit_clip_rects( dev_priv,
  607. &sarea_priv->boxes[i],
  608. sarea_priv->nbox - i );
  609. }
  610. r128_cce_dispatch_indirect( dev, buf, start, end );
  611. i += 3;
  612. } while ( i < sarea_priv->nbox );
  613. }
  614. if ( buf_priv->discard ) {
  615. buf_priv->age = dev_priv->sarea_priv->last_dispatch;
  616. /* Emit the vertex buffer age */
  617. BEGIN_RING( 2 );
  618. OUT_RING( CCE_PACKET0( R128_LAST_DISPATCH_REG, 0 ) );
  619. OUT_RING( buf_priv->age );
  620. ADVANCE_RING();
  621. buf->pending = 1;
  622. /* FIXME: Check dispatched field */
  623. buf_priv->dispatched = 0;
  624. }
  625. dev_priv->sarea_priv->last_dispatch++;
  626. sarea_priv->dirty &= ~R128_UPLOAD_CLIPRECTS;
  627. sarea_priv->nbox = 0;
  628. }
  629. static int r128_cce_dispatch_blit( DRMFILE filp,
  630. drm_device_t *dev,
  631. drm_r128_blit_t *blit )
  632. {
  633. drm_r128_private_t *dev_priv = dev->dev_private;
  634. drm_device_dma_t *dma = dev->dma;
  635. drm_buf_t *buf;
  636. drm_r128_buf_priv_t *buf_priv;
  637. u32 *data;
  638. int dword_shift, dwords;
  639. RING_LOCALS;
  640. DRM_DEBUG( "\n" );
  641. /* The compiler won't optimize away a division by a variable,
  642. * even if the only legal values are powers of two. Thus, we'll
  643. * use a shift instead.
  644. */
  645. switch ( blit->format ) {
  646. case R128_DATATYPE_ARGB8888:
  647. dword_shift = 0;
  648. break;
  649. case R128_DATATYPE_ARGB1555:
  650. case R128_DATATYPE_RGB565:
  651. case R128_DATATYPE_ARGB4444:
  652. case R128_DATATYPE_YVYU422:
  653. case R128_DATATYPE_VYUY422:
  654. dword_shift = 1;
  655. break;
  656. case R128_DATATYPE_CI8:
  657. case R128_DATATYPE_RGB8:
  658. dword_shift = 2;
  659. break;
  660. default:
  661. DRM_ERROR( "invalid blit format %d\n", blit->format );
  662. return DRM_ERR(EINVAL);
  663. }
  664. /* Flush the pixel cache, and mark the contents as Read Invalid.
  665. * This ensures no pixel data gets mixed up with the texture
  666. * data from the host data blit, otherwise part of the texture
  667. * image may be corrupted.
  668. */
  669. BEGIN_RING( 2 );
  670. OUT_RING( CCE_PACKET0( R128_PC_GUI_CTLSTAT, 0 ) );
  671. OUT_RING( R128_PC_RI_GUI | R128_PC_FLUSH_GUI );
  672. ADVANCE_RING();
  673. /* Dispatch the indirect buffer.
  674. */
  675. buf = dma->buflist[blit->idx];
  676. buf_priv = buf->dev_private;
  677. if ( buf->filp != filp ) {
  678. DRM_ERROR( "process %d using buffer owned by %p\n",
  679. DRM_CURRENTPID, buf->filp );
  680. return DRM_ERR(EINVAL);
  681. }
  682. if ( buf->pending ) {
  683. DRM_ERROR( "sending pending buffer %d\n", blit->idx );
  684. return DRM_ERR(EINVAL);
  685. }
  686. buf_priv->discard = 1;
  687. dwords = (blit->width * blit->height) >> dword_shift;
  688. data = (u32 *)((char *)dev->agp_buffer_map->handle + buf->offset);
  689. data[0] = cpu_to_le32( CCE_PACKET3( R128_CNTL_HOSTDATA_BLT, dwords + 6 ) );
  690. data[1] = cpu_to_le32( (R128_GMC_DST_PITCH_OFFSET_CNTL |
  691. R128_GMC_BRUSH_NONE |
  692. (blit->format << 8) |
  693. R128_GMC_SRC_DATATYPE_COLOR |
  694. R128_ROP3_S |
  695. R128_DP_SRC_SOURCE_HOST_DATA |
  696. R128_GMC_CLR_CMP_CNTL_DIS |
  697. R128_GMC_AUX_CLIP_DIS |
  698. R128_GMC_WR_MSK_DIS) );
  699. data[2] = cpu_to_le32( (blit->pitch << 21) | (blit->offset >> 5) );
  700. data[3] = cpu_to_le32( 0xffffffff );
  701. data[4] = cpu_to_le32( 0xffffffff );
  702. data[5] = cpu_to_le32( (blit->y << 16) | blit->x );
  703. data[6] = cpu_to_le32( (blit->height << 16) | blit->width );
  704. data[7] = cpu_to_le32( dwords );
  705. buf->used = (dwords + 8) * sizeof(u32);
  706. r128_cce_dispatch_indirect( dev, buf, 0, buf->used );
  707. /* Flush the pixel cache after the blit completes. This ensures
  708. * the texture data is written out to memory before rendering
  709. * continues.
  710. */
  711. BEGIN_RING( 2 );
  712. OUT_RING( CCE_PACKET0( R128_PC_GUI_CTLSTAT, 0 ) );
  713. OUT_RING( R128_PC_FLUSH_GUI );
  714. ADVANCE_RING();
  715. return 0;
  716. }
  717. /* ================================================================
  718. * Tiled depth buffer management
  719. *
  720. * FIXME: These should all set the destination write mask for when we
  721. * have hardware stencil support.
  722. */
  723. static int r128_cce_dispatch_write_span( drm_device_t *dev,
  724. drm_r128_depth_t *depth )
  725. {
  726. drm_r128_private_t *dev_priv = dev->dev_private;
  727. int count, x, y;
  728. u32 *buffer;
  729. u8 *mask;
  730. int i, buffer_size, mask_size;
  731. RING_LOCALS;
  732. DRM_DEBUG( "\n" );
  733. count = depth->n;
  734. if (count > 4096 || count <= 0)
  735. return DRM_ERR(EMSGSIZE);
  736. if ( DRM_COPY_FROM_USER( &x, depth->x, sizeof(x) ) ) {
  737. return DRM_ERR(EFAULT);
  738. }
  739. if ( DRM_COPY_FROM_USER( &y, depth->y, sizeof(y) ) ) {
  740. return DRM_ERR(EFAULT);
  741. }
  742. buffer_size = depth->n * sizeof(u32);
  743. buffer = drm_alloc( buffer_size, DRM_MEM_BUFS );
  744. if ( buffer == NULL )
  745. return DRM_ERR(ENOMEM);
  746. if ( DRM_COPY_FROM_USER( buffer, depth->buffer, buffer_size ) ) {
  747. drm_free( buffer, buffer_size, DRM_MEM_BUFS);
  748. return DRM_ERR(EFAULT);
  749. }
  750. mask_size = depth->n * sizeof(u8);
  751. if ( depth->mask ) {
  752. mask = drm_alloc( mask_size, DRM_MEM_BUFS );
  753. if ( mask == NULL ) {
  754. drm_free( buffer, buffer_size, DRM_MEM_BUFS );
  755. return DRM_ERR(ENOMEM);
  756. }
  757. if ( DRM_COPY_FROM_USER( mask, depth->mask, mask_size ) ) {
  758. drm_free( buffer, buffer_size, DRM_MEM_BUFS );
  759. drm_free( mask, mask_size, DRM_MEM_BUFS );
  760. return DRM_ERR(EFAULT);
  761. }
  762. for ( i = 0 ; i < count ; i++, x++ ) {
  763. if ( mask[i] ) {
  764. BEGIN_RING( 6 );
  765. OUT_RING( CCE_PACKET3( R128_CNTL_PAINT_MULTI, 4 ) );
  766. OUT_RING( R128_GMC_DST_PITCH_OFFSET_CNTL |
  767. R128_GMC_BRUSH_SOLID_COLOR |
  768. (dev_priv->depth_fmt << 8) |
  769. R128_GMC_SRC_DATATYPE_COLOR |
  770. R128_ROP3_P |
  771. R128_GMC_CLR_CMP_CNTL_DIS |
  772. R128_GMC_WR_MSK_DIS );
  773. OUT_RING( dev_priv->depth_pitch_offset_c );
  774. OUT_RING( buffer[i] );
  775. OUT_RING( (x << 16) | y );
  776. OUT_RING( (1 << 16) | 1 );
  777. ADVANCE_RING();
  778. }
  779. }
  780. drm_free( mask, mask_size, DRM_MEM_BUFS );
  781. } else {
  782. for ( i = 0 ; i < count ; i++, x++ ) {
  783. BEGIN_RING( 6 );
  784. OUT_RING( CCE_PACKET3( R128_CNTL_PAINT_MULTI, 4 ) );
  785. OUT_RING( R128_GMC_DST_PITCH_OFFSET_CNTL |
  786. R128_GMC_BRUSH_SOLID_COLOR |
  787. (dev_priv->depth_fmt << 8) |
  788. R128_GMC_SRC_DATATYPE_COLOR |
  789. R128_ROP3_P |
  790. R128_GMC_CLR_CMP_CNTL_DIS |
  791. R128_GMC_WR_MSK_DIS );
  792. OUT_RING( dev_priv->depth_pitch_offset_c );
  793. OUT_RING( buffer[i] );
  794. OUT_RING( (x << 16) | y );
  795. OUT_RING( (1 << 16) | 1 );
  796. ADVANCE_RING();
  797. }
  798. }
  799. drm_free( buffer, buffer_size, DRM_MEM_BUFS );
  800. return 0;
  801. }
  802. static int r128_cce_dispatch_write_pixels( drm_device_t *dev,
  803. drm_r128_depth_t *depth )
  804. {
  805. drm_r128_private_t *dev_priv = dev->dev_private;
  806. int count, *x, *y;
  807. u32 *buffer;
  808. u8 *mask;
  809. int i, xbuf_size, ybuf_size, buffer_size, mask_size;
  810. RING_LOCALS;
  811. DRM_DEBUG( "\n" );
  812. count = depth->n;
  813. if (count > 4096 || count <= 0)
  814. return DRM_ERR(EMSGSIZE);
  815. xbuf_size = count * sizeof(*x);
  816. ybuf_size = count * sizeof(*y);
  817. x = drm_alloc( xbuf_size, DRM_MEM_BUFS );
  818. if ( x == NULL ) {
  819. return DRM_ERR(ENOMEM);
  820. }
  821. y = drm_alloc( ybuf_size, DRM_MEM_BUFS );
  822. if ( y == NULL ) {
  823. drm_free( x, xbuf_size, DRM_MEM_BUFS );
  824. return DRM_ERR(ENOMEM);
  825. }
  826. if ( DRM_COPY_FROM_USER( x, depth->x, xbuf_size ) ) {
  827. drm_free( x, xbuf_size, DRM_MEM_BUFS );
  828. drm_free( y, ybuf_size, DRM_MEM_BUFS );
  829. return DRM_ERR(EFAULT);
  830. }
  831. if ( DRM_COPY_FROM_USER( y, depth->y, xbuf_size ) ) {
  832. drm_free( x, xbuf_size, DRM_MEM_BUFS );
  833. drm_free( y, ybuf_size, DRM_MEM_BUFS );
  834. return DRM_ERR(EFAULT);
  835. }
  836. buffer_size = depth->n * sizeof(u32);
  837. buffer = drm_alloc( buffer_size, DRM_MEM_BUFS );
  838. if ( buffer == NULL ) {
  839. drm_free( x, xbuf_size, DRM_MEM_BUFS );
  840. drm_free( y, ybuf_size, DRM_MEM_BUFS );
  841. return DRM_ERR(ENOMEM);
  842. }
  843. if ( DRM_COPY_FROM_USER( buffer, depth->buffer, buffer_size ) ) {
  844. drm_free( x, xbuf_size, DRM_MEM_BUFS );
  845. drm_free( y, ybuf_size, DRM_MEM_BUFS );
  846. drm_free( buffer, buffer_size, DRM_MEM_BUFS );
  847. return DRM_ERR(EFAULT);
  848. }
  849. if ( depth->mask ) {
  850. mask_size = depth->n * sizeof(u8);
  851. mask = drm_alloc( mask_size, DRM_MEM_BUFS );
  852. if ( mask == NULL ) {
  853. drm_free( x, xbuf_size, DRM_MEM_BUFS );
  854. drm_free( y, ybuf_size, DRM_MEM_BUFS );
  855. drm_free( buffer, buffer_size, DRM_MEM_BUFS );
  856. return DRM_ERR(ENOMEM);
  857. }
  858. if ( DRM_COPY_FROM_USER( mask, depth->mask, mask_size ) ) {
  859. drm_free( x, xbuf_size, DRM_MEM_BUFS );
  860. drm_free( y, ybuf_size, DRM_MEM_BUFS );
  861. drm_free( buffer, buffer_size, DRM_MEM_BUFS );
  862. drm_free( mask, mask_size, DRM_MEM_BUFS );
  863. return DRM_ERR(EFAULT);
  864. }
  865. for ( i = 0 ; i < count ; i++ ) {
  866. if ( mask[i] ) {
  867. BEGIN_RING( 6 );
  868. OUT_RING( CCE_PACKET3( R128_CNTL_PAINT_MULTI, 4 ) );
  869. OUT_RING( R128_GMC_DST_PITCH_OFFSET_CNTL |
  870. R128_GMC_BRUSH_SOLID_COLOR |
  871. (dev_priv->depth_fmt << 8) |
  872. R128_GMC_SRC_DATATYPE_COLOR |
  873. R128_ROP3_P |
  874. R128_GMC_CLR_CMP_CNTL_DIS |
  875. R128_GMC_WR_MSK_DIS );
  876. OUT_RING( dev_priv->depth_pitch_offset_c );
  877. OUT_RING( buffer[i] );
  878. OUT_RING( (x[i] << 16) | y[i] );
  879. OUT_RING( (1 << 16) | 1 );
  880. ADVANCE_RING();
  881. }
  882. }
  883. drm_free( mask, mask_size, DRM_MEM_BUFS );
  884. } else {
  885. for ( i = 0 ; i < count ; i++ ) {
  886. BEGIN_RING( 6 );
  887. OUT_RING( CCE_PACKET3( R128_CNTL_PAINT_MULTI, 4 ) );
  888. OUT_RING( R128_GMC_DST_PITCH_OFFSET_CNTL |
  889. R128_GMC_BRUSH_SOLID_COLOR |
  890. (dev_priv->depth_fmt << 8) |
  891. R128_GMC_SRC_DATATYPE_COLOR |
  892. R128_ROP3_P |
  893. R128_GMC_CLR_CMP_CNTL_DIS |
  894. R128_GMC_WR_MSK_DIS );
  895. OUT_RING( dev_priv->depth_pitch_offset_c );
  896. OUT_RING( buffer[i] );
  897. OUT_RING( (x[i] << 16) | y[i] );
  898. OUT_RING( (1 << 16) | 1 );
  899. ADVANCE_RING();
  900. }
  901. }
  902. drm_free( x, xbuf_size, DRM_MEM_BUFS );
  903. drm_free( y, ybuf_size, DRM_MEM_BUFS );
  904. drm_free( buffer, buffer_size, DRM_MEM_BUFS );
  905. return 0;
  906. }
  907. static int r128_cce_dispatch_read_span( drm_device_t *dev,
  908. drm_r128_depth_t *depth )
  909. {
  910. drm_r128_private_t *dev_priv = dev->dev_private;
  911. int count, x, y;
  912. RING_LOCALS;
  913. DRM_DEBUG( "\n" );
  914. count = depth->n;
  915. if (count > 4096 || count <= 0)
  916. return DRM_ERR(EMSGSIZE);
  917. if ( DRM_COPY_FROM_USER( &x, depth->x, sizeof(x) ) ) {
  918. return DRM_ERR(EFAULT);
  919. }
  920. if ( DRM_COPY_FROM_USER( &y, depth->y, sizeof(y) ) ) {
  921. return DRM_ERR(EFAULT);
  922. }
  923. BEGIN_RING( 7 );
  924. OUT_RING( CCE_PACKET3( R128_CNTL_BITBLT_MULTI, 5 ) );
  925. OUT_RING( R128_GMC_SRC_PITCH_OFFSET_CNTL |
  926. R128_GMC_DST_PITCH_OFFSET_CNTL |
  927. R128_GMC_BRUSH_NONE |
  928. (dev_priv->depth_fmt << 8) |
  929. R128_GMC_SRC_DATATYPE_COLOR |
  930. R128_ROP3_S |
  931. R128_DP_SRC_SOURCE_MEMORY |
  932. R128_GMC_CLR_CMP_CNTL_DIS |
  933. R128_GMC_WR_MSK_DIS );
  934. OUT_RING( dev_priv->depth_pitch_offset_c );
  935. OUT_RING( dev_priv->span_pitch_offset_c );
  936. OUT_RING( (x << 16) | y );
  937. OUT_RING( (0 << 16) | 0 );
  938. OUT_RING( (count << 16) | 1 );
  939. ADVANCE_RING();
  940. return 0;
  941. }
  942. static int r128_cce_dispatch_read_pixels( drm_device_t *dev,
  943. drm_r128_depth_t *depth )
  944. {
  945. drm_r128_private_t *dev_priv = dev->dev_private;
  946. int count, *x, *y;
  947. int i, xbuf_size, ybuf_size;
  948. RING_LOCALS;
  949. DRM_DEBUG( "%s\n", __FUNCTION__ );
  950. count = depth->n;
  951. if (count > 4096 || count <= 0)
  952. return DRM_ERR(EMSGSIZE);
  953. if ( count > dev_priv->depth_pitch ) {
  954. count = dev_priv->depth_pitch;
  955. }
  956. xbuf_size = count * sizeof(*x);
  957. ybuf_size = count * sizeof(*y);
  958. x = drm_alloc( xbuf_size, DRM_MEM_BUFS );
  959. if ( x == NULL ) {
  960. return DRM_ERR(ENOMEM);
  961. }
  962. y = drm_alloc( ybuf_size, DRM_MEM_BUFS );
  963. if ( y == NULL ) {
  964. drm_free( x, xbuf_size, DRM_MEM_BUFS );
  965. return DRM_ERR(ENOMEM);
  966. }
  967. if ( DRM_COPY_FROM_USER( x, depth->x, xbuf_size ) ) {
  968. drm_free( x, xbuf_size, DRM_MEM_BUFS );
  969. drm_free( y, ybuf_size, DRM_MEM_BUFS );
  970. return DRM_ERR(EFAULT);
  971. }
  972. if ( DRM_COPY_FROM_USER( y, depth->y, ybuf_size ) ) {
  973. drm_free( x, xbuf_size, DRM_MEM_BUFS );
  974. drm_free( y, ybuf_size, DRM_MEM_BUFS );
  975. return DRM_ERR(EFAULT);
  976. }
  977. for ( i = 0 ; i < count ; i++ ) {
  978. BEGIN_RING( 7 );
  979. OUT_RING( CCE_PACKET3( R128_CNTL_BITBLT_MULTI, 5 ) );
  980. OUT_RING( R128_GMC_SRC_PITCH_OFFSET_CNTL |
  981. R128_GMC_DST_PITCH_OFFSET_CNTL |
  982. R128_GMC_BRUSH_NONE |
  983. (dev_priv->depth_fmt << 8) |
  984. R128_GMC_SRC_DATATYPE_COLOR |
  985. R128_ROP3_S |
  986. R128_DP_SRC_SOURCE_MEMORY |
  987. R128_GMC_CLR_CMP_CNTL_DIS |
  988. R128_GMC_WR_MSK_DIS );
  989. OUT_RING( dev_priv->depth_pitch_offset_c );
  990. OUT_RING( dev_priv->span_pitch_offset_c );
  991. OUT_RING( (x[i] << 16) | y[i] );
  992. OUT_RING( (i << 16) | 0 );
  993. OUT_RING( (1 << 16) | 1 );
  994. ADVANCE_RING();
  995. }
  996. drm_free( x, xbuf_size, DRM_MEM_BUFS );
  997. drm_free( y, ybuf_size, DRM_MEM_BUFS );
  998. return 0;
  999. }
  1000. /* ================================================================
  1001. * Polygon stipple
  1002. */
  1003. static void r128_cce_dispatch_stipple( drm_device_t *dev, u32 *stipple )
  1004. {
  1005. drm_r128_private_t *dev_priv = dev->dev_private;
  1006. int i;
  1007. RING_LOCALS;
  1008. DRM_DEBUG( "%s\n", __FUNCTION__ );
  1009. BEGIN_RING( 33 );
  1010. OUT_RING( CCE_PACKET0( R128_BRUSH_DATA0, 31 ) );
  1011. for ( i = 0 ; i < 32 ; i++ ) {
  1012. OUT_RING( stipple[i] );
  1013. }
  1014. ADVANCE_RING();
  1015. }
  1016. /* ================================================================
  1017. * IOCTL functions
  1018. */
  1019. static int r128_cce_clear( DRM_IOCTL_ARGS )
  1020. {
  1021. DRM_DEVICE;
  1022. drm_r128_private_t *dev_priv = dev->dev_private;
  1023. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  1024. drm_r128_clear_t clear;
  1025. DRM_DEBUG( "\n" );
  1026. LOCK_TEST_WITH_RETURN( dev, filp );
  1027. DRM_COPY_FROM_USER_IOCTL( clear, (drm_r128_clear_t __user *) data,
  1028. sizeof(clear) );
  1029. RING_SPACE_TEST_WITH_RETURN( dev_priv );
  1030. if ( sarea_priv->nbox > R128_NR_SAREA_CLIPRECTS )
  1031. sarea_priv->nbox = R128_NR_SAREA_CLIPRECTS;
  1032. r128_cce_dispatch_clear( dev, &clear );
  1033. COMMIT_RING();
  1034. /* Make sure we restore the 3D state next time.
  1035. */
  1036. dev_priv->sarea_priv->dirty |= R128_UPLOAD_CONTEXT | R128_UPLOAD_MASKS;
  1037. return 0;
  1038. }
  1039. static int r128_do_init_pageflip( drm_device_t *dev )
  1040. {
  1041. drm_r128_private_t *dev_priv = dev->dev_private;
  1042. DRM_DEBUG( "\n" );
  1043. dev_priv->crtc_offset = R128_READ( R128_CRTC_OFFSET );
  1044. dev_priv->crtc_offset_cntl = R128_READ( R128_CRTC_OFFSET_CNTL );
  1045. R128_WRITE( R128_CRTC_OFFSET, dev_priv->front_offset );
  1046. R128_WRITE( R128_CRTC_OFFSET_CNTL,
  1047. dev_priv->crtc_offset_cntl | R128_CRTC_OFFSET_FLIP_CNTL );
  1048. dev_priv->page_flipping = 1;
  1049. dev_priv->current_page = 0;
  1050. dev_priv->sarea_priv->pfCurrentPage = dev_priv->current_page;
  1051. return 0;
  1052. }
  1053. int r128_do_cleanup_pageflip( drm_device_t *dev )
  1054. {
  1055. drm_r128_private_t *dev_priv = dev->dev_private;
  1056. DRM_DEBUG( "\n" );
  1057. R128_WRITE( R128_CRTC_OFFSET, dev_priv->crtc_offset );
  1058. R128_WRITE( R128_CRTC_OFFSET_CNTL, dev_priv->crtc_offset_cntl );
  1059. if (dev_priv->current_page != 0) {
  1060. r128_cce_dispatch_flip( dev );
  1061. COMMIT_RING();
  1062. }
  1063. dev_priv->page_flipping = 0;
  1064. return 0;
  1065. }
  1066. /* Swapping and flipping are different operations, need different ioctls.
  1067. * They can & should be intermixed to support multiple 3d windows.
  1068. */
  1069. static int r128_cce_flip( DRM_IOCTL_ARGS )
  1070. {
  1071. DRM_DEVICE;
  1072. drm_r128_private_t *dev_priv = dev->dev_private;
  1073. DRM_DEBUG( "%s\n", __FUNCTION__ );
  1074. LOCK_TEST_WITH_RETURN( dev, filp );
  1075. RING_SPACE_TEST_WITH_RETURN( dev_priv );
  1076. if (!dev_priv->page_flipping)
  1077. r128_do_init_pageflip( dev );
  1078. r128_cce_dispatch_flip( dev );
  1079. COMMIT_RING();
  1080. return 0;
  1081. }
  1082. static int r128_cce_swap( DRM_IOCTL_ARGS )
  1083. {
  1084. DRM_DEVICE;
  1085. drm_r128_private_t *dev_priv = dev->dev_private;
  1086. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  1087. DRM_DEBUG( "%s\n", __FUNCTION__ );
  1088. LOCK_TEST_WITH_RETURN( dev, filp );
  1089. RING_SPACE_TEST_WITH_RETURN( dev_priv );
  1090. if ( sarea_priv->nbox > R128_NR_SAREA_CLIPRECTS )
  1091. sarea_priv->nbox = R128_NR_SAREA_CLIPRECTS;
  1092. r128_cce_dispatch_swap( dev );
  1093. dev_priv->sarea_priv->dirty |= (R128_UPLOAD_CONTEXT |
  1094. R128_UPLOAD_MASKS);
  1095. COMMIT_RING();
  1096. return 0;
  1097. }
  1098. static int r128_cce_vertex( DRM_IOCTL_ARGS )
  1099. {
  1100. DRM_DEVICE;
  1101. drm_r128_private_t *dev_priv = dev->dev_private;
  1102. drm_device_dma_t *dma = dev->dma;
  1103. drm_buf_t *buf;
  1104. drm_r128_buf_priv_t *buf_priv;
  1105. drm_r128_vertex_t vertex;
  1106. LOCK_TEST_WITH_RETURN( dev, filp );
  1107. if ( !dev_priv ) {
  1108. DRM_ERROR( "%s called with no initialization\n", __FUNCTION__ );
  1109. return DRM_ERR(EINVAL);
  1110. }
  1111. DRM_COPY_FROM_USER_IOCTL( vertex, (drm_r128_vertex_t __user *) data,
  1112. sizeof(vertex) );
  1113. DRM_DEBUG( "pid=%d index=%d count=%d discard=%d\n",
  1114. DRM_CURRENTPID,
  1115. vertex.idx, vertex.count, vertex.discard );
  1116. if ( vertex.idx < 0 || vertex.idx >= dma->buf_count ) {
  1117. DRM_ERROR( "buffer index %d (of %d max)\n",
  1118. vertex.idx, dma->buf_count - 1 );
  1119. return DRM_ERR(EINVAL);
  1120. }
  1121. if ( vertex.prim < 0 ||
  1122. vertex.prim > R128_CCE_VC_CNTL_PRIM_TYPE_TRI_TYPE2 ) {
  1123. DRM_ERROR( "buffer prim %d\n", vertex.prim );
  1124. return DRM_ERR(EINVAL);
  1125. }
  1126. RING_SPACE_TEST_WITH_RETURN( dev_priv );
  1127. VB_AGE_TEST_WITH_RETURN( dev_priv );
  1128. buf = dma->buflist[vertex.idx];
  1129. buf_priv = buf->dev_private;
  1130. if ( buf->filp != filp ) {
  1131. DRM_ERROR( "process %d using buffer owned by %p\n",
  1132. DRM_CURRENTPID, buf->filp );
  1133. return DRM_ERR(EINVAL);
  1134. }
  1135. if ( buf->pending ) {
  1136. DRM_ERROR( "sending pending buffer %d\n", vertex.idx );
  1137. return DRM_ERR(EINVAL);
  1138. }
  1139. buf->used = vertex.count;
  1140. buf_priv->prim = vertex.prim;
  1141. buf_priv->discard = vertex.discard;
  1142. r128_cce_dispatch_vertex( dev, buf );
  1143. COMMIT_RING();
  1144. return 0;
  1145. }
  1146. static int r128_cce_indices( DRM_IOCTL_ARGS )
  1147. {
  1148. DRM_DEVICE;
  1149. drm_r128_private_t *dev_priv = dev->dev_private;
  1150. drm_device_dma_t *dma = dev->dma;
  1151. drm_buf_t *buf;
  1152. drm_r128_buf_priv_t *buf_priv;
  1153. drm_r128_indices_t elts;
  1154. int count;
  1155. LOCK_TEST_WITH_RETURN( dev, filp );
  1156. if ( !dev_priv ) {
  1157. DRM_ERROR( "%s called with no initialization\n", __FUNCTION__ );
  1158. return DRM_ERR(EINVAL);
  1159. }
  1160. DRM_COPY_FROM_USER_IOCTL( elts, (drm_r128_indices_t __user *) data,
  1161. sizeof(elts) );
  1162. DRM_DEBUG( "pid=%d buf=%d s=%d e=%d d=%d\n", DRM_CURRENTPID,
  1163. elts.idx, elts.start, elts.end, elts.discard );
  1164. if ( elts.idx < 0 || elts.idx >= dma->buf_count ) {
  1165. DRM_ERROR( "buffer index %d (of %d max)\n",
  1166. elts.idx, dma->buf_count - 1 );
  1167. return DRM_ERR(EINVAL);
  1168. }
  1169. if ( elts.prim < 0 ||
  1170. elts.prim > R128_CCE_VC_CNTL_PRIM_TYPE_TRI_TYPE2 ) {
  1171. DRM_ERROR( "buffer prim %d\n", elts.prim );
  1172. return DRM_ERR(EINVAL);
  1173. }
  1174. RING_SPACE_TEST_WITH_RETURN( dev_priv );
  1175. VB_AGE_TEST_WITH_RETURN( dev_priv );
  1176. buf = dma->buflist[elts.idx];
  1177. buf_priv = buf->dev_private;
  1178. if ( buf->filp != filp ) {
  1179. DRM_ERROR( "process %d using buffer owned by %p\n",
  1180. DRM_CURRENTPID, buf->filp );
  1181. return DRM_ERR(EINVAL);
  1182. }
  1183. if ( buf->pending ) {
  1184. DRM_ERROR( "sending pending buffer %d\n", elts.idx );
  1185. return DRM_ERR(EINVAL);
  1186. }
  1187. count = (elts.end - elts.start) / sizeof(u16);
  1188. elts.start -= R128_INDEX_PRIM_OFFSET;
  1189. if ( elts.start & 0x7 ) {
  1190. DRM_ERROR( "misaligned buffer 0x%x\n", elts.start );
  1191. return DRM_ERR(EINVAL);
  1192. }
  1193. if ( elts.start < buf->used ) {
  1194. DRM_ERROR( "no header 0x%x - 0x%x\n", elts.start, buf->used );
  1195. return DRM_ERR(EINVAL);
  1196. }
  1197. buf->used = elts.end;
  1198. buf_priv->prim = elts.prim;
  1199. buf_priv->discard = elts.discard;
  1200. r128_cce_dispatch_indices( dev, buf, elts.start, elts.end, count );
  1201. COMMIT_RING();
  1202. return 0;
  1203. }
  1204. static int r128_cce_blit( DRM_IOCTL_ARGS )
  1205. {
  1206. DRM_DEVICE;
  1207. drm_device_dma_t *dma = dev->dma;
  1208. drm_r128_private_t *dev_priv = dev->dev_private;
  1209. drm_r128_blit_t blit;
  1210. int ret;
  1211. LOCK_TEST_WITH_RETURN( dev, filp );
  1212. DRM_COPY_FROM_USER_IOCTL( blit, (drm_r128_blit_t __user *) data,
  1213. sizeof(blit) );
  1214. DRM_DEBUG( "pid=%d index=%d\n", DRM_CURRENTPID, blit.idx );
  1215. if ( blit.idx < 0 || blit.idx >= dma->buf_count ) {
  1216. DRM_ERROR( "buffer index %d (of %d max)\n",
  1217. blit.idx, dma->buf_count - 1 );
  1218. return DRM_ERR(EINVAL);
  1219. }
  1220. RING_SPACE_TEST_WITH_RETURN( dev_priv );
  1221. VB_AGE_TEST_WITH_RETURN( dev_priv );
  1222. ret = r128_cce_dispatch_blit( filp, dev, &blit );
  1223. COMMIT_RING();
  1224. return ret;
  1225. }
  1226. static int r128_cce_depth( DRM_IOCTL_ARGS )
  1227. {
  1228. DRM_DEVICE;
  1229. drm_r128_private_t *dev_priv = dev->dev_private;
  1230. drm_r128_depth_t depth;
  1231. int ret;
  1232. LOCK_TEST_WITH_RETURN( dev, filp );
  1233. DRM_COPY_FROM_USER_IOCTL( depth, (drm_r128_depth_t __user *) data,
  1234. sizeof(depth) );
  1235. RING_SPACE_TEST_WITH_RETURN( dev_priv );
  1236. ret = DRM_ERR(EINVAL);
  1237. switch ( depth.func ) {
  1238. case R128_WRITE_SPAN:
  1239. ret = r128_cce_dispatch_write_span( dev, &depth );
  1240. break;
  1241. case R128_WRITE_PIXELS:
  1242. ret = r128_cce_dispatch_write_pixels( dev, &depth );
  1243. break;
  1244. case R128_READ_SPAN:
  1245. ret = r128_cce_dispatch_read_span( dev, &depth );
  1246. break;
  1247. case R128_READ_PIXELS:
  1248. ret = r128_cce_dispatch_read_pixels( dev, &depth );
  1249. break;
  1250. }
  1251. COMMIT_RING();
  1252. return ret;
  1253. }
  1254. static int r128_cce_stipple( DRM_IOCTL_ARGS )
  1255. {
  1256. DRM_DEVICE;
  1257. drm_r128_private_t *dev_priv = dev->dev_private;
  1258. drm_r128_stipple_t stipple;
  1259. u32 mask[32];
  1260. LOCK_TEST_WITH_RETURN( dev, filp );
  1261. DRM_COPY_FROM_USER_IOCTL( stipple, (drm_r128_stipple_t __user *) data,
  1262. sizeof(stipple) );
  1263. if ( DRM_COPY_FROM_USER( &mask, stipple.mask,
  1264. 32 * sizeof(u32) ) )
  1265. return DRM_ERR( EFAULT );
  1266. RING_SPACE_TEST_WITH_RETURN( dev_priv );
  1267. r128_cce_dispatch_stipple( dev, mask );
  1268. COMMIT_RING();
  1269. return 0;
  1270. }
  1271. static int r128_cce_indirect( DRM_IOCTL_ARGS )
  1272. {
  1273. DRM_DEVICE;
  1274. drm_r128_private_t *dev_priv = dev->dev_private;
  1275. drm_device_dma_t *dma = dev->dma;
  1276. drm_buf_t *buf;
  1277. drm_r128_buf_priv_t *buf_priv;
  1278. drm_r128_indirect_t indirect;
  1279. #if 0
  1280. RING_LOCALS;
  1281. #endif
  1282. LOCK_TEST_WITH_RETURN( dev, filp );
  1283. if ( !dev_priv ) {
  1284. DRM_ERROR( "%s called with no initialization\n", __FUNCTION__ );
  1285. return DRM_ERR(EINVAL);
  1286. }
  1287. DRM_COPY_FROM_USER_IOCTL( indirect, (drm_r128_indirect_t __user *) data,
  1288. sizeof(indirect) );
  1289. DRM_DEBUG( "indirect: idx=%d s=%d e=%d d=%d\n",
  1290. indirect.idx, indirect.start,
  1291. indirect.end, indirect.discard );
  1292. if ( indirect.idx < 0 || indirect.idx >= dma->buf_count ) {
  1293. DRM_ERROR( "buffer index %d (of %d max)\n",
  1294. indirect.idx, dma->buf_count - 1 );
  1295. return DRM_ERR(EINVAL);
  1296. }
  1297. buf = dma->buflist[indirect.idx];
  1298. buf_priv = buf->dev_private;
  1299. if ( buf->filp != filp ) {
  1300. DRM_ERROR( "process %d using buffer owned by %p\n",
  1301. DRM_CURRENTPID, buf->filp );
  1302. return DRM_ERR(EINVAL);
  1303. }
  1304. if ( buf->pending ) {
  1305. DRM_ERROR( "sending pending buffer %d\n", indirect.idx );
  1306. return DRM_ERR(EINVAL);
  1307. }
  1308. if ( indirect.start < buf->used ) {
  1309. DRM_ERROR( "reusing indirect: start=0x%x actual=0x%x\n",
  1310. indirect.start, buf->used );
  1311. return DRM_ERR(EINVAL);
  1312. }
  1313. RING_SPACE_TEST_WITH_RETURN( dev_priv );
  1314. VB_AGE_TEST_WITH_RETURN( dev_priv );
  1315. buf->used = indirect.end;
  1316. buf_priv->discard = indirect.discard;
  1317. #if 0
  1318. /* Wait for the 3D stream to idle before the indirect buffer
  1319. * containing 2D acceleration commands is processed.
  1320. */
  1321. BEGIN_RING( 2 );
  1322. RADEON_WAIT_UNTIL_3D_IDLE();
  1323. ADVANCE_RING();
  1324. #endif
  1325. /* Dispatch the indirect buffer full of commands from the
  1326. * X server. This is insecure and is thus only available to
  1327. * privileged clients.
  1328. */
  1329. r128_cce_dispatch_indirect( dev, buf, indirect.start, indirect.end );
  1330. COMMIT_RING();
  1331. return 0;
  1332. }
  1333. static int r128_getparam( DRM_IOCTL_ARGS )
  1334. {
  1335. DRM_DEVICE;
  1336. drm_r128_private_t *dev_priv = dev->dev_private;
  1337. drm_r128_getparam_t param;
  1338. int value;
  1339. if ( !dev_priv ) {
  1340. DRM_ERROR( "%s called with no initialization\n", __FUNCTION__ );
  1341. return DRM_ERR(EINVAL);
  1342. }
  1343. DRM_COPY_FROM_USER_IOCTL( param, (drm_r128_getparam_t __user *)data,
  1344. sizeof(param) );
  1345. DRM_DEBUG( "pid=%d\n", DRM_CURRENTPID );
  1346. switch( param.param ) {
  1347. case R128_PARAM_IRQ_NR:
  1348. value = dev->irq;
  1349. break;
  1350. default:
  1351. return DRM_ERR(EINVAL);
  1352. }
  1353. if ( DRM_COPY_TO_USER( param.value, &value, sizeof(int) ) ) {
  1354. DRM_ERROR( "copy_to_user\n" );
  1355. return DRM_ERR(EFAULT);
  1356. }
  1357. return 0;
  1358. }
  1359. void r128_driver_prerelease(drm_device_t *dev, DRMFILE filp)
  1360. {
  1361. if ( dev->dev_private ) {
  1362. drm_r128_private_t *dev_priv = dev->dev_private;
  1363. if ( dev_priv->page_flipping ) {
  1364. r128_do_cleanup_pageflip( dev );
  1365. }
  1366. }
  1367. }
  1368. void r128_driver_pretakedown(drm_device_t *dev)
  1369. {
  1370. r128_do_cleanup_cce( dev );
  1371. }
  1372. drm_ioctl_desc_t r128_ioctls[] = {
  1373. [DRM_IOCTL_NR(DRM_R128_INIT)] = { r128_cce_init, 1, 1 },
  1374. [DRM_IOCTL_NR(DRM_R128_CCE_START)] = { r128_cce_start, 1, 1 },
  1375. [DRM_IOCTL_NR(DRM_R128_CCE_STOP)] = { r128_cce_stop, 1, 1 },
  1376. [DRM_IOCTL_NR(DRM_R128_CCE_RESET)] = { r128_cce_reset, 1, 1 },
  1377. [DRM_IOCTL_NR(DRM_R128_CCE_IDLE)] = { r128_cce_idle, 1, 0 },
  1378. [DRM_IOCTL_NR(DRM_R128_RESET)] = { r128_engine_reset, 1, 0 },
  1379. [DRM_IOCTL_NR(DRM_R128_FULLSCREEN)] = { r128_fullscreen, 1, 0 },
  1380. [DRM_IOCTL_NR(DRM_R128_SWAP)] = { r128_cce_swap, 1, 0 },
  1381. [DRM_IOCTL_NR(DRM_R128_FLIP)] = { r128_cce_flip, 1, 0 },
  1382. [DRM_IOCTL_NR(DRM_R128_CLEAR)] = { r128_cce_clear, 1, 0 },
  1383. [DRM_IOCTL_NR(DRM_R128_VERTEX)] = { r128_cce_vertex, 1, 0 },
  1384. [DRM_IOCTL_NR(DRM_R128_INDICES)] = { r128_cce_indices, 1, 0 },
  1385. [DRM_IOCTL_NR(DRM_R128_BLIT)] = { r128_cce_blit, 1, 0 },
  1386. [DRM_IOCTL_NR(DRM_R128_DEPTH)] = { r128_cce_depth, 1, 0 },
  1387. [DRM_IOCTL_NR(DRM_R128_STIPPLE)] = { r128_cce_stipple, 1, 0 },
  1388. [DRM_IOCTL_NR(DRM_R128_INDIRECT)] = { r128_cce_indirect, 1, 1 },
  1389. [DRM_IOCTL_NR(DRM_R128_GETPARAM)] = { r128_getparam, 1, 0 },
  1390. };
  1391. int r128_max_ioctl = DRM_ARRAY_SIZE(r128_ioctls);