r128_state.c 40 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667
  1. /* r128_state.c -- State support for r128 -*- linux-c -*-
  2. * Created: Thu Jan 27 02:53:43 2000 by gareth@valinux.com
  3. */
  4. /*
  5. * Copyright 2000 VA Linux Systems, Inc., Sunnyvale, California.
  6. * All Rights Reserved.
  7. *
  8. * Permission is hereby granted, free of charge, to any person obtaining a
  9. * copy of this software and associated documentation files (the "Software"),
  10. * to deal in the Software without restriction, including without limitation
  11. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  12. * and/or sell copies of the Software, and to permit persons to whom the
  13. * Software is furnished to do so, subject to the following conditions:
  14. *
  15. * The above copyright notice and this permission notice (including the next
  16. * paragraph) shall be included in all copies or substantial portions of the
  17. * Software.
  18. *
  19. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  20. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  21. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  22. * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
  23. * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  24. * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
  25. * DEALINGS IN THE SOFTWARE.
  26. *
  27. * Authors:
  28. * Gareth Hughes <gareth@valinux.com>
  29. */
  30. #include "drmP.h"
  31. #include "drm.h"
  32. #include "r128_drm.h"
  33. #include "r128_drv.h"
  34. /* ================================================================
  35. * CCE hardware state programming functions
  36. */
  37. static void r128_emit_clip_rects(drm_r128_private_t *dev_priv,
  38. struct drm_clip_rect *boxes, int count)
  39. {
  40. u32 aux_sc_cntl = 0x00000000;
  41. RING_LOCALS;
  42. DRM_DEBUG("\n");
  43. BEGIN_RING((count < 3 ? count : 3) * 5 + 2);
  44. if (count >= 1) {
  45. OUT_RING(CCE_PACKET0(R128_AUX1_SC_LEFT, 3));
  46. OUT_RING(boxes[0].x1);
  47. OUT_RING(boxes[0].x2 - 1);
  48. OUT_RING(boxes[0].y1);
  49. OUT_RING(boxes[0].y2 - 1);
  50. aux_sc_cntl |= (R128_AUX1_SC_EN | R128_AUX1_SC_MODE_OR);
  51. }
  52. if (count >= 2) {
  53. OUT_RING(CCE_PACKET0(R128_AUX2_SC_LEFT, 3));
  54. OUT_RING(boxes[1].x1);
  55. OUT_RING(boxes[1].x2 - 1);
  56. OUT_RING(boxes[1].y1);
  57. OUT_RING(boxes[1].y2 - 1);
  58. aux_sc_cntl |= (R128_AUX2_SC_EN | R128_AUX2_SC_MODE_OR);
  59. }
  60. if (count >= 3) {
  61. OUT_RING(CCE_PACKET0(R128_AUX3_SC_LEFT, 3));
  62. OUT_RING(boxes[2].x1);
  63. OUT_RING(boxes[2].x2 - 1);
  64. OUT_RING(boxes[2].y1);
  65. OUT_RING(boxes[2].y2 - 1);
  66. aux_sc_cntl |= (R128_AUX3_SC_EN | R128_AUX3_SC_MODE_OR);
  67. }
  68. OUT_RING(CCE_PACKET0(R128_AUX_SC_CNTL, 0));
  69. OUT_RING(aux_sc_cntl);
  70. ADVANCE_RING();
  71. }
  72. static __inline__ void r128_emit_core(drm_r128_private_t *dev_priv)
  73. {
  74. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  75. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  76. RING_LOCALS;
  77. DRM_DEBUG("\n");
  78. BEGIN_RING(2);
  79. OUT_RING(CCE_PACKET0(R128_SCALE_3D_CNTL, 0));
  80. OUT_RING(ctx->scale_3d_cntl);
  81. ADVANCE_RING();
  82. }
  83. static __inline__ void r128_emit_context(drm_r128_private_t *dev_priv)
  84. {
  85. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  86. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  87. RING_LOCALS;
  88. DRM_DEBUG("\n");
  89. BEGIN_RING(13);
  90. OUT_RING(CCE_PACKET0(R128_DST_PITCH_OFFSET_C, 11));
  91. OUT_RING(ctx->dst_pitch_offset_c);
  92. OUT_RING(ctx->dp_gui_master_cntl_c);
  93. OUT_RING(ctx->sc_top_left_c);
  94. OUT_RING(ctx->sc_bottom_right_c);
  95. OUT_RING(ctx->z_offset_c);
  96. OUT_RING(ctx->z_pitch_c);
  97. OUT_RING(ctx->z_sten_cntl_c);
  98. OUT_RING(ctx->tex_cntl_c);
  99. OUT_RING(ctx->misc_3d_state_cntl_reg);
  100. OUT_RING(ctx->texture_clr_cmp_clr_c);
  101. OUT_RING(ctx->texture_clr_cmp_msk_c);
  102. OUT_RING(ctx->fog_color_c);
  103. ADVANCE_RING();
  104. }
  105. static __inline__ void r128_emit_setup(drm_r128_private_t *dev_priv)
  106. {
  107. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  108. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  109. RING_LOCALS;
  110. DRM_DEBUG("\n");
  111. BEGIN_RING(3);
  112. OUT_RING(CCE_PACKET1(R128_SETUP_CNTL, R128_PM4_VC_FPU_SETUP));
  113. OUT_RING(ctx->setup_cntl);
  114. OUT_RING(ctx->pm4_vc_fpu_setup);
  115. ADVANCE_RING();
  116. }
  117. static __inline__ void r128_emit_masks(drm_r128_private_t *dev_priv)
  118. {
  119. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  120. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  121. RING_LOCALS;
  122. DRM_DEBUG("\n");
  123. BEGIN_RING(5);
  124. OUT_RING(CCE_PACKET0(R128_DP_WRITE_MASK, 0));
  125. OUT_RING(ctx->dp_write_mask);
  126. OUT_RING(CCE_PACKET0(R128_STEN_REF_MASK_C, 1));
  127. OUT_RING(ctx->sten_ref_mask_c);
  128. OUT_RING(ctx->plane_3d_mask_c);
  129. ADVANCE_RING();
  130. }
  131. static __inline__ void r128_emit_window(drm_r128_private_t *dev_priv)
  132. {
  133. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  134. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  135. RING_LOCALS;
  136. DRM_DEBUG("\n");
  137. BEGIN_RING(2);
  138. OUT_RING(CCE_PACKET0(R128_WINDOW_XY_OFFSET, 0));
  139. OUT_RING(ctx->window_xy_offset);
  140. ADVANCE_RING();
  141. }
  142. static __inline__ void r128_emit_tex0(drm_r128_private_t *dev_priv)
  143. {
  144. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  145. drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
  146. drm_r128_texture_regs_t *tex = &sarea_priv->tex_state[0];
  147. int i;
  148. RING_LOCALS;
  149. DRM_DEBUG("\n");
  150. BEGIN_RING(7 + R128_MAX_TEXTURE_LEVELS);
  151. OUT_RING(CCE_PACKET0(R128_PRIM_TEX_CNTL_C,
  152. 2 + R128_MAX_TEXTURE_LEVELS));
  153. OUT_RING(tex->tex_cntl);
  154. OUT_RING(tex->tex_combine_cntl);
  155. OUT_RING(ctx->tex_size_pitch_c);
  156. for (i = 0; i < R128_MAX_TEXTURE_LEVELS; i++)
  157. OUT_RING(tex->tex_offset[i]);
  158. OUT_RING(CCE_PACKET0(R128_CONSTANT_COLOR_C, 1));
  159. OUT_RING(ctx->constant_color_c);
  160. OUT_RING(tex->tex_border_color);
  161. ADVANCE_RING();
  162. }
  163. static __inline__ void r128_emit_tex1(drm_r128_private_t *dev_priv)
  164. {
  165. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  166. drm_r128_texture_regs_t *tex = &sarea_priv->tex_state[1];
  167. int i;
  168. RING_LOCALS;
  169. DRM_DEBUG("\n");
  170. BEGIN_RING(5 + R128_MAX_TEXTURE_LEVELS);
  171. OUT_RING(CCE_PACKET0(R128_SEC_TEX_CNTL_C, 1 + R128_MAX_TEXTURE_LEVELS));
  172. OUT_RING(tex->tex_cntl);
  173. OUT_RING(tex->tex_combine_cntl);
  174. for (i = 0; i < R128_MAX_TEXTURE_LEVELS; i++)
  175. OUT_RING(tex->tex_offset[i]);
  176. OUT_RING(CCE_PACKET0(R128_SEC_TEXTURE_BORDER_COLOR_C, 0));
  177. OUT_RING(tex->tex_border_color);
  178. ADVANCE_RING();
  179. }
  180. static void r128_emit_state(drm_r128_private_t *dev_priv)
  181. {
  182. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  183. unsigned int dirty = sarea_priv->dirty;
  184. DRM_DEBUG("dirty=0x%08x\n", dirty);
  185. if (dirty & R128_UPLOAD_CORE) {
  186. r128_emit_core(dev_priv);
  187. sarea_priv->dirty &= ~R128_UPLOAD_CORE;
  188. }
  189. if (dirty & R128_UPLOAD_CONTEXT) {
  190. r128_emit_context(dev_priv);
  191. sarea_priv->dirty &= ~R128_UPLOAD_CONTEXT;
  192. }
  193. if (dirty & R128_UPLOAD_SETUP) {
  194. r128_emit_setup(dev_priv);
  195. sarea_priv->dirty &= ~R128_UPLOAD_SETUP;
  196. }
  197. if (dirty & R128_UPLOAD_MASKS) {
  198. r128_emit_masks(dev_priv);
  199. sarea_priv->dirty &= ~R128_UPLOAD_MASKS;
  200. }
  201. if (dirty & R128_UPLOAD_WINDOW) {
  202. r128_emit_window(dev_priv);
  203. sarea_priv->dirty &= ~R128_UPLOAD_WINDOW;
  204. }
  205. if (dirty & R128_UPLOAD_TEX0) {
  206. r128_emit_tex0(dev_priv);
  207. sarea_priv->dirty &= ~R128_UPLOAD_TEX0;
  208. }
  209. if (dirty & R128_UPLOAD_TEX1) {
  210. r128_emit_tex1(dev_priv);
  211. sarea_priv->dirty &= ~R128_UPLOAD_TEX1;
  212. }
  213. /* Turn off the texture cache flushing */
  214. sarea_priv->context_state.tex_cntl_c &= ~R128_TEX_CACHE_FLUSH;
  215. sarea_priv->dirty &= ~R128_REQUIRE_QUIESCENCE;
  216. }
  217. #if R128_PERFORMANCE_BOXES
  218. /* ================================================================
  219. * Performance monitoring functions
  220. */
  221. static void r128_clear_box(drm_r128_private_t *dev_priv,
  222. int x, int y, int w, int h, int r, int g, int b)
  223. {
  224. u32 pitch, offset;
  225. u32 fb_bpp, color;
  226. RING_LOCALS;
  227. switch (dev_priv->fb_bpp) {
  228. case 16:
  229. fb_bpp = R128_GMC_DST_16BPP;
  230. color = (((r & 0xf8) << 8) |
  231. ((g & 0xfc) << 3) | ((b & 0xf8) >> 3));
  232. break;
  233. case 24:
  234. fb_bpp = R128_GMC_DST_24BPP;
  235. color = ((r << 16) | (g << 8) | b);
  236. break;
  237. case 32:
  238. fb_bpp = R128_GMC_DST_32BPP;
  239. color = (((0xff) << 24) | (r << 16) | (g << 8) | b);
  240. break;
  241. default:
  242. return;
  243. }
  244. offset = dev_priv->back_offset;
  245. pitch = dev_priv->back_pitch >> 3;
  246. BEGIN_RING(6);
  247. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  248. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  249. R128_GMC_BRUSH_SOLID_COLOR |
  250. fb_bpp |
  251. R128_GMC_SRC_DATATYPE_COLOR |
  252. R128_ROP3_P |
  253. R128_GMC_CLR_CMP_CNTL_DIS | R128_GMC_AUX_CLIP_DIS);
  254. OUT_RING((pitch << 21) | (offset >> 5));
  255. OUT_RING(color);
  256. OUT_RING((x << 16) | y);
  257. OUT_RING((w << 16) | h);
  258. ADVANCE_RING();
  259. }
  260. static void r128_cce_performance_boxes(drm_r128_private_t *dev_priv)
  261. {
  262. if (atomic_read(&dev_priv->idle_count) == 0)
  263. r128_clear_box(dev_priv, 64, 4, 8, 8, 0, 255, 0);
  264. else
  265. atomic_set(&dev_priv->idle_count, 0);
  266. }
  267. #endif
  268. /* ================================================================
  269. * CCE command dispatch functions
  270. */
  271. static void r128_print_dirty(const char *msg, unsigned int flags)
  272. {
  273. DRM_INFO("%s: (0x%x) %s%s%s%s%s%s%s%s%s\n",
  274. msg,
  275. flags,
  276. (flags & R128_UPLOAD_CORE) ? "core, " : "",
  277. (flags & R128_UPLOAD_CONTEXT) ? "context, " : "",
  278. (flags & R128_UPLOAD_SETUP) ? "setup, " : "",
  279. (flags & R128_UPLOAD_TEX0) ? "tex0, " : "",
  280. (flags & R128_UPLOAD_TEX1) ? "tex1, " : "",
  281. (flags & R128_UPLOAD_MASKS) ? "masks, " : "",
  282. (flags & R128_UPLOAD_WINDOW) ? "window, " : "",
  283. (flags & R128_UPLOAD_CLIPRECTS) ? "cliprects, " : "",
  284. (flags & R128_REQUIRE_QUIESCENCE) ? "quiescence, " : "");
  285. }
  286. static void r128_cce_dispatch_clear(struct drm_device *dev,
  287. drm_r128_clear_t *clear)
  288. {
  289. drm_r128_private_t *dev_priv = dev->dev_private;
  290. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  291. int nbox = sarea_priv->nbox;
  292. struct drm_clip_rect *pbox = sarea_priv->boxes;
  293. unsigned int flags = clear->flags;
  294. int i;
  295. RING_LOCALS;
  296. DRM_DEBUG("\n");
  297. if (dev_priv->page_flipping && dev_priv->current_page == 1) {
  298. unsigned int tmp = flags;
  299. flags &= ~(R128_FRONT | R128_BACK);
  300. if (tmp & R128_FRONT)
  301. flags |= R128_BACK;
  302. if (tmp & R128_BACK)
  303. flags |= R128_FRONT;
  304. }
  305. for (i = 0; i < nbox; i++) {
  306. int x = pbox[i].x1;
  307. int y = pbox[i].y1;
  308. int w = pbox[i].x2 - x;
  309. int h = pbox[i].y2 - y;
  310. DRM_DEBUG("dispatch clear %d,%d-%d,%d flags 0x%x\n",
  311. pbox[i].x1, pbox[i].y1, pbox[i].x2,
  312. pbox[i].y2, flags);
  313. if (flags & (R128_FRONT | R128_BACK)) {
  314. BEGIN_RING(2);
  315. OUT_RING(CCE_PACKET0(R128_DP_WRITE_MASK, 0));
  316. OUT_RING(clear->color_mask);
  317. ADVANCE_RING();
  318. }
  319. if (flags & R128_FRONT) {
  320. BEGIN_RING(6);
  321. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  322. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  323. R128_GMC_BRUSH_SOLID_COLOR |
  324. (dev_priv->color_fmt << 8) |
  325. R128_GMC_SRC_DATATYPE_COLOR |
  326. R128_ROP3_P |
  327. R128_GMC_CLR_CMP_CNTL_DIS |
  328. R128_GMC_AUX_CLIP_DIS);
  329. OUT_RING(dev_priv->front_pitch_offset_c);
  330. OUT_RING(clear->clear_color);
  331. OUT_RING((x << 16) | y);
  332. OUT_RING((w << 16) | h);
  333. ADVANCE_RING();
  334. }
  335. if (flags & R128_BACK) {
  336. BEGIN_RING(6);
  337. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  338. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  339. R128_GMC_BRUSH_SOLID_COLOR |
  340. (dev_priv->color_fmt << 8) |
  341. R128_GMC_SRC_DATATYPE_COLOR |
  342. R128_ROP3_P |
  343. R128_GMC_CLR_CMP_CNTL_DIS |
  344. R128_GMC_AUX_CLIP_DIS);
  345. OUT_RING(dev_priv->back_pitch_offset_c);
  346. OUT_RING(clear->clear_color);
  347. OUT_RING((x << 16) | y);
  348. OUT_RING((w << 16) | h);
  349. ADVANCE_RING();
  350. }
  351. if (flags & R128_DEPTH) {
  352. BEGIN_RING(6);
  353. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  354. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  355. R128_GMC_BRUSH_SOLID_COLOR |
  356. (dev_priv->depth_fmt << 8) |
  357. R128_GMC_SRC_DATATYPE_COLOR |
  358. R128_ROP3_P |
  359. R128_GMC_CLR_CMP_CNTL_DIS |
  360. R128_GMC_AUX_CLIP_DIS | R128_GMC_WR_MSK_DIS);
  361. OUT_RING(dev_priv->depth_pitch_offset_c);
  362. OUT_RING(clear->clear_depth);
  363. OUT_RING((x << 16) | y);
  364. OUT_RING((w << 16) | h);
  365. ADVANCE_RING();
  366. }
  367. }
  368. }
  369. static void r128_cce_dispatch_swap(struct drm_device *dev)
  370. {
  371. drm_r128_private_t *dev_priv = dev->dev_private;
  372. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  373. int nbox = sarea_priv->nbox;
  374. struct drm_clip_rect *pbox = sarea_priv->boxes;
  375. int i;
  376. RING_LOCALS;
  377. DRM_DEBUG("\n");
  378. #if R128_PERFORMANCE_BOXES
  379. /* Do some trivial performance monitoring...
  380. */
  381. r128_cce_performance_boxes(dev_priv);
  382. #endif
  383. for (i = 0; i < nbox; i++) {
  384. int x = pbox[i].x1;
  385. int y = pbox[i].y1;
  386. int w = pbox[i].x2 - x;
  387. int h = pbox[i].y2 - y;
  388. BEGIN_RING(7);
  389. OUT_RING(CCE_PACKET3(R128_CNTL_BITBLT_MULTI, 5));
  390. OUT_RING(R128_GMC_SRC_PITCH_OFFSET_CNTL |
  391. R128_GMC_DST_PITCH_OFFSET_CNTL |
  392. R128_GMC_BRUSH_NONE |
  393. (dev_priv->color_fmt << 8) |
  394. R128_GMC_SRC_DATATYPE_COLOR |
  395. R128_ROP3_S |
  396. R128_DP_SRC_SOURCE_MEMORY |
  397. R128_GMC_CLR_CMP_CNTL_DIS |
  398. R128_GMC_AUX_CLIP_DIS | R128_GMC_WR_MSK_DIS);
  399. /* Make this work even if front & back are flipped:
  400. */
  401. if (dev_priv->current_page == 0) {
  402. OUT_RING(dev_priv->back_pitch_offset_c);
  403. OUT_RING(dev_priv->front_pitch_offset_c);
  404. } else {
  405. OUT_RING(dev_priv->front_pitch_offset_c);
  406. OUT_RING(dev_priv->back_pitch_offset_c);
  407. }
  408. OUT_RING((x << 16) | y);
  409. OUT_RING((x << 16) | y);
  410. OUT_RING((w << 16) | h);
  411. ADVANCE_RING();
  412. }
  413. /* Increment the frame counter. The client-side 3D driver must
  414. * throttle the framerate by waiting for this value before
  415. * performing the swapbuffer ioctl.
  416. */
  417. dev_priv->sarea_priv->last_frame++;
  418. BEGIN_RING(2);
  419. OUT_RING(CCE_PACKET0(R128_LAST_FRAME_REG, 0));
  420. OUT_RING(dev_priv->sarea_priv->last_frame);
  421. ADVANCE_RING();
  422. }
  423. static void r128_cce_dispatch_flip(struct drm_device *dev)
  424. {
  425. drm_r128_private_t *dev_priv = dev->dev_private;
  426. RING_LOCALS;
  427. DRM_DEBUG("page=%d pfCurrentPage=%d\n",
  428. dev_priv->current_page, dev_priv->sarea_priv->pfCurrentPage);
  429. #if R128_PERFORMANCE_BOXES
  430. /* Do some trivial performance monitoring...
  431. */
  432. r128_cce_performance_boxes(dev_priv);
  433. #endif
  434. BEGIN_RING(4);
  435. R128_WAIT_UNTIL_PAGE_FLIPPED();
  436. OUT_RING(CCE_PACKET0(R128_CRTC_OFFSET, 0));
  437. if (dev_priv->current_page == 0)
  438. OUT_RING(dev_priv->back_offset);
  439. else
  440. OUT_RING(dev_priv->front_offset);
  441. ADVANCE_RING();
  442. /* Increment the frame counter. The client-side 3D driver must
  443. * throttle the framerate by waiting for this value before
  444. * performing the swapbuffer ioctl.
  445. */
  446. dev_priv->sarea_priv->last_frame++;
  447. dev_priv->sarea_priv->pfCurrentPage = dev_priv->current_page =
  448. 1 - dev_priv->current_page;
  449. BEGIN_RING(2);
  450. OUT_RING(CCE_PACKET0(R128_LAST_FRAME_REG, 0));
  451. OUT_RING(dev_priv->sarea_priv->last_frame);
  452. ADVANCE_RING();
  453. }
  454. static void r128_cce_dispatch_vertex(struct drm_device *dev, struct drm_buf *buf)
  455. {
  456. drm_r128_private_t *dev_priv = dev->dev_private;
  457. drm_r128_buf_priv_t *buf_priv = buf->dev_private;
  458. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  459. int format = sarea_priv->vc_format;
  460. int offset = buf->bus_address;
  461. int size = buf->used;
  462. int prim = buf_priv->prim;
  463. int i = 0;
  464. RING_LOCALS;
  465. DRM_DEBUG("buf=%d nbox=%d\n", buf->idx, sarea_priv->nbox);
  466. if (0)
  467. r128_print_dirty("dispatch_vertex", sarea_priv->dirty);
  468. if (buf->used) {
  469. buf_priv->dispatched = 1;
  470. if (sarea_priv->dirty & ~R128_UPLOAD_CLIPRECTS)
  471. r128_emit_state(dev_priv);
  472. do {
  473. /* Emit the next set of up to three cliprects */
  474. if (i < sarea_priv->nbox) {
  475. r128_emit_clip_rects(dev_priv,
  476. &sarea_priv->boxes[i],
  477. sarea_priv->nbox - i);
  478. }
  479. /* Emit the vertex buffer rendering commands */
  480. BEGIN_RING(5);
  481. OUT_RING(CCE_PACKET3(R128_3D_RNDR_GEN_INDX_PRIM, 3));
  482. OUT_RING(offset);
  483. OUT_RING(size);
  484. OUT_RING(format);
  485. OUT_RING(prim | R128_CCE_VC_CNTL_PRIM_WALK_LIST |
  486. (size << R128_CCE_VC_CNTL_NUM_SHIFT));
  487. ADVANCE_RING();
  488. i += 3;
  489. } while (i < sarea_priv->nbox);
  490. }
  491. if (buf_priv->discard) {
  492. buf_priv->age = dev_priv->sarea_priv->last_dispatch;
  493. /* Emit the vertex buffer age */
  494. BEGIN_RING(2);
  495. OUT_RING(CCE_PACKET0(R128_LAST_DISPATCH_REG, 0));
  496. OUT_RING(buf_priv->age);
  497. ADVANCE_RING();
  498. buf->pending = 1;
  499. buf->used = 0;
  500. /* FIXME: Check dispatched field */
  501. buf_priv->dispatched = 0;
  502. }
  503. dev_priv->sarea_priv->last_dispatch++;
  504. sarea_priv->dirty &= ~R128_UPLOAD_CLIPRECTS;
  505. sarea_priv->nbox = 0;
  506. }
  507. static void r128_cce_dispatch_indirect(struct drm_device *dev,
  508. struct drm_buf *buf, int start, int end)
  509. {
  510. drm_r128_private_t *dev_priv = dev->dev_private;
  511. drm_r128_buf_priv_t *buf_priv = buf->dev_private;
  512. RING_LOCALS;
  513. DRM_DEBUG("indirect: buf=%d s=0x%x e=0x%x\n", buf->idx, start, end);
  514. if (start != end) {
  515. int offset = buf->bus_address + start;
  516. int dwords = (end - start + 3) / sizeof(u32);
  517. /* Indirect buffer data must be an even number of
  518. * dwords, so if we've been given an odd number we must
  519. * pad the data with a Type-2 CCE packet.
  520. */
  521. if (dwords & 1) {
  522. u32 *data = (u32 *)
  523. ((char *)dev->agp_buffer_map->handle
  524. + buf->offset + start);
  525. data[dwords++] = cpu_to_le32(R128_CCE_PACKET2);
  526. }
  527. buf_priv->dispatched = 1;
  528. /* Fire off the indirect buffer */
  529. BEGIN_RING(3);
  530. OUT_RING(CCE_PACKET0(R128_PM4_IW_INDOFF, 1));
  531. OUT_RING(offset);
  532. OUT_RING(dwords);
  533. ADVANCE_RING();
  534. }
  535. if (buf_priv->discard) {
  536. buf_priv->age = dev_priv->sarea_priv->last_dispatch;
  537. /* Emit the indirect buffer age */
  538. BEGIN_RING(2);
  539. OUT_RING(CCE_PACKET0(R128_LAST_DISPATCH_REG, 0));
  540. OUT_RING(buf_priv->age);
  541. ADVANCE_RING();
  542. buf->pending = 1;
  543. buf->used = 0;
  544. /* FIXME: Check dispatched field */
  545. buf_priv->dispatched = 0;
  546. }
  547. dev_priv->sarea_priv->last_dispatch++;
  548. }
  549. static void r128_cce_dispatch_indices(struct drm_device *dev,
  550. struct drm_buf *buf,
  551. int start, int end, int count)
  552. {
  553. drm_r128_private_t *dev_priv = dev->dev_private;
  554. drm_r128_buf_priv_t *buf_priv = buf->dev_private;
  555. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  556. int format = sarea_priv->vc_format;
  557. int offset = dev->agp_buffer_map->offset - dev_priv->cce_buffers_offset;
  558. int prim = buf_priv->prim;
  559. u32 *data;
  560. int dwords;
  561. int i = 0;
  562. RING_LOCALS;
  563. DRM_DEBUG("indices: s=%d e=%d c=%d\n", start, end, count);
  564. if (0)
  565. r128_print_dirty("dispatch_indices", sarea_priv->dirty);
  566. if (start != end) {
  567. buf_priv->dispatched = 1;
  568. if (sarea_priv->dirty & ~R128_UPLOAD_CLIPRECTS)
  569. r128_emit_state(dev_priv);
  570. dwords = (end - start + 3) / sizeof(u32);
  571. data = (u32 *) ((char *)dev->agp_buffer_map->handle
  572. + buf->offset + start);
  573. data[0] = cpu_to_le32(CCE_PACKET3(R128_3D_RNDR_GEN_INDX_PRIM,
  574. dwords - 2));
  575. data[1] = cpu_to_le32(offset);
  576. data[2] = cpu_to_le32(R128_MAX_VB_VERTS);
  577. data[3] = cpu_to_le32(format);
  578. data[4] = cpu_to_le32((prim | R128_CCE_VC_CNTL_PRIM_WALK_IND |
  579. (count << 16)));
  580. if (count & 0x1) {
  581. #ifdef __LITTLE_ENDIAN
  582. data[dwords - 1] &= 0x0000ffff;
  583. #else
  584. data[dwords - 1] &= 0xffff0000;
  585. #endif
  586. }
  587. do {
  588. /* Emit the next set of up to three cliprects */
  589. if (i < sarea_priv->nbox) {
  590. r128_emit_clip_rects(dev_priv,
  591. &sarea_priv->boxes[i],
  592. sarea_priv->nbox - i);
  593. }
  594. r128_cce_dispatch_indirect(dev, buf, start, end);
  595. i += 3;
  596. } while (i < sarea_priv->nbox);
  597. }
  598. if (buf_priv->discard) {
  599. buf_priv->age = dev_priv->sarea_priv->last_dispatch;
  600. /* Emit the vertex buffer age */
  601. BEGIN_RING(2);
  602. OUT_RING(CCE_PACKET0(R128_LAST_DISPATCH_REG, 0));
  603. OUT_RING(buf_priv->age);
  604. ADVANCE_RING();
  605. buf->pending = 1;
  606. /* FIXME: Check dispatched field */
  607. buf_priv->dispatched = 0;
  608. }
  609. dev_priv->sarea_priv->last_dispatch++;
  610. sarea_priv->dirty &= ~R128_UPLOAD_CLIPRECTS;
  611. sarea_priv->nbox = 0;
  612. }
  613. static int r128_cce_dispatch_blit(struct drm_device *dev,
  614. struct drm_file *file_priv,
  615. drm_r128_blit_t *blit)
  616. {
  617. drm_r128_private_t *dev_priv = dev->dev_private;
  618. struct drm_device_dma *dma = dev->dma;
  619. struct drm_buf *buf;
  620. drm_r128_buf_priv_t *buf_priv;
  621. u32 *data;
  622. int dword_shift, dwords;
  623. RING_LOCALS;
  624. DRM_DEBUG("\n");
  625. /* The compiler won't optimize away a division by a variable,
  626. * even if the only legal values are powers of two. Thus, we'll
  627. * use a shift instead.
  628. */
  629. switch (blit->format) {
  630. case R128_DATATYPE_ARGB8888:
  631. dword_shift = 0;
  632. break;
  633. case R128_DATATYPE_ARGB1555:
  634. case R128_DATATYPE_RGB565:
  635. case R128_DATATYPE_ARGB4444:
  636. case R128_DATATYPE_YVYU422:
  637. case R128_DATATYPE_VYUY422:
  638. dword_shift = 1;
  639. break;
  640. case R128_DATATYPE_CI8:
  641. case R128_DATATYPE_RGB8:
  642. dword_shift = 2;
  643. break;
  644. default:
  645. DRM_ERROR("invalid blit format %d\n", blit->format);
  646. return -EINVAL;
  647. }
  648. /* Flush the pixel cache, and mark the contents as Read Invalid.
  649. * This ensures no pixel data gets mixed up with the texture
  650. * data from the host data blit, otherwise part of the texture
  651. * image may be corrupted.
  652. */
  653. BEGIN_RING(2);
  654. OUT_RING(CCE_PACKET0(R128_PC_GUI_CTLSTAT, 0));
  655. OUT_RING(R128_PC_RI_GUI | R128_PC_FLUSH_GUI);
  656. ADVANCE_RING();
  657. /* Dispatch the indirect buffer.
  658. */
  659. buf = dma->buflist[blit->idx];
  660. buf_priv = buf->dev_private;
  661. if (buf->file_priv != file_priv) {
  662. DRM_ERROR("process %d using buffer owned by %p\n",
  663. DRM_CURRENTPID, buf->file_priv);
  664. return -EINVAL;
  665. }
  666. if (buf->pending) {
  667. DRM_ERROR("sending pending buffer %d\n", blit->idx);
  668. return -EINVAL;
  669. }
  670. buf_priv->discard = 1;
  671. dwords = (blit->width * blit->height) >> dword_shift;
  672. data = (u32 *) ((char *)dev->agp_buffer_map->handle + buf->offset);
  673. data[0] = cpu_to_le32(CCE_PACKET3(R128_CNTL_HOSTDATA_BLT, dwords + 6));
  674. data[1] = cpu_to_le32((R128_GMC_DST_PITCH_OFFSET_CNTL |
  675. R128_GMC_BRUSH_NONE |
  676. (blit->format << 8) |
  677. R128_GMC_SRC_DATATYPE_COLOR |
  678. R128_ROP3_S |
  679. R128_DP_SRC_SOURCE_HOST_DATA |
  680. R128_GMC_CLR_CMP_CNTL_DIS |
  681. R128_GMC_AUX_CLIP_DIS | R128_GMC_WR_MSK_DIS));
  682. data[2] = cpu_to_le32((blit->pitch << 21) | (blit->offset >> 5));
  683. data[3] = cpu_to_le32(0xffffffff);
  684. data[4] = cpu_to_le32(0xffffffff);
  685. data[5] = cpu_to_le32((blit->y << 16) | blit->x);
  686. data[6] = cpu_to_le32((blit->height << 16) | blit->width);
  687. data[7] = cpu_to_le32(dwords);
  688. buf->used = (dwords + 8) * sizeof(u32);
  689. r128_cce_dispatch_indirect(dev, buf, 0, buf->used);
  690. /* Flush the pixel cache after the blit completes. This ensures
  691. * the texture data is written out to memory before rendering
  692. * continues.
  693. */
  694. BEGIN_RING(2);
  695. OUT_RING(CCE_PACKET0(R128_PC_GUI_CTLSTAT, 0));
  696. OUT_RING(R128_PC_FLUSH_GUI);
  697. ADVANCE_RING();
  698. return 0;
  699. }
  700. /* ================================================================
  701. * Tiled depth buffer management
  702. *
  703. * FIXME: These should all set the destination write mask for when we
  704. * have hardware stencil support.
  705. */
  706. static int r128_cce_dispatch_write_span(struct drm_device *dev,
  707. drm_r128_depth_t *depth)
  708. {
  709. drm_r128_private_t *dev_priv = dev->dev_private;
  710. int count, x, y;
  711. u32 *buffer;
  712. u8 *mask;
  713. int i, buffer_size, mask_size;
  714. RING_LOCALS;
  715. DRM_DEBUG("\n");
  716. count = depth->n;
  717. if (count > 4096 || count <= 0)
  718. return -EMSGSIZE;
  719. if (DRM_COPY_FROM_USER(&x, depth->x, sizeof(x)))
  720. return -EFAULT;
  721. if (DRM_COPY_FROM_USER(&y, depth->y, sizeof(y)))
  722. return -EFAULT;
  723. buffer_size = depth->n * sizeof(u32);
  724. buffer = kmalloc(buffer_size, GFP_KERNEL);
  725. if (buffer == NULL)
  726. return -ENOMEM;
  727. if (DRM_COPY_FROM_USER(buffer, depth->buffer, buffer_size)) {
  728. kfree(buffer);
  729. return -EFAULT;
  730. }
  731. mask_size = depth->n * sizeof(u8);
  732. if (depth->mask) {
  733. mask = kmalloc(mask_size, GFP_KERNEL);
  734. if (mask == NULL) {
  735. kfree(buffer);
  736. return -ENOMEM;
  737. }
  738. if (DRM_COPY_FROM_USER(mask, depth->mask, mask_size)) {
  739. kfree(buffer);
  740. kfree(mask);
  741. return -EFAULT;
  742. }
  743. for (i = 0; i < count; i++, x++) {
  744. if (mask[i]) {
  745. BEGIN_RING(6);
  746. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  747. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  748. R128_GMC_BRUSH_SOLID_COLOR |
  749. (dev_priv->depth_fmt << 8) |
  750. R128_GMC_SRC_DATATYPE_COLOR |
  751. R128_ROP3_P |
  752. R128_GMC_CLR_CMP_CNTL_DIS |
  753. R128_GMC_WR_MSK_DIS);
  754. OUT_RING(dev_priv->depth_pitch_offset_c);
  755. OUT_RING(buffer[i]);
  756. OUT_RING((x << 16) | y);
  757. OUT_RING((1 << 16) | 1);
  758. ADVANCE_RING();
  759. }
  760. }
  761. kfree(mask);
  762. } else {
  763. for (i = 0; i < count; i++, x++) {
  764. BEGIN_RING(6);
  765. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  766. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  767. R128_GMC_BRUSH_SOLID_COLOR |
  768. (dev_priv->depth_fmt << 8) |
  769. R128_GMC_SRC_DATATYPE_COLOR |
  770. R128_ROP3_P |
  771. R128_GMC_CLR_CMP_CNTL_DIS |
  772. R128_GMC_WR_MSK_DIS);
  773. OUT_RING(dev_priv->depth_pitch_offset_c);
  774. OUT_RING(buffer[i]);
  775. OUT_RING((x << 16) | y);
  776. OUT_RING((1 << 16) | 1);
  777. ADVANCE_RING();
  778. }
  779. }
  780. kfree(buffer);
  781. return 0;
  782. }
  783. static int r128_cce_dispatch_write_pixels(struct drm_device *dev,
  784. drm_r128_depth_t *depth)
  785. {
  786. drm_r128_private_t *dev_priv = dev->dev_private;
  787. int count, *x, *y;
  788. u32 *buffer;
  789. u8 *mask;
  790. int i, xbuf_size, ybuf_size, buffer_size, mask_size;
  791. RING_LOCALS;
  792. DRM_DEBUG("\n");
  793. count = depth->n;
  794. if (count > 4096 || count <= 0)
  795. return -EMSGSIZE;
  796. xbuf_size = count * sizeof(*x);
  797. ybuf_size = count * sizeof(*y);
  798. x = kmalloc(xbuf_size, GFP_KERNEL);
  799. if (x == NULL)
  800. return -ENOMEM;
  801. y = kmalloc(ybuf_size, GFP_KERNEL);
  802. if (y == NULL) {
  803. kfree(x);
  804. return -ENOMEM;
  805. }
  806. if (DRM_COPY_FROM_USER(x, depth->x, xbuf_size)) {
  807. kfree(x);
  808. kfree(y);
  809. return -EFAULT;
  810. }
  811. if (DRM_COPY_FROM_USER(y, depth->y, xbuf_size)) {
  812. kfree(x);
  813. kfree(y);
  814. return -EFAULT;
  815. }
  816. buffer_size = depth->n * sizeof(u32);
  817. buffer = kmalloc(buffer_size, GFP_KERNEL);
  818. if (buffer == NULL) {
  819. kfree(x);
  820. kfree(y);
  821. return -ENOMEM;
  822. }
  823. if (DRM_COPY_FROM_USER(buffer, depth->buffer, buffer_size)) {
  824. kfree(x);
  825. kfree(y);
  826. kfree(buffer);
  827. return -EFAULT;
  828. }
  829. if (depth->mask) {
  830. mask_size = depth->n * sizeof(u8);
  831. mask = kmalloc(mask_size, GFP_KERNEL);
  832. if (mask == NULL) {
  833. kfree(x);
  834. kfree(y);
  835. kfree(buffer);
  836. return -ENOMEM;
  837. }
  838. if (DRM_COPY_FROM_USER(mask, depth->mask, mask_size)) {
  839. kfree(x);
  840. kfree(y);
  841. kfree(buffer);
  842. kfree(mask);
  843. return -EFAULT;
  844. }
  845. for (i = 0; i < count; i++) {
  846. if (mask[i]) {
  847. BEGIN_RING(6);
  848. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  849. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  850. R128_GMC_BRUSH_SOLID_COLOR |
  851. (dev_priv->depth_fmt << 8) |
  852. R128_GMC_SRC_DATATYPE_COLOR |
  853. R128_ROP3_P |
  854. R128_GMC_CLR_CMP_CNTL_DIS |
  855. R128_GMC_WR_MSK_DIS);
  856. OUT_RING(dev_priv->depth_pitch_offset_c);
  857. OUT_RING(buffer[i]);
  858. OUT_RING((x[i] << 16) | y[i]);
  859. OUT_RING((1 << 16) | 1);
  860. ADVANCE_RING();
  861. }
  862. }
  863. kfree(mask);
  864. } else {
  865. for (i = 0; i < count; i++) {
  866. BEGIN_RING(6);
  867. OUT_RING(CCE_PACKET3(R128_CNTL_PAINT_MULTI, 4));
  868. OUT_RING(R128_GMC_DST_PITCH_OFFSET_CNTL |
  869. R128_GMC_BRUSH_SOLID_COLOR |
  870. (dev_priv->depth_fmt << 8) |
  871. R128_GMC_SRC_DATATYPE_COLOR |
  872. R128_ROP3_P |
  873. R128_GMC_CLR_CMP_CNTL_DIS |
  874. R128_GMC_WR_MSK_DIS);
  875. OUT_RING(dev_priv->depth_pitch_offset_c);
  876. OUT_RING(buffer[i]);
  877. OUT_RING((x[i] << 16) | y[i]);
  878. OUT_RING((1 << 16) | 1);
  879. ADVANCE_RING();
  880. }
  881. }
  882. kfree(x);
  883. kfree(y);
  884. kfree(buffer);
  885. return 0;
  886. }
  887. static int r128_cce_dispatch_read_span(struct drm_device *dev,
  888. drm_r128_depth_t *depth)
  889. {
  890. drm_r128_private_t *dev_priv = dev->dev_private;
  891. int count, x, y;
  892. RING_LOCALS;
  893. DRM_DEBUG("\n");
  894. count = depth->n;
  895. if (count > 4096 || count <= 0)
  896. return -EMSGSIZE;
  897. if (DRM_COPY_FROM_USER(&x, depth->x, sizeof(x)))
  898. return -EFAULT;
  899. if (DRM_COPY_FROM_USER(&y, depth->y, sizeof(y)))
  900. return -EFAULT;
  901. BEGIN_RING(7);
  902. OUT_RING(CCE_PACKET3(R128_CNTL_BITBLT_MULTI, 5));
  903. OUT_RING(R128_GMC_SRC_PITCH_OFFSET_CNTL |
  904. R128_GMC_DST_PITCH_OFFSET_CNTL |
  905. R128_GMC_BRUSH_NONE |
  906. (dev_priv->depth_fmt << 8) |
  907. R128_GMC_SRC_DATATYPE_COLOR |
  908. R128_ROP3_S |
  909. R128_DP_SRC_SOURCE_MEMORY |
  910. R128_GMC_CLR_CMP_CNTL_DIS | R128_GMC_WR_MSK_DIS);
  911. OUT_RING(dev_priv->depth_pitch_offset_c);
  912. OUT_RING(dev_priv->span_pitch_offset_c);
  913. OUT_RING((x << 16) | y);
  914. OUT_RING((0 << 16) | 0);
  915. OUT_RING((count << 16) | 1);
  916. ADVANCE_RING();
  917. return 0;
  918. }
  919. static int r128_cce_dispatch_read_pixels(struct drm_device *dev,
  920. drm_r128_depth_t *depth)
  921. {
  922. drm_r128_private_t *dev_priv = dev->dev_private;
  923. int count, *x, *y;
  924. int i, xbuf_size, ybuf_size;
  925. RING_LOCALS;
  926. DRM_DEBUG("\n");
  927. count = depth->n;
  928. if (count > 4096 || count <= 0)
  929. return -EMSGSIZE;
  930. if (count > dev_priv->depth_pitch)
  931. count = dev_priv->depth_pitch;
  932. xbuf_size = count * sizeof(*x);
  933. ybuf_size = count * sizeof(*y);
  934. x = kmalloc(xbuf_size, GFP_KERNEL);
  935. if (x == NULL)
  936. return -ENOMEM;
  937. y = kmalloc(ybuf_size, GFP_KERNEL);
  938. if (y == NULL) {
  939. kfree(x);
  940. return -ENOMEM;
  941. }
  942. if (DRM_COPY_FROM_USER(x, depth->x, xbuf_size)) {
  943. kfree(x);
  944. kfree(y);
  945. return -EFAULT;
  946. }
  947. if (DRM_COPY_FROM_USER(y, depth->y, ybuf_size)) {
  948. kfree(x);
  949. kfree(y);
  950. return -EFAULT;
  951. }
  952. for (i = 0; i < count; i++) {
  953. BEGIN_RING(7);
  954. OUT_RING(CCE_PACKET3(R128_CNTL_BITBLT_MULTI, 5));
  955. OUT_RING(R128_GMC_SRC_PITCH_OFFSET_CNTL |
  956. R128_GMC_DST_PITCH_OFFSET_CNTL |
  957. R128_GMC_BRUSH_NONE |
  958. (dev_priv->depth_fmt << 8) |
  959. R128_GMC_SRC_DATATYPE_COLOR |
  960. R128_ROP3_S |
  961. R128_DP_SRC_SOURCE_MEMORY |
  962. R128_GMC_CLR_CMP_CNTL_DIS | R128_GMC_WR_MSK_DIS);
  963. OUT_RING(dev_priv->depth_pitch_offset_c);
  964. OUT_RING(dev_priv->span_pitch_offset_c);
  965. OUT_RING((x[i] << 16) | y[i]);
  966. OUT_RING((i << 16) | 0);
  967. OUT_RING((1 << 16) | 1);
  968. ADVANCE_RING();
  969. }
  970. kfree(x);
  971. kfree(y);
  972. return 0;
  973. }
  974. /* ================================================================
  975. * Polygon stipple
  976. */
  977. static void r128_cce_dispatch_stipple(struct drm_device *dev, u32 *stipple)
  978. {
  979. drm_r128_private_t *dev_priv = dev->dev_private;
  980. int i;
  981. RING_LOCALS;
  982. DRM_DEBUG("\n");
  983. BEGIN_RING(33);
  984. OUT_RING(CCE_PACKET0(R128_BRUSH_DATA0, 31));
  985. for (i = 0; i < 32; i++)
  986. OUT_RING(stipple[i]);
  987. ADVANCE_RING();
  988. }
  989. /* ================================================================
  990. * IOCTL functions
  991. */
  992. static int r128_cce_clear(struct drm_device *dev, void *data, struct drm_file *file_priv)
  993. {
  994. drm_r128_private_t *dev_priv = dev->dev_private;
  995. drm_r128_sarea_t *sarea_priv;
  996. drm_r128_clear_t *clear = data;
  997. DRM_DEBUG("\n");
  998. LOCK_TEST_WITH_RETURN(dev, file_priv);
  999. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  1000. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1001. sarea_priv = dev_priv->sarea_priv;
  1002. if (sarea_priv->nbox > R128_NR_SAREA_CLIPRECTS)
  1003. sarea_priv->nbox = R128_NR_SAREA_CLIPRECTS;
  1004. r128_cce_dispatch_clear(dev, clear);
  1005. COMMIT_RING();
  1006. /* Make sure we restore the 3D state next time.
  1007. */
  1008. dev_priv->sarea_priv->dirty |= R128_UPLOAD_CONTEXT | R128_UPLOAD_MASKS;
  1009. return 0;
  1010. }
  1011. static int r128_do_init_pageflip(struct drm_device *dev)
  1012. {
  1013. drm_r128_private_t *dev_priv = dev->dev_private;
  1014. DRM_DEBUG("\n");
  1015. dev_priv->crtc_offset = R128_READ(R128_CRTC_OFFSET);
  1016. dev_priv->crtc_offset_cntl = R128_READ(R128_CRTC_OFFSET_CNTL);
  1017. R128_WRITE(R128_CRTC_OFFSET, dev_priv->front_offset);
  1018. R128_WRITE(R128_CRTC_OFFSET_CNTL,
  1019. dev_priv->crtc_offset_cntl | R128_CRTC_OFFSET_FLIP_CNTL);
  1020. dev_priv->page_flipping = 1;
  1021. dev_priv->current_page = 0;
  1022. dev_priv->sarea_priv->pfCurrentPage = dev_priv->current_page;
  1023. return 0;
  1024. }
  1025. static int r128_do_cleanup_pageflip(struct drm_device *dev)
  1026. {
  1027. drm_r128_private_t *dev_priv = dev->dev_private;
  1028. DRM_DEBUG("\n");
  1029. R128_WRITE(R128_CRTC_OFFSET, dev_priv->crtc_offset);
  1030. R128_WRITE(R128_CRTC_OFFSET_CNTL, dev_priv->crtc_offset_cntl);
  1031. if (dev_priv->current_page != 0) {
  1032. r128_cce_dispatch_flip(dev);
  1033. COMMIT_RING();
  1034. }
  1035. dev_priv->page_flipping = 0;
  1036. return 0;
  1037. }
  1038. /* Swapping and flipping are different operations, need different ioctls.
  1039. * They can & should be intermixed to support multiple 3d windows.
  1040. */
  1041. static int r128_cce_flip(struct drm_device *dev, void *data, struct drm_file *file_priv)
  1042. {
  1043. drm_r128_private_t *dev_priv = dev->dev_private;
  1044. DRM_DEBUG("\n");
  1045. LOCK_TEST_WITH_RETURN(dev, file_priv);
  1046. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  1047. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1048. if (!dev_priv->page_flipping)
  1049. r128_do_init_pageflip(dev);
  1050. r128_cce_dispatch_flip(dev);
  1051. COMMIT_RING();
  1052. return 0;
  1053. }
  1054. static int r128_cce_swap(struct drm_device *dev, void *data, struct drm_file *file_priv)
  1055. {
  1056. drm_r128_private_t *dev_priv = dev->dev_private;
  1057. drm_r128_sarea_t *sarea_priv = dev_priv->sarea_priv;
  1058. DRM_DEBUG("\n");
  1059. LOCK_TEST_WITH_RETURN(dev, file_priv);
  1060. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  1061. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1062. if (sarea_priv->nbox > R128_NR_SAREA_CLIPRECTS)
  1063. sarea_priv->nbox = R128_NR_SAREA_CLIPRECTS;
  1064. r128_cce_dispatch_swap(dev);
  1065. dev_priv->sarea_priv->dirty |= (R128_UPLOAD_CONTEXT |
  1066. R128_UPLOAD_MASKS);
  1067. COMMIT_RING();
  1068. return 0;
  1069. }
  1070. static int r128_cce_vertex(struct drm_device *dev, void *data, struct drm_file *file_priv)
  1071. {
  1072. drm_r128_private_t *dev_priv = dev->dev_private;
  1073. struct drm_device_dma *dma = dev->dma;
  1074. struct drm_buf *buf;
  1075. drm_r128_buf_priv_t *buf_priv;
  1076. drm_r128_vertex_t *vertex = data;
  1077. LOCK_TEST_WITH_RETURN(dev, file_priv);
  1078. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  1079. DRM_DEBUG("pid=%d index=%d count=%d discard=%d\n",
  1080. DRM_CURRENTPID, vertex->idx, vertex->count, vertex->discard);
  1081. if (vertex->idx < 0 || vertex->idx >= dma->buf_count) {
  1082. DRM_ERROR("buffer index %d (of %d max)\n",
  1083. vertex->idx, dma->buf_count - 1);
  1084. return -EINVAL;
  1085. }
  1086. if (vertex->prim < 0 ||
  1087. vertex->prim > R128_CCE_VC_CNTL_PRIM_TYPE_TRI_TYPE2) {
  1088. DRM_ERROR("buffer prim %d\n", vertex->prim);
  1089. return -EINVAL;
  1090. }
  1091. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1092. VB_AGE_TEST_WITH_RETURN(dev_priv);
  1093. buf = dma->buflist[vertex->idx];
  1094. buf_priv = buf->dev_private;
  1095. if (buf->file_priv != file_priv) {
  1096. DRM_ERROR("process %d using buffer owned by %p\n",
  1097. DRM_CURRENTPID, buf->file_priv);
  1098. return -EINVAL;
  1099. }
  1100. if (buf->pending) {
  1101. DRM_ERROR("sending pending buffer %d\n", vertex->idx);
  1102. return -EINVAL;
  1103. }
  1104. buf->used = vertex->count;
  1105. buf_priv->prim = vertex->prim;
  1106. buf_priv->discard = vertex->discard;
  1107. r128_cce_dispatch_vertex(dev, buf);
  1108. COMMIT_RING();
  1109. return 0;
  1110. }
  1111. static int r128_cce_indices(struct drm_device *dev, void *data, struct drm_file *file_priv)
  1112. {
  1113. drm_r128_private_t *dev_priv = dev->dev_private;
  1114. struct drm_device_dma *dma = dev->dma;
  1115. struct drm_buf *buf;
  1116. drm_r128_buf_priv_t *buf_priv;
  1117. drm_r128_indices_t *elts = data;
  1118. int count;
  1119. LOCK_TEST_WITH_RETURN(dev, file_priv);
  1120. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  1121. DRM_DEBUG("pid=%d buf=%d s=%d e=%d d=%d\n", DRM_CURRENTPID,
  1122. elts->idx, elts->start, elts->end, elts->discard);
  1123. if (elts->idx < 0 || elts->idx >= dma->buf_count) {
  1124. DRM_ERROR("buffer index %d (of %d max)\n",
  1125. elts->idx, dma->buf_count - 1);
  1126. return -EINVAL;
  1127. }
  1128. if (elts->prim < 0 ||
  1129. elts->prim > R128_CCE_VC_CNTL_PRIM_TYPE_TRI_TYPE2) {
  1130. DRM_ERROR("buffer prim %d\n", elts->prim);
  1131. return -EINVAL;
  1132. }
  1133. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1134. VB_AGE_TEST_WITH_RETURN(dev_priv);
  1135. buf = dma->buflist[elts->idx];
  1136. buf_priv = buf->dev_private;
  1137. if (buf->file_priv != file_priv) {
  1138. DRM_ERROR("process %d using buffer owned by %p\n",
  1139. DRM_CURRENTPID, buf->file_priv);
  1140. return -EINVAL;
  1141. }
  1142. if (buf->pending) {
  1143. DRM_ERROR("sending pending buffer %d\n", elts->idx);
  1144. return -EINVAL;
  1145. }
  1146. count = (elts->end - elts->start) / sizeof(u16);
  1147. elts->start -= R128_INDEX_PRIM_OFFSET;
  1148. if (elts->start & 0x7) {
  1149. DRM_ERROR("misaligned buffer 0x%x\n", elts->start);
  1150. return -EINVAL;
  1151. }
  1152. if (elts->start < buf->used) {
  1153. DRM_ERROR("no header 0x%x - 0x%x\n", elts->start, buf->used);
  1154. return -EINVAL;
  1155. }
  1156. buf->used = elts->end;
  1157. buf_priv->prim = elts->prim;
  1158. buf_priv->discard = elts->discard;
  1159. r128_cce_dispatch_indices(dev, buf, elts->start, elts->end, count);
  1160. COMMIT_RING();
  1161. return 0;
  1162. }
  1163. static int r128_cce_blit(struct drm_device *dev, void *data, struct drm_file *file_priv)
  1164. {
  1165. struct drm_device_dma *dma = dev->dma;
  1166. drm_r128_private_t *dev_priv = dev->dev_private;
  1167. drm_r128_blit_t *blit = data;
  1168. int ret;
  1169. LOCK_TEST_WITH_RETURN(dev, file_priv);
  1170. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  1171. DRM_DEBUG("pid=%d index=%d\n", DRM_CURRENTPID, blit->idx);
  1172. if (blit->idx < 0 || blit->idx >= dma->buf_count) {
  1173. DRM_ERROR("buffer index %d (of %d max)\n",
  1174. blit->idx, dma->buf_count - 1);
  1175. return -EINVAL;
  1176. }
  1177. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1178. VB_AGE_TEST_WITH_RETURN(dev_priv);
  1179. ret = r128_cce_dispatch_blit(dev, file_priv, blit);
  1180. COMMIT_RING();
  1181. return ret;
  1182. }
  1183. static int r128_cce_depth(struct drm_device *dev, void *data, struct drm_file *file_priv)
  1184. {
  1185. drm_r128_private_t *dev_priv = dev->dev_private;
  1186. drm_r128_depth_t *depth = data;
  1187. int ret;
  1188. LOCK_TEST_WITH_RETURN(dev, file_priv);
  1189. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  1190. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1191. ret = -EINVAL;
  1192. switch (depth->func) {
  1193. case R128_WRITE_SPAN:
  1194. ret = r128_cce_dispatch_write_span(dev, depth);
  1195. break;
  1196. case R128_WRITE_PIXELS:
  1197. ret = r128_cce_dispatch_write_pixels(dev, depth);
  1198. break;
  1199. case R128_READ_SPAN:
  1200. ret = r128_cce_dispatch_read_span(dev, depth);
  1201. break;
  1202. case R128_READ_PIXELS:
  1203. ret = r128_cce_dispatch_read_pixels(dev, depth);
  1204. break;
  1205. }
  1206. COMMIT_RING();
  1207. return ret;
  1208. }
  1209. static int r128_cce_stipple(struct drm_device *dev, void *data, struct drm_file *file_priv)
  1210. {
  1211. drm_r128_private_t *dev_priv = dev->dev_private;
  1212. drm_r128_stipple_t *stipple = data;
  1213. u32 mask[32];
  1214. LOCK_TEST_WITH_RETURN(dev, file_priv);
  1215. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  1216. if (DRM_COPY_FROM_USER(&mask, stipple->mask, 32 * sizeof(u32)))
  1217. return -EFAULT;
  1218. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1219. r128_cce_dispatch_stipple(dev, mask);
  1220. COMMIT_RING();
  1221. return 0;
  1222. }
  1223. static int r128_cce_indirect(struct drm_device *dev, void *data, struct drm_file *file_priv)
  1224. {
  1225. drm_r128_private_t *dev_priv = dev->dev_private;
  1226. struct drm_device_dma *dma = dev->dma;
  1227. struct drm_buf *buf;
  1228. drm_r128_buf_priv_t *buf_priv;
  1229. drm_r128_indirect_t *indirect = data;
  1230. #if 0
  1231. RING_LOCALS;
  1232. #endif
  1233. LOCK_TEST_WITH_RETURN(dev, file_priv);
  1234. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  1235. DRM_DEBUG("idx=%d s=%d e=%d d=%d\n",
  1236. indirect->idx, indirect->start, indirect->end,
  1237. indirect->discard);
  1238. if (indirect->idx < 0 || indirect->idx >= dma->buf_count) {
  1239. DRM_ERROR("buffer index %d (of %d max)\n",
  1240. indirect->idx, dma->buf_count - 1);
  1241. return -EINVAL;
  1242. }
  1243. buf = dma->buflist[indirect->idx];
  1244. buf_priv = buf->dev_private;
  1245. if (buf->file_priv != file_priv) {
  1246. DRM_ERROR("process %d using buffer owned by %p\n",
  1247. DRM_CURRENTPID, buf->file_priv);
  1248. return -EINVAL;
  1249. }
  1250. if (buf->pending) {
  1251. DRM_ERROR("sending pending buffer %d\n", indirect->idx);
  1252. return -EINVAL;
  1253. }
  1254. if (indirect->start < buf->used) {
  1255. DRM_ERROR("reusing indirect: start=0x%x actual=0x%x\n",
  1256. indirect->start, buf->used);
  1257. return -EINVAL;
  1258. }
  1259. RING_SPACE_TEST_WITH_RETURN(dev_priv);
  1260. VB_AGE_TEST_WITH_RETURN(dev_priv);
  1261. buf->used = indirect->end;
  1262. buf_priv->discard = indirect->discard;
  1263. #if 0
  1264. /* Wait for the 3D stream to idle before the indirect buffer
  1265. * containing 2D acceleration commands is processed.
  1266. */
  1267. BEGIN_RING(2);
  1268. RADEON_WAIT_UNTIL_3D_IDLE();
  1269. ADVANCE_RING();
  1270. #endif
  1271. /* Dispatch the indirect buffer full of commands from the
  1272. * X server. This is insecure and is thus only available to
  1273. * privileged clients.
  1274. */
  1275. r128_cce_dispatch_indirect(dev, buf, indirect->start, indirect->end);
  1276. COMMIT_RING();
  1277. return 0;
  1278. }
  1279. static int r128_getparam(struct drm_device *dev, void *data, struct drm_file *file_priv)
  1280. {
  1281. drm_r128_private_t *dev_priv = dev->dev_private;
  1282. drm_r128_getparam_t *param = data;
  1283. int value;
  1284. DEV_INIT_TEST_WITH_RETURN(dev_priv);
  1285. DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
  1286. switch (param->param) {
  1287. case R128_PARAM_IRQ_NR:
  1288. value = drm_dev_to_irq(dev);
  1289. break;
  1290. default:
  1291. return -EINVAL;
  1292. }
  1293. if (DRM_COPY_TO_USER(param->value, &value, sizeof(int))) {
  1294. DRM_ERROR("copy_to_user\n");
  1295. return -EFAULT;
  1296. }
  1297. return 0;
  1298. }
  1299. void r128_driver_preclose(struct drm_device *dev, struct drm_file *file_priv)
  1300. {
  1301. if (dev->dev_private) {
  1302. drm_r128_private_t *dev_priv = dev->dev_private;
  1303. if (dev_priv->page_flipping)
  1304. r128_do_cleanup_pageflip(dev);
  1305. }
  1306. }
  1307. void r128_driver_lastclose(struct drm_device *dev)
  1308. {
  1309. r128_do_cleanup_cce(dev);
  1310. }
  1311. struct drm_ioctl_desc r128_ioctls[] = {
  1312. DRM_IOCTL_DEF_DRV(R128_INIT, r128_cce_init, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
  1313. DRM_IOCTL_DEF_DRV(R128_CCE_START, r128_cce_start, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
  1314. DRM_IOCTL_DEF_DRV(R128_CCE_STOP, r128_cce_stop, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
  1315. DRM_IOCTL_DEF_DRV(R128_CCE_RESET, r128_cce_reset, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
  1316. DRM_IOCTL_DEF_DRV(R128_CCE_IDLE, r128_cce_idle, DRM_AUTH),
  1317. DRM_IOCTL_DEF_DRV(R128_RESET, r128_engine_reset, DRM_AUTH),
  1318. DRM_IOCTL_DEF_DRV(R128_FULLSCREEN, r128_fullscreen, DRM_AUTH),
  1319. DRM_IOCTL_DEF_DRV(R128_SWAP, r128_cce_swap, DRM_AUTH),
  1320. DRM_IOCTL_DEF_DRV(R128_FLIP, r128_cce_flip, DRM_AUTH),
  1321. DRM_IOCTL_DEF_DRV(R128_CLEAR, r128_cce_clear, DRM_AUTH),
  1322. DRM_IOCTL_DEF_DRV(R128_VERTEX, r128_cce_vertex, DRM_AUTH),
  1323. DRM_IOCTL_DEF_DRV(R128_INDICES, r128_cce_indices, DRM_AUTH),
  1324. DRM_IOCTL_DEF_DRV(R128_BLIT, r128_cce_blit, DRM_AUTH),
  1325. DRM_IOCTL_DEF_DRV(R128_DEPTH, r128_cce_depth, DRM_AUTH),
  1326. DRM_IOCTL_DEF_DRV(R128_STIPPLE, r128_cce_stipple, DRM_AUTH),
  1327. DRM_IOCTL_DEF_DRV(R128_INDIRECT, r128_cce_indirect, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
  1328. DRM_IOCTL_DEF_DRV(R128_GETPARAM, r128_getparam, DRM_AUTH),
  1329. };
  1330. int r128_max_ioctl = DRM_ARRAY_SIZE(r128_ioctls);