intel_overlay.c 39 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570
  1. /*
  2. * Copyright © 2009
  3. *
  4. * Permission is hereby granted, free of charge, to any person obtaining a
  5. * copy of this software and associated documentation files (the "Software"),
  6. * to deal in the Software without restriction, including without limitation
  7. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  8. * and/or sell copies of the Software, and to permit persons to whom the
  9. * Software is furnished to do so, subject to the following conditions:
  10. *
  11. * The above copyright notice and this permission notice (including the next
  12. * paragraph) shall be included in all copies or substantial portions of the
  13. * Software.
  14. *
  15. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  18. * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  19. * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  20. * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
  21. * SOFTWARE.
  22. *
  23. * Authors:
  24. * Daniel Vetter <daniel@ffwll.ch>
  25. *
  26. * Derived from Xorg ddx, xf86-video-intel, src/i830_video.c
  27. */
  28. #include <linux/seq_file.h>
  29. #include "drmP.h"
  30. #include "drm.h"
  31. #include "i915_drm.h"
  32. #include "i915_drv.h"
  33. #include "i915_reg.h"
  34. #include "intel_drv.h"
  35. /* Limits for overlay size. According to intel doc, the real limits are:
  36. * Y width: 4095, UV width (planar): 2047, Y height: 2047,
  37. * UV width (planar): * 1023. But the xorg thinks 2048 for height and width. Use
  38. * the mininum of both. */
  39. #define IMAGE_MAX_WIDTH 2048
  40. #define IMAGE_MAX_HEIGHT 2046 /* 2 * 1023 */
  41. /* on 830 and 845 these large limits result in the card hanging */
  42. #define IMAGE_MAX_WIDTH_LEGACY 1024
  43. #define IMAGE_MAX_HEIGHT_LEGACY 1088
  44. /* overlay register definitions */
  45. /* OCMD register */
  46. #define OCMD_TILED_SURFACE (0x1<<19)
  47. #define OCMD_MIRROR_MASK (0x3<<17)
  48. #define OCMD_MIRROR_MODE (0x3<<17)
  49. #define OCMD_MIRROR_HORIZONTAL (0x1<<17)
  50. #define OCMD_MIRROR_VERTICAL (0x2<<17)
  51. #define OCMD_MIRROR_BOTH (0x3<<17)
  52. #define OCMD_BYTEORDER_MASK (0x3<<14) /* zero for YUYV or FOURCC YUY2 */
  53. #define OCMD_UV_SWAP (0x1<<14) /* YVYU */
  54. #define OCMD_Y_SWAP (0x2<<14) /* UYVY or FOURCC UYVY */
  55. #define OCMD_Y_AND_UV_SWAP (0x3<<14) /* VYUY */
  56. #define OCMD_SOURCE_FORMAT_MASK (0xf<<10)
  57. #define OCMD_RGB_888 (0x1<<10) /* not in i965 Intel docs */
  58. #define OCMD_RGB_555 (0x2<<10) /* not in i965 Intel docs */
  59. #define OCMD_RGB_565 (0x3<<10) /* not in i965 Intel docs */
  60. #define OCMD_YUV_422_PACKED (0x8<<10)
  61. #define OCMD_YUV_411_PACKED (0x9<<10) /* not in i965 Intel docs */
  62. #define OCMD_YUV_420_PLANAR (0xc<<10)
  63. #define OCMD_YUV_422_PLANAR (0xd<<10)
  64. #define OCMD_YUV_410_PLANAR (0xe<<10) /* also 411 */
  65. #define OCMD_TVSYNCFLIP_PARITY (0x1<<9)
  66. #define OCMD_TVSYNCFLIP_ENABLE (0x1<<7)
  67. #define OCMD_BUF_TYPE_MASK (0x1<<5)
  68. #define OCMD_BUF_TYPE_FRAME (0x0<<5)
  69. #define OCMD_BUF_TYPE_FIELD (0x1<<5)
  70. #define OCMD_TEST_MODE (0x1<<4)
  71. #define OCMD_BUFFER_SELECT (0x3<<2)
  72. #define OCMD_BUFFER0 (0x0<<2)
  73. #define OCMD_BUFFER1 (0x1<<2)
  74. #define OCMD_FIELD_SELECT (0x1<<2)
  75. #define OCMD_FIELD0 (0x0<<1)
  76. #define OCMD_FIELD1 (0x1<<1)
  77. #define OCMD_ENABLE (0x1<<0)
  78. /* OCONFIG register */
  79. #define OCONF_PIPE_MASK (0x1<<18)
  80. #define OCONF_PIPE_A (0x0<<18)
  81. #define OCONF_PIPE_B (0x1<<18)
  82. #define OCONF_GAMMA2_ENABLE (0x1<<16)
  83. #define OCONF_CSC_MODE_BT601 (0x0<<5)
  84. #define OCONF_CSC_MODE_BT709 (0x1<<5)
  85. #define OCONF_CSC_BYPASS (0x1<<4)
  86. #define OCONF_CC_OUT_8BIT (0x1<<3)
  87. #define OCONF_TEST_MODE (0x1<<2)
  88. #define OCONF_THREE_LINE_BUFFER (0x1<<0)
  89. #define OCONF_TWO_LINE_BUFFER (0x0<<0)
  90. /* DCLRKM (dst-key) register */
  91. #define DST_KEY_ENABLE (0x1<<31)
  92. #define CLK_RGB24_MASK 0x0
  93. #define CLK_RGB16_MASK 0x070307
  94. #define CLK_RGB15_MASK 0x070707
  95. #define CLK_RGB8I_MASK 0xffffff
  96. #define RGB16_TO_COLORKEY(c) \
  97. (((c & 0xF800) << 8) | ((c & 0x07E0) << 5) | ((c & 0x001F) << 3))
  98. #define RGB15_TO_COLORKEY(c) \
  99. (((c & 0x7c00) << 9) | ((c & 0x03E0) << 6) | ((c & 0x001F) << 3))
  100. /* overlay flip addr flag */
  101. #define OFC_UPDATE 0x1
  102. /* polyphase filter coefficients */
  103. #define N_HORIZ_Y_TAPS 5
  104. #define N_VERT_Y_TAPS 3
  105. #define N_HORIZ_UV_TAPS 3
  106. #define N_VERT_UV_TAPS 3
  107. #define N_PHASES 17
  108. #define MAX_TAPS 5
  109. /* memory bufferd overlay registers */
  110. struct overlay_registers {
  111. u32 OBUF_0Y;
  112. u32 OBUF_1Y;
  113. u32 OBUF_0U;
  114. u32 OBUF_0V;
  115. u32 OBUF_1U;
  116. u32 OBUF_1V;
  117. u32 OSTRIDE;
  118. u32 YRGB_VPH;
  119. u32 UV_VPH;
  120. u32 HORZ_PH;
  121. u32 INIT_PHS;
  122. u32 DWINPOS;
  123. u32 DWINSZ;
  124. u32 SWIDTH;
  125. u32 SWIDTHSW;
  126. u32 SHEIGHT;
  127. u32 YRGBSCALE;
  128. u32 UVSCALE;
  129. u32 OCLRC0;
  130. u32 OCLRC1;
  131. u32 DCLRKV;
  132. u32 DCLRKM;
  133. u32 SCLRKVH;
  134. u32 SCLRKVL;
  135. u32 SCLRKEN;
  136. u32 OCONFIG;
  137. u32 OCMD;
  138. u32 RESERVED1; /* 0x6C */
  139. u32 OSTART_0Y;
  140. u32 OSTART_1Y;
  141. u32 OSTART_0U;
  142. u32 OSTART_0V;
  143. u32 OSTART_1U;
  144. u32 OSTART_1V;
  145. u32 OTILEOFF_0Y;
  146. u32 OTILEOFF_1Y;
  147. u32 OTILEOFF_0U;
  148. u32 OTILEOFF_0V;
  149. u32 OTILEOFF_1U;
  150. u32 OTILEOFF_1V;
  151. u32 FASTHSCALE; /* 0xA0 */
  152. u32 UVSCALEV; /* 0xA4 */
  153. u32 RESERVEDC[(0x200 - 0xA8) / 4]; /* 0xA8 - 0x1FC */
  154. u16 Y_VCOEFS[N_VERT_Y_TAPS * N_PHASES]; /* 0x200 */
  155. u16 RESERVEDD[0x100 / 2 - N_VERT_Y_TAPS * N_PHASES];
  156. u16 Y_HCOEFS[N_HORIZ_Y_TAPS * N_PHASES]; /* 0x300 */
  157. u16 RESERVEDE[0x200 / 2 - N_HORIZ_Y_TAPS * N_PHASES];
  158. u16 UV_VCOEFS[N_VERT_UV_TAPS * N_PHASES]; /* 0x500 */
  159. u16 RESERVEDF[0x100 / 2 - N_VERT_UV_TAPS * N_PHASES];
  160. u16 UV_HCOEFS[N_HORIZ_UV_TAPS * N_PHASES]; /* 0x600 */
  161. u16 RESERVEDG[0x100 / 2 - N_HORIZ_UV_TAPS * N_PHASES];
  162. };
  163. /* overlay flip addr flag */
  164. #define OFC_UPDATE 0x1
  165. static struct overlay_registers *
  166. intel_overlay_map_regs_atomic(struct intel_overlay *overlay,
  167. int slot)
  168. {
  169. drm_i915_private_t *dev_priv = overlay->dev->dev_private;
  170. struct overlay_registers *regs;
  171. /* no recursive mappings */
  172. BUG_ON(overlay->virt_addr);
  173. if (OVERLAY_NEEDS_PHYSICAL(overlay->dev)) {
  174. regs = overlay->reg_bo->phys_obj->handle->vaddr;
  175. } else {
  176. regs = io_mapping_map_atomic_wc(dev_priv->mm.gtt_mapping,
  177. overlay->reg_bo->gtt_offset,
  178. slot);
  179. if (!regs) {
  180. DRM_ERROR("failed to map overlay regs in GTT\n");
  181. return NULL;
  182. }
  183. }
  184. return overlay->virt_addr = regs;
  185. }
  186. static void intel_overlay_unmap_regs_atomic(struct intel_overlay *overlay,
  187. int slot)
  188. {
  189. if (!OVERLAY_NEEDS_PHYSICAL(overlay->dev))
  190. io_mapping_unmap_atomic(overlay->virt_addr, slot);
  191. overlay->virt_addr = NULL;
  192. return;
  193. }
  194. static struct overlay_registers *
  195. intel_overlay_map_regs(struct intel_overlay *overlay)
  196. {
  197. drm_i915_private_t *dev_priv = overlay->dev->dev_private;
  198. struct overlay_registers *regs;
  199. /* no recursive mappings */
  200. BUG_ON(overlay->virt_addr);
  201. if (OVERLAY_NEEDS_PHYSICAL(overlay->dev)) {
  202. regs = overlay->reg_bo->phys_obj->handle->vaddr;
  203. } else {
  204. regs = io_mapping_map_wc(dev_priv->mm.gtt_mapping,
  205. overlay->reg_bo->gtt_offset);
  206. if (!regs) {
  207. DRM_ERROR("failed to map overlay regs in GTT\n");
  208. return NULL;
  209. }
  210. }
  211. return overlay->virt_addr = regs;
  212. }
  213. static void intel_overlay_unmap_regs(struct intel_overlay *overlay)
  214. {
  215. if (!OVERLAY_NEEDS_PHYSICAL(overlay->dev))
  216. io_mapping_unmap(overlay->virt_addr);
  217. overlay->virt_addr = NULL;
  218. return;
  219. }
  220. /* overlay needs to be disable in OCMD reg */
  221. static int intel_overlay_on(struct intel_overlay *overlay)
  222. {
  223. struct drm_device *dev = overlay->dev;
  224. int ret;
  225. drm_i915_private_t *dev_priv = dev->dev_private;
  226. BUG_ON(overlay->active);
  227. overlay->active = 1;
  228. overlay->hw_wedged = NEEDS_WAIT_FOR_FLIP;
  229. BEGIN_LP_RING(4);
  230. OUT_RING(MI_OVERLAY_FLIP | MI_OVERLAY_ON);
  231. OUT_RING(overlay->flip_addr | OFC_UPDATE);
  232. OUT_RING(MI_WAIT_FOR_EVENT | MI_WAIT_FOR_OVERLAY_FLIP);
  233. OUT_RING(MI_NOOP);
  234. ADVANCE_LP_RING();
  235. overlay->last_flip_req =
  236. i915_add_request(dev, NULL, &dev_priv->render_ring);
  237. if (overlay->last_flip_req == 0)
  238. return -ENOMEM;
  239. ret = i915_do_wait_request(dev,
  240. overlay->last_flip_req, true,
  241. &dev_priv->render_ring);
  242. if (ret != 0)
  243. return ret;
  244. overlay->hw_wedged = 0;
  245. overlay->last_flip_req = 0;
  246. return 0;
  247. }
  248. /* overlay needs to be enabled in OCMD reg */
  249. static void intel_overlay_continue(struct intel_overlay *overlay,
  250. bool load_polyphase_filter)
  251. {
  252. struct drm_device *dev = overlay->dev;
  253. drm_i915_private_t *dev_priv = dev->dev_private;
  254. u32 flip_addr = overlay->flip_addr;
  255. u32 tmp;
  256. BUG_ON(!overlay->active);
  257. if (load_polyphase_filter)
  258. flip_addr |= OFC_UPDATE;
  259. /* check for underruns */
  260. tmp = I915_READ(DOVSTA);
  261. if (tmp & (1 << 17))
  262. DRM_DEBUG("overlay underrun, DOVSTA: %x\n", tmp);
  263. BEGIN_LP_RING(2);
  264. OUT_RING(MI_OVERLAY_FLIP | MI_OVERLAY_CONTINUE);
  265. OUT_RING(flip_addr);
  266. ADVANCE_LP_RING();
  267. overlay->last_flip_req =
  268. i915_add_request(dev, NULL, &dev_priv->render_ring);
  269. }
  270. static int intel_overlay_wait_flip(struct intel_overlay *overlay)
  271. {
  272. struct drm_device *dev = overlay->dev;
  273. drm_i915_private_t *dev_priv = dev->dev_private;
  274. int ret;
  275. u32 tmp;
  276. if (overlay->last_flip_req != 0) {
  277. ret = i915_do_wait_request(dev,
  278. overlay->last_flip_req, true,
  279. &dev_priv->render_ring);
  280. if (ret == 0) {
  281. overlay->last_flip_req = 0;
  282. tmp = I915_READ(ISR);
  283. if (!(tmp & I915_OVERLAY_PLANE_FLIP_PENDING_INTERRUPT))
  284. return 0;
  285. }
  286. }
  287. /* synchronous slowpath */
  288. overlay->hw_wedged = RELEASE_OLD_VID;
  289. BEGIN_LP_RING(2);
  290. OUT_RING(MI_WAIT_FOR_EVENT | MI_WAIT_FOR_OVERLAY_FLIP);
  291. OUT_RING(MI_NOOP);
  292. ADVANCE_LP_RING();
  293. overlay->last_flip_req =
  294. i915_add_request(dev, NULL, &dev_priv->render_ring);
  295. if (overlay->last_flip_req == 0)
  296. return -ENOMEM;
  297. ret = i915_do_wait_request(dev,
  298. overlay->last_flip_req, true,
  299. &dev_priv->render_ring);
  300. if (ret != 0)
  301. return ret;
  302. overlay->hw_wedged = 0;
  303. overlay->last_flip_req = 0;
  304. return 0;
  305. }
  306. /* overlay needs to be disabled in OCMD reg */
  307. static int intel_overlay_off(struct intel_overlay *overlay)
  308. {
  309. u32 flip_addr = overlay->flip_addr;
  310. struct drm_device *dev = overlay->dev;
  311. drm_i915_private_t *dev_priv = dev->dev_private;
  312. int ret;
  313. BUG_ON(!overlay->active);
  314. /* According to intel docs the overlay hw may hang (when switching
  315. * off) without loading the filter coeffs. It is however unclear whether
  316. * this applies to the disabling of the overlay or to the switching off
  317. * of the hw. Do it in both cases */
  318. flip_addr |= OFC_UPDATE;
  319. /* wait for overlay to go idle */
  320. overlay->hw_wedged = SWITCH_OFF_STAGE_1;
  321. BEGIN_LP_RING(4);
  322. OUT_RING(MI_OVERLAY_FLIP | MI_OVERLAY_CONTINUE);
  323. OUT_RING(flip_addr);
  324. OUT_RING(MI_WAIT_FOR_EVENT | MI_WAIT_FOR_OVERLAY_FLIP);
  325. OUT_RING(MI_NOOP);
  326. ADVANCE_LP_RING();
  327. overlay->last_flip_req =
  328. i915_add_request(dev, NULL, &dev_priv->render_ring);
  329. if (overlay->last_flip_req == 0)
  330. return -ENOMEM;
  331. ret = i915_do_wait_request(dev,
  332. overlay->last_flip_req, true,
  333. &dev_priv->render_ring);
  334. if (ret != 0)
  335. return ret;
  336. /* turn overlay off */
  337. overlay->hw_wedged = SWITCH_OFF_STAGE_2;
  338. BEGIN_LP_RING(4);
  339. OUT_RING(MI_OVERLAY_FLIP | MI_OVERLAY_OFF);
  340. OUT_RING(flip_addr);
  341. OUT_RING(MI_WAIT_FOR_EVENT | MI_WAIT_FOR_OVERLAY_FLIP);
  342. OUT_RING(MI_NOOP);
  343. ADVANCE_LP_RING();
  344. overlay->last_flip_req =
  345. i915_add_request(dev, NULL, &dev_priv->render_ring);
  346. if (overlay->last_flip_req == 0)
  347. return -ENOMEM;
  348. ret = i915_do_wait_request(dev,
  349. overlay->last_flip_req, true,
  350. &dev_priv->render_ring);
  351. if (ret != 0)
  352. return ret;
  353. overlay->hw_wedged = 0;
  354. overlay->last_flip_req = 0;
  355. return ret;
  356. }
  357. static void intel_overlay_off_tail(struct intel_overlay *overlay)
  358. {
  359. struct drm_gem_object *obj;
  360. /* never have the overlay hw on without showing a frame */
  361. BUG_ON(!overlay->vid_bo);
  362. obj = &overlay->vid_bo->base;
  363. i915_gem_object_unpin(obj);
  364. drm_gem_object_unreference(obj);
  365. overlay->vid_bo = NULL;
  366. overlay->crtc->overlay = NULL;
  367. overlay->crtc = NULL;
  368. overlay->active = 0;
  369. }
  370. /* recover from an interruption due to a signal
  371. * We have to be careful not to repeat work forever an make forward progess. */
  372. int intel_overlay_recover_from_interrupt(struct intel_overlay *overlay,
  373. bool interruptible)
  374. {
  375. struct drm_device *dev = overlay->dev;
  376. struct drm_gem_object *obj;
  377. drm_i915_private_t *dev_priv = dev->dev_private;
  378. u32 flip_addr;
  379. int ret;
  380. if (overlay->hw_wedged == HW_WEDGED)
  381. return -EIO;
  382. if (overlay->last_flip_req == 0) {
  383. overlay->last_flip_req =
  384. i915_add_request(dev, NULL, &dev_priv->render_ring);
  385. if (overlay->last_flip_req == 0)
  386. return -ENOMEM;
  387. }
  388. ret = i915_do_wait_request(dev, overlay->last_flip_req,
  389. interruptible, &dev_priv->render_ring);
  390. if (ret != 0)
  391. return ret;
  392. switch (overlay->hw_wedged) {
  393. case RELEASE_OLD_VID:
  394. obj = &overlay->old_vid_bo->base;
  395. i915_gem_object_unpin(obj);
  396. drm_gem_object_unreference(obj);
  397. overlay->old_vid_bo = NULL;
  398. break;
  399. case SWITCH_OFF_STAGE_1:
  400. flip_addr = overlay->flip_addr;
  401. flip_addr |= OFC_UPDATE;
  402. overlay->hw_wedged = SWITCH_OFF_STAGE_2;
  403. BEGIN_LP_RING(4);
  404. OUT_RING(MI_OVERLAY_FLIP | MI_OVERLAY_OFF);
  405. OUT_RING(flip_addr);
  406. OUT_RING(MI_WAIT_FOR_EVENT | MI_WAIT_FOR_OVERLAY_FLIP);
  407. OUT_RING(MI_NOOP);
  408. ADVANCE_LP_RING();
  409. overlay->last_flip_req =
  410. i915_add_request(dev, NULL,
  411. &dev_priv->render_ring);
  412. if (overlay->last_flip_req == 0)
  413. return -ENOMEM;
  414. ret = i915_do_wait_request(dev, overlay->last_flip_req,
  415. interruptible,
  416. &dev_priv->render_ring);
  417. if (ret != 0)
  418. return ret;
  419. case SWITCH_OFF_STAGE_2:
  420. intel_overlay_off_tail(overlay);
  421. break;
  422. default:
  423. BUG_ON(overlay->hw_wedged != NEEDS_WAIT_FOR_FLIP);
  424. }
  425. overlay->hw_wedged = 0;
  426. overlay->last_flip_req = 0;
  427. return 0;
  428. }
  429. /* Wait for pending overlay flip and release old frame.
  430. * Needs to be called before the overlay register are changed
  431. * via intel_overlay_(un)map_regs
  432. */
  433. static int intel_overlay_release_old_vid(struct intel_overlay *overlay)
  434. {
  435. int ret;
  436. struct drm_gem_object *obj;
  437. /* only wait if there is actually an old frame to release to
  438. * guarantee forward progress */
  439. if (!overlay->old_vid_bo)
  440. return 0;
  441. ret = intel_overlay_wait_flip(overlay);
  442. if (ret != 0)
  443. return ret;
  444. obj = &overlay->old_vid_bo->base;
  445. i915_gem_object_unpin(obj);
  446. drm_gem_object_unreference(obj);
  447. overlay->old_vid_bo = NULL;
  448. return 0;
  449. }
  450. struct put_image_params {
  451. int format;
  452. short dst_x;
  453. short dst_y;
  454. short dst_w;
  455. short dst_h;
  456. short src_w;
  457. short src_scan_h;
  458. short src_scan_w;
  459. short src_h;
  460. short stride_Y;
  461. short stride_UV;
  462. int offset_Y;
  463. int offset_U;
  464. int offset_V;
  465. };
  466. static int packed_depth_bytes(u32 format)
  467. {
  468. switch (format & I915_OVERLAY_DEPTH_MASK) {
  469. case I915_OVERLAY_YUV422:
  470. return 4;
  471. case I915_OVERLAY_YUV411:
  472. /* return 6; not implemented */
  473. default:
  474. return -EINVAL;
  475. }
  476. }
  477. static int packed_width_bytes(u32 format, short width)
  478. {
  479. switch (format & I915_OVERLAY_DEPTH_MASK) {
  480. case I915_OVERLAY_YUV422:
  481. return width << 1;
  482. default:
  483. return -EINVAL;
  484. }
  485. }
  486. static int uv_hsubsampling(u32 format)
  487. {
  488. switch (format & I915_OVERLAY_DEPTH_MASK) {
  489. case I915_OVERLAY_YUV422:
  490. case I915_OVERLAY_YUV420:
  491. return 2;
  492. case I915_OVERLAY_YUV411:
  493. case I915_OVERLAY_YUV410:
  494. return 4;
  495. default:
  496. return -EINVAL;
  497. }
  498. }
  499. static int uv_vsubsampling(u32 format)
  500. {
  501. switch (format & I915_OVERLAY_DEPTH_MASK) {
  502. case I915_OVERLAY_YUV420:
  503. case I915_OVERLAY_YUV410:
  504. return 2;
  505. case I915_OVERLAY_YUV422:
  506. case I915_OVERLAY_YUV411:
  507. return 1;
  508. default:
  509. return -EINVAL;
  510. }
  511. }
  512. static u32 calc_swidthsw(struct drm_device *dev, u32 offset, u32 width)
  513. {
  514. u32 mask, shift, ret;
  515. if (IS_I9XX(dev)) {
  516. mask = 0x3f;
  517. shift = 6;
  518. } else {
  519. mask = 0x1f;
  520. shift = 5;
  521. }
  522. ret = ((offset + width + mask) >> shift) - (offset >> shift);
  523. if (IS_I9XX(dev))
  524. ret <<= 1;
  525. ret -=1;
  526. return ret << 2;
  527. }
  528. static const u16 y_static_hcoeffs[N_HORIZ_Y_TAPS * N_PHASES] = {
  529. 0x3000, 0xb4a0, 0x1930, 0x1920, 0xb4a0,
  530. 0x3000, 0xb500, 0x19d0, 0x1880, 0xb440,
  531. 0x3000, 0xb540, 0x1a88, 0x2f80, 0xb3e0,
  532. 0x3000, 0xb580, 0x1b30, 0x2e20, 0xb380,
  533. 0x3000, 0xb5c0, 0x1bd8, 0x2cc0, 0xb320,
  534. 0x3020, 0xb5e0, 0x1c60, 0x2b80, 0xb2c0,
  535. 0x3020, 0xb5e0, 0x1cf8, 0x2a20, 0xb260,
  536. 0x3020, 0xb5e0, 0x1d80, 0x28e0, 0xb200,
  537. 0x3020, 0xb5c0, 0x1e08, 0x3f40, 0xb1c0,
  538. 0x3020, 0xb580, 0x1e78, 0x3ce0, 0xb160,
  539. 0x3040, 0xb520, 0x1ed8, 0x3aa0, 0xb120,
  540. 0x3040, 0xb4a0, 0x1f30, 0x3880, 0xb0e0,
  541. 0x3040, 0xb400, 0x1f78, 0x3680, 0xb0a0,
  542. 0x3020, 0xb340, 0x1fb8, 0x34a0, 0xb060,
  543. 0x3020, 0xb240, 0x1fe0, 0x32e0, 0xb040,
  544. 0x3020, 0xb140, 0x1ff8, 0x3160, 0xb020,
  545. 0xb000, 0x3000, 0x0800, 0x3000, 0xb000
  546. };
  547. static const u16 uv_static_hcoeffs[N_HORIZ_UV_TAPS * N_PHASES] = {
  548. 0x3000, 0x1800, 0x1800, 0xb000, 0x18d0, 0x2e60,
  549. 0xb000, 0x1990, 0x2ce0, 0xb020, 0x1a68, 0x2b40,
  550. 0xb040, 0x1b20, 0x29e0, 0xb060, 0x1bd8, 0x2880,
  551. 0xb080, 0x1c88, 0x3e60, 0xb0a0, 0x1d28, 0x3c00,
  552. 0xb0c0, 0x1db8, 0x39e0, 0xb0e0, 0x1e40, 0x37e0,
  553. 0xb100, 0x1eb8, 0x3620, 0xb100, 0x1f18, 0x34a0,
  554. 0xb100, 0x1f68, 0x3360, 0xb0e0, 0x1fa8, 0x3240,
  555. 0xb0c0, 0x1fe0, 0x3140, 0xb060, 0x1ff0, 0x30a0,
  556. 0x3000, 0x0800, 0x3000
  557. };
  558. static void update_polyphase_filter(struct overlay_registers *regs)
  559. {
  560. memcpy(regs->Y_HCOEFS, y_static_hcoeffs, sizeof(y_static_hcoeffs));
  561. memcpy(regs->UV_HCOEFS, uv_static_hcoeffs, sizeof(uv_static_hcoeffs));
  562. }
  563. static bool update_scaling_factors(struct intel_overlay *overlay,
  564. struct overlay_registers *regs,
  565. struct put_image_params *params)
  566. {
  567. /* fixed point with a 12 bit shift */
  568. u32 xscale, yscale, xscale_UV, yscale_UV;
  569. #define FP_SHIFT 12
  570. #define FRACT_MASK 0xfff
  571. bool scale_changed = false;
  572. int uv_hscale = uv_hsubsampling(params->format);
  573. int uv_vscale = uv_vsubsampling(params->format);
  574. if (params->dst_w > 1)
  575. xscale = ((params->src_scan_w - 1) << FP_SHIFT)
  576. /(params->dst_w);
  577. else
  578. xscale = 1 << FP_SHIFT;
  579. if (params->dst_h > 1)
  580. yscale = ((params->src_scan_h - 1) << FP_SHIFT)
  581. /(params->dst_h);
  582. else
  583. yscale = 1 << FP_SHIFT;
  584. /*if (params->format & I915_OVERLAY_YUV_PLANAR) {*/
  585. xscale_UV = xscale/uv_hscale;
  586. yscale_UV = yscale/uv_vscale;
  587. /* make the Y scale to UV scale ratio an exact multiply */
  588. xscale = xscale_UV * uv_hscale;
  589. yscale = yscale_UV * uv_vscale;
  590. /*} else {
  591. xscale_UV = 0;
  592. yscale_UV = 0;
  593. }*/
  594. if (xscale != overlay->old_xscale || yscale != overlay->old_yscale)
  595. scale_changed = true;
  596. overlay->old_xscale = xscale;
  597. overlay->old_yscale = yscale;
  598. regs->YRGBSCALE = (((yscale & FRACT_MASK) << 20) |
  599. ((xscale >> FP_SHIFT) << 16) |
  600. ((xscale & FRACT_MASK) << 3));
  601. regs->UVSCALE = (((yscale_UV & FRACT_MASK) << 20) |
  602. ((xscale_UV >> FP_SHIFT) << 16) |
  603. ((xscale_UV & FRACT_MASK) << 3));
  604. regs->UVSCALEV = ((((yscale >> FP_SHIFT) << 16) |
  605. ((yscale_UV >> FP_SHIFT) << 0)));
  606. if (scale_changed)
  607. update_polyphase_filter(regs);
  608. return scale_changed;
  609. }
  610. static void update_colorkey(struct intel_overlay *overlay,
  611. struct overlay_registers *regs)
  612. {
  613. u32 key = overlay->color_key;
  614. switch (overlay->crtc->base.fb->bits_per_pixel) {
  615. case 8:
  616. regs->DCLRKV = 0;
  617. regs->DCLRKM = CLK_RGB8I_MASK | DST_KEY_ENABLE;
  618. break;
  619. case 16:
  620. if (overlay->crtc->base.fb->depth == 15) {
  621. regs->DCLRKV = RGB15_TO_COLORKEY(key);
  622. regs->DCLRKM = CLK_RGB15_MASK | DST_KEY_ENABLE;
  623. } else {
  624. regs->DCLRKV = RGB16_TO_COLORKEY(key);
  625. regs->DCLRKM = CLK_RGB16_MASK | DST_KEY_ENABLE;
  626. }
  627. break;
  628. case 24:
  629. case 32:
  630. regs->DCLRKV = key;
  631. regs->DCLRKM = CLK_RGB24_MASK | DST_KEY_ENABLE;
  632. break;
  633. }
  634. }
  635. static u32 overlay_cmd_reg(struct put_image_params *params)
  636. {
  637. u32 cmd = OCMD_ENABLE | OCMD_BUF_TYPE_FRAME | OCMD_BUFFER0;
  638. if (params->format & I915_OVERLAY_YUV_PLANAR) {
  639. switch (params->format & I915_OVERLAY_DEPTH_MASK) {
  640. case I915_OVERLAY_YUV422:
  641. cmd |= OCMD_YUV_422_PLANAR;
  642. break;
  643. case I915_OVERLAY_YUV420:
  644. cmd |= OCMD_YUV_420_PLANAR;
  645. break;
  646. case I915_OVERLAY_YUV411:
  647. case I915_OVERLAY_YUV410:
  648. cmd |= OCMD_YUV_410_PLANAR;
  649. break;
  650. }
  651. } else { /* YUV packed */
  652. switch (params->format & I915_OVERLAY_DEPTH_MASK) {
  653. case I915_OVERLAY_YUV422:
  654. cmd |= OCMD_YUV_422_PACKED;
  655. break;
  656. case I915_OVERLAY_YUV411:
  657. cmd |= OCMD_YUV_411_PACKED;
  658. break;
  659. }
  660. switch (params->format & I915_OVERLAY_SWAP_MASK) {
  661. case I915_OVERLAY_NO_SWAP:
  662. break;
  663. case I915_OVERLAY_UV_SWAP:
  664. cmd |= OCMD_UV_SWAP;
  665. break;
  666. case I915_OVERLAY_Y_SWAP:
  667. cmd |= OCMD_Y_SWAP;
  668. break;
  669. case I915_OVERLAY_Y_AND_UV_SWAP:
  670. cmd |= OCMD_Y_AND_UV_SWAP;
  671. break;
  672. }
  673. }
  674. return cmd;
  675. }
  676. int intel_overlay_do_put_image(struct intel_overlay *overlay,
  677. struct drm_gem_object *new_bo,
  678. struct put_image_params *params)
  679. {
  680. int ret, tmp_width;
  681. struct overlay_registers *regs;
  682. bool scale_changed = false;
  683. struct drm_i915_gem_object *bo_priv = to_intel_bo(new_bo);
  684. struct drm_device *dev = overlay->dev;
  685. BUG_ON(!mutex_is_locked(&dev->struct_mutex));
  686. BUG_ON(!mutex_is_locked(&dev->mode_config.mutex));
  687. BUG_ON(!overlay);
  688. ret = intel_overlay_release_old_vid(overlay);
  689. if (ret != 0)
  690. return ret;
  691. ret = i915_gem_object_pin(new_bo, PAGE_SIZE);
  692. if (ret != 0)
  693. return ret;
  694. ret = i915_gem_object_set_to_gtt_domain(new_bo, 0);
  695. if (ret != 0)
  696. goto out_unpin;
  697. if (!overlay->active) {
  698. regs = intel_overlay_map_regs(overlay);
  699. if (!regs) {
  700. ret = -ENOMEM;
  701. goto out_unpin;
  702. }
  703. regs->OCONFIG = OCONF_CC_OUT_8BIT;
  704. if (IS_I965GM(overlay->dev))
  705. regs->OCONFIG |= OCONF_CSC_MODE_BT709;
  706. regs->OCONFIG |= overlay->crtc->pipe == 0 ?
  707. OCONF_PIPE_A : OCONF_PIPE_B;
  708. intel_overlay_unmap_regs(overlay);
  709. ret = intel_overlay_on(overlay);
  710. if (ret != 0)
  711. goto out_unpin;
  712. }
  713. regs = intel_overlay_map_regs(overlay);
  714. if (!regs) {
  715. ret = -ENOMEM;
  716. goto out_unpin;
  717. }
  718. regs->DWINPOS = (params->dst_y << 16) | params->dst_x;
  719. regs->DWINSZ = (params->dst_h << 16) | params->dst_w;
  720. if (params->format & I915_OVERLAY_YUV_PACKED)
  721. tmp_width = packed_width_bytes(params->format, params->src_w);
  722. else
  723. tmp_width = params->src_w;
  724. regs->SWIDTH = params->src_w;
  725. regs->SWIDTHSW = calc_swidthsw(overlay->dev,
  726. params->offset_Y, tmp_width);
  727. regs->SHEIGHT = params->src_h;
  728. regs->OBUF_0Y = bo_priv->gtt_offset + params-> offset_Y;
  729. regs->OSTRIDE = params->stride_Y;
  730. if (params->format & I915_OVERLAY_YUV_PLANAR) {
  731. int uv_hscale = uv_hsubsampling(params->format);
  732. int uv_vscale = uv_vsubsampling(params->format);
  733. u32 tmp_U, tmp_V;
  734. regs->SWIDTH |= (params->src_w/uv_hscale) << 16;
  735. tmp_U = calc_swidthsw(overlay->dev, params->offset_U,
  736. params->src_w/uv_hscale);
  737. tmp_V = calc_swidthsw(overlay->dev, params->offset_V,
  738. params->src_w/uv_hscale);
  739. regs->SWIDTHSW |= max_t(u32, tmp_U, tmp_V) << 16;
  740. regs->SHEIGHT |= (params->src_h/uv_vscale) << 16;
  741. regs->OBUF_0U = bo_priv->gtt_offset + params->offset_U;
  742. regs->OBUF_0V = bo_priv->gtt_offset + params->offset_V;
  743. regs->OSTRIDE |= params->stride_UV << 16;
  744. }
  745. scale_changed = update_scaling_factors(overlay, regs, params);
  746. update_colorkey(overlay, regs);
  747. regs->OCMD = overlay_cmd_reg(params);
  748. intel_overlay_unmap_regs(overlay);
  749. intel_overlay_continue(overlay, scale_changed);
  750. overlay->old_vid_bo = overlay->vid_bo;
  751. overlay->vid_bo = to_intel_bo(new_bo);
  752. return 0;
  753. out_unpin:
  754. i915_gem_object_unpin(new_bo);
  755. return ret;
  756. }
  757. int intel_overlay_switch_off(struct intel_overlay *overlay)
  758. {
  759. int ret;
  760. struct overlay_registers *regs;
  761. struct drm_device *dev = overlay->dev;
  762. BUG_ON(!mutex_is_locked(&dev->struct_mutex));
  763. BUG_ON(!mutex_is_locked(&dev->mode_config.mutex));
  764. if (overlay->hw_wedged) {
  765. ret = intel_overlay_recover_from_interrupt(overlay, 1);
  766. if (ret != 0)
  767. return ret;
  768. }
  769. if (!overlay->active)
  770. return 0;
  771. ret = intel_overlay_release_old_vid(overlay);
  772. if (ret != 0)
  773. return ret;
  774. regs = intel_overlay_map_regs(overlay);
  775. regs->OCMD = 0;
  776. intel_overlay_unmap_regs(overlay);
  777. ret = intel_overlay_off(overlay);
  778. if (ret != 0)
  779. return ret;
  780. intel_overlay_off_tail(overlay);
  781. return 0;
  782. }
  783. static int check_overlay_possible_on_crtc(struct intel_overlay *overlay,
  784. struct intel_crtc *crtc)
  785. {
  786. drm_i915_private_t *dev_priv = overlay->dev->dev_private;
  787. u32 pipeconf;
  788. int pipeconf_reg = (crtc->pipe == 0) ? PIPEACONF : PIPEBCONF;
  789. if (!crtc->base.enabled || crtc->dpms_mode != DRM_MODE_DPMS_ON)
  790. return -EINVAL;
  791. pipeconf = I915_READ(pipeconf_reg);
  792. /* can't use the overlay with double wide pipe */
  793. if (!IS_I965G(overlay->dev) && pipeconf & PIPEACONF_DOUBLE_WIDE)
  794. return -EINVAL;
  795. return 0;
  796. }
  797. static void update_pfit_vscale_ratio(struct intel_overlay *overlay)
  798. {
  799. struct drm_device *dev = overlay->dev;
  800. drm_i915_private_t *dev_priv = dev->dev_private;
  801. u32 ratio;
  802. u32 pfit_control = I915_READ(PFIT_CONTROL);
  803. /* XXX: This is not the same logic as in the xorg driver, but more in
  804. * line with the intel documentation for the i965 */
  805. if (!IS_I965G(dev) && (pfit_control & VERT_AUTO_SCALE)) {
  806. ratio = I915_READ(PFIT_AUTO_RATIOS) >> PFIT_VERT_SCALE_SHIFT;
  807. } else { /* on i965 use the PGM reg to read out the autoscaler values */
  808. ratio = I915_READ(PFIT_PGM_RATIOS);
  809. if (IS_I965G(dev))
  810. ratio >>= PFIT_VERT_SCALE_SHIFT_965;
  811. else
  812. ratio >>= PFIT_VERT_SCALE_SHIFT;
  813. }
  814. overlay->pfit_vscale_ratio = ratio;
  815. }
  816. static int check_overlay_dst(struct intel_overlay *overlay,
  817. struct drm_intel_overlay_put_image *rec)
  818. {
  819. struct drm_display_mode *mode = &overlay->crtc->base.mode;
  820. if (rec->dst_x < mode->crtc_hdisplay &&
  821. rec->dst_x + rec->dst_width <= mode->crtc_hdisplay &&
  822. rec->dst_y < mode->crtc_vdisplay &&
  823. rec->dst_y + rec->dst_height <= mode->crtc_vdisplay)
  824. return 0;
  825. else
  826. return -EINVAL;
  827. }
  828. static int check_overlay_scaling(struct put_image_params *rec)
  829. {
  830. u32 tmp;
  831. /* downscaling limit is 8.0 */
  832. tmp = ((rec->src_scan_h << 16) / rec->dst_h) >> 16;
  833. if (tmp > 7)
  834. return -EINVAL;
  835. tmp = ((rec->src_scan_w << 16) / rec->dst_w) >> 16;
  836. if (tmp > 7)
  837. return -EINVAL;
  838. return 0;
  839. }
  840. static int check_overlay_src(struct drm_device *dev,
  841. struct drm_intel_overlay_put_image *rec,
  842. struct drm_gem_object *new_bo)
  843. {
  844. u32 stride_mask;
  845. int depth;
  846. int uv_hscale = uv_hsubsampling(rec->flags);
  847. int uv_vscale = uv_vsubsampling(rec->flags);
  848. size_t tmp;
  849. /* check src dimensions */
  850. if (IS_845G(dev) || IS_I830(dev)) {
  851. if (rec->src_height > IMAGE_MAX_HEIGHT_LEGACY ||
  852. rec->src_width > IMAGE_MAX_WIDTH_LEGACY)
  853. return -EINVAL;
  854. } else {
  855. if (rec->src_height > IMAGE_MAX_HEIGHT ||
  856. rec->src_width > IMAGE_MAX_WIDTH)
  857. return -EINVAL;
  858. }
  859. /* better safe than sorry, use 4 as the maximal subsampling ratio */
  860. if (rec->src_height < N_VERT_Y_TAPS*4 ||
  861. rec->src_width < N_HORIZ_Y_TAPS*4)
  862. return -EINVAL;
  863. /* check alignment constraints */
  864. switch (rec->flags & I915_OVERLAY_TYPE_MASK) {
  865. case I915_OVERLAY_RGB:
  866. /* not implemented */
  867. return -EINVAL;
  868. case I915_OVERLAY_YUV_PACKED:
  869. depth = packed_depth_bytes(rec->flags);
  870. if (uv_vscale != 1)
  871. return -EINVAL;
  872. if (depth < 0)
  873. return depth;
  874. /* ignore UV planes */
  875. rec->stride_UV = 0;
  876. rec->offset_U = 0;
  877. rec->offset_V = 0;
  878. /* check pixel alignment */
  879. if (rec->offset_Y % depth)
  880. return -EINVAL;
  881. break;
  882. case I915_OVERLAY_YUV_PLANAR:
  883. if (uv_vscale < 0 || uv_hscale < 0)
  884. return -EINVAL;
  885. /* no offset restrictions for planar formats */
  886. break;
  887. default:
  888. return -EINVAL;
  889. }
  890. if (rec->src_width % uv_hscale)
  891. return -EINVAL;
  892. /* stride checking */
  893. if (IS_I830(dev) || IS_845G(dev))
  894. stride_mask = 255;
  895. else
  896. stride_mask = 63;
  897. if (rec->stride_Y & stride_mask || rec->stride_UV & stride_mask)
  898. return -EINVAL;
  899. if (IS_I965G(dev) && rec->stride_Y < 512)
  900. return -EINVAL;
  901. tmp = (rec->flags & I915_OVERLAY_TYPE_MASK) == I915_OVERLAY_YUV_PLANAR ?
  902. 4 : 8;
  903. if (rec->stride_Y > tmp*1024 || rec->stride_UV > 2*1024)
  904. return -EINVAL;
  905. /* check buffer dimensions */
  906. switch (rec->flags & I915_OVERLAY_TYPE_MASK) {
  907. case I915_OVERLAY_RGB:
  908. case I915_OVERLAY_YUV_PACKED:
  909. /* always 4 Y values per depth pixels */
  910. if (packed_width_bytes(rec->flags, rec->src_width) > rec->stride_Y)
  911. return -EINVAL;
  912. tmp = rec->stride_Y*rec->src_height;
  913. if (rec->offset_Y + tmp > new_bo->size)
  914. return -EINVAL;
  915. break;
  916. case I915_OVERLAY_YUV_PLANAR:
  917. if (rec->src_width > rec->stride_Y)
  918. return -EINVAL;
  919. if (rec->src_width/uv_hscale > rec->stride_UV)
  920. return -EINVAL;
  921. tmp = rec->stride_Y*rec->src_height;
  922. if (rec->offset_Y + tmp > new_bo->size)
  923. return -EINVAL;
  924. tmp = rec->stride_UV*rec->src_height;
  925. tmp /= uv_vscale;
  926. if (rec->offset_U + tmp > new_bo->size ||
  927. rec->offset_V + tmp > new_bo->size)
  928. return -EINVAL;
  929. break;
  930. }
  931. return 0;
  932. }
  933. int intel_overlay_put_image(struct drm_device *dev, void *data,
  934. struct drm_file *file_priv)
  935. {
  936. struct drm_intel_overlay_put_image *put_image_rec = data;
  937. drm_i915_private_t *dev_priv = dev->dev_private;
  938. struct intel_overlay *overlay;
  939. struct drm_mode_object *drmmode_obj;
  940. struct intel_crtc *crtc;
  941. struct drm_gem_object *new_bo;
  942. struct put_image_params *params;
  943. int ret;
  944. if (!dev_priv) {
  945. DRM_ERROR("called with no initialization\n");
  946. return -EINVAL;
  947. }
  948. overlay = dev_priv->overlay;
  949. if (!overlay) {
  950. DRM_DEBUG("userspace bug: no overlay\n");
  951. return -ENODEV;
  952. }
  953. if (!(put_image_rec->flags & I915_OVERLAY_ENABLE)) {
  954. mutex_lock(&dev->mode_config.mutex);
  955. mutex_lock(&dev->struct_mutex);
  956. ret = intel_overlay_switch_off(overlay);
  957. mutex_unlock(&dev->struct_mutex);
  958. mutex_unlock(&dev->mode_config.mutex);
  959. return ret;
  960. }
  961. params = kmalloc(sizeof(struct put_image_params), GFP_KERNEL);
  962. if (!params)
  963. return -ENOMEM;
  964. drmmode_obj = drm_mode_object_find(dev, put_image_rec->crtc_id,
  965. DRM_MODE_OBJECT_CRTC);
  966. if (!drmmode_obj) {
  967. ret = -ENOENT;
  968. goto out_free;
  969. }
  970. crtc = to_intel_crtc(obj_to_crtc(drmmode_obj));
  971. new_bo = drm_gem_object_lookup(dev, file_priv,
  972. put_image_rec->bo_handle);
  973. if (!new_bo) {
  974. ret = -ENOENT;
  975. goto out_free;
  976. }
  977. mutex_lock(&dev->mode_config.mutex);
  978. mutex_lock(&dev->struct_mutex);
  979. if (overlay->hw_wedged) {
  980. ret = intel_overlay_recover_from_interrupt(overlay, 1);
  981. if (ret != 0)
  982. goto out_unlock;
  983. }
  984. if (overlay->crtc != crtc) {
  985. struct drm_display_mode *mode = &crtc->base.mode;
  986. ret = intel_overlay_switch_off(overlay);
  987. if (ret != 0)
  988. goto out_unlock;
  989. ret = check_overlay_possible_on_crtc(overlay, crtc);
  990. if (ret != 0)
  991. goto out_unlock;
  992. overlay->crtc = crtc;
  993. crtc->overlay = overlay;
  994. if (intel_panel_fitter_pipe(dev) == crtc->pipe
  995. /* and line to wide, i.e. one-line-mode */
  996. && mode->hdisplay > 1024) {
  997. overlay->pfit_active = 1;
  998. update_pfit_vscale_ratio(overlay);
  999. } else
  1000. overlay->pfit_active = 0;
  1001. }
  1002. ret = check_overlay_dst(overlay, put_image_rec);
  1003. if (ret != 0)
  1004. goto out_unlock;
  1005. if (overlay->pfit_active) {
  1006. params->dst_y = ((((u32)put_image_rec->dst_y) << 12) /
  1007. overlay->pfit_vscale_ratio);
  1008. /* shifting right rounds downwards, so add 1 */
  1009. params->dst_h = ((((u32)put_image_rec->dst_height) << 12) /
  1010. overlay->pfit_vscale_ratio) + 1;
  1011. } else {
  1012. params->dst_y = put_image_rec->dst_y;
  1013. params->dst_h = put_image_rec->dst_height;
  1014. }
  1015. params->dst_x = put_image_rec->dst_x;
  1016. params->dst_w = put_image_rec->dst_width;
  1017. params->src_w = put_image_rec->src_width;
  1018. params->src_h = put_image_rec->src_height;
  1019. params->src_scan_w = put_image_rec->src_scan_width;
  1020. params->src_scan_h = put_image_rec->src_scan_height;
  1021. if (params->src_scan_h > params->src_h ||
  1022. params->src_scan_w > params->src_w) {
  1023. ret = -EINVAL;
  1024. goto out_unlock;
  1025. }
  1026. ret = check_overlay_src(dev, put_image_rec, new_bo);
  1027. if (ret != 0)
  1028. goto out_unlock;
  1029. params->format = put_image_rec->flags & ~I915_OVERLAY_FLAGS_MASK;
  1030. params->stride_Y = put_image_rec->stride_Y;
  1031. params->stride_UV = put_image_rec->stride_UV;
  1032. params->offset_Y = put_image_rec->offset_Y;
  1033. params->offset_U = put_image_rec->offset_U;
  1034. params->offset_V = put_image_rec->offset_V;
  1035. /* Check scaling after src size to prevent a divide-by-zero. */
  1036. ret = check_overlay_scaling(params);
  1037. if (ret != 0)
  1038. goto out_unlock;
  1039. ret = intel_overlay_do_put_image(overlay, new_bo, params);
  1040. if (ret != 0)
  1041. goto out_unlock;
  1042. mutex_unlock(&dev->struct_mutex);
  1043. mutex_unlock(&dev->mode_config.mutex);
  1044. kfree(params);
  1045. return 0;
  1046. out_unlock:
  1047. mutex_unlock(&dev->struct_mutex);
  1048. mutex_unlock(&dev->mode_config.mutex);
  1049. drm_gem_object_unreference_unlocked(new_bo);
  1050. out_free:
  1051. kfree(params);
  1052. return ret;
  1053. }
  1054. static void update_reg_attrs(struct intel_overlay *overlay,
  1055. struct overlay_registers *regs)
  1056. {
  1057. regs->OCLRC0 = (overlay->contrast << 18) | (overlay->brightness & 0xff);
  1058. regs->OCLRC1 = overlay->saturation;
  1059. }
  1060. static bool check_gamma_bounds(u32 gamma1, u32 gamma2)
  1061. {
  1062. int i;
  1063. if (gamma1 & 0xff000000 || gamma2 & 0xff000000)
  1064. return false;
  1065. for (i = 0; i < 3; i++) {
  1066. if (((gamma1 >> i*8) & 0xff) >= ((gamma2 >> i*8) & 0xff))
  1067. return false;
  1068. }
  1069. return true;
  1070. }
  1071. static bool check_gamma5_errata(u32 gamma5)
  1072. {
  1073. int i;
  1074. for (i = 0; i < 3; i++) {
  1075. if (((gamma5 >> i*8) & 0xff) == 0x80)
  1076. return false;
  1077. }
  1078. return true;
  1079. }
  1080. static int check_gamma(struct drm_intel_overlay_attrs *attrs)
  1081. {
  1082. if (!check_gamma_bounds(0, attrs->gamma0) ||
  1083. !check_gamma_bounds(attrs->gamma0, attrs->gamma1) ||
  1084. !check_gamma_bounds(attrs->gamma1, attrs->gamma2) ||
  1085. !check_gamma_bounds(attrs->gamma2, attrs->gamma3) ||
  1086. !check_gamma_bounds(attrs->gamma3, attrs->gamma4) ||
  1087. !check_gamma_bounds(attrs->gamma4, attrs->gamma5) ||
  1088. !check_gamma_bounds(attrs->gamma5, 0x00ffffff))
  1089. return -EINVAL;
  1090. if (!check_gamma5_errata(attrs->gamma5))
  1091. return -EINVAL;
  1092. return 0;
  1093. }
  1094. int intel_overlay_attrs(struct drm_device *dev, void *data,
  1095. struct drm_file *file_priv)
  1096. {
  1097. struct drm_intel_overlay_attrs *attrs = data;
  1098. drm_i915_private_t *dev_priv = dev->dev_private;
  1099. struct intel_overlay *overlay;
  1100. struct overlay_registers *regs;
  1101. int ret;
  1102. if (!dev_priv) {
  1103. DRM_ERROR("called with no initialization\n");
  1104. return -EINVAL;
  1105. }
  1106. overlay = dev_priv->overlay;
  1107. if (!overlay) {
  1108. DRM_DEBUG("userspace bug: no overlay\n");
  1109. return -ENODEV;
  1110. }
  1111. mutex_lock(&dev->mode_config.mutex);
  1112. mutex_lock(&dev->struct_mutex);
  1113. ret = -EINVAL;
  1114. if (!(attrs->flags & I915_OVERLAY_UPDATE_ATTRS)) {
  1115. attrs->color_key = overlay->color_key;
  1116. attrs->brightness = overlay->brightness;
  1117. attrs->contrast = overlay->contrast;
  1118. attrs->saturation = overlay->saturation;
  1119. if (IS_I9XX(dev)) {
  1120. attrs->gamma0 = I915_READ(OGAMC0);
  1121. attrs->gamma1 = I915_READ(OGAMC1);
  1122. attrs->gamma2 = I915_READ(OGAMC2);
  1123. attrs->gamma3 = I915_READ(OGAMC3);
  1124. attrs->gamma4 = I915_READ(OGAMC4);
  1125. attrs->gamma5 = I915_READ(OGAMC5);
  1126. }
  1127. } else {
  1128. if (attrs->brightness < -128 || attrs->brightness > 127)
  1129. goto out_unlock;
  1130. if (attrs->contrast > 255)
  1131. goto out_unlock;
  1132. if (attrs->saturation > 1023)
  1133. goto out_unlock;
  1134. overlay->color_key = attrs->color_key;
  1135. overlay->brightness = attrs->brightness;
  1136. overlay->contrast = attrs->contrast;
  1137. overlay->saturation = attrs->saturation;
  1138. regs = intel_overlay_map_regs(overlay);
  1139. if (!regs) {
  1140. ret = -ENOMEM;
  1141. goto out_unlock;
  1142. }
  1143. update_reg_attrs(overlay, regs);
  1144. intel_overlay_unmap_regs(overlay);
  1145. if (attrs->flags & I915_OVERLAY_UPDATE_GAMMA) {
  1146. if (!IS_I9XX(dev))
  1147. goto out_unlock;
  1148. if (overlay->active) {
  1149. ret = -EBUSY;
  1150. goto out_unlock;
  1151. }
  1152. ret = check_gamma(attrs);
  1153. if (ret)
  1154. goto out_unlock;
  1155. I915_WRITE(OGAMC0, attrs->gamma0);
  1156. I915_WRITE(OGAMC1, attrs->gamma1);
  1157. I915_WRITE(OGAMC2, attrs->gamma2);
  1158. I915_WRITE(OGAMC3, attrs->gamma3);
  1159. I915_WRITE(OGAMC4, attrs->gamma4);
  1160. I915_WRITE(OGAMC5, attrs->gamma5);
  1161. }
  1162. }
  1163. ret = 0;
  1164. out_unlock:
  1165. mutex_unlock(&dev->struct_mutex);
  1166. mutex_unlock(&dev->mode_config.mutex);
  1167. return ret;
  1168. }
  1169. void intel_setup_overlay(struct drm_device *dev)
  1170. {
  1171. drm_i915_private_t *dev_priv = dev->dev_private;
  1172. struct intel_overlay *overlay;
  1173. struct drm_gem_object *reg_bo;
  1174. struct overlay_registers *regs;
  1175. int ret;
  1176. if (!HAS_OVERLAY(dev))
  1177. return;
  1178. overlay = kzalloc(sizeof(struct intel_overlay), GFP_KERNEL);
  1179. if (!overlay)
  1180. return;
  1181. overlay->dev = dev;
  1182. reg_bo = i915_gem_alloc_object(dev, PAGE_SIZE);
  1183. if (!reg_bo)
  1184. goto out_free;
  1185. overlay->reg_bo = to_intel_bo(reg_bo);
  1186. if (OVERLAY_NEEDS_PHYSICAL(dev)) {
  1187. ret = i915_gem_attach_phys_object(dev, reg_bo,
  1188. I915_GEM_PHYS_OVERLAY_REGS,
  1189. PAGE_SIZE);
  1190. if (ret) {
  1191. DRM_ERROR("failed to attach phys overlay regs\n");
  1192. goto out_free_bo;
  1193. }
  1194. overlay->flip_addr = overlay->reg_bo->phys_obj->handle->busaddr;
  1195. } else {
  1196. ret = i915_gem_object_pin(reg_bo, PAGE_SIZE);
  1197. if (ret) {
  1198. DRM_ERROR("failed to pin overlay register bo\n");
  1199. goto out_free_bo;
  1200. }
  1201. overlay->flip_addr = overlay->reg_bo->gtt_offset;
  1202. ret = i915_gem_object_set_to_gtt_domain(reg_bo, true);
  1203. if (ret) {
  1204. DRM_ERROR("failed to move overlay register bo into the GTT\n");
  1205. goto out_unpin_bo;
  1206. }
  1207. }
  1208. /* init all values */
  1209. overlay->color_key = 0x0101fe;
  1210. overlay->brightness = -19;
  1211. overlay->contrast = 75;
  1212. overlay->saturation = 146;
  1213. regs = intel_overlay_map_regs(overlay);
  1214. if (!regs)
  1215. goto out_free_bo;
  1216. memset(regs, 0, sizeof(struct overlay_registers));
  1217. update_polyphase_filter(regs);
  1218. update_reg_attrs(overlay, regs);
  1219. intel_overlay_unmap_regs(overlay);
  1220. dev_priv->overlay = overlay;
  1221. DRM_INFO("initialized overlay support\n");
  1222. return;
  1223. out_unpin_bo:
  1224. i915_gem_object_unpin(reg_bo);
  1225. out_free_bo:
  1226. drm_gem_object_unreference(reg_bo);
  1227. out_free:
  1228. kfree(overlay);
  1229. return;
  1230. }
  1231. void intel_cleanup_overlay(struct drm_device *dev)
  1232. {
  1233. drm_i915_private_t *dev_priv = dev->dev_private;
  1234. if (!dev_priv->overlay)
  1235. return;
  1236. /* The bo's should be free'd by the generic code already.
  1237. * Furthermore modesetting teardown happens beforehand so the
  1238. * hardware should be off already */
  1239. BUG_ON(dev_priv->overlay->active);
  1240. drm_gem_object_unreference_unlocked(&dev_priv->overlay->reg_bo->base);
  1241. kfree(dev_priv->overlay);
  1242. }
  1243. struct intel_overlay_error_state {
  1244. struct overlay_registers regs;
  1245. unsigned long base;
  1246. u32 dovsta;
  1247. u32 isr;
  1248. };
  1249. struct intel_overlay_error_state *
  1250. intel_overlay_capture_error_state(struct drm_device *dev)
  1251. {
  1252. drm_i915_private_t *dev_priv = dev->dev_private;
  1253. struct intel_overlay *overlay = dev_priv->overlay;
  1254. struct intel_overlay_error_state *error;
  1255. struct overlay_registers __iomem *regs;
  1256. if (!overlay || !overlay->active)
  1257. return NULL;
  1258. error = kmalloc(sizeof(*error), GFP_ATOMIC);
  1259. if (error == NULL)
  1260. return NULL;
  1261. error->dovsta = I915_READ(DOVSTA);
  1262. error->isr = I915_READ(ISR);
  1263. if (OVERLAY_NEEDS_PHYSICAL(overlay->dev))
  1264. error->base = (long) overlay->reg_bo->phys_obj->handle->vaddr;
  1265. else
  1266. error->base = (long) overlay->reg_bo->gtt_offset;
  1267. regs = intel_overlay_map_regs_atomic(overlay, KM_IRQ0);
  1268. if (!regs)
  1269. goto err;
  1270. memcpy_fromio(&error->regs, regs, sizeof(struct overlay_registers));
  1271. intel_overlay_unmap_regs_atomic(overlay, KM_IRQ0);
  1272. return error;
  1273. err:
  1274. kfree(error);
  1275. return NULL;
  1276. }
  1277. void
  1278. intel_overlay_print_error_state(struct seq_file *m, struct intel_overlay_error_state *error)
  1279. {
  1280. seq_printf(m, "Overlay, status: 0x%08x, interrupt: 0x%08x\n",
  1281. error->dovsta, error->isr);
  1282. seq_printf(m, " Register file at 0x%08lx:\n",
  1283. error->base);
  1284. #define P(x) seq_printf(m, " " #x ": 0x%08x\n", error->regs.x)
  1285. P(OBUF_0Y);
  1286. P(OBUF_1Y);
  1287. P(OBUF_0U);
  1288. P(OBUF_0V);
  1289. P(OBUF_1U);
  1290. P(OBUF_1V);
  1291. P(OSTRIDE);
  1292. P(YRGB_VPH);
  1293. P(UV_VPH);
  1294. P(HORZ_PH);
  1295. P(INIT_PHS);
  1296. P(DWINPOS);
  1297. P(DWINSZ);
  1298. P(SWIDTH);
  1299. P(SWIDTHSW);
  1300. P(SHEIGHT);
  1301. P(YRGBSCALE);
  1302. P(UVSCALE);
  1303. P(OCLRC0);
  1304. P(OCLRC1);
  1305. P(DCLRKV);
  1306. P(DCLRKM);
  1307. P(SCLRKVH);
  1308. P(SCLRKVL);
  1309. P(SCLRKEN);
  1310. P(OCONFIG);
  1311. P(OCMD);
  1312. P(OSTART_0Y);
  1313. P(OSTART_1Y);
  1314. P(OSTART_0U);
  1315. P(OSTART_0V);
  1316. P(OSTART_1U);
  1317. P(OSTART_1V);
  1318. P(OTILEOFF_0Y);
  1319. P(OTILEOFF_1Y);
  1320. P(OTILEOFF_0U);
  1321. P(OTILEOFF_0V);
  1322. P(OTILEOFF_1U);
  1323. P(OTILEOFF_1V);
  1324. P(FASTHSCALE);
  1325. P(UVSCALEV);
  1326. #undef P
  1327. }