exynos_mixer.c 32 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294
  1. /*
  2. * Copyright (C) 2011 Samsung Electronics Co.Ltd
  3. * Authors:
  4. * Seung-Woo Kim <sw0312.kim@samsung.com>
  5. * Inki Dae <inki.dae@samsung.com>
  6. * Joonyoung Shim <jy0922.shim@samsung.com>
  7. *
  8. * Based on drivers/media/video/s5p-tv/mixer_reg.c
  9. *
  10. * This program is free software; you can redistribute it and/or modify it
  11. * under the terms of the GNU General Public License as published by the
  12. * Free Software Foundation; either version 2 of the License, or (at your
  13. * option) any later version.
  14. *
  15. */
  16. #include <drm/drmP.h>
  17. #include "regs-mixer.h"
  18. #include "regs-vp.h"
  19. #include <linux/kernel.h>
  20. #include <linux/spinlock.h>
  21. #include <linux/wait.h>
  22. #include <linux/i2c.h>
  23. #include <linux/module.h>
  24. #include <linux/platform_device.h>
  25. #include <linux/interrupt.h>
  26. #include <linux/irq.h>
  27. #include <linux/delay.h>
  28. #include <linux/pm_runtime.h>
  29. #include <linux/clk.h>
  30. #include <linux/regulator/consumer.h>
  31. #include <drm/exynos_drm.h>
  32. #include "exynos_drm_drv.h"
  33. #include "exynos_drm_crtc.h"
  34. #include "exynos_drm_hdmi.h"
  35. #include "exynos_drm_iommu.h"
  36. #define get_mixer_context(dev) platform_get_drvdata(to_platform_device(dev))
  37. struct hdmi_win_data {
  38. dma_addr_t dma_addr;
  39. dma_addr_t chroma_dma_addr;
  40. uint32_t pixel_format;
  41. unsigned int bpp;
  42. unsigned int crtc_x;
  43. unsigned int crtc_y;
  44. unsigned int crtc_width;
  45. unsigned int crtc_height;
  46. unsigned int fb_x;
  47. unsigned int fb_y;
  48. unsigned int fb_width;
  49. unsigned int fb_height;
  50. unsigned int src_width;
  51. unsigned int src_height;
  52. unsigned int mode_width;
  53. unsigned int mode_height;
  54. unsigned int scan_flags;
  55. bool enabled;
  56. bool resume;
  57. };
  58. struct mixer_resources {
  59. int irq;
  60. void __iomem *mixer_regs;
  61. void __iomem *vp_regs;
  62. spinlock_t reg_slock;
  63. struct clk *mixer;
  64. struct clk *vp;
  65. struct clk *sclk_mixer;
  66. struct clk *sclk_hdmi;
  67. struct clk *sclk_dac;
  68. };
  69. enum mixer_version_id {
  70. MXR_VER_0_0_0_16,
  71. MXR_VER_16_0_33_0,
  72. };
  73. struct mixer_context {
  74. struct device *dev;
  75. struct drm_device *drm_dev;
  76. int pipe;
  77. bool interlace;
  78. bool powered;
  79. bool vp_enabled;
  80. u32 int_en;
  81. struct mutex mixer_mutex;
  82. struct mixer_resources mixer_res;
  83. struct hdmi_win_data win_data[MIXER_WIN_NR];
  84. enum mixer_version_id mxr_ver;
  85. void *parent_ctx;
  86. wait_queue_head_t wait_vsync_queue;
  87. atomic_t wait_vsync_event;
  88. };
  89. struct mixer_drv_data {
  90. enum mixer_version_id version;
  91. bool is_vp_enabled;
  92. };
  93. static const u8 filter_y_horiz_tap8[] = {
  94. 0, -1, -1, -1, -1, -1, -1, -1,
  95. -1, -1, -1, -1, -1, 0, 0, 0,
  96. 0, 2, 4, 5, 6, 6, 6, 6,
  97. 6, 5, 5, 4, 3, 2, 1, 1,
  98. 0, -6, -12, -16, -18, -20, -21, -20,
  99. -20, -18, -16, -13, -10, -8, -5, -2,
  100. 127, 126, 125, 121, 114, 107, 99, 89,
  101. 79, 68, 57, 46, 35, 25, 16, 8,
  102. };
  103. static const u8 filter_y_vert_tap4[] = {
  104. 0, -3, -6, -8, -8, -8, -8, -7,
  105. -6, -5, -4, -3, -2, -1, -1, 0,
  106. 127, 126, 124, 118, 111, 102, 92, 81,
  107. 70, 59, 48, 37, 27, 19, 11, 5,
  108. 0, 5, 11, 19, 27, 37, 48, 59,
  109. 70, 81, 92, 102, 111, 118, 124, 126,
  110. 0, 0, -1, -1, -2, -3, -4, -5,
  111. -6, -7, -8, -8, -8, -8, -6, -3,
  112. };
  113. static const u8 filter_cr_horiz_tap4[] = {
  114. 0, -3, -6, -8, -8, -8, -8, -7,
  115. -6, -5, -4, -3, -2, -1, -1, 0,
  116. 127, 126, 124, 118, 111, 102, 92, 81,
  117. 70, 59, 48, 37, 27, 19, 11, 5,
  118. };
  119. static inline u32 vp_reg_read(struct mixer_resources *res, u32 reg_id)
  120. {
  121. return readl(res->vp_regs + reg_id);
  122. }
  123. static inline void vp_reg_write(struct mixer_resources *res, u32 reg_id,
  124. u32 val)
  125. {
  126. writel(val, res->vp_regs + reg_id);
  127. }
  128. static inline void vp_reg_writemask(struct mixer_resources *res, u32 reg_id,
  129. u32 val, u32 mask)
  130. {
  131. u32 old = vp_reg_read(res, reg_id);
  132. val = (val & mask) | (old & ~mask);
  133. writel(val, res->vp_regs + reg_id);
  134. }
  135. static inline u32 mixer_reg_read(struct mixer_resources *res, u32 reg_id)
  136. {
  137. return readl(res->mixer_regs + reg_id);
  138. }
  139. static inline void mixer_reg_write(struct mixer_resources *res, u32 reg_id,
  140. u32 val)
  141. {
  142. writel(val, res->mixer_regs + reg_id);
  143. }
  144. static inline void mixer_reg_writemask(struct mixer_resources *res,
  145. u32 reg_id, u32 val, u32 mask)
  146. {
  147. u32 old = mixer_reg_read(res, reg_id);
  148. val = (val & mask) | (old & ~mask);
  149. writel(val, res->mixer_regs + reg_id);
  150. }
  151. static void mixer_regs_dump(struct mixer_context *ctx)
  152. {
  153. #define DUMPREG(reg_id) \
  154. do { \
  155. DRM_DEBUG_KMS(#reg_id " = %08x\n", \
  156. (u32)readl(ctx->mixer_res.mixer_regs + reg_id)); \
  157. } while (0)
  158. DUMPREG(MXR_STATUS);
  159. DUMPREG(MXR_CFG);
  160. DUMPREG(MXR_INT_EN);
  161. DUMPREG(MXR_INT_STATUS);
  162. DUMPREG(MXR_LAYER_CFG);
  163. DUMPREG(MXR_VIDEO_CFG);
  164. DUMPREG(MXR_GRAPHIC0_CFG);
  165. DUMPREG(MXR_GRAPHIC0_BASE);
  166. DUMPREG(MXR_GRAPHIC0_SPAN);
  167. DUMPREG(MXR_GRAPHIC0_WH);
  168. DUMPREG(MXR_GRAPHIC0_SXY);
  169. DUMPREG(MXR_GRAPHIC0_DXY);
  170. DUMPREG(MXR_GRAPHIC1_CFG);
  171. DUMPREG(MXR_GRAPHIC1_BASE);
  172. DUMPREG(MXR_GRAPHIC1_SPAN);
  173. DUMPREG(MXR_GRAPHIC1_WH);
  174. DUMPREG(MXR_GRAPHIC1_SXY);
  175. DUMPREG(MXR_GRAPHIC1_DXY);
  176. #undef DUMPREG
  177. }
  178. static void vp_regs_dump(struct mixer_context *ctx)
  179. {
  180. #define DUMPREG(reg_id) \
  181. do { \
  182. DRM_DEBUG_KMS(#reg_id " = %08x\n", \
  183. (u32) readl(ctx->mixer_res.vp_regs + reg_id)); \
  184. } while (0)
  185. DUMPREG(VP_ENABLE);
  186. DUMPREG(VP_SRESET);
  187. DUMPREG(VP_SHADOW_UPDATE);
  188. DUMPREG(VP_FIELD_ID);
  189. DUMPREG(VP_MODE);
  190. DUMPREG(VP_IMG_SIZE_Y);
  191. DUMPREG(VP_IMG_SIZE_C);
  192. DUMPREG(VP_PER_RATE_CTRL);
  193. DUMPREG(VP_TOP_Y_PTR);
  194. DUMPREG(VP_BOT_Y_PTR);
  195. DUMPREG(VP_TOP_C_PTR);
  196. DUMPREG(VP_BOT_C_PTR);
  197. DUMPREG(VP_ENDIAN_MODE);
  198. DUMPREG(VP_SRC_H_POSITION);
  199. DUMPREG(VP_SRC_V_POSITION);
  200. DUMPREG(VP_SRC_WIDTH);
  201. DUMPREG(VP_SRC_HEIGHT);
  202. DUMPREG(VP_DST_H_POSITION);
  203. DUMPREG(VP_DST_V_POSITION);
  204. DUMPREG(VP_DST_WIDTH);
  205. DUMPREG(VP_DST_HEIGHT);
  206. DUMPREG(VP_H_RATIO);
  207. DUMPREG(VP_V_RATIO);
  208. #undef DUMPREG
  209. }
  210. static inline void vp_filter_set(struct mixer_resources *res,
  211. int reg_id, const u8 *data, unsigned int size)
  212. {
  213. /* assure 4-byte align */
  214. BUG_ON(size & 3);
  215. for (; size; size -= 4, reg_id += 4, data += 4) {
  216. u32 val = (data[0] << 24) | (data[1] << 16) |
  217. (data[2] << 8) | data[3];
  218. vp_reg_write(res, reg_id, val);
  219. }
  220. }
  221. static void vp_default_filter(struct mixer_resources *res)
  222. {
  223. vp_filter_set(res, VP_POLY8_Y0_LL,
  224. filter_y_horiz_tap8, sizeof(filter_y_horiz_tap8));
  225. vp_filter_set(res, VP_POLY4_Y0_LL,
  226. filter_y_vert_tap4, sizeof(filter_y_vert_tap4));
  227. vp_filter_set(res, VP_POLY4_C0_LL,
  228. filter_cr_horiz_tap4, sizeof(filter_cr_horiz_tap4));
  229. }
  230. static void mixer_vsync_set_update(struct mixer_context *ctx, bool enable)
  231. {
  232. struct mixer_resources *res = &ctx->mixer_res;
  233. /* block update on vsync */
  234. mixer_reg_writemask(res, MXR_STATUS, enable ?
  235. MXR_STATUS_SYNC_ENABLE : 0, MXR_STATUS_SYNC_ENABLE);
  236. if (ctx->vp_enabled)
  237. vp_reg_write(res, VP_SHADOW_UPDATE, enable ?
  238. VP_SHADOW_UPDATE_ENABLE : 0);
  239. }
  240. static void mixer_cfg_scan(struct mixer_context *ctx, unsigned int height)
  241. {
  242. struct mixer_resources *res = &ctx->mixer_res;
  243. u32 val;
  244. /* choosing between interlace and progressive mode */
  245. val = (ctx->interlace ? MXR_CFG_SCAN_INTERLACE :
  246. MXR_CFG_SCAN_PROGRASSIVE);
  247. /* choosing between porper HD and SD mode */
  248. if (height == 480)
  249. val |= MXR_CFG_SCAN_NTSC | MXR_CFG_SCAN_SD;
  250. else if (height == 576)
  251. val |= MXR_CFG_SCAN_PAL | MXR_CFG_SCAN_SD;
  252. else if (height == 720)
  253. val |= MXR_CFG_SCAN_HD_720 | MXR_CFG_SCAN_HD;
  254. else if (height == 1080)
  255. val |= MXR_CFG_SCAN_HD_1080 | MXR_CFG_SCAN_HD;
  256. else
  257. val |= MXR_CFG_SCAN_HD_720 | MXR_CFG_SCAN_HD;
  258. mixer_reg_writemask(res, MXR_CFG, val, MXR_CFG_SCAN_MASK);
  259. }
  260. static void mixer_cfg_rgb_fmt(struct mixer_context *ctx, unsigned int height)
  261. {
  262. struct mixer_resources *res = &ctx->mixer_res;
  263. u32 val;
  264. if (height == 480) {
  265. val = MXR_CFG_RGB601_0_255;
  266. } else if (height == 576) {
  267. val = MXR_CFG_RGB601_0_255;
  268. } else if (height == 720) {
  269. val = MXR_CFG_RGB709_16_235;
  270. mixer_reg_write(res, MXR_CM_COEFF_Y,
  271. (1 << 30) | (94 << 20) | (314 << 10) |
  272. (32 << 0));
  273. mixer_reg_write(res, MXR_CM_COEFF_CB,
  274. (972 << 20) | (851 << 10) | (225 << 0));
  275. mixer_reg_write(res, MXR_CM_COEFF_CR,
  276. (225 << 20) | (820 << 10) | (1004 << 0));
  277. } else if (height == 1080) {
  278. val = MXR_CFG_RGB709_16_235;
  279. mixer_reg_write(res, MXR_CM_COEFF_Y,
  280. (1 << 30) | (94 << 20) | (314 << 10) |
  281. (32 << 0));
  282. mixer_reg_write(res, MXR_CM_COEFF_CB,
  283. (972 << 20) | (851 << 10) | (225 << 0));
  284. mixer_reg_write(res, MXR_CM_COEFF_CR,
  285. (225 << 20) | (820 << 10) | (1004 << 0));
  286. } else {
  287. val = MXR_CFG_RGB709_16_235;
  288. mixer_reg_write(res, MXR_CM_COEFF_Y,
  289. (1 << 30) | (94 << 20) | (314 << 10) |
  290. (32 << 0));
  291. mixer_reg_write(res, MXR_CM_COEFF_CB,
  292. (972 << 20) | (851 << 10) | (225 << 0));
  293. mixer_reg_write(res, MXR_CM_COEFF_CR,
  294. (225 << 20) | (820 << 10) | (1004 << 0));
  295. }
  296. mixer_reg_writemask(res, MXR_CFG, val, MXR_CFG_RGB_FMT_MASK);
  297. }
  298. static void mixer_cfg_layer(struct mixer_context *ctx, int win, bool enable)
  299. {
  300. struct mixer_resources *res = &ctx->mixer_res;
  301. u32 val = enable ? ~0 : 0;
  302. switch (win) {
  303. case 0:
  304. mixer_reg_writemask(res, MXR_CFG, val, MXR_CFG_GRP0_ENABLE);
  305. break;
  306. case 1:
  307. mixer_reg_writemask(res, MXR_CFG, val, MXR_CFG_GRP1_ENABLE);
  308. break;
  309. case 2:
  310. if (ctx->vp_enabled) {
  311. vp_reg_writemask(res, VP_ENABLE, val, VP_ENABLE_ON);
  312. mixer_reg_writemask(res, MXR_CFG, val,
  313. MXR_CFG_VP_ENABLE);
  314. }
  315. break;
  316. }
  317. }
  318. static void mixer_run(struct mixer_context *ctx)
  319. {
  320. struct mixer_resources *res = &ctx->mixer_res;
  321. mixer_reg_writemask(res, MXR_STATUS, ~0, MXR_STATUS_REG_RUN);
  322. mixer_regs_dump(ctx);
  323. }
  324. static void vp_video_buffer(struct mixer_context *ctx, int win)
  325. {
  326. struct mixer_resources *res = &ctx->mixer_res;
  327. unsigned long flags;
  328. struct hdmi_win_data *win_data;
  329. unsigned int x_ratio, y_ratio;
  330. unsigned int buf_num;
  331. dma_addr_t luma_addr[2], chroma_addr[2];
  332. bool tiled_mode = false;
  333. bool crcb_mode = false;
  334. u32 val;
  335. win_data = &ctx->win_data[win];
  336. switch (win_data->pixel_format) {
  337. case DRM_FORMAT_NV12MT:
  338. tiled_mode = true;
  339. case DRM_FORMAT_NV12:
  340. crcb_mode = false;
  341. buf_num = 2;
  342. break;
  343. /* TODO: single buffer format NV12, NV21 */
  344. default:
  345. /* ignore pixel format at disable time */
  346. if (!win_data->dma_addr)
  347. break;
  348. DRM_ERROR("pixel format for vp is wrong [%d].\n",
  349. win_data->pixel_format);
  350. return;
  351. }
  352. /* scaling feature: (src << 16) / dst */
  353. x_ratio = (win_data->src_width << 16) / win_data->crtc_width;
  354. y_ratio = (win_data->src_height << 16) / win_data->crtc_height;
  355. if (buf_num == 2) {
  356. luma_addr[0] = win_data->dma_addr;
  357. chroma_addr[0] = win_data->chroma_dma_addr;
  358. } else {
  359. luma_addr[0] = win_data->dma_addr;
  360. chroma_addr[0] = win_data->dma_addr
  361. + (win_data->fb_width * win_data->fb_height);
  362. }
  363. if (win_data->scan_flags & DRM_MODE_FLAG_INTERLACE) {
  364. ctx->interlace = true;
  365. if (tiled_mode) {
  366. luma_addr[1] = luma_addr[0] + 0x40;
  367. chroma_addr[1] = chroma_addr[0] + 0x40;
  368. } else {
  369. luma_addr[1] = luma_addr[0] + win_data->fb_width;
  370. chroma_addr[1] = chroma_addr[0] + win_data->fb_width;
  371. }
  372. } else {
  373. ctx->interlace = false;
  374. luma_addr[1] = 0;
  375. chroma_addr[1] = 0;
  376. }
  377. spin_lock_irqsave(&res->reg_slock, flags);
  378. mixer_vsync_set_update(ctx, false);
  379. /* interlace or progressive scan mode */
  380. val = (ctx->interlace ? ~0 : 0);
  381. vp_reg_writemask(res, VP_MODE, val, VP_MODE_LINE_SKIP);
  382. /* setup format */
  383. val = (crcb_mode ? VP_MODE_NV21 : VP_MODE_NV12);
  384. val |= (tiled_mode ? VP_MODE_MEM_TILED : VP_MODE_MEM_LINEAR);
  385. vp_reg_writemask(res, VP_MODE, val, VP_MODE_FMT_MASK);
  386. /* setting size of input image */
  387. vp_reg_write(res, VP_IMG_SIZE_Y, VP_IMG_HSIZE(win_data->fb_width) |
  388. VP_IMG_VSIZE(win_data->fb_height));
  389. /* chroma height has to reduced by 2 to avoid chroma distorions */
  390. vp_reg_write(res, VP_IMG_SIZE_C, VP_IMG_HSIZE(win_data->fb_width) |
  391. VP_IMG_VSIZE(win_data->fb_height / 2));
  392. vp_reg_write(res, VP_SRC_WIDTH, win_data->src_width);
  393. vp_reg_write(res, VP_SRC_HEIGHT, win_data->src_height);
  394. vp_reg_write(res, VP_SRC_H_POSITION,
  395. VP_SRC_H_POSITION_VAL(win_data->fb_x));
  396. vp_reg_write(res, VP_SRC_V_POSITION, win_data->fb_y);
  397. vp_reg_write(res, VP_DST_WIDTH, win_data->crtc_width);
  398. vp_reg_write(res, VP_DST_H_POSITION, win_data->crtc_x);
  399. if (ctx->interlace) {
  400. vp_reg_write(res, VP_DST_HEIGHT, win_data->crtc_height / 2);
  401. vp_reg_write(res, VP_DST_V_POSITION, win_data->crtc_y / 2);
  402. } else {
  403. vp_reg_write(res, VP_DST_HEIGHT, win_data->crtc_height);
  404. vp_reg_write(res, VP_DST_V_POSITION, win_data->crtc_y);
  405. }
  406. vp_reg_write(res, VP_H_RATIO, x_ratio);
  407. vp_reg_write(res, VP_V_RATIO, y_ratio);
  408. vp_reg_write(res, VP_ENDIAN_MODE, VP_ENDIAN_MODE_LITTLE);
  409. /* set buffer address to vp */
  410. vp_reg_write(res, VP_TOP_Y_PTR, luma_addr[0]);
  411. vp_reg_write(res, VP_BOT_Y_PTR, luma_addr[1]);
  412. vp_reg_write(res, VP_TOP_C_PTR, chroma_addr[0]);
  413. vp_reg_write(res, VP_BOT_C_PTR, chroma_addr[1]);
  414. mixer_cfg_scan(ctx, win_data->mode_height);
  415. mixer_cfg_rgb_fmt(ctx, win_data->mode_height);
  416. mixer_cfg_layer(ctx, win, true);
  417. mixer_run(ctx);
  418. mixer_vsync_set_update(ctx, true);
  419. spin_unlock_irqrestore(&res->reg_slock, flags);
  420. vp_regs_dump(ctx);
  421. }
  422. static void mixer_layer_update(struct mixer_context *ctx)
  423. {
  424. struct mixer_resources *res = &ctx->mixer_res;
  425. u32 val;
  426. val = mixer_reg_read(res, MXR_CFG);
  427. /* allow one update per vsync only */
  428. if (!(val & MXR_CFG_LAYER_UPDATE_COUNT_MASK))
  429. mixer_reg_writemask(res, MXR_CFG, ~0, MXR_CFG_LAYER_UPDATE);
  430. }
  431. static void mixer_graph_buffer(struct mixer_context *ctx, int win)
  432. {
  433. struct mixer_resources *res = &ctx->mixer_res;
  434. unsigned long flags;
  435. struct hdmi_win_data *win_data;
  436. unsigned int x_ratio, y_ratio;
  437. unsigned int src_x_offset, src_y_offset, dst_x_offset, dst_y_offset;
  438. dma_addr_t dma_addr;
  439. unsigned int fmt;
  440. u32 val;
  441. win_data = &ctx->win_data[win];
  442. #define RGB565 4
  443. #define ARGB1555 5
  444. #define ARGB4444 6
  445. #define ARGB8888 7
  446. switch (win_data->bpp) {
  447. case 16:
  448. fmt = ARGB4444;
  449. break;
  450. case 32:
  451. fmt = ARGB8888;
  452. break;
  453. default:
  454. fmt = ARGB8888;
  455. }
  456. /* 2x scaling feature */
  457. x_ratio = 0;
  458. y_ratio = 0;
  459. dst_x_offset = win_data->crtc_x;
  460. dst_y_offset = win_data->crtc_y;
  461. /* converting dma address base and source offset */
  462. dma_addr = win_data->dma_addr
  463. + (win_data->fb_x * win_data->bpp >> 3)
  464. + (win_data->fb_y * win_data->fb_width * win_data->bpp >> 3);
  465. src_x_offset = 0;
  466. src_y_offset = 0;
  467. if (win_data->scan_flags & DRM_MODE_FLAG_INTERLACE)
  468. ctx->interlace = true;
  469. else
  470. ctx->interlace = false;
  471. spin_lock_irqsave(&res->reg_slock, flags);
  472. mixer_vsync_set_update(ctx, false);
  473. /* setup format */
  474. mixer_reg_writemask(res, MXR_GRAPHIC_CFG(win),
  475. MXR_GRP_CFG_FORMAT_VAL(fmt), MXR_GRP_CFG_FORMAT_MASK);
  476. /* setup geometry */
  477. mixer_reg_write(res, MXR_GRAPHIC_SPAN(win), win_data->fb_width);
  478. val = MXR_GRP_WH_WIDTH(win_data->crtc_width);
  479. val |= MXR_GRP_WH_HEIGHT(win_data->crtc_height);
  480. val |= MXR_GRP_WH_H_SCALE(x_ratio);
  481. val |= MXR_GRP_WH_V_SCALE(y_ratio);
  482. mixer_reg_write(res, MXR_GRAPHIC_WH(win), val);
  483. /* setup offsets in source image */
  484. val = MXR_GRP_SXY_SX(src_x_offset);
  485. val |= MXR_GRP_SXY_SY(src_y_offset);
  486. mixer_reg_write(res, MXR_GRAPHIC_SXY(win), val);
  487. /* setup offsets in display image */
  488. val = MXR_GRP_DXY_DX(dst_x_offset);
  489. val |= MXR_GRP_DXY_DY(dst_y_offset);
  490. mixer_reg_write(res, MXR_GRAPHIC_DXY(win), val);
  491. /* set buffer address to mixer */
  492. mixer_reg_write(res, MXR_GRAPHIC_BASE(win), dma_addr);
  493. mixer_cfg_scan(ctx, win_data->mode_height);
  494. mixer_cfg_rgb_fmt(ctx, win_data->mode_height);
  495. mixer_cfg_layer(ctx, win, true);
  496. /* layer update mandatory for mixer 16.0.33.0 */
  497. if (ctx->mxr_ver == MXR_VER_16_0_33_0)
  498. mixer_layer_update(ctx);
  499. mixer_run(ctx);
  500. mixer_vsync_set_update(ctx, true);
  501. spin_unlock_irqrestore(&res->reg_slock, flags);
  502. }
  503. static void vp_win_reset(struct mixer_context *ctx)
  504. {
  505. struct mixer_resources *res = &ctx->mixer_res;
  506. int tries = 100;
  507. vp_reg_write(res, VP_SRESET, VP_SRESET_PROCESSING);
  508. for (tries = 100; tries; --tries) {
  509. /* waiting until VP_SRESET_PROCESSING is 0 */
  510. if (~vp_reg_read(res, VP_SRESET) & VP_SRESET_PROCESSING)
  511. break;
  512. mdelay(10);
  513. }
  514. WARN(tries == 0, "failed to reset Video Processor\n");
  515. }
  516. static void mixer_win_reset(struct mixer_context *ctx)
  517. {
  518. struct mixer_resources *res = &ctx->mixer_res;
  519. unsigned long flags;
  520. u32 val; /* value stored to register */
  521. spin_lock_irqsave(&res->reg_slock, flags);
  522. mixer_vsync_set_update(ctx, false);
  523. mixer_reg_writemask(res, MXR_CFG, MXR_CFG_DST_HDMI, MXR_CFG_DST_MASK);
  524. /* set output in RGB888 mode */
  525. mixer_reg_writemask(res, MXR_CFG, MXR_CFG_OUT_RGB888, MXR_CFG_OUT_MASK);
  526. /* 16 beat burst in DMA */
  527. mixer_reg_writemask(res, MXR_STATUS, MXR_STATUS_16_BURST,
  528. MXR_STATUS_BURST_MASK);
  529. /* setting default layer priority: layer1 > layer0 > video
  530. * because typical usage scenario would be
  531. * layer1 - OSD
  532. * layer0 - framebuffer
  533. * video - video overlay
  534. */
  535. val = MXR_LAYER_CFG_GRP1_VAL(3);
  536. val |= MXR_LAYER_CFG_GRP0_VAL(2);
  537. if (ctx->vp_enabled)
  538. val |= MXR_LAYER_CFG_VP_VAL(1);
  539. mixer_reg_write(res, MXR_LAYER_CFG, val);
  540. /* setting background color */
  541. mixer_reg_write(res, MXR_BG_COLOR0, 0x008080);
  542. mixer_reg_write(res, MXR_BG_COLOR1, 0x008080);
  543. mixer_reg_write(res, MXR_BG_COLOR2, 0x008080);
  544. /* setting graphical layers */
  545. val = MXR_GRP_CFG_COLOR_KEY_DISABLE; /* no blank key */
  546. val |= MXR_GRP_CFG_WIN_BLEND_EN;
  547. val |= MXR_GRP_CFG_BLEND_PRE_MUL;
  548. val |= MXR_GRP_CFG_PIXEL_BLEND_EN;
  549. val |= MXR_GRP_CFG_ALPHA_VAL(0xff); /* non-transparent alpha */
  550. /* the same configuration for both layers */
  551. mixer_reg_write(res, MXR_GRAPHIC_CFG(0), val);
  552. mixer_reg_write(res, MXR_GRAPHIC_CFG(1), val);
  553. /* setting video layers */
  554. val = MXR_GRP_CFG_ALPHA_VAL(0);
  555. mixer_reg_write(res, MXR_VIDEO_CFG, val);
  556. if (ctx->vp_enabled) {
  557. /* configuration of Video Processor Registers */
  558. vp_win_reset(ctx);
  559. vp_default_filter(res);
  560. }
  561. /* disable all layers */
  562. mixer_reg_writemask(res, MXR_CFG, 0, MXR_CFG_GRP0_ENABLE);
  563. mixer_reg_writemask(res, MXR_CFG, 0, MXR_CFG_GRP1_ENABLE);
  564. if (ctx->vp_enabled)
  565. mixer_reg_writemask(res, MXR_CFG, 0, MXR_CFG_VP_ENABLE);
  566. mixer_vsync_set_update(ctx, true);
  567. spin_unlock_irqrestore(&res->reg_slock, flags);
  568. }
  569. static int mixer_iommu_on(void *ctx, bool enable)
  570. {
  571. struct exynos_drm_hdmi_context *drm_hdmi_ctx;
  572. struct mixer_context *mdata = ctx;
  573. struct drm_device *drm_dev;
  574. drm_hdmi_ctx = mdata->parent_ctx;
  575. drm_dev = drm_hdmi_ctx->drm_dev;
  576. if (is_drm_iommu_supported(drm_dev)) {
  577. if (enable)
  578. return drm_iommu_attach_device(drm_dev, mdata->dev);
  579. drm_iommu_detach_device(drm_dev, mdata->dev);
  580. }
  581. return 0;
  582. }
  583. static int mixer_enable_vblank(void *ctx, int pipe)
  584. {
  585. struct mixer_context *mixer_ctx = ctx;
  586. struct mixer_resources *res = &mixer_ctx->mixer_res;
  587. DRM_DEBUG_KMS("[%d] %s\n", __LINE__, __func__);
  588. mixer_ctx->pipe = pipe;
  589. /* enable vsync interrupt */
  590. mixer_reg_writemask(res, MXR_INT_EN, MXR_INT_EN_VSYNC,
  591. MXR_INT_EN_VSYNC);
  592. return 0;
  593. }
  594. static void mixer_disable_vblank(void *ctx)
  595. {
  596. struct mixer_context *mixer_ctx = ctx;
  597. struct mixer_resources *res = &mixer_ctx->mixer_res;
  598. DRM_DEBUG_KMS("[%d] %s\n", __LINE__, __func__);
  599. /* disable vsync interrupt */
  600. mixer_reg_writemask(res, MXR_INT_EN, 0, MXR_INT_EN_VSYNC);
  601. }
  602. static void mixer_win_mode_set(void *ctx,
  603. struct exynos_drm_overlay *overlay)
  604. {
  605. struct mixer_context *mixer_ctx = ctx;
  606. struct hdmi_win_data *win_data;
  607. int win;
  608. DRM_DEBUG_KMS("[%d] %s\n", __LINE__, __func__);
  609. if (!overlay) {
  610. DRM_ERROR("overlay is NULL\n");
  611. return;
  612. }
  613. DRM_DEBUG_KMS("set [%d]x[%d] at (%d,%d) to [%d]x[%d] at (%d,%d)\n",
  614. overlay->fb_width, overlay->fb_height,
  615. overlay->fb_x, overlay->fb_y,
  616. overlay->crtc_width, overlay->crtc_height,
  617. overlay->crtc_x, overlay->crtc_y);
  618. win = overlay->zpos;
  619. if (win == DEFAULT_ZPOS)
  620. win = MIXER_DEFAULT_WIN;
  621. if (win < 0 || win > MIXER_WIN_NR) {
  622. DRM_ERROR("mixer window[%d] is wrong\n", win);
  623. return;
  624. }
  625. win_data = &mixer_ctx->win_data[win];
  626. win_data->dma_addr = overlay->dma_addr[0];
  627. win_data->chroma_dma_addr = overlay->dma_addr[1];
  628. win_data->pixel_format = overlay->pixel_format;
  629. win_data->bpp = overlay->bpp;
  630. win_data->crtc_x = overlay->crtc_x;
  631. win_data->crtc_y = overlay->crtc_y;
  632. win_data->crtc_width = overlay->crtc_width;
  633. win_data->crtc_height = overlay->crtc_height;
  634. win_data->fb_x = overlay->fb_x;
  635. win_data->fb_y = overlay->fb_y;
  636. win_data->fb_width = overlay->fb_width;
  637. win_data->fb_height = overlay->fb_height;
  638. win_data->src_width = overlay->src_width;
  639. win_data->src_height = overlay->src_height;
  640. win_data->mode_width = overlay->mode_width;
  641. win_data->mode_height = overlay->mode_height;
  642. win_data->scan_flags = overlay->scan_flag;
  643. }
  644. static void mixer_win_commit(void *ctx, int win)
  645. {
  646. struct mixer_context *mixer_ctx = ctx;
  647. DRM_DEBUG_KMS("[%d] %s, win: %d\n", __LINE__, __func__, win);
  648. if (win > 1 && mixer_ctx->vp_enabled)
  649. vp_video_buffer(mixer_ctx, win);
  650. else
  651. mixer_graph_buffer(mixer_ctx, win);
  652. mixer_ctx->win_data[win].enabled = true;
  653. }
  654. static void mixer_win_disable(void *ctx, int win)
  655. {
  656. struct mixer_context *mixer_ctx = ctx;
  657. struct mixer_resources *res = &mixer_ctx->mixer_res;
  658. unsigned long flags;
  659. DRM_DEBUG_KMS("[%d] %s, win: %d\n", __LINE__, __func__, win);
  660. mutex_lock(&mixer_ctx->mixer_mutex);
  661. if (!mixer_ctx->powered) {
  662. mutex_unlock(&mixer_ctx->mixer_mutex);
  663. mixer_ctx->win_data[win].resume = false;
  664. return;
  665. }
  666. mutex_unlock(&mixer_ctx->mixer_mutex);
  667. spin_lock_irqsave(&res->reg_slock, flags);
  668. mixer_vsync_set_update(mixer_ctx, false);
  669. mixer_cfg_layer(mixer_ctx, win, false);
  670. mixer_vsync_set_update(mixer_ctx, true);
  671. spin_unlock_irqrestore(&res->reg_slock, flags);
  672. mixer_ctx->win_data[win].enabled = false;
  673. }
  674. static void mixer_wait_for_vblank(void *ctx)
  675. {
  676. struct mixer_context *mixer_ctx = ctx;
  677. mutex_lock(&mixer_ctx->mixer_mutex);
  678. if (!mixer_ctx->powered) {
  679. mutex_unlock(&mixer_ctx->mixer_mutex);
  680. return;
  681. }
  682. mutex_unlock(&mixer_ctx->mixer_mutex);
  683. atomic_set(&mixer_ctx->wait_vsync_event, 1);
  684. /*
  685. * wait for MIXER to signal VSYNC interrupt or return after
  686. * timeout which is set to 50ms (refresh rate of 20).
  687. */
  688. if (!wait_event_timeout(mixer_ctx->wait_vsync_queue,
  689. !atomic_read(&mixer_ctx->wait_vsync_event),
  690. DRM_HZ/20))
  691. DRM_DEBUG_KMS("vblank wait timed out.\n");
  692. }
  693. static void mixer_window_suspend(struct mixer_context *ctx)
  694. {
  695. struct hdmi_win_data *win_data;
  696. int i;
  697. for (i = 0; i < MIXER_WIN_NR; i++) {
  698. win_data = &ctx->win_data[i];
  699. win_data->resume = win_data->enabled;
  700. mixer_win_disable(ctx, i);
  701. }
  702. mixer_wait_for_vblank(ctx);
  703. }
  704. static void mixer_window_resume(struct mixer_context *ctx)
  705. {
  706. struct hdmi_win_data *win_data;
  707. int i;
  708. for (i = 0; i < MIXER_WIN_NR; i++) {
  709. win_data = &ctx->win_data[i];
  710. win_data->enabled = win_data->resume;
  711. win_data->resume = false;
  712. }
  713. }
  714. static void mixer_poweron(struct mixer_context *ctx)
  715. {
  716. struct mixer_resources *res = &ctx->mixer_res;
  717. DRM_DEBUG_KMS("[%d] %s\n", __LINE__, __func__);
  718. mutex_lock(&ctx->mixer_mutex);
  719. if (ctx->powered) {
  720. mutex_unlock(&ctx->mixer_mutex);
  721. return;
  722. }
  723. ctx->powered = true;
  724. mutex_unlock(&ctx->mixer_mutex);
  725. clk_enable(res->mixer);
  726. if (ctx->vp_enabled) {
  727. clk_enable(res->vp);
  728. clk_enable(res->sclk_mixer);
  729. }
  730. mixer_reg_write(res, MXR_INT_EN, ctx->int_en);
  731. mixer_win_reset(ctx);
  732. mixer_window_resume(ctx);
  733. }
  734. static void mixer_poweroff(struct mixer_context *ctx)
  735. {
  736. struct mixer_resources *res = &ctx->mixer_res;
  737. DRM_DEBUG_KMS("[%d] %s\n", __LINE__, __func__);
  738. mutex_lock(&ctx->mixer_mutex);
  739. if (!ctx->powered)
  740. goto out;
  741. mutex_unlock(&ctx->mixer_mutex);
  742. mixer_window_suspend(ctx);
  743. ctx->int_en = mixer_reg_read(res, MXR_INT_EN);
  744. clk_disable(res->mixer);
  745. if (ctx->vp_enabled) {
  746. clk_disable(res->vp);
  747. clk_disable(res->sclk_mixer);
  748. }
  749. mutex_lock(&ctx->mixer_mutex);
  750. ctx->powered = false;
  751. out:
  752. mutex_unlock(&ctx->mixer_mutex);
  753. }
  754. static void mixer_dpms(void *ctx, int mode)
  755. {
  756. struct mixer_context *mixer_ctx = ctx;
  757. DRM_DEBUG_KMS("[%d] %s\n", __LINE__, __func__);
  758. switch (mode) {
  759. case DRM_MODE_DPMS_ON:
  760. if (pm_runtime_suspended(mixer_ctx->dev))
  761. pm_runtime_get_sync(mixer_ctx->dev);
  762. break;
  763. case DRM_MODE_DPMS_STANDBY:
  764. case DRM_MODE_DPMS_SUSPEND:
  765. case DRM_MODE_DPMS_OFF:
  766. if (!pm_runtime_suspended(mixer_ctx->dev))
  767. pm_runtime_put_sync(mixer_ctx->dev);
  768. break;
  769. default:
  770. DRM_DEBUG_KMS("unknown dpms mode: %d\n", mode);
  771. break;
  772. }
  773. }
  774. static struct exynos_mixer_ops mixer_ops = {
  775. /* manager */
  776. .iommu_on = mixer_iommu_on,
  777. .enable_vblank = mixer_enable_vblank,
  778. .disable_vblank = mixer_disable_vblank,
  779. .wait_for_vblank = mixer_wait_for_vblank,
  780. .dpms = mixer_dpms,
  781. /* overlay */
  782. .win_mode_set = mixer_win_mode_set,
  783. .win_commit = mixer_win_commit,
  784. .win_disable = mixer_win_disable,
  785. };
  786. static irqreturn_t mixer_irq_handler(int irq, void *arg)
  787. {
  788. struct exynos_drm_hdmi_context *drm_hdmi_ctx = arg;
  789. struct mixer_context *ctx = drm_hdmi_ctx->ctx;
  790. struct mixer_resources *res = &ctx->mixer_res;
  791. u32 val, base, shadow;
  792. spin_lock(&res->reg_slock);
  793. /* read interrupt status for handling and clearing flags for VSYNC */
  794. val = mixer_reg_read(res, MXR_INT_STATUS);
  795. /* handling VSYNC */
  796. if (val & MXR_INT_STATUS_VSYNC) {
  797. /* interlace scan need to check shadow register */
  798. if (ctx->interlace) {
  799. base = mixer_reg_read(res, MXR_GRAPHIC_BASE(0));
  800. shadow = mixer_reg_read(res, MXR_GRAPHIC_BASE_S(0));
  801. if (base != shadow)
  802. goto out;
  803. base = mixer_reg_read(res, MXR_GRAPHIC_BASE(1));
  804. shadow = mixer_reg_read(res, MXR_GRAPHIC_BASE_S(1));
  805. if (base != shadow)
  806. goto out;
  807. }
  808. drm_handle_vblank(drm_hdmi_ctx->drm_dev, ctx->pipe);
  809. exynos_drm_crtc_finish_pageflip(drm_hdmi_ctx->drm_dev,
  810. ctx->pipe);
  811. /* set wait vsync event to zero and wake up queue. */
  812. if (atomic_read(&ctx->wait_vsync_event)) {
  813. atomic_set(&ctx->wait_vsync_event, 0);
  814. DRM_WAKEUP(&ctx->wait_vsync_queue);
  815. }
  816. }
  817. out:
  818. /* clear interrupts */
  819. if (~val & MXR_INT_EN_VSYNC) {
  820. /* vsync interrupt use different bit for read and clear */
  821. val &= ~MXR_INT_EN_VSYNC;
  822. val |= MXR_INT_CLEAR_VSYNC;
  823. }
  824. mixer_reg_write(res, MXR_INT_STATUS, val);
  825. spin_unlock(&res->reg_slock);
  826. return IRQ_HANDLED;
  827. }
  828. static int mixer_resources_init(struct exynos_drm_hdmi_context *ctx,
  829. struct platform_device *pdev)
  830. {
  831. struct mixer_context *mixer_ctx = ctx->ctx;
  832. struct device *dev = &pdev->dev;
  833. struct mixer_resources *mixer_res = &mixer_ctx->mixer_res;
  834. struct resource *res;
  835. int ret;
  836. spin_lock_init(&mixer_res->reg_slock);
  837. mixer_res->mixer = devm_clk_get(dev, "mixer");
  838. if (IS_ERR_OR_NULL(mixer_res->mixer)) {
  839. dev_err(dev, "failed to get clock 'mixer'\n");
  840. return -ENODEV;
  841. }
  842. mixer_res->sclk_hdmi = devm_clk_get(dev, "sclk_hdmi");
  843. if (IS_ERR_OR_NULL(mixer_res->sclk_hdmi)) {
  844. dev_err(dev, "failed to get clock 'sclk_hdmi'\n");
  845. return -ENODEV;
  846. }
  847. res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
  848. if (res == NULL) {
  849. dev_err(dev, "get memory resource failed.\n");
  850. return -ENXIO;
  851. }
  852. mixer_res->mixer_regs = devm_ioremap(&pdev->dev, res->start,
  853. resource_size(res));
  854. if (mixer_res->mixer_regs == NULL) {
  855. dev_err(dev, "register mapping failed.\n");
  856. return -ENXIO;
  857. }
  858. res = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
  859. if (res == NULL) {
  860. dev_err(dev, "get interrupt resource failed.\n");
  861. return -ENXIO;
  862. }
  863. ret = devm_request_irq(&pdev->dev, res->start, mixer_irq_handler,
  864. 0, "drm_mixer", ctx);
  865. if (ret) {
  866. dev_err(dev, "request interrupt failed.\n");
  867. return ret;
  868. }
  869. mixer_res->irq = res->start;
  870. return 0;
  871. }
  872. static int vp_resources_init(struct exynos_drm_hdmi_context *ctx,
  873. struct platform_device *pdev)
  874. {
  875. struct mixer_context *mixer_ctx = ctx->ctx;
  876. struct device *dev = &pdev->dev;
  877. struct mixer_resources *mixer_res = &mixer_ctx->mixer_res;
  878. struct resource *res;
  879. mixer_res->vp = devm_clk_get(dev, "vp");
  880. if (IS_ERR_OR_NULL(mixer_res->vp)) {
  881. dev_err(dev, "failed to get clock 'vp'\n");
  882. return -ENODEV;
  883. }
  884. mixer_res->sclk_mixer = devm_clk_get(dev, "sclk_mixer");
  885. if (IS_ERR_OR_NULL(mixer_res->sclk_mixer)) {
  886. dev_err(dev, "failed to get clock 'sclk_mixer'\n");
  887. return -ENODEV;
  888. }
  889. mixer_res->sclk_dac = devm_clk_get(dev, "sclk_dac");
  890. if (IS_ERR_OR_NULL(mixer_res->sclk_dac)) {
  891. dev_err(dev, "failed to get clock 'sclk_dac'\n");
  892. return -ENODEV;
  893. }
  894. if (mixer_res->sclk_hdmi)
  895. clk_set_parent(mixer_res->sclk_mixer, mixer_res->sclk_hdmi);
  896. res = platform_get_resource(pdev, IORESOURCE_MEM, 1);
  897. if (res == NULL) {
  898. dev_err(dev, "get memory resource failed.\n");
  899. return -ENXIO;
  900. }
  901. mixer_res->vp_regs = devm_ioremap(&pdev->dev, res->start,
  902. resource_size(res));
  903. if (mixer_res->vp_regs == NULL) {
  904. dev_err(dev, "register mapping failed.\n");
  905. return -ENXIO;
  906. }
  907. return 0;
  908. }
  909. static struct mixer_drv_data exynos5_mxr_drv_data = {
  910. .version = MXR_VER_16_0_33_0,
  911. .is_vp_enabled = 0,
  912. };
  913. static struct mixer_drv_data exynos4_mxr_drv_data = {
  914. .version = MXR_VER_0_0_0_16,
  915. .is_vp_enabled = 1,
  916. };
  917. static struct platform_device_id mixer_driver_types[] = {
  918. {
  919. .name = "s5p-mixer",
  920. .driver_data = (unsigned long)&exynos4_mxr_drv_data,
  921. }, {
  922. .name = "exynos5-mixer",
  923. .driver_data = (unsigned long)&exynos5_mxr_drv_data,
  924. }, {
  925. /* end node */
  926. }
  927. };
  928. static struct of_device_id mixer_match_types[] = {
  929. {
  930. .compatible = "samsung,exynos5-mixer",
  931. .data = &exynos5_mxr_drv_data,
  932. }, {
  933. /* end node */
  934. }
  935. };
  936. static int mixer_probe(struct platform_device *pdev)
  937. {
  938. struct device *dev = &pdev->dev;
  939. struct exynos_drm_hdmi_context *drm_hdmi_ctx;
  940. struct mixer_context *ctx;
  941. struct mixer_drv_data *drv;
  942. int ret;
  943. dev_info(dev, "probe start\n");
  944. drm_hdmi_ctx = devm_kzalloc(&pdev->dev, sizeof(*drm_hdmi_ctx),
  945. GFP_KERNEL);
  946. if (!drm_hdmi_ctx) {
  947. DRM_ERROR("failed to allocate common hdmi context.\n");
  948. return -ENOMEM;
  949. }
  950. ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
  951. if (!ctx) {
  952. DRM_ERROR("failed to alloc mixer context.\n");
  953. return -ENOMEM;
  954. }
  955. mutex_init(&ctx->mixer_mutex);
  956. if (dev->of_node) {
  957. const struct of_device_id *match;
  958. match = of_match_node(of_match_ptr(mixer_match_types),
  959. pdev->dev.of_node);
  960. drv = (struct mixer_drv_data *)match->data;
  961. } else {
  962. drv = (struct mixer_drv_data *)
  963. platform_get_device_id(pdev)->driver_data;
  964. }
  965. ctx->dev = &pdev->dev;
  966. ctx->parent_ctx = (void *)drm_hdmi_ctx;
  967. drm_hdmi_ctx->ctx = (void *)ctx;
  968. ctx->vp_enabled = drv->is_vp_enabled;
  969. ctx->mxr_ver = drv->version;
  970. DRM_INIT_WAITQUEUE(&ctx->wait_vsync_queue);
  971. atomic_set(&ctx->wait_vsync_event, 0);
  972. platform_set_drvdata(pdev, drm_hdmi_ctx);
  973. /* acquire resources: regs, irqs, clocks */
  974. ret = mixer_resources_init(drm_hdmi_ctx, pdev);
  975. if (ret) {
  976. DRM_ERROR("mixer_resources_init failed\n");
  977. goto fail;
  978. }
  979. if (ctx->vp_enabled) {
  980. /* acquire vp resources: regs, irqs, clocks */
  981. ret = vp_resources_init(drm_hdmi_ctx, pdev);
  982. if (ret) {
  983. DRM_ERROR("vp_resources_init failed\n");
  984. goto fail;
  985. }
  986. }
  987. /* attach mixer driver to common hdmi. */
  988. exynos_mixer_drv_attach(drm_hdmi_ctx);
  989. /* register specific callback point to common hdmi. */
  990. exynos_mixer_ops_register(&mixer_ops);
  991. pm_runtime_enable(dev);
  992. return 0;
  993. fail:
  994. dev_info(dev, "probe failed\n");
  995. return ret;
  996. }
  997. static int mixer_remove(struct platform_device *pdev)
  998. {
  999. dev_info(&pdev->dev, "remove successful\n");
  1000. pm_runtime_disable(&pdev->dev);
  1001. return 0;
  1002. }
  1003. #ifdef CONFIG_PM_SLEEP
  1004. static int mixer_suspend(struct device *dev)
  1005. {
  1006. struct exynos_drm_hdmi_context *drm_hdmi_ctx = get_mixer_context(dev);
  1007. struct mixer_context *ctx = drm_hdmi_ctx->ctx;
  1008. DRM_DEBUG_KMS("[%d] %s\n", __LINE__, __func__);
  1009. if (pm_runtime_suspended(dev)) {
  1010. DRM_DEBUG_KMS("%s : Already suspended\n", __func__);
  1011. return 0;
  1012. }
  1013. mixer_poweroff(ctx);
  1014. return 0;
  1015. }
  1016. static int mixer_resume(struct device *dev)
  1017. {
  1018. struct exynos_drm_hdmi_context *drm_hdmi_ctx = get_mixer_context(dev);
  1019. struct mixer_context *ctx = drm_hdmi_ctx->ctx;
  1020. DRM_DEBUG_KMS("[%d] %s\n", __LINE__, __func__);
  1021. if (!pm_runtime_suspended(dev)) {
  1022. DRM_DEBUG_KMS("%s : Already resumed\n", __func__);
  1023. return 0;
  1024. }
  1025. mixer_poweron(ctx);
  1026. return 0;
  1027. }
  1028. #endif
  1029. #ifdef CONFIG_PM_RUNTIME
  1030. static int mixer_runtime_suspend(struct device *dev)
  1031. {
  1032. struct exynos_drm_hdmi_context *drm_hdmi_ctx = get_mixer_context(dev);
  1033. struct mixer_context *ctx = drm_hdmi_ctx->ctx;
  1034. DRM_DEBUG_KMS("[%d] %s\n", __LINE__, __func__);
  1035. mixer_poweroff(ctx);
  1036. return 0;
  1037. }
  1038. static int mixer_runtime_resume(struct device *dev)
  1039. {
  1040. struct exynos_drm_hdmi_context *drm_hdmi_ctx = get_mixer_context(dev);
  1041. struct mixer_context *ctx = drm_hdmi_ctx->ctx;
  1042. DRM_DEBUG_KMS("[%d] %s\n", __LINE__, __func__);
  1043. mixer_poweron(ctx);
  1044. return 0;
  1045. }
  1046. #endif
  1047. static const struct dev_pm_ops mixer_pm_ops = {
  1048. SET_SYSTEM_SLEEP_PM_OPS(mixer_suspend, mixer_resume)
  1049. SET_RUNTIME_PM_OPS(mixer_runtime_suspend, mixer_runtime_resume, NULL)
  1050. };
  1051. struct platform_driver mixer_driver = {
  1052. .driver = {
  1053. .name = "exynos-mixer",
  1054. .owner = THIS_MODULE,
  1055. .pm = &mixer_pm_ops,
  1056. .of_match_table = mixer_match_types,
  1057. },
  1058. .probe = mixer_probe,
  1059. .remove = mixer_remove,
  1060. .id_table = mixer_driver_types,
  1061. };