exynos_mixer.c 32 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301
  1. /*
  2. * Copyright (C) 2011 Samsung Electronics Co.Ltd
  3. * Authors:
  4. * Seung-Woo Kim <sw0312.kim@samsung.com>
  5. * Inki Dae <inki.dae@samsung.com>
  6. * Joonyoung Shim <jy0922.shim@samsung.com>
  7. *
  8. * Based on drivers/media/video/s5p-tv/mixer_reg.c
  9. *
  10. * This program is free software; you can redistribute it and/or modify it
  11. * under the terms of the GNU General Public License as published by the
  12. * Free Software Foundation; either version 2 of the License, or (at your
  13. * option) any later version.
  14. *
  15. */
  16. #include <drm/drmP.h>
  17. #include "regs-mixer.h"
  18. #include "regs-vp.h"
  19. #include <linux/kernel.h>
  20. #include <linux/spinlock.h>
  21. #include <linux/wait.h>
  22. #include <linux/i2c.h>
  23. #include <linux/module.h>
  24. #include <linux/platform_device.h>
  25. #include <linux/interrupt.h>
  26. #include <linux/irq.h>
  27. #include <linux/delay.h>
  28. #include <linux/pm_runtime.h>
  29. #include <linux/clk.h>
  30. #include <linux/regulator/consumer.h>
  31. #include <drm/exynos_drm.h>
  32. #include "exynos_drm_drv.h"
  33. #include "exynos_drm_crtc.h"
  34. #include "exynos_drm_hdmi.h"
  35. #include "exynos_drm_iommu.h"
  36. #define get_mixer_context(dev) platform_get_drvdata(to_platform_device(dev))
  37. struct hdmi_win_data {
  38. dma_addr_t dma_addr;
  39. dma_addr_t chroma_dma_addr;
  40. uint32_t pixel_format;
  41. unsigned int bpp;
  42. unsigned int crtc_x;
  43. unsigned int crtc_y;
  44. unsigned int crtc_width;
  45. unsigned int crtc_height;
  46. unsigned int fb_x;
  47. unsigned int fb_y;
  48. unsigned int fb_width;
  49. unsigned int fb_height;
  50. unsigned int src_width;
  51. unsigned int src_height;
  52. unsigned int mode_width;
  53. unsigned int mode_height;
  54. unsigned int scan_flags;
  55. bool enabled;
  56. bool resume;
  57. };
  58. struct mixer_resources {
  59. int irq;
  60. void __iomem *mixer_regs;
  61. void __iomem *vp_regs;
  62. spinlock_t reg_slock;
  63. struct clk *mixer;
  64. struct clk *vp;
  65. struct clk *sclk_mixer;
  66. struct clk *sclk_hdmi;
  67. struct clk *sclk_dac;
  68. };
  69. enum mixer_version_id {
  70. MXR_VER_0_0_0_16,
  71. MXR_VER_16_0_33_0,
  72. };
  73. struct mixer_context {
  74. struct device *dev;
  75. struct drm_device *drm_dev;
  76. int pipe;
  77. bool interlace;
  78. bool powered;
  79. bool vp_enabled;
  80. u32 int_en;
  81. struct mutex mixer_mutex;
  82. struct mixer_resources mixer_res;
  83. struct hdmi_win_data win_data[MIXER_WIN_NR];
  84. enum mixer_version_id mxr_ver;
  85. void *parent_ctx;
  86. wait_queue_head_t wait_vsync_queue;
  87. atomic_t wait_vsync_event;
  88. };
  89. struct mixer_drv_data {
  90. enum mixer_version_id version;
  91. bool is_vp_enabled;
  92. };
  93. static const u8 filter_y_horiz_tap8[] = {
  94. 0, -1, -1, -1, -1, -1, -1, -1,
  95. -1, -1, -1, -1, -1, 0, 0, 0,
  96. 0, 2, 4, 5, 6, 6, 6, 6,
  97. 6, 5, 5, 4, 3, 2, 1, 1,
  98. 0, -6, -12, -16, -18, -20, -21, -20,
  99. -20, -18, -16, -13, -10, -8, -5, -2,
  100. 127, 126, 125, 121, 114, 107, 99, 89,
  101. 79, 68, 57, 46, 35, 25, 16, 8,
  102. };
  103. static const u8 filter_y_vert_tap4[] = {
  104. 0, -3, -6, -8, -8, -8, -8, -7,
  105. -6, -5, -4, -3, -2, -1, -1, 0,
  106. 127, 126, 124, 118, 111, 102, 92, 81,
  107. 70, 59, 48, 37, 27, 19, 11, 5,
  108. 0, 5, 11, 19, 27, 37, 48, 59,
  109. 70, 81, 92, 102, 111, 118, 124, 126,
  110. 0, 0, -1, -1, -2, -3, -4, -5,
  111. -6, -7, -8, -8, -8, -8, -6, -3,
  112. };
  113. static const u8 filter_cr_horiz_tap4[] = {
  114. 0, -3, -6, -8, -8, -8, -8, -7,
  115. -6, -5, -4, -3, -2, -1, -1, 0,
  116. 127, 126, 124, 118, 111, 102, 92, 81,
  117. 70, 59, 48, 37, 27, 19, 11, 5,
  118. };
  119. static inline u32 vp_reg_read(struct mixer_resources *res, u32 reg_id)
  120. {
  121. return readl(res->vp_regs + reg_id);
  122. }
  123. static inline void vp_reg_write(struct mixer_resources *res, u32 reg_id,
  124. u32 val)
  125. {
  126. writel(val, res->vp_regs + reg_id);
  127. }
  128. static inline void vp_reg_writemask(struct mixer_resources *res, u32 reg_id,
  129. u32 val, u32 mask)
  130. {
  131. u32 old = vp_reg_read(res, reg_id);
  132. val = (val & mask) | (old & ~mask);
  133. writel(val, res->vp_regs + reg_id);
  134. }
  135. static inline u32 mixer_reg_read(struct mixer_resources *res, u32 reg_id)
  136. {
  137. return readl(res->mixer_regs + reg_id);
  138. }
  139. static inline void mixer_reg_write(struct mixer_resources *res, u32 reg_id,
  140. u32 val)
  141. {
  142. writel(val, res->mixer_regs + reg_id);
  143. }
  144. static inline void mixer_reg_writemask(struct mixer_resources *res,
  145. u32 reg_id, u32 val, u32 mask)
  146. {
  147. u32 old = mixer_reg_read(res, reg_id);
  148. val = (val & mask) | (old & ~mask);
  149. writel(val, res->mixer_regs + reg_id);
  150. }
  151. static void mixer_regs_dump(struct mixer_context *ctx)
  152. {
  153. #define DUMPREG(reg_id) \
  154. do { \
  155. DRM_DEBUG_KMS(#reg_id " = %08x\n", \
  156. (u32)readl(ctx->mixer_res.mixer_regs + reg_id)); \
  157. } while (0)
  158. DUMPREG(MXR_STATUS);
  159. DUMPREG(MXR_CFG);
  160. DUMPREG(MXR_INT_EN);
  161. DUMPREG(MXR_INT_STATUS);
  162. DUMPREG(MXR_LAYER_CFG);
  163. DUMPREG(MXR_VIDEO_CFG);
  164. DUMPREG(MXR_GRAPHIC0_CFG);
  165. DUMPREG(MXR_GRAPHIC0_BASE);
  166. DUMPREG(MXR_GRAPHIC0_SPAN);
  167. DUMPREG(MXR_GRAPHIC0_WH);
  168. DUMPREG(MXR_GRAPHIC0_SXY);
  169. DUMPREG(MXR_GRAPHIC0_DXY);
  170. DUMPREG(MXR_GRAPHIC1_CFG);
  171. DUMPREG(MXR_GRAPHIC1_BASE);
  172. DUMPREG(MXR_GRAPHIC1_SPAN);
  173. DUMPREG(MXR_GRAPHIC1_WH);
  174. DUMPREG(MXR_GRAPHIC1_SXY);
  175. DUMPREG(MXR_GRAPHIC1_DXY);
  176. #undef DUMPREG
  177. }
  178. static void vp_regs_dump(struct mixer_context *ctx)
  179. {
  180. #define DUMPREG(reg_id) \
  181. do { \
  182. DRM_DEBUG_KMS(#reg_id " = %08x\n", \
  183. (u32) readl(ctx->mixer_res.vp_regs + reg_id)); \
  184. } while (0)
  185. DUMPREG(VP_ENABLE);
  186. DUMPREG(VP_SRESET);
  187. DUMPREG(VP_SHADOW_UPDATE);
  188. DUMPREG(VP_FIELD_ID);
  189. DUMPREG(VP_MODE);
  190. DUMPREG(VP_IMG_SIZE_Y);
  191. DUMPREG(VP_IMG_SIZE_C);
  192. DUMPREG(VP_PER_RATE_CTRL);
  193. DUMPREG(VP_TOP_Y_PTR);
  194. DUMPREG(VP_BOT_Y_PTR);
  195. DUMPREG(VP_TOP_C_PTR);
  196. DUMPREG(VP_BOT_C_PTR);
  197. DUMPREG(VP_ENDIAN_MODE);
  198. DUMPREG(VP_SRC_H_POSITION);
  199. DUMPREG(VP_SRC_V_POSITION);
  200. DUMPREG(VP_SRC_WIDTH);
  201. DUMPREG(VP_SRC_HEIGHT);
  202. DUMPREG(VP_DST_H_POSITION);
  203. DUMPREG(VP_DST_V_POSITION);
  204. DUMPREG(VP_DST_WIDTH);
  205. DUMPREG(VP_DST_HEIGHT);
  206. DUMPREG(VP_H_RATIO);
  207. DUMPREG(VP_V_RATIO);
  208. #undef DUMPREG
  209. }
  210. static inline void vp_filter_set(struct mixer_resources *res,
  211. int reg_id, const u8 *data, unsigned int size)
  212. {
  213. /* assure 4-byte align */
  214. BUG_ON(size & 3);
  215. for (; size; size -= 4, reg_id += 4, data += 4) {
  216. u32 val = (data[0] << 24) | (data[1] << 16) |
  217. (data[2] << 8) | data[3];
  218. vp_reg_write(res, reg_id, val);
  219. }
  220. }
  221. static void vp_default_filter(struct mixer_resources *res)
  222. {
  223. vp_filter_set(res, VP_POLY8_Y0_LL,
  224. filter_y_horiz_tap8, sizeof(filter_y_horiz_tap8));
  225. vp_filter_set(res, VP_POLY4_Y0_LL,
  226. filter_y_vert_tap4, sizeof(filter_y_vert_tap4));
  227. vp_filter_set(res, VP_POLY4_C0_LL,
  228. filter_cr_horiz_tap4, sizeof(filter_cr_horiz_tap4));
  229. }
  230. static void mixer_vsync_set_update(struct mixer_context *ctx, bool enable)
  231. {
  232. struct mixer_resources *res = &ctx->mixer_res;
  233. /* block update on vsync */
  234. mixer_reg_writemask(res, MXR_STATUS, enable ?
  235. MXR_STATUS_SYNC_ENABLE : 0, MXR_STATUS_SYNC_ENABLE);
  236. if (ctx->vp_enabled)
  237. vp_reg_write(res, VP_SHADOW_UPDATE, enable ?
  238. VP_SHADOW_UPDATE_ENABLE : 0);
  239. }
  240. static void mixer_cfg_scan(struct mixer_context *ctx, unsigned int height)
  241. {
  242. struct mixer_resources *res = &ctx->mixer_res;
  243. u32 val;
  244. /* choosing between interlace and progressive mode */
  245. val = (ctx->interlace ? MXR_CFG_SCAN_INTERLACE :
  246. MXR_CFG_SCAN_PROGRASSIVE);
  247. /* choosing between porper HD and SD mode */
  248. if (height == 480)
  249. val |= MXR_CFG_SCAN_NTSC | MXR_CFG_SCAN_SD;
  250. else if (height == 576)
  251. val |= MXR_CFG_SCAN_PAL | MXR_CFG_SCAN_SD;
  252. else if (height == 720)
  253. val |= MXR_CFG_SCAN_HD_720 | MXR_CFG_SCAN_HD;
  254. else if (height == 1080)
  255. val |= MXR_CFG_SCAN_HD_1080 | MXR_CFG_SCAN_HD;
  256. else
  257. val |= MXR_CFG_SCAN_HD_720 | MXR_CFG_SCAN_HD;
  258. mixer_reg_writemask(res, MXR_CFG, val, MXR_CFG_SCAN_MASK);
  259. }
  260. static void mixer_cfg_rgb_fmt(struct mixer_context *ctx, unsigned int height)
  261. {
  262. struct mixer_resources *res = &ctx->mixer_res;
  263. u32 val;
  264. if (height == 480) {
  265. val = MXR_CFG_RGB601_0_255;
  266. } else if (height == 576) {
  267. val = MXR_CFG_RGB601_0_255;
  268. } else if (height == 720) {
  269. val = MXR_CFG_RGB709_16_235;
  270. mixer_reg_write(res, MXR_CM_COEFF_Y,
  271. (1 << 30) | (94 << 20) | (314 << 10) |
  272. (32 << 0));
  273. mixer_reg_write(res, MXR_CM_COEFF_CB,
  274. (972 << 20) | (851 << 10) | (225 << 0));
  275. mixer_reg_write(res, MXR_CM_COEFF_CR,
  276. (225 << 20) | (820 << 10) | (1004 << 0));
  277. } else if (height == 1080) {
  278. val = MXR_CFG_RGB709_16_235;
  279. mixer_reg_write(res, MXR_CM_COEFF_Y,
  280. (1 << 30) | (94 << 20) | (314 << 10) |
  281. (32 << 0));
  282. mixer_reg_write(res, MXR_CM_COEFF_CB,
  283. (972 << 20) | (851 << 10) | (225 << 0));
  284. mixer_reg_write(res, MXR_CM_COEFF_CR,
  285. (225 << 20) | (820 << 10) | (1004 << 0));
  286. } else {
  287. val = MXR_CFG_RGB709_16_235;
  288. mixer_reg_write(res, MXR_CM_COEFF_Y,
  289. (1 << 30) | (94 << 20) | (314 << 10) |
  290. (32 << 0));
  291. mixer_reg_write(res, MXR_CM_COEFF_CB,
  292. (972 << 20) | (851 << 10) | (225 << 0));
  293. mixer_reg_write(res, MXR_CM_COEFF_CR,
  294. (225 << 20) | (820 << 10) | (1004 << 0));
  295. }
  296. mixer_reg_writemask(res, MXR_CFG, val, MXR_CFG_RGB_FMT_MASK);
  297. }
  298. static void mixer_cfg_layer(struct mixer_context *ctx, int win, bool enable)
  299. {
  300. struct mixer_resources *res = &ctx->mixer_res;
  301. u32 val = enable ? ~0 : 0;
  302. switch (win) {
  303. case 0:
  304. mixer_reg_writemask(res, MXR_CFG, val, MXR_CFG_GRP0_ENABLE);
  305. break;
  306. case 1:
  307. mixer_reg_writemask(res, MXR_CFG, val, MXR_CFG_GRP1_ENABLE);
  308. break;
  309. case 2:
  310. if (ctx->vp_enabled) {
  311. vp_reg_writemask(res, VP_ENABLE, val, VP_ENABLE_ON);
  312. mixer_reg_writemask(res, MXR_CFG, val,
  313. MXR_CFG_VP_ENABLE);
  314. }
  315. break;
  316. }
  317. }
  318. static void mixer_run(struct mixer_context *ctx)
  319. {
  320. struct mixer_resources *res = &ctx->mixer_res;
  321. mixer_reg_writemask(res, MXR_STATUS, ~0, MXR_STATUS_REG_RUN);
  322. mixer_regs_dump(ctx);
  323. }
  324. static void vp_video_buffer(struct mixer_context *ctx, int win)
  325. {
  326. struct mixer_resources *res = &ctx->mixer_res;
  327. unsigned long flags;
  328. struct hdmi_win_data *win_data;
  329. unsigned int x_ratio, y_ratio;
  330. unsigned int buf_num;
  331. dma_addr_t luma_addr[2], chroma_addr[2];
  332. bool tiled_mode = false;
  333. bool crcb_mode = false;
  334. u32 val;
  335. win_data = &ctx->win_data[win];
  336. switch (win_data->pixel_format) {
  337. case DRM_FORMAT_NV12MT:
  338. tiled_mode = true;
  339. case DRM_FORMAT_NV12:
  340. crcb_mode = false;
  341. buf_num = 2;
  342. break;
  343. /* TODO: single buffer format NV12, NV21 */
  344. default:
  345. /* ignore pixel format at disable time */
  346. if (!win_data->dma_addr)
  347. break;
  348. DRM_ERROR("pixel format for vp is wrong [%d].\n",
  349. win_data->pixel_format);
  350. return;
  351. }
  352. /* scaling feature: (src << 16) / dst */
  353. x_ratio = (win_data->src_width << 16) / win_data->crtc_width;
  354. y_ratio = (win_data->src_height << 16) / win_data->crtc_height;
  355. if (buf_num == 2) {
  356. luma_addr[0] = win_data->dma_addr;
  357. chroma_addr[0] = win_data->chroma_dma_addr;
  358. } else {
  359. luma_addr[0] = win_data->dma_addr;
  360. chroma_addr[0] = win_data->dma_addr
  361. + (win_data->fb_width * win_data->fb_height);
  362. }
  363. if (win_data->scan_flags & DRM_MODE_FLAG_INTERLACE) {
  364. ctx->interlace = true;
  365. if (tiled_mode) {
  366. luma_addr[1] = luma_addr[0] + 0x40;
  367. chroma_addr[1] = chroma_addr[0] + 0x40;
  368. } else {
  369. luma_addr[1] = luma_addr[0] + win_data->fb_width;
  370. chroma_addr[1] = chroma_addr[0] + win_data->fb_width;
  371. }
  372. } else {
  373. ctx->interlace = false;
  374. luma_addr[1] = 0;
  375. chroma_addr[1] = 0;
  376. }
  377. spin_lock_irqsave(&res->reg_slock, flags);
  378. mixer_vsync_set_update(ctx, false);
  379. /* interlace or progressive scan mode */
  380. val = (ctx->interlace ? ~0 : 0);
  381. vp_reg_writemask(res, VP_MODE, val, VP_MODE_LINE_SKIP);
  382. /* setup format */
  383. val = (crcb_mode ? VP_MODE_NV21 : VP_MODE_NV12);
  384. val |= (tiled_mode ? VP_MODE_MEM_TILED : VP_MODE_MEM_LINEAR);
  385. vp_reg_writemask(res, VP_MODE, val, VP_MODE_FMT_MASK);
  386. /* setting size of input image */
  387. vp_reg_write(res, VP_IMG_SIZE_Y, VP_IMG_HSIZE(win_data->fb_width) |
  388. VP_IMG_VSIZE(win_data->fb_height));
  389. /* chroma height has to reduced by 2 to avoid chroma distorions */
  390. vp_reg_write(res, VP_IMG_SIZE_C, VP_IMG_HSIZE(win_data->fb_width) |
  391. VP_IMG_VSIZE(win_data->fb_height / 2));
  392. vp_reg_write(res, VP_SRC_WIDTH, win_data->src_width);
  393. vp_reg_write(res, VP_SRC_HEIGHT, win_data->src_height);
  394. vp_reg_write(res, VP_SRC_H_POSITION,
  395. VP_SRC_H_POSITION_VAL(win_data->fb_x));
  396. vp_reg_write(res, VP_SRC_V_POSITION, win_data->fb_y);
  397. vp_reg_write(res, VP_DST_WIDTH, win_data->crtc_width);
  398. vp_reg_write(res, VP_DST_H_POSITION, win_data->crtc_x);
  399. if (ctx->interlace) {
  400. vp_reg_write(res, VP_DST_HEIGHT, win_data->crtc_height / 2);
  401. vp_reg_write(res, VP_DST_V_POSITION, win_data->crtc_y / 2);
  402. } else {
  403. vp_reg_write(res, VP_DST_HEIGHT, win_data->crtc_height);
  404. vp_reg_write(res, VP_DST_V_POSITION, win_data->crtc_y);
  405. }
  406. vp_reg_write(res, VP_H_RATIO, x_ratio);
  407. vp_reg_write(res, VP_V_RATIO, y_ratio);
  408. vp_reg_write(res, VP_ENDIAN_MODE, VP_ENDIAN_MODE_LITTLE);
  409. /* set buffer address to vp */
  410. vp_reg_write(res, VP_TOP_Y_PTR, luma_addr[0]);
  411. vp_reg_write(res, VP_BOT_Y_PTR, luma_addr[1]);
  412. vp_reg_write(res, VP_TOP_C_PTR, chroma_addr[0]);
  413. vp_reg_write(res, VP_BOT_C_PTR, chroma_addr[1]);
  414. mixer_cfg_scan(ctx, win_data->mode_height);
  415. mixer_cfg_rgb_fmt(ctx, win_data->mode_height);
  416. mixer_cfg_layer(ctx, win, true);
  417. mixer_run(ctx);
  418. mixer_vsync_set_update(ctx, true);
  419. spin_unlock_irqrestore(&res->reg_slock, flags);
  420. vp_regs_dump(ctx);
  421. }
  422. static void mixer_layer_update(struct mixer_context *ctx)
  423. {
  424. struct mixer_resources *res = &ctx->mixer_res;
  425. u32 val;
  426. val = mixer_reg_read(res, MXR_CFG);
  427. /* allow one update per vsync only */
  428. if (!(val & MXR_CFG_LAYER_UPDATE_COUNT_MASK))
  429. mixer_reg_writemask(res, MXR_CFG, ~0, MXR_CFG_LAYER_UPDATE);
  430. }
  431. static void mixer_graph_buffer(struct mixer_context *ctx, int win)
  432. {
  433. struct mixer_resources *res = &ctx->mixer_res;
  434. unsigned long flags;
  435. struct hdmi_win_data *win_data;
  436. unsigned int x_ratio, y_ratio;
  437. unsigned int src_x_offset, src_y_offset, dst_x_offset, dst_y_offset;
  438. dma_addr_t dma_addr;
  439. unsigned int fmt;
  440. u32 val;
  441. win_data = &ctx->win_data[win];
  442. #define RGB565 4
  443. #define ARGB1555 5
  444. #define ARGB4444 6
  445. #define ARGB8888 7
  446. switch (win_data->bpp) {
  447. case 16:
  448. fmt = ARGB4444;
  449. break;
  450. case 32:
  451. fmt = ARGB8888;
  452. break;
  453. default:
  454. fmt = ARGB8888;
  455. }
  456. /* 2x scaling feature */
  457. x_ratio = 0;
  458. y_ratio = 0;
  459. dst_x_offset = win_data->crtc_x;
  460. dst_y_offset = win_data->crtc_y;
  461. /* converting dma address base and source offset */
  462. dma_addr = win_data->dma_addr
  463. + (win_data->fb_x * win_data->bpp >> 3)
  464. + (win_data->fb_y * win_data->fb_width * win_data->bpp >> 3);
  465. src_x_offset = 0;
  466. src_y_offset = 0;
  467. if (win_data->scan_flags & DRM_MODE_FLAG_INTERLACE)
  468. ctx->interlace = true;
  469. else
  470. ctx->interlace = false;
  471. spin_lock_irqsave(&res->reg_slock, flags);
  472. mixer_vsync_set_update(ctx, false);
  473. /* setup format */
  474. mixer_reg_writemask(res, MXR_GRAPHIC_CFG(win),
  475. MXR_GRP_CFG_FORMAT_VAL(fmt), MXR_GRP_CFG_FORMAT_MASK);
  476. /* setup geometry */
  477. mixer_reg_write(res, MXR_GRAPHIC_SPAN(win), win_data->fb_width);
  478. val = MXR_GRP_WH_WIDTH(win_data->crtc_width);
  479. val |= MXR_GRP_WH_HEIGHT(win_data->crtc_height);
  480. val |= MXR_GRP_WH_H_SCALE(x_ratio);
  481. val |= MXR_GRP_WH_V_SCALE(y_ratio);
  482. mixer_reg_write(res, MXR_GRAPHIC_WH(win), val);
  483. /* setup offsets in source image */
  484. val = MXR_GRP_SXY_SX(src_x_offset);
  485. val |= MXR_GRP_SXY_SY(src_y_offset);
  486. mixer_reg_write(res, MXR_GRAPHIC_SXY(win), val);
  487. /* setup offsets in display image */
  488. val = MXR_GRP_DXY_DX(dst_x_offset);
  489. val |= MXR_GRP_DXY_DY(dst_y_offset);
  490. mixer_reg_write(res, MXR_GRAPHIC_DXY(win), val);
  491. /* set buffer address to mixer */
  492. mixer_reg_write(res, MXR_GRAPHIC_BASE(win), dma_addr);
  493. mixer_cfg_scan(ctx, win_data->mode_height);
  494. mixer_cfg_rgb_fmt(ctx, win_data->mode_height);
  495. mixer_cfg_layer(ctx, win, true);
  496. /* layer update mandatory for mixer 16.0.33.0 */
  497. if (ctx->mxr_ver == MXR_VER_16_0_33_0)
  498. mixer_layer_update(ctx);
  499. mixer_run(ctx);
  500. mixer_vsync_set_update(ctx, true);
  501. spin_unlock_irqrestore(&res->reg_slock, flags);
  502. }
  503. static void vp_win_reset(struct mixer_context *ctx)
  504. {
  505. struct mixer_resources *res = &ctx->mixer_res;
  506. int tries = 100;
  507. vp_reg_write(res, VP_SRESET, VP_SRESET_PROCESSING);
  508. for (tries = 100; tries; --tries) {
  509. /* waiting until VP_SRESET_PROCESSING is 0 */
  510. if (~vp_reg_read(res, VP_SRESET) & VP_SRESET_PROCESSING)
  511. break;
  512. usleep_range(10000, 12000);
  513. }
  514. WARN(tries == 0, "failed to reset Video Processor\n");
  515. }
  516. static void mixer_win_reset(struct mixer_context *ctx)
  517. {
  518. struct mixer_resources *res = &ctx->mixer_res;
  519. unsigned long flags;
  520. u32 val; /* value stored to register */
  521. spin_lock_irqsave(&res->reg_slock, flags);
  522. mixer_vsync_set_update(ctx, false);
  523. mixer_reg_writemask(res, MXR_CFG, MXR_CFG_DST_HDMI, MXR_CFG_DST_MASK);
  524. /* set output in RGB888 mode */
  525. mixer_reg_writemask(res, MXR_CFG, MXR_CFG_OUT_RGB888, MXR_CFG_OUT_MASK);
  526. /* 16 beat burst in DMA */
  527. mixer_reg_writemask(res, MXR_STATUS, MXR_STATUS_16_BURST,
  528. MXR_STATUS_BURST_MASK);
  529. /* setting default layer priority: layer1 > layer0 > video
  530. * because typical usage scenario would be
  531. * layer1 - OSD
  532. * layer0 - framebuffer
  533. * video - video overlay
  534. */
  535. val = MXR_LAYER_CFG_GRP1_VAL(3);
  536. val |= MXR_LAYER_CFG_GRP0_VAL(2);
  537. if (ctx->vp_enabled)
  538. val |= MXR_LAYER_CFG_VP_VAL(1);
  539. mixer_reg_write(res, MXR_LAYER_CFG, val);
  540. /* setting background color */
  541. mixer_reg_write(res, MXR_BG_COLOR0, 0x008080);
  542. mixer_reg_write(res, MXR_BG_COLOR1, 0x008080);
  543. mixer_reg_write(res, MXR_BG_COLOR2, 0x008080);
  544. /* setting graphical layers */
  545. val = MXR_GRP_CFG_COLOR_KEY_DISABLE; /* no blank key */
  546. val |= MXR_GRP_CFG_WIN_BLEND_EN;
  547. val |= MXR_GRP_CFG_BLEND_PRE_MUL;
  548. val |= MXR_GRP_CFG_PIXEL_BLEND_EN;
  549. val |= MXR_GRP_CFG_ALPHA_VAL(0xff); /* non-transparent alpha */
  550. /* the same configuration for both layers */
  551. mixer_reg_write(res, MXR_GRAPHIC_CFG(0), val);
  552. mixer_reg_write(res, MXR_GRAPHIC_CFG(1), val);
  553. /* setting video layers */
  554. val = MXR_GRP_CFG_ALPHA_VAL(0);
  555. mixer_reg_write(res, MXR_VIDEO_CFG, val);
  556. if (ctx->vp_enabled) {
  557. /* configuration of Video Processor Registers */
  558. vp_win_reset(ctx);
  559. vp_default_filter(res);
  560. }
  561. /* disable all layers */
  562. mixer_reg_writemask(res, MXR_CFG, 0, MXR_CFG_GRP0_ENABLE);
  563. mixer_reg_writemask(res, MXR_CFG, 0, MXR_CFG_GRP1_ENABLE);
  564. if (ctx->vp_enabled)
  565. mixer_reg_writemask(res, MXR_CFG, 0, MXR_CFG_VP_ENABLE);
  566. mixer_vsync_set_update(ctx, true);
  567. spin_unlock_irqrestore(&res->reg_slock, flags);
  568. }
  569. static int mixer_iommu_on(void *ctx, bool enable)
  570. {
  571. struct exynos_drm_hdmi_context *drm_hdmi_ctx;
  572. struct mixer_context *mdata = ctx;
  573. struct drm_device *drm_dev;
  574. drm_hdmi_ctx = mdata->parent_ctx;
  575. drm_dev = drm_hdmi_ctx->drm_dev;
  576. if (is_drm_iommu_supported(drm_dev)) {
  577. if (enable)
  578. return drm_iommu_attach_device(drm_dev, mdata->dev);
  579. drm_iommu_detach_device(drm_dev, mdata->dev);
  580. }
  581. return 0;
  582. }
  583. static int mixer_enable_vblank(void *ctx, int pipe)
  584. {
  585. struct mixer_context *mixer_ctx = ctx;
  586. struct mixer_resources *res = &mixer_ctx->mixer_res;
  587. DRM_DEBUG_KMS("[%d] %s\n", __LINE__, __func__);
  588. mixer_ctx->pipe = pipe;
  589. /* enable vsync interrupt */
  590. mixer_reg_writemask(res, MXR_INT_EN, MXR_INT_EN_VSYNC,
  591. MXR_INT_EN_VSYNC);
  592. return 0;
  593. }
  594. static void mixer_disable_vblank(void *ctx)
  595. {
  596. struct mixer_context *mixer_ctx = ctx;
  597. struct mixer_resources *res = &mixer_ctx->mixer_res;
  598. DRM_DEBUG_KMS("[%d] %s\n", __LINE__, __func__);
  599. /* disable vsync interrupt */
  600. mixer_reg_writemask(res, MXR_INT_EN, 0, MXR_INT_EN_VSYNC);
  601. }
  602. static void mixer_win_mode_set(void *ctx,
  603. struct exynos_drm_overlay *overlay)
  604. {
  605. struct mixer_context *mixer_ctx = ctx;
  606. struct hdmi_win_data *win_data;
  607. int win;
  608. DRM_DEBUG_KMS("[%d] %s\n", __LINE__, __func__);
  609. if (!overlay) {
  610. DRM_ERROR("overlay is NULL\n");
  611. return;
  612. }
  613. DRM_DEBUG_KMS("set [%d]x[%d] at (%d,%d) to [%d]x[%d] at (%d,%d)\n",
  614. overlay->fb_width, overlay->fb_height,
  615. overlay->fb_x, overlay->fb_y,
  616. overlay->crtc_width, overlay->crtc_height,
  617. overlay->crtc_x, overlay->crtc_y);
  618. win = overlay->zpos;
  619. if (win == DEFAULT_ZPOS)
  620. win = MIXER_DEFAULT_WIN;
  621. if (win < 0 || win > MIXER_WIN_NR) {
  622. DRM_ERROR("mixer window[%d] is wrong\n", win);
  623. return;
  624. }
  625. win_data = &mixer_ctx->win_data[win];
  626. win_data->dma_addr = overlay->dma_addr[0];
  627. win_data->chroma_dma_addr = overlay->dma_addr[1];
  628. win_data->pixel_format = overlay->pixel_format;
  629. win_data->bpp = overlay->bpp;
  630. win_data->crtc_x = overlay->crtc_x;
  631. win_data->crtc_y = overlay->crtc_y;
  632. win_data->crtc_width = overlay->crtc_width;
  633. win_data->crtc_height = overlay->crtc_height;
  634. win_data->fb_x = overlay->fb_x;
  635. win_data->fb_y = overlay->fb_y;
  636. win_data->fb_width = overlay->fb_width;
  637. win_data->fb_height = overlay->fb_height;
  638. win_data->src_width = overlay->src_width;
  639. win_data->src_height = overlay->src_height;
  640. win_data->mode_width = overlay->mode_width;
  641. win_data->mode_height = overlay->mode_height;
  642. win_data->scan_flags = overlay->scan_flag;
  643. }
  644. static void mixer_win_commit(void *ctx, int win)
  645. {
  646. struct mixer_context *mixer_ctx = ctx;
  647. DRM_DEBUG_KMS("[%d] %s, win: %d\n", __LINE__, __func__, win);
  648. mutex_lock(&mixer_ctx->mixer_mutex);
  649. if (!mixer_ctx->powered) {
  650. mutex_unlock(&mixer_ctx->mixer_mutex);
  651. return;
  652. }
  653. mutex_unlock(&mixer_ctx->mixer_mutex);
  654. if (win > 1 && mixer_ctx->vp_enabled)
  655. vp_video_buffer(mixer_ctx, win);
  656. else
  657. mixer_graph_buffer(mixer_ctx, win);
  658. mixer_ctx->win_data[win].enabled = true;
  659. }
  660. static void mixer_win_disable(void *ctx, int win)
  661. {
  662. struct mixer_context *mixer_ctx = ctx;
  663. struct mixer_resources *res = &mixer_ctx->mixer_res;
  664. unsigned long flags;
  665. DRM_DEBUG_KMS("[%d] %s, win: %d\n", __LINE__, __func__, win);
  666. mutex_lock(&mixer_ctx->mixer_mutex);
  667. if (!mixer_ctx->powered) {
  668. mutex_unlock(&mixer_ctx->mixer_mutex);
  669. mixer_ctx->win_data[win].resume = false;
  670. return;
  671. }
  672. mutex_unlock(&mixer_ctx->mixer_mutex);
  673. spin_lock_irqsave(&res->reg_slock, flags);
  674. mixer_vsync_set_update(mixer_ctx, false);
  675. mixer_cfg_layer(mixer_ctx, win, false);
  676. mixer_vsync_set_update(mixer_ctx, true);
  677. spin_unlock_irqrestore(&res->reg_slock, flags);
  678. mixer_ctx->win_data[win].enabled = false;
  679. }
  680. static void mixer_wait_for_vblank(void *ctx)
  681. {
  682. struct mixer_context *mixer_ctx = ctx;
  683. mutex_lock(&mixer_ctx->mixer_mutex);
  684. if (!mixer_ctx->powered) {
  685. mutex_unlock(&mixer_ctx->mixer_mutex);
  686. return;
  687. }
  688. mutex_unlock(&mixer_ctx->mixer_mutex);
  689. atomic_set(&mixer_ctx->wait_vsync_event, 1);
  690. /*
  691. * wait for MIXER to signal VSYNC interrupt or return after
  692. * timeout which is set to 50ms (refresh rate of 20).
  693. */
  694. if (!wait_event_timeout(mixer_ctx->wait_vsync_queue,
  695. !atomic_read(&mixer_ctx->wait_vsync_event),
  696. DRM_HZ/20))
  697. DRM_DEBUG_KMS("vblank wait timed out.\n");
  698. }
  699. static void mixer_window_suspend(struct mixer_context *ctx)
  700. {
  701. struct hdmi_win_data *win_data;
  702. int i;
  703. for (i = 0; i < MIXER_WIN_NR; i++) {
  704. win_data = &ctx->win_data[i];
  705. win_data->resume = win_data->enabled;
  706. mixer_win_disable(ctx, i);
  707. }
  708. mixer_wait_for_vblank(ctx);
  709. }
  710. static void mixer_window_resume(struct mixer_context *ctx)
  711. {
  712. struct hdmi_win_data *win_data;
  713. int i;
  714. for (i = 0; i < MIXER_WIN_NR; i++) {
  715. win_data = &ctx->win_data[i];
  716. win_data->enabled = win_data->resume;
  717. win_data->resume = false;
  718. }
  719. }
  720. static void mixer_poweron(struct mixer_context *ctx)
  721. {
  722. struct mixer_resources *res = &ctx->mixer_res;
  723. DRM_DEBUG_KMS("[%d] %s\n", __LINE__, __func__);
  724. mutex_lock(&ctx->mixer_mutex);
  725. if (ctx->powered) {
  726. mutex_unlock(&ctx->mixer_mutex);
  727. return;
  728. }
  729. ctx->powered = true;
  730. mutex_unlock(&ctx->mixer_mutex);
  731. clk_enable(res->mixer);
  732. if (ctx->vp_enabled) {
  733. clk_enable(res->vp);
  734. clk_enable(res->sclk_mixer);
  735. }
  736. mixer_reg_write(res, MXR_INT_EN, ctx->int_en);
  737. mixer_win_reset(ctx);
  738. mixer_window_resume(ctx);
  739. }
  740. static void mixer_poweroff(struct mixer_context *ctx)
  741. {
  742. struct mixer_resources *res = &ctx->mixer_res;
  743. DRM_DEBUG_KMS("[%d] %s\n", __LINE__, __func__);
  744. mutex_lock(&ctx->mixer_mutex);
  745. if (!ctx->powered)
  746. goto out;
  747. mutex_unlock(&ctx->mixer_mutex);
  748. mixer_window_suspend(ctx);
  749. ctx->int_en = mixer_reg_read(res, MXR_INT_EN);
  750. clk_disable(res->mixer);
  751. if (ctx->vp_enabled) {
  752. clk_disable(res->vp);
  753. clk_disable(res->sclk_mixer);
  754. }
  755. mutex_lock(&ctx->mixer_mutex);
  756. ctx->powered = false;
  757. out:
  758. mutex_unlock(&ctx->mixer_mutex);
  759. }
  760. static void mixer_dpms(void *ctx, int mode)
  761. {
  762. struct mixer_context *mixer_ctx = ctx;
  763. DRM_DEBUG_KMS("[%d] %s\n", __LINE__, __func__);
  764. switch (mode) {
  765. case DRM_MODE_DPMS_ON:
  766. if (pm_runtime_suspended(mixer_ctx->dev))
  767. pm_runtime_get_sync(mixer_ctx->dev);
  768. break;
  769. case DRM_MODE_DPMS_STANDBY:
  770. case DRM_MODE_DPMS_SUSPEND:
  771. case DRM_MODE_DPMS_OFF:
  772. if (!pm_runtime_suspended(mixer_ctx->dev))
  773. pm_runtime_put_sync(mixer_ctx->dev);
  774. break;
  775. default:
  776. DRM_DEBUG_KMS("unknown dpms mode: %d\n", mode);
  777. break;
  778. }
  779. }
  780. static struct exynos_mixer_ops mixer_ops = {
  781. /* manager */
  782. .iommu_on = mixer_iommu_on,
  783. .enable_vblank = mixer_enable_vblank,
  784. .disable_vblank = mixer_disable_vblank,
  785. .wait_for_vblank = mixer_wait_for_vblank,
  786. .dpms = mixer_dpms,
  787. /* overlay */
  788. .win_mode_set = mixer_win_mode_set,
  789. .win_commit = mixer_win_commit,
  790. .win_disable = mixer_win_disable,
  791. };
  792. static irqreturn_t mixer_irq_handler(int irq, void *arg)
  793. {
  794. struct exynos_drm_hdmi_context *drm_hdmi_ctx = arg;
  795. struct mixer_context *ctx = drm_hdmi_ctx->ctx;
  796. struct mixer_resources *res = &ctx->mixer_res;
  797. u32 val, base, shadow;
  798. spin_lock(&res->reg_slock);
  799. /* read interrupt status for handling and clearing flags for VSYNC */
  800. val = mixer_reg_read(res, MXR_INT_STATUS);
  801. /* handling VSYNC */
  802. if (val & MXR_INT_STATUS_VSYNC) {
  803. /* interlace scan need to check shadow register */
  804. if (ctx->interlace) {
  805. base = mixer_reg_read(res, MXR_GRAPHIC_BASE(0));
  806. shadow = mixer_reg_read(res, MXR_GRAPHIC_BASE_S(0));
  807. if (base != shadow)
  808. goto out;
  809. base = mixer_reg_read(res, MXR_GRAPHIC_BASE(1));
  810. shadow = mixer_reg_read(res, MXR_GRAPHIC_BASE_S(1));
  811. if (base != shadow)
  812. goto out;
  813. }
  814. drm_handle_vblank(drm_hdmi_ctx->drm_dev, ctx->pipe);
  815. exynos_drm_crtc_finish_pageflip(drm_hdmi_ctx->drm_dev,
  816. ctx->pipe);
  817. /* set wait vsync event to zero and wake up queue. */
  818. if (atomic_read(&ctx->wait_vsync_event)) {
  819. atomic_set(&ctx->wait_vsync_event, 0);
  820. DRM_WAKEUP(&ctx->wait_vsync_queue);
  821. }
  822. }
  823. out:
  824. /* clear interrupts */
  825. if (~val & MXR_INT_EN_VSYNC) {
  826. /* vsync interrupt use different bit for read and clear */
  827. val &= ~MXR_INT_EN_VSYNC;
  828. val |= MXR_INT_CLEAR_VSYNC;
  829. }
  830. mixer_reg_write(res, MXR_INT_STATUS, val);
  831. spin_unlock(&res->reg_slock);
  832. return IRQ_HANDLED;
  833. }
  834. static int mixer_resources_init(struct exynos_drm_hdmi_context *ctx,
  835. struct platform_device *pdev)
  836. {
  837. struct mixer_context *mixer_ctx = ctx->ctx;
  838. struct device *dev = &pdev->dev;
  839. struct mixer_resources *mixer_res = &mixer_ctx->mixer_res;
  840. struct resource *res;
  841. int ret;
  842. spin_lock_init(&mixer_res->reg_slock);
  843. mixer_res->mixer = devm_clk_get(dev, "mixer");
  844. if (IS_ERR_OR_NULL(mixer_res->mixer)) {
  845. dev_err(dev, "failed to get clock 'mixer'\n");
  846. return -ENODEV;
  847. }
  848. mixer_res->sclk_hdmi = devm_clk_get(dev, "sclk_hdmi");
  849. if (IS_ERR_OR_NULL(mixer_res->sclk_hdmi)) {
  850. dev_err(dev, "failed to get clock 'sclk_hdmi'\n");
  851. return -ENODEV;
  852. }
  853. res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
  854. if (res == NULL) {
  855. dev_err(dev, "get memory resource failed.\n");
  856. return -ENXIO;
  857. }
  858. mixer_res->mixer_regs = devm_ioremap(&pdev->dev, res->start,
  859. resource_size(res));
  860. if (mixer_res->mixer_regs == NULL) {
  861. dev_err(dev, "register mapping failed.\n");
  862. return -ENXIO;
  863. }
  864. res = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
  865. if (res == NULL) {
  866. dev_err(dev, "get interrupt resource failed.\n");
  867. return -ENXIO;
  868. }
  869. ret = devm_request_irq(&pdev->dev, res->start, mixer_irq_handler,
  870. 0, "drm_mixer", ctx);
  871. if (ret) {
  872. dev_err(dev, "request interrupt failed.\n");
  873. return ret;
  874. }
  875. mixer_res->irq = res->start;
  876. return 0;
  877. }
  878. static int vp_resources_init(struct exynos_drm_hdmi_context *ctx,
  879. struct platform_device *pdev)
  880. {
  881. struct mixer_context *mixer_ctx = ctx->ctx;
  882. struct device *dev = &pdev->dev;
  883. struct mixer_resources *mixer_res = &mixer_ctx->mixer_res;
  884. struct resource *res;
  885. mixer_res->vp = devm_clk_get(dev, "vp");
  886. if (IS_ERR_OR_NULL(mixer_res->vp)) {
  887. dev_err(dev, "failed to get clock 'vp'\n");
  888. return -ENODEV;
  889. }
  890. mixer_res->sclk_mixer = devm_clk_get(dev, "sclk_mixer");
  891. if (IS_ERR_OR_NULL(mixer_res->sclk_mixer)) {
  892. dev_err(dev, "failed to get clock 'sclk_mixer'\n");
  893. return -ENODEV;
  894. }
  895. mixer_res->sclk_dac = devm_clk_get(dev, "sclk_dac");
  896. if (IS_ERR_OR_NULL(mixer_res->sclk_dac)) {
  897. dev_err(dev, "failed to get clock 'sclk_dac'\n");
  898. return -ENODEV;
  899. }
  900. if (mixer_res->sclk_hdmi)
  901. clk_set_parent(mixer_res->sclk_mixer, mixer_res->sclk_hdmi);
  902. res = platform_get_resource(pdev, IORESOURCE_MEM, 1);
  903. if (res == NULL) {
  904. dev_err(dev, "get memory resource failed.\n");
  905. return -ENXIO;
  906. }
  907. mixer_res->vp_regs = devm_ioremap(&pdev->dev, res->start,
  908. resource_size(res));
  909. if (mixer_res->vp_regs == NULL) {
  910. dev_err(dev, "register mapping failed.\n");
  911. return -ENXIO;
  912. }
  913. return 0;
  914. }
  915. static struct mixer_drv_data exynos5_mxr_drv_data = {
  916. .version = MXR_VER_16_0_33_0,
  917. .is_vp_enabled = 0,
  918. };
  919. static struct mixer_drv_data exynos4_mxr_drv_data = {
  920. .version = MXR_VER_0_0_0_16,
  921. .is_vp_enabled = 1,
  922. };
  923. static struct platform_device_id mixer_driver_types[] = {
  924. {
  925. .name = "s5p-mixer",
  926. .driver_data = (unsigned long)&exynos4_mxr_drv_data,
  927. }, {
  928. .name = "exynos5-mixer",
  929. .driver_data = (unsigned long)&exynos5_mxr_drv_data,
  930. }, {
  931. /* end node */
  932. }
  933. };
  934. static struct of_device_id mixer_match_types[] = {
  935. {
  936. .compatible = "samsung,exynos5-mixer",
  937. .data = &exynos5_mxr_drv_data,
  938. }, {
  939. /* end node */
  940. }
  941. };
  942. static int mixer_probe(struct platform_device *pdev)
  943. {
  944. struct device *dev = &pdev->dev;
  945. struct exynos_drm_hdmi_context *drm_hdmi_ctx;
  946. struct mixer_context *ctx;
  947. struct mixer_drv_data *drv;
  948. int ret;
  949. dev_info(dev, "probe start\n");
  950. drm_hdmi_ctx = devm_kzalloc(&pdev->dev, sizeof(*drm_hdmi_ctx),
  951. GFP_KERNEL);
  952. if (!drm_hdmi_ctx) {
  953. DRM_ERROR("failed to allocate common hdmi context.\n");
  954. return -ENOMEM;
  955. }
  956. ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
  957. if (!ctx) {
  958. DRM_ERROR("failed to alloc mixer context.\n");
  959. return -ENOMEM;
  960. }
  961. mutex_init(&ctx->mixer_mutex);
  962. if (dev->of_node) {
  963. const struct of_device_id *match;
  964. match = of_match_node(of_match_ptr(mixer_match_types),
  965. pdev->dev.of_node);
  966. drv = (struct mixer_drv_data *)match->data;
  967. } else {
  968. drv = (struct mixer_drv_data *)
  969. platform_get_device_id(pdev)->driver_data;
  970. }
  971. ctx->dev = &pdev->dev;
  972. ctx->parent_ctx = (void *)drm_hdmi_ctx;
  973. drm_hdmi_ctx->ctx = (void *)ctx;
  974. ctx->vp_enabled = drv->is_vp_enabled;
  975. ctx->mxr_ver = drv->version;
  976. DRM_INIT_WAITQUEUE(&ctx->wait_vsync_queue);
  977. atomic_set(&ctx->wait_vsync_event, 0);
  978. platform_set_drvdata(pdev, drm_hdmi_ctx);
  979. /* acquire resources: regs, irqs, clocks */
  980. ret = mixer_resources_init(drm_hdmi_ctx, pdev);
  981. if (ret) {
  982. DRM_ERROR("mixer_resources_init failed\n");
  983. goto fail;
  984. }
  985. if (ctx->vp_enabled) {
  986. /* acquire vp resources: regs, irqs, clocks */
  987. ret = vp_resources_init(drm_hdmi_ctx, pdev);
  988. if (ret) {
  989. DRM_ERROR("vp_resources_init failed\n");
  990. goto fail;
  991. }
  992. }
  993. /* attach mixer driver to common hdmi. */
  994. exynos_mixer_drv_attach(drm_hdmi_ctx);
  995. /* register specific callback point to common hdmi. */
  996. exynos_mixer_ops_register(&mixer_ops);
  997. pm_runtime_enable(dev);
  998. return 0;
  999. fail:
  1000. dev_info(dev, "probe failed\n");
  1001. return ret;
  1002. }
  1003. static int mixer_remove(struct platform_device *pdev)
  1004. {
  1005. dev_info(&pdev->dev, "remove successful\n");
  1006. pm_runtime_disable(&pdev->dev);
  1007. return 0;
  1008. }
  1009. #ifdef CONFIG_PM_SLEEP
  1010. static int mixer_suspend(struct device *dev)
  1011. {
  1012. struct exynos_drm_hdmi_context *drm_hdmi_ctx = get_mixer_context(dev);
  1013. struct mixer_context *ctx = drm_hdmi_ctx->ctx;
  1014. DRM_DEBUG_KMS("[%d] %s\n", __LINE__, __func__);
  1015. if (pm_runtime_suspended(dev)) {
  1016. DRM_DEBUG_KMS("%s : Already suspended\n", __func__);
  1017. return 0;
  1018. }
  1019. mixer_poweroff(ctx);
  1020. return 0;
  1021. }
  1022. static int mixer_resume(struct device *dev)
  1023. {
  1024. struct exynos_drm_hdmi_context *drm_hdmi_ctx = get_mixer_context(dev);
  1025. struct mixer_context *ctx = drm_hdmi_ctx->ctx;
  1026. DRM_DEBUG_KMS("[%d] %s\n", __LINE__, __func__);
  1027. if (!pm_runtime_suspended(dev)) {
  1028. DRM_DEBUG_KMS("%s : Already resumed\n", __func__);
  1029. return 0;
  1030. }
  1031. mixer_poweron(ctx);
  1032. return 0;
  1033. }
  1034. #endif
  1035. #ifdef CONFIG_PM_RUNTIME
  1036. static int mixer_runtime_suspend(struct device *dev)
  1037. {
  1038. struct exynos_drm_hdmi_context *drm_hdmi_ctx = get_mixer_context(dev);
  1039. struct mixer_context *ctx = drm_hdmi_ctx->ctx;
  1040. DRM_DEBUG_KMS("[%d] %s\n", __LINE__, __func__);
  1041. mixer_poweroff(ctx);
  1042. return 0;
  1043. }
  1044. static int mixer_runtime_resume(struct device *dev)
  1045. {
  1046. struct exynos_drm_hdmi_context *drm_hdmi_ctx = get_mixer_context(dev);
  1047. struct mixer_context *ctx = drm_hdmi_ctx->ctx;
  1048. DRM_DEBUG_KMS("[%d] %s\n", __LINE__, __func__);
  1049. mixer_poweron(ctx);
  1050. return 0;
  1051. }
  1052. #endif
  1053. static const struct dev_pm_ops mixer_pm_ops = {
  1054. SET_SYSTEM_SLEEP_PM_OPS(mixer_suspend, mixer_resume)
  1055. SET_RUNTIME_PM_OPS(mixer_runtime_suspend, mixer_runtime_resume, NULL)
  1056. };
  1057. struct platform_driver mixer_driver = {
  1058. .driver = {
  1059. .name = "exynos-mixer",
  1060. .owner = THIS_MODULE,
  1061. .pm = &mixer_pm_ops,
  1062. .of_match_table = mixer_match_types,
  1063. },
  1064. .probe = mixer_probe,
  1065. .remove = mixer_remove,
  1066. .id_table = mixer_driver_types,
  1067. };