fimc-core.c 48 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989
  1. /*
  2. * Samsung S5P/EXYNOS4 SoC series camera interface (video postprocessor) driver
  3. *
  4. * Copyright (C) 2010-2011 Samsung Electronics Co., Ltd.
  5. * Contact: Sylwester Nawrocki, <s.nawrocki@samsung.com>
  6. *
  7. * This program is free software; you can redistribute it and/or modify
  8. * it under the terms of the GNU General Public License as published
  9. * by the Free Software Foundation, either version 2 of the License,
  10. * or (at your option) any later version.
  11. */
  12. #include <linux/module.h>
  13. #include <linux/kernel.h>
  14. #include <linux/types.h>
  15. #include <linux/errno.h>
  16. #include <linux/bug.h>
  17. #include <linux/interrupt.h>
  18. #include <linux/device.h>
  19. #include <linux/platform_device.h>
  20. #include <linux/pm_runtime.h>
  21. #include <linux/list.h>
  22. #include <linux/io.h>
  23. #include <linux/slab.h>
  24. #include <linux/clk.h>
  25. #include <media/v4l2-ioctl.h>
  26. #include <media/videobuf2-core.h>
  27. #include <media/videobuf2-dma-contig.h>
  28. #include "fimc-core.h"
  29. #include "fimc-mdevice.h"
  30. static char *fimc_clocks[MAX_FIMC_CLOCKS] = {
  31. "sclk_fimc", "fimc"
  32. };
  33. static struct fimc_fmt fimc_formats[] = {
  34. {
  35. .name = "RGB565",
  36. .fourcc = V4L2_PIX_FMT_RGB565X,
  37. .depth = { 16 },
  38. .color = S5P_FIMC_RGB565,
  39. .memplanes = 1,
  40. .colplanes = 1,
  41. .flags = FMT_FLAGS_M2M,
  42. }, {
  43. .name = "BGR666",
  44. .fourcc = V4L2_PIX_FMT_BGR666,
  45. .depth = { 32 },
  46. .color = S5P_FIMC_RGB666,
  47. .memplanes = 1,
  48. .colplanes = 1,
  49. .flags = FMT_FLAGS_M2M,
  50. }, {
  51. .name = "XRGB-8-8-8-8, 32 bpp",
  52. .fourcc = V4L2_PIX_FMT_RGB32,
  53. .depth = { 32 },
  54. .color = S5P_FIMC_RGB888,
  55. .memplanes = 1,
  56. .colplanes = 1,
  57. .flags = FMT_FLAGS_M2M,
  58. }, {
  59. .name = "YUV 4:2:2 packed, YCbYCr",
  60. .fourcc = V4L2_PIX_FMT_YUYV,
  61. .depth = { 16 },
  62. .color = S5P_FIMC_YCBYCR422,
  63. .memplanes = 1,
  64. .colplanes = 1,
  65. .mbus_code = V4L2_MBUS_FMT_YUYV8_2X8,
  66. .flags = FMT_FLAGS_M2M | FMT_FLAGS_CAM,
  67. }, {
  68. .name = "YUV 4:2:2 packed, CbYCrY",
  69. .fourcc = V4L2_PIX_FMT_UYVY,
  70. .depth = { 16 },
  71. .color = S5P_FIMC_CBYCRY422,
  72. .memplanes = 1,
  73. .colplanes = 1,
  74. .mbus_code = V4L2_MBUS_FMT_UYVY8_2X8,
  75. .flags = FMT_FLAGS_M2M | FMT_FLAGS_CAM,
  76. }, {
  77. .name = "YUV 4:2:2 packed, CrYCbY",
  78. .fourcc = V4L2_PIX_FMT_VYUY,
  79. .depth = { 16 },
  80. .color = S5P_FIMC_CRYCBY422,
  81. .memplanes = 1,
  82. .colplanes = 1,
  83. .mbus_code = V4L2_MBUS_FMT_VYUY8_2X8,
  84. .flags = FMT_FLAGS_M2M | FMT_FLAGS_CAM,
  85. }, {
  86. .name = "YUV 4:2:2 packed, YCrYCb",
  87. .fourcc = V4L2_PIX_FMT_YVYU,
  88. .depth = { 16 },
  89. .color = S5P_FIMC_YCRYCB422,
  90. .memplanes = 1,
  91. .colplanes = 1,
  92. .mbus_code = V4L2_MBUS_FMT_YVYU8_2X8,
  93. .flags = FMT_FLAGS_M2M | FMT_FLAGS_CAM,
  94. }, {
  95. .name = "YUV 4:2:2 planar, Y/Cb/Cr",
  96. .fourcc = V4L2_PIX_FMT_YUV422P,
  97. .depth = { 12 },
  98. .color = S5P_FIMC_YCBYCR422,
  99. .memplanes = 1,
  100. .colplanes = 3,
  101. .flags = FMT_FLAGS_M2M,
  102. }, {
  103. .name = "YUV 4:2:2 planar, Y/CbCr",
  104. .fourcc = V4L2_PIX_FMT_NV16,
  105. .depth = { 16 },
  106. .color = S5P_FIMC_YCBYCR422,
  107. .memplanes = 1,
  108. .colplanes = 2,
  109. .flags = FMT_FLAGS_M2M,
  110. }, {
  111. .name = "YUV 4:2:2 planar, Y/CrCb",
  112. .fourcc = V4L2_PIX_FMT_NV61,
  113. .depth = { 16 },
  114. .color = S5P_FIMC_YCRYCB422,
  115. .memplanes = 1,
  116. .colplanes = 2,
  117. .flags = FMT_FLAGS_M2M,
  118. }, {
  119. .name = "YUV 4:2:0 planar, YCbCr",
  120. .fourcc = V4L2_PIX_FMT_YUV420,
  121. .depth = { 12 },
  122. .color = S5P_FIMC_YCBCR420,
  123. .memplanes = 1,
  124. .colplanes = 3,
  125. .flags = FMT_FLAGS_M2M,
  126. }, {
  127. .name = "YUV 4:2:0 planar, Y/CbCr",
  128. .fourcc = V4L2_PIX_FMT_NV12,
  129. .depth = { 12 },
  130. .color = S5P_FIMC_YCBCR420,
  131. .memplanes = 1,
  132. .colplanes = 2,
  133. .flags = FMT_FLAGS_M2M,
  134. }, {
  135. .name = "YUV 4:2:0 non-contiguous 2-planar, Y/CbCr",
  136. .fourcc = V4L2_PIX_FMT_NV12M,
  137. .color = S5P_FIMC_YCBCR420,
  138. .depth = { 8, 4 },
  139. .memplanes = 2,
  140. .colplanes = 2,
  141. .flags = FMT_FLAGS_M2M,
  142. }, {
  143. .name = "YUV 4:2:0 non-contiguous 3-planar, Y/Cb/Cr",
  144. .fourcc = V4L2_PIX_FMT_YUV420M,
  145. .color = S5P_FIMC_YCBCR420,
  146. .depth = { 8, 2, 2 },
  147. .memplanes = 3,
  148. .colplanes = 3,
  149. .flags = FMT_FLAGS_M2M,
  150. }, {
  151. .name = "YUV 4:2:0 non-contiguous 2-planar, Y/CbCr, tiled",
  152. .fourcc = V4L2_PIX_FMT_NV12MT,
  153. .color = S5P_FIMC_YCBCR420,
  154. .depth = { 8, 4 },
  155. .memplanes = 2,
  156. .colplanes = 2,
  157. .flags = FMT_FLAGS_M2M,
  158. },
  159. };
  160. int fimc_check_scaler_ratio(int sw, int sh, int dw, int dh, int rot)
  161. {
  162. int tx, ty;
  163. if (rot == 90 || rot == 270) {
  164. ty = dw;
  165. tx = dh;
  166. } else {
  167. tx = dw;
  168. ty = dh;
  169. }
  170. if ((sw >= SCALER_MAX_HRATIO * tx) || (sh >= SCALER_MAX_VRATIO * ty))
  171. return -EINVAL;
  172. return 0;
  173. }
  174. static int fimc_get_scaler_factor(u32 src, u32 tar, u32 *ratio, u32 *shift)
  175. {
  176. u32 sh = 6;
  177. if (src >= 64 * tar)
  178. return -EINVAL;
  179. while (sh--) {
  180. u32 tmp = 1 << sh;
  181. if (src >= tar * tmp) {
  182. *shift = sh, *ratio = tmp;
  183. return 0;
  184. }
  185. }
  186. *shift = 0, *ratio = 1;
  187. return 0;
  188. }
  189. int fimc_set_scaler_info(struct fimc_ctx *ctx)
  190. {
  191. struct samsung_fimc_variant *variant = ctx->fimc_dev->variant;
  192. struct device *dev = &ctx->fimc_dev->pdev->dev;
  193. struct fimc_scaler *sc = &ctx->scaler;
  194. struct fimc_frame *s_frame = &ctx->s_frame;
  195. struct fimc_frame *d_frame = &ctx->d_frame;
  196. int tx, ty, sx, sy;
  197. int ret;
  198. if (ctx->rotation == 90 || ctx->rotation == 270) {
  199. ty = d_frame->width;
  200. tx = d_frame->height;
  201. } else {
  202. tx = d_frame->width;
  203. ty = d_frame->height;
  204. }
  205. if (tx <= 0 || ty <= 0) {
  206. dev_err(dev, "Invalid target size: %dx%d", tx, ty);
  207. return -EINVAL;
  208. }
  209. sx = s_frame->width;
  210. sy = s_frame->height;
  211. if (sx <= 0 || sy <= 0) {
  212. dev_err(dev, "Invalid source size: %dx%d", sx, sy);
  213. return -EINVAL;
  214. }
  215. sc->real_width = sx;
  216. sc->real_height = sy;
  217. ret = fimc_get_scaler_factor(sx, tx, &sc->pre_hratio, &sc->hfactor);
  218. if (ret)
  219. return ret;
  220. ret = fimc_get_scaler_factor(sy, ty, &sc->pre_vratio, &sc->vfactor);
  221. if (ret)
  222. return ret;
  223. sc->pre_dst_width = sx / sc->pre_hratio;
  224. sc->pre_dst_height = sy / sc->pre_vratio;
  225. if (variant->has_mainscaler_ext) {
  226. sc->main_hratio = (sx << 14) / (tx << sc->hfactor);
  227. sc->main_vratio = (sy << 14) / (ty << sc->vfactor);
  228. } else {
  229. sc->main_hratio = (sx << 8) / (tx << sc->hfactor);
  230. sc->main_vratio = (sy << 8) / (ty << sc->vfactor);
  231. }
  232. sc->scaleup_h = (tx >= sx) ? 1 : 0;
  233. sc->scaleup_v = (ty >= sy) ? 1 : 0;
  234. /* check to see if input and output size/format differ */
  235. if (s_frame->fmt->color == d_frame->fmt->color
  236. && s_frame->width == d_frame->width
  237. && s_frame->height == d_frame->height)
  238. sc->copy_mode = 1;
  239. else
  240. sc->copy_mode = 0;
  241. return 0;
  242. }
  243. static void fimc_m2m_job_finish(struct fimc_ctx *ctx, int vb_state)
  244. {
  245. struct vb2_buffer *src_vb, *dst_vb;
  246. if (!ctx || !ctx->m2m_ctx)
  247. return;
  248. src_vb = v4l2_m2m_src_buf_remove(ctx->m2m_ctx);
  249. dst_vb = v4l2_m2m_dst_buf_remove(ctx->m2m_ctx);
  250. if (src_vb && dst_vb) {
  251. v4l2_m2m_buf_done(src_vb, vb_state);
  252. v4l2_m2m_buf_done(dst_vb, vb_state);
  253. v4l2_m2m_job_finish(ctx->fimc_dev->m2m.m2m_dev,
  254. ctx->m2m_ctx);
  255. }
  256. }
  257. /* Complete the transaction which has been scheduled for execution. */
  258. static int fimc_m2m_shutdown(struct fimc_ctx *ctx)
  259. {
  260. struct fimc_dev *fimc = ctx->fimc_dev;
  261. int ret;
  262. if (!fimc_m2m_pending(fimc))
  263. return 0;
  264. fimc_ctx_state_lock_set(FIMC_CTX_SHUT, ctx);
  265. ret = wait_event_timeout(fimc->irq_queue,
  266. !fimc_ctx_state_is_set(FIMC_CTX_SHUT, ctx),
  267. FIMC_SHUTDOWN_TIMEOUT);
  268. return ret == 0 ? -ETIMEDOUT : ret;
  269. }
  270. static int start_streaming(struct vb2_queue *q, unsigned int count)
  271. {
  272. struct fimc_ctx *ctx = q->drv_priv;
  273. int ret;
  274. ret = pm_runtime_get_sync(&ctx->fimc_dev->pdev->dev);
  275. return ret > 0 ? 0 : ret;
  276. }
  277. static int stop_streaming(struct vb2_queue *q)
  278. {
  279. struct fimc_ctx *ctx = q->drv_priv;
  280. int ret;
  281. ret = fimc_m2m_shutdown(ctx);
  282. if (ret == -ETIMEDOUT)
  283. fimc_m2m_job_finish(ctx, VB2_BUF_STATE_ERROR);
  284. pm_runtime_put(&ctx->fimc_dev->pdev->dev);
  285. return 0;
  286. }
  287. static void fimc_capture_irq_handler(struct fimc_dev *fimc)
  288. {
  289. struct fimc_vid_cap *cap = &fimc->vid_cap;
  290. struct fimc_vid_buffer *v_buf;
  291. struct timeval *tv;
  292. struct timespec ts;
  293. if (!list_empty(&cap->active_buf_q) &&
  294. test_bit(ST_CAPT_RUN, &fimc->state)) {
  295. ktime_get_real_ts(&ts);
  296. v_buf = active_queue_pop(cap);
  297. tv = &v_buf->vb.v4l2_buf.timestamp;
  298. tv->tv_sec = ts.tv_sec;
  299. tv->tv_usec = ts.tv_nsec / NSEC_PER_USEC;
  300. v_buf->vb.v4l2_buf.sequence = cap->frame_count++;
  301. vb2_buffer_done(&v_buf->vb, VB2_BUF_STATE_DONE);
  302. }
  303. if (test_and_clear_bit(ST_CAPT_SHUT, &fimc->state)) {
  304. wake_up(&fimc->irq_queue);
  305. return;
  306. }
  307. if (!list_empty(&cap->pending_buf_q)) {
  308. v_buf = pending_queue_pop(cap);
  309. fimc_hw_set_output_addr(fimc, &v_buf->paddr, cap->buf_index);
  310. v_buf->index = cap->buf_index;
  311. /* Move the buffer to the capture active queue */
  312. active_queue_add(cap, v_buf);
  313. dbg("next frame: %d, done frame: %d",
  314. fimc_hw_get_frame_index(fimc), v_buf->index);
  315. if (++cap->buf_index >= FIMC_MAX_OUT_BUFS)
  316. cap->buf_index = 0;
  317. }
  318. if (cap->active_buf_cnt == 0) {
  319. clear_bit(ST_CAPT_RUN, &fimc->state);
  320. if (++cap->buf_index >= FIMC_MAX_OUT_BUFS)
  321. cap->buf_index = 0;
  322. } else {
  323. set_bit(ST_CAPT_RUN, &fimc->state);
  324. }
  325. fimc_capture_config_update(cap->ctx);
  326. dbg("frame: %d, active_buf_cnt: %d",
  327. fimc_hw_get_frame_index(fimc), cap->active_buf_cnt);
  328. }
  329. static irqreturn_t fimc_irq_handler(int irq, void *priv)
  330. {
  331. struct fimc_dev *fimc = priv;
  332. struct fimc_vid_cap *cap = &fimc->vid_cap;
  333. struct fimc_ctx *ctx;
  334. fimc_hw_clear_irq(fimc);
  335. spin_lock(&fimc->slock);
  336. if (test_and_clear_bit(ST_M2M_PEND, &fimc->state)) {
  337. if (test_and_clear_bit(ST_M2M_SUSPENDING, &fimc->state)) {
  338. set_bit(ST_M2M_SUSPENDED, &fimc->state);
  339. wake_up(&fimc->irq_queue);
  340. goto out;
  341. }
  342. ctx = v4l2_m2m_get_curr_priv(fimc->m2m.m2m_dev);
  343. if (ctx != NULL) {
  344. spin_unlock(&fimc->slock);
  345. fimc_m2m_job_finish(ctx, VB2_BUF_STATE_DONE);
  346. spin_lock(&ctx->slock);
  347. if (ctx->state & FIMC_CTX_SHUT) {
  348. ctx->state &= ~FIMC_CTX_SHUT;
  349. wake_up(&fimc->irq_queue);
  350. }
  351. spin_unlock(&ctx->slock);
  352. }
  353. return IRQ_HANDLED;
  354. } else {
  355. if (test_bit(ST_CAPT_PEND, &fimc->state)) {
  356. fimc_capture_irq_handler(fimc);
  357. if (cap->active_buf_cnt == 1) {
  358. fimc_deactivate_capture(fimc);
  359. clear_bit(ST_CAPT_STREAM, &fimc->state);
  360. }
  361. }
  362. }
  363. out:
  364. spin_unlock(&fimc->slock);
  365. return IRQ_HANDLED;
  366. }
  367. /* The color format (colplanes, memplanes) must be already configured. */
  368. int fimc_prepare_addr(struct fimc_ctx *ctx, struct vb2_buffer *vb,
  369. struct fimc_frame *frame, struct fimc_addr *paddr)
  370. {
  371. int ret = 0;
  372. u32 pix_size;
  373. if (vb == NULL || frame == NULL)
  374. return -EINVAL;
  375. pix_size = frame->width * frame->height;
  376. dbg("memplanes= %d, colplanes= %d, pix_size= %d",
  377. frame->fmt->memplanes, frame->fmt->colplanes, pix_size);
  378. paddr->y = vb2_dma_contig_plane_dma_addr(vb, 0);
  379. if (frame->fmt->memplanes == 1) {
  380. switch (frame->fmt->colplanes) {
  381. case 1:
  382. paddr->cb = 0;
  383. paddr->cr = 0;
  384. break;
  385. case 2:
  386. /* decompose Y into Y/Cb */
  387. paddr->cb = (u32)(paddr->y + pix_size);
  388. paddr->cr = 0;
  389. break;
  390. case 3:
  391. paddr->cb = (u32)(paddr->y + pix_size);
  392. /* decompose Y into Y/Cb/Cr */
  393. if (S5P_FIMC_YCBCR420 == frame->fmt->color)
  394. paddr->cr = (u32)(paddr->cb
  395. + (pix_size >> 2));
  396. else /* 422 */
  397. paddr->cr = (u32)(paddr->cb
  398. + (pix_size >> 1));
  399. break;
  400. default:
  401. return -EINVAL;
  402. }
  403. } else {
  404. if (frame->fmt->memplanes >= 2)
  405. paddr->cb = vb2_dma_contig_plane_dma_addr(vb, 1);
  406. if (frame->fmt->memplanes == 3)
  407. paddr->cr = vb2_dma_contig_plane_dma_addr(vb, 2);
  408. }
  409. dbg("PHYS_ADDR: y= 0x%X cb= 0x%X cr= 0x%X ret= %d",
  410. paddr->y, paddr->cb, paddr->cr, ret);
  411. return ret;
  412. }
  413. /* Set order for 1 and 2 plane YCBCR 4:2:2 formats. */
  414. void fimc_set_yuv_order(struct fimc_ctx *ctx)
  415. {
  416. /* The one only mode supported in SoC. */
  417. ctx->in_order_2p = S5P_FIMC_LSB_CRCB;
  418. ctx->out_order_2p = S5P_FIMC_LSB_CRCB;
  419. /* Set order for 1 plane input formats. */
  420. switch (ctx->s_frame.fmt->color) {
  421. case S5P_FIMC_YCRYCB422:
  422. ctx->in_order_1p = S5P_MSCTRL_ORDER422_CBYCRY;
  423. break;
  424. case S5P_FIMC_CBYCRY422:
  425. ctx->in_order_1p = S5P_MSCTRL_ORDER422_YCRYCB;
  426. break;
  427. case S5P_FIMC_CRYCBY422:
  428. ctx->in_order_1p = S5P_MSCTRL_ORDER422_YCBYCR;
  429. break;
  430. case S5P_FIMC_YCBYCR422:
  431. default:
  432. ctx->in_order_1p = S5P_MSCTRL_ORDER422_CRYCBY;
  433. break;
  434. }
  435. dbg("ctx->in_order_1p= %d", ctx->in_order_1p);
  436. switch (ctx->d_frame.fmt->color) {
  437. case S5P_FIMC_YCRYCB422:
  438. ctx->out_order_1p = S5P_CIOCTRL_ORDER422_CBYCRY;
  439. break;
  440. case S5P_FIMC_CBYCRY422:
  441. ctx->out_order_1p = S5P_CIOCTRL_ORDER422_YCRYCB;
  442. break;
  443. case S5P_FIMC_CRYCBY422:
  444. ctx->out_order_1p = S5P_CIOCTRL_ORDER422_YCBYCR;
  445. break;
  446. case S5P_FIMC_YCBYCR422:
  447. default:
  448. ctx->out_order_1p = S5P_CIOCTRL_ORDER422_CRYCBY;
  449. break;
  450. }
  451. dbg("ctx->out_order_1p= %d", ctx->out_order_1p);
  452. }
  453. void fimc_prepare_dma_offset(struct fimc_ctx *ctx, struct fimc_frame *f)
  454. {
  455. struct samsung_fimc_variant *variant = ctx->fimc_dev->variant;
  456. u32 i, depth = 0;
  457. for (i = 0; i < f->fmt->colplanes; i++)
  458. depth += f->fmt->depth[i];
  459. f->dma_offset.y_h = f->offs_h;
  460. if (!variant->pix_hoff)
  461. f->dma_offset.y_h *= (depth >> 3);
  462. f->dma_offset.y_v = f->offs_v;
  463. f->dma_offset.cb_h = f->offs_h;
  464. f->dma_offset.cb_v = f->offs_v;
  465. f->dma_offset.cr_h = f->offs_h;
  466. f->dma_offset.cr_v = f->offs_v;
  467. if (!variant->pix_hoff) {
  468. if (f->fmt->colplanes == 3) {
  469. f->dma_offset.cb_h >>= 1;
  470. f->dma_offset.cr_h >>= 1;
  471. }
  472. if (f->fmt->color == S5P_FIMC_YCBCR420) {
  473. f->dma_offset.cb_v >>= 1;
  474. f->dma_offset.cr_v >>= 1;
  475. }
  476. }
  477. dbg("in_offset: color= %d, y_h= %d, y_v= %d",
  478. f->fmt->color, f->dma_offset.y_h, f->dma_offset.y_v);
  479. }
  480. /**
  481. * fimc_prepare_config - check dimensions, operation and color mode
  482. * and pre-calculate offset and the scaling coefficients.
  483. *
  484. * @ctx: hardware context information
  485. * @flags: flags indicating which parameters to check/update
  486. *
  487. * Return: 0 if dimensions are valid or non zero otherwise.
  488. */
  489. int fimc_prepare_config(struct fimc_ctx *ctx, u32 flags)
  490. {
  491. struct fimc_frame *s_frame, *d_frame;
  492. struct vb2_buffer *vb = NULL;
  493. int ret = 0;
  494. s_frame = &ctx->s_frame;
  495. d_frame = &ctx->d_frame;
  496. if (flags & FIMC_PARAMS) {
  497. /* Prepare the DMA offset ratios for scaler. */
  498. fimc_prepare_dma_offset(ctx, &ctx->s_frame);
  499. fimc_prepare_dma_offset(ctx, &ctx->d_frame);
  500. if (s_frame->height > (SCALER_MAX_VRATIO * d_frame->height) ||
  501. s_frame->width > (SCALER_MAX_HRATIO * d_frame->width)) {
  502. err("out of scaler range");
  503. return -EINVAL;
  504. }
  505. fimc_set_yuv_order(ctx);
  506. }
  507. /* Input DMA mode is not allowed when the scaler is disabled. */
  508. ctx->scaler.enabled = 1;
  509. if (flags & FIMC_SRC_ADDR) {
  510. vb = v4l2_m2m_next_src_buf(ctx->m2m_ctx);
  511. ret = fimc_prepare_addr(ctx, vb, s_frame, &s_frame->paddr);
  512. if (ret)
  513. return ret;
  514. }
  515. if (flags & FIMC_DST_ADDR) {
  516. vb = v4l2_m2m_next_dst_buf(ctx->m2m_ctx);
  517. ret = fimc_prepare_addr(ctx, vb, d_frame, &d_frame->paddr);
  518. }
  519. return ret;
  520. }
  521. static void fimc_dma_run(void *priv)
  522. {
  523. struct fimc_ctx *ctx = priv;
  524. struct fimc_dev *fimc;
  525. unsigned long flags;
  526. u32 ret;
  527. if (WARN(!ctx, "null hardware context\n"))
  528. return;
  529. fimc = ctx->fimc_dev;
  530. spin_lock_irqsave(&fimc->slock, flags);
  531. set_bit(ST_M2M_PEND, &fimc->state);
  532. spin_lock(&ctx->slock);
  533. ctx->state |= (FIMC_SRC_ADDR | FIMC_DST_ADDR);
  534. ret = fimc_prepare_config(ctx, ctx->state);
  535. if (ret)
  536. goto dma_unlock;
  537. /* Reconfigure hardware if the context has changed. */
  538. if (fimc->m2m.ctx != ctx) {
  539. ctx->state |= FIMC_PARAMS;
  540. fimc->m2m.ctx = ctx;
  541. }
  542. fimc_hw_set_input_addr(fimc, &ctx->s_frame.paddr);
  543. if (ctx->state & FIMC_PARAMS) {
  544. fimc_hw_set_input_path(ctx);
  545. fimc_hw_set_in_dma(ctx);
  546. ret = fimc_set_scaler_info(ctx);
  547. if (ret) {
  548. spin_unlock(&fimc->slock);
  549. goto dma_unlock;
  550. }
  551. fimc_hw_set_prescaler(ctx);
  552. fimc_hw_set_mainscaler(ctx);
  553. fimc_hw_set_target_format(ctx);
  554. fimc_hw_set_rotation(ctx);
  555. fimc_hw_set_effect(ctx);
  556. }
  557. fimc_hw_set_output_path(ctx);
  558. if (ctx->state & (FIMC_DST_ADDR | FIMC_PARAMS))
  559. fimc_hw_set_output_addr(fimc, &ctx->d_frame.paddr, -1);
  560. if (ctx->state & FIMC_PARAMS)
  561. fimc_hw_set_out_dma(ctx);
  562. fimc_activate_capture(ctx);
  563. ctx->state &= (FIMC_CTX_M2M | FIMC_CTX_CAP |
  564. FIMC_SRC_FMT | FIMC_DST_FMT);
  565. fimc_hw_activate_input_dma(fimc, true);
  566. dma_unlock:
  567. spin_unlock(&ctx->slock);
  568. spin_unlock_irqrestore(&fimc->slock, flags);
  569. }
  570. static void fimc_job_abort(void *priv)
  571. {
  572. fimc_m2m_shutdown(priv);
  573. }
  574. static int fimc_queue_setup(struct vb2_queue *vq, unsigned int *num_buffers,
  575. unsigned int *num_planes, unsigned int sizes[],
  576. void *allocators[])
  577. {
  578. struct fimc_ctx *ctx = vb2_get_drv_priv(vq);
  579. struct fimc_frame *f;
  580. int i;
  581. f = ctx_get_frame(ctx, vq->type);
  582. if (IS_ERR(f))
  583. return PTR_ERR(f);
  584. /*
  585. * Return number of non-contigous planes (plane buffers)
  586. * depending on the configured color format.
  587. */
  588. if (!f->fmt)
  589. return -EINVAL;
  590. *num_planes = f->fmt->memplanes;
  591. for (i = 0; i < f->fmt->memplanes; i++) {
  592. sizes[i] = (f->f_width * f->f_height * f->fmt->depth[i]) / 8;
  593. allocators[i] = ctx->fimc_dev->alloc_ctx;
  594. }
  595. return 0;
  596. }
  597. static int fimc_buf_prepare(struct vb2_buffer *vb)
  598. {
  599. struct fimc_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
  600. struct fimc_frame *frame;
  601. int i;
  602. frame = ctx_get_frame(ctx, vb->vb2_queue->type);
  603. if (IS_ERR(frame))
  604. return PTR_ERR(frame);
  605. for (i = 0; i < frame->fmt->memplanes; i++)
  606. vb2_set_plane_payload(vb, i, frame->payload[i]);
  607. return 0;
  608. }
  609. static void fimc_buf_queue(struct vb2_buffer *vb)
  610. {
  611. struct fimc_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
  612. dbg("ctx: %p, ctx->state: 0x%x", ctx, ctx->state);
  613. if (ctx->m2m_ctx)
  614. v4l2_m2m_buf_queue(ctx->m2m_ctx, vb);
  615. }
  616. static void fimc_lock(struct vb2_queue *vq)
  617. {
  618. struct fimc_ctx *ctx = vb2_get_drv_priv(vq);
  619. mutex_lock(&ctx->fimc_dev->lock);
  620. }
  621. static void fimc_unlock(struct vb2_queue *vq)
  622. {
  623. struct fimc_ctx *ctx = vb2_get_drv_priv(vq);
  624. mutex_unlock(&ctx->fimc_dev->lock);
  625. }
  626. static struct vb2_ops fimc_qops = {
  627. .queue_setup = fimc_queue_setup,
  628. .buf_prepare = fimc_buf_prepare,
  629. .buf_queue = fimc_buf_queue,
  630. .wait_prepare = fimc_unlock,
  631. .wait_finish = fimc_lock,
  632. .stop_streaming = stop_streaming,
  633. .start_streaming = start_streaming,
  634. };
  635. /*
  636. * V4L2 controls handling
  637. */
  638. #define ctrl_to_ctx(__ctrl) \
  639. container_of((__ctrl)->handler, struct fimc_ctx, ctrl_handler)
  640. static int fimc_s_ctrl(struct v4l2_ctrl *ctrl)
  641. {
  642. struct fimc_ctx *ctx = ctrl_to_ctx(ctrl);
  643. struct fimc_dev *fimc = ctx->fimc_dev;
  644. struct samsung_fimc_variant *variant = fimc->variant;
  645. unsigned long flags;
  646. int ret = 0;
  647. if (ctrl->flags & V4L2_CTRL_FLAG_INACTIVE)
  648. return 0;
  649. switch (ctrl->id) {
  650. case V4L2_CID_HFLIP:
  651. spin_lock_irqsave(&ctx->slock, flags);
  652. ctx->hflip = ctrl->val;
  653. break;
  654. case V4L2_CID_VFLIP:
  655. spin_lock_irqsave(&ctx->slock, flags);
  656. ctx->vflip = ctrl->val;
  657. break;
  658. case V4L2_CID_ROTATE:
  659. if (fimc_capture_pending(fimc) ||
  660. fimc_ctx_state_is_set(FIMC_DST_FMT | FIMC_SRC_FMT, ctx)) {
  661. ret = fimc_check_scaler_ratio(ctx->s_frame.width,
  662. ctx->s_frame.height, ctx->d_frame.width,
  663. ctx->d_frame.height, ctrl->val);
  664. }
  665. if (ret) {
  666. v4l2_err(fimc->m2m.vfd, "Out of scaler range\n");
  667. return -EINVAL;
  668. }
  669. if ((ctrl->val == 90 || ctrl->val == 270) &&
  670. !variant->has_out_rot)
  671. return -EINVAL;
  672. spin_lock_irqsave(&ctx->slock, flags);
  673. ctx->rotation = ctrl->val;
  674. break;
  675. default:
  676. v4l2_err(fimc->v4l2_dev, "Invalid control: 0x%X\n", ctrl->id);
  677. return -EINVAL;
  678. }
  679. ctx->state |= FIMC_PARAMS;
  680. set_bit(ST_CAPT_APPLY_CFG, &fimc->state);
  681. spin_unlock_irqrestore(&ctx->slock, flags);
  682. return 0;
  683. }
  684. static const struct v4l2_ctrl_ops fimc_ctrl_ops = {
  685. .s_ctrl = fimc_s_ctrl,
  686. };
  687. int fimc_ctrls_create(struct fimc_ctx *ctx)
  688. {
  689. if (ctx->ctrls_rdy)
  690. return 0;
  691. v4l2_ctrl_handler_init(&ctx->ctrl_handler, 3);
  692. ctx->ctrl_rotate = v4l2_ctrl_new_std(&ctx->ctrl_handler, &fimc_ctrl_ops,
  693. V4L2_CID_HFLIP, 0, 1, 1, 0);
  694. ctx->ctrl_hflip = v4l2_ctrl_new_std(&ctx->ctrl_handler, &fimc_ctrl_ops,
  695. V4L2_CID_VFLIP, 0, 1, 1, 0);
  696. ctx->ctrl_vflip = v4l2_ctrl_new_std(&ctx->ctrl_handler, &fimc_ctrl_ops,
  697. V4L2_CID_ROTATE, 0, 270, 90, 0);
  698. ctx->ctrls_rdy = ctx->ctrl_handler.error == 0;
  699. return ctx->ctrl_handler.error;
  700. }
  701. void fimc_ctrls_delete(struct fimc_ctx *ctx)
  702. {
  703. if (ctx->ctrls_rdy) {
  704. v4l2_ctrl_handler_free(&ctx->ctrl_handler);
  705. ctx->ctrls_rdy = false;
  706. }
  707. }
  708. void fimc_ctrls_activate(struct fimc_ctx *ctx, bool active)
  709. {
  710. if (!ctx->ctrls_rdy)
  711. return;
  712. mutex_lock(&ctx->ctrl_handler.lock);
  713. v4l2_ctrl_activate(ctx->ctrl_rotate, active);
  714. v4l2_ctrl_activate(ctx->ctrl_hflip, active);
  715. v4l2_ctrl_activate(ctx->ctrl_vflip, active);
  716. if (active) {
  717. ctx->rotation = ctx->ctrl_rotate->val;
  718. ctx->hflip = ctx->ctrl_hflip->val;
  719. ctx->vflip = ctx->ctrl_vflip->val;
  720. } else {
  721. ctx->rotation = 0;
  722. ctx->hflip = 0;
  723. ctx->vflip = 0;
  724. }
  725. mutex_unlock(&ctx->ctrl_handler.lock);
  726. }
  727. /*
  728. * V4L2 ioctl handlers
  729. */
  730. static int fimc_m2m_querycap(struct file *file, void *fh,
  731. struct v4l2_capability *cap)
  732. {
  733. struct fimc_ctx *ctx = fh_to_ctx(fh);
  734. struct fimc_dev *fimc = ctx->fimc_dev;
  735. strncpy(cap->driver, fimc->pdev->name, sizeof(cap->driver) - 1);
  736. strncpy(cap->card, fimc->pdev->name, sizeof(cap->card) - 1);
  737. cap->bus_info[0] = 0;
  738. cap->capabilities = V4L2_CAP_STREAMING |
  739. V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_VIDEO_OUTPUT |
  740. V4L2_CAP_VIDEO_CAPTURE_MPLANE | V4L2_CAP_VIDEO_OUTPUT_MPLANE;
  741. return 0;
  742. }
  743. static int fimc_m2m_enum_fmt_mplane(struct file *file, void *priv,
  744. struct v4l2_fmtdesc *f)
  745. {
  746. struct fimc_fmt *fmt;
  747. fmt = fimc_find_format(NULL, NULL, FMT_FLAGS_M2M, f->index);
  748. if (!fmt)
  749. return -EINVAL;
  750. strncpy(f->description, fmt->name, sizeof(f->description) - 1);
  751. f->pixelformat = fmt->fourcc;
  752. return 0;
  753. }
  754. int fimc_fill_format(struct fimc_frame *frame, struct v4l2_format *f)
  755. {
  756. struct v4l2_pix_format_mplane *pixm = &f->fmt.pix_mp;
  757. int i;
  758. pixm->width = frame->o_width;
  759. pixm->height = frame->o_height;
  760. pixm->field = V4L2_FIELD_NONE;
  761. pixm->pixelformat = frame->fmt->fourcc;
  762. pixm->colorspace = V4L2_COLORSPACE_JPEG;
  763. pixm->num_planes = frame->fmt->memplanes;
  764. for (i = 0; i < pixm->num_planes; ++i) {
  765. int bpl = frame->f_width;
  766. if (frame->fmt->colplanes == 1) /* packed formats */
  767. bpl = (bpl * frame->fmt->depth[0]) / 8;
  768. pixm->plane_fmt[i].bytesperline = bpl;
  769. pixm->plane_fmt[i].sizeimage = (frame->o_width *
  770. frame->o_height * frame->fmt->depth[i]) / 8;
  771. }
  772. return 0;
  773. }
  774. void fimc_fill_frame(struct fimc_frame *frame, struct v4l2_format *f)
  775. {
  776. struct v4l2_pix_format_mplane *pixm = &f->fmt.pix_mp;
  777. frame->f_width = pixm->plane_fmt[0].bytesperline;
  778. if (frame->fmt->colplanes == 1)
  779. frame->f_width = (frame->f_width * 8) / frame->fmt->depth[0];
  780. frame->f_height = pixm->height;
  781. frame->width = pixm->width;
  782. frame->height = pixm->height;
  783. frame->o_width = pixm->width;
  784. frame->o_height = pixm->height;
  785. frame->offs_h = 0;
  786. frame->offs_v = 0;
  787. }
  788. /**
  789. * fimc_adjust_mplane_format - adjust bytesperline/sizeimage for each plane
  790. * @fmt: fimc pixel format description (input)
  791. * @width: requested pixel width
  792. * @height: requested pixel height
  793. * @pix: multi-plane format to adjust
  794. */
  795. void fimc_adjust_mplane_format(struct fimc_fmt *fmt, u32 width, u32 height,
  796. struct v4l2_pix_format_mplane *pix)
  797. {
  798. u32 bytesperline = 0;
  799. int i;
  800. pix->colorspace = V4L2_COLORSPACE_JPEG;
  801. pix->field = V4L2_FIELD_NONE;
  802. pix->num_planes = fmt->memplanes;
  803. pix->height = height;
  804. pix->width = width;
  805. for (i = 0; i < pix->num_planes; ++i) {
  806. u32 bpl = pix->plane_fmt[i].bytesperline;
  807. u32 *sizeimage = &pix->plane_fmt[i].sizeimage;
  808. if (fmt->colplanes > 1 && (bpl == 0 || bpl < pix->width))
  809. bpl = pix->width; /* Planar */
  810. if (fmt->colplanes == 1 && /* Packed */
  811. (bpl == 0 || ((bpl * 8) / fmt->depth[i]) < pix->width))
  812. bpl = (pix->width * fmt->depth[0]) / 8;
  813. if (i == 0) /* Same bytesperline for each plane. */
  814. bytesperline = bpl;
  815. pix->plane_fmt[i].bytesperline = bytesperline;
  816. *sizeimage = (pix->width * pix->height * fmt->depth[i]) / 8;
  817. }
  818. }
  819. static int fimc_m2m_g_fmt_mplane(struct file *file, void *fh,
  820. struct v4l2_format *f)
  821. {
  822. struct fimc_ctx *ctx = fh_to_ctx(fh);
  823. struct fimc_frame *frame = ctx_get_frame(ctx, f->type);
  824. if (IS_ERR(frame))
  825. return PTR_ERR(frame);
  826. return fimc_fill_format(frame, f);
  827. }
  828. /**
  829. * fimc_find_format - lookup fimc color format by fourcc or media bus format
  830. * @pixelformat: fourcc to match, ignored if null
  831. * @mbus_code: media bus code to match, ignored if null
  832. * @mask: the color flags to match
  833. * @index: offset in the fimc_formats array, ignored if negative
  834. */
  835. struct fimc_fmt *fimc_find_format(u32 *pixelformat, u32 *mbus_code,
  836. unsigned int mask, int index)
  837. {
  838. struct fimc_fmt *fmt, *def_fmt = NULL;
  839. unsigned int i;
  840. int id = 0;
  841. if (index >= ARRAY_SIZE(fimc_formats))
  842. return NULL;
  843. for (i = 0; i < ARRAY_SIZE(fimc_formats); ++i) {
  844. fmt = &fimc_formats[i];
  845. if (!(fmt->flags & mask))
  846. continue;
  847. if (pixelformat && fmt->fourcc == *pixelformat)
  848. return fmt;
  849. if (mbus_code && fmt->mbus_code == *mbus_code)
  850. return fmt;
  851. if (index == id)
  852. def_fmt = fmt;
  853. id++;
  854. }
  855. return def_fmt;
  856. }
  857. static int fimc_try_fmt_mplane(struct fimc_ctx *ctx, struct v4l2_format *f)
  858. {
  859. struct fimc_dev *fimc = ctx->fimc_dev;
  860. struct samsung_fimc_variant *variant = fimc->variant;
  861. struct v4l2_pix_format_mplane *pix = &f->fmt.pix_mp;
  862. struct fimc_fmt *fmt;
  863. u32 max_w, mod_x, mod_y;
  864. if (!IS_M2M(f->type))
  865. return -EINVAL;
  866. dbg("w: %d, h: %d", pix->width, pix->height);
  867. fmt = fimc_find_format(&pix->pixelformat, NULL, FMT_FLAGS_M2M, 0);
  868. if (WARN(fmt == NULL, "Pixel format lookup failed"))
  869. return -EINVAL;
  870. if (pix->field == V4L2_FIELD_ANY)
  871. pix->field = V4L2_FIELD_NONE;
  872. else if (pix->field != V4L2_FIELD_NONE)
  873. return -EINVAL;
  874. if (f->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) {
  875. max_w = variant->pix_limit->scaler_dis_w;
  876. mod_x = ffs(variant->min_inp_pixsize) - 1;
  877. } else {
  878. max_w = variant->pix_limit->out_rot_dis_w;
  879. mod_x = ffs(variant->min_out_pixsize) - 1;
  880. }
  881. if (tiled_fmt(fmt)) {
  882. mod_x = 6; /* 64 x 32 pixels tile */
  883. mod_y = 5;
  884. } else {
  885. if (fimc->id == 1 && variant->pix_hoff)
  886. mod_y = fimc_fmt_is_rgb(fmt->color) ? 0 : 1;
  887. else
  888. mod_y = mod_x;
  889. }
  890. dbg("mod_x: %d, mod_y: %d, max_w: %d", mod_x, mod_y, max_w);
  891. v4l_bound_align_image(&pix->width, 16, max_w, mod_x,
  892. &pix->height, 8, variant->pix_limit->scaler_dis_w, mod_y, 0);
  893. fimc_adjust_mplane_format(fmt, pix->width, pix->height, &f->fmt.pix_mp);
  894. return 0;
  895. }
  896. static int fimc_m2m_try_fmt_mplane(struct file *file, void *fh,
  897. struct v4l2_format *f)
  898. {
  899. struct fimc_ctx *ctx = fh_to_ctx(fh);
  900. return fimc_try_fmt_mplane(ctx, f);
  901. }
  902. static int fimc_m2m_s_fmt_mplane(struct file *file, void *fh,
  903. struct v4l2_format *f)
  904. {
  905. struct fimc_ctx *ctx = fh_to_ctx(fh);
  906. struct fimc_dev *fimc = ctx->fimc_dev;
  907. struct vb2_queue *vq;
  908. struct fimc_frame *frame;
  909. struct v4l2_pix_format_mplane *pix;
  910. int i, ret = 0;
  911. ret = fimc_try_fmt_mplane(ctx, f);
  912. if (ret)
  913. return ret;
  914. vq = v4l2_m2m_get_vq(ctx->m2m_ctx, f->type);
  915. if (vb2_is_busy(vq)) {
  916. v4l2_err(fimc->m2m.vfd, "queue (%d) busy\n", f->type);
  917. return -EBUSY;
  918. }
  919. if (f->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
  920. frame = &ctx->s_frame;
  921. else
  922. frame = &ctx->d_frame;
  923. pix = &f->fmt.pix_mp;
  924. frame->fmt = fimc_find_format(&pix->pixelformat, NULL,
  925. FMT_FLAGS_M2M, 0);
  926. if (!frame->fmt)
  927. return -EINVAL;
  928. for (i = 0; i < frame->fmt->colplanes; i++) {
  929. frame->payload[i] =
  930. (pix->width * pix->height * frame->fmt->depth[i]) / 8;
  931. }
  932. fimc_fill_frame(frame, f);
  933. if (f->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
  934. fimc_ctx_state_lock_set(FIMC_PARAMS | FIMC_DST_FMT, ctx);
  935. else
  936. fimc_ctx_state_lock_set(FIMC_PARAMS | FIMC_SRC_FMT, ctx);
  937. dbg("f_w: %d, f_h: %d", frame->f_width, frame->f_height);
  938. return 0;
  939. }
  940. static int fimc_m2m_reqbufs(struct file *file, void *fh,
  941. struct v4l2_requestbuffers *reqbufs)
  942. {
  943. struct fimc_ctx *ctx = fh_to_ctx(fh);
  944. return v4l2_m2m_reqbufs(file, ctx->m2m_ctx, reqbufs);
  945. }
  946. static int fimc_m2m_querybuf(struct file *file, void *fh,
  947. struct v4l2_buffer *buf)
  948. {
  949. struct fimc_ctx *ctx = fh_to_ctx(fh);
  950. return v4l2_m2m_querybuf(file, ctx->m2m_ctx, buf);
  951. }
  952. static int fimc_m2m_qbuf(struct file *file, void *fh,
  953. struct v4l2_buffer *buf)
  954. {
  955. struct fimc_ctx *ctx = fh_to_ctx(fh);
  956. return v4l2_m2m_qbuf(file, ctx->m2m_ctx, buf);
  957. }
  958. static int fimc_m2m_dqbuf(struct file *file, void *fh,
  959. struct v4l2_buffer *buf)
  960. {
  961. struct fimc_ctx *ctx = fh_to_ctx(fh);
  962. return v4l2_m2m_dqbuf(file, ctx->m2m_ctx, buf);
  963. }
  964. static int fimc_m2m_streamon(struct file *file, void *fh,
  965. enum v4l2_buf_type type)
  966. {
  967. struct fimc_ctx *ctx = fh_to_ctx(fh);
  968. /* The source and target color format need to be set */
  969. if (V4L2_TYPE_IS_OUTPUT(type)) {
  970. if (!fimc_ctx_state_is_set(FIMC_SRC_FMT, ctx))
  971. return -EINVAL;
  972. } else if (!fimc_ctx_state_is_set(FIMC_DST_FMT, ctx)) {
  973. return -EINVAL;
  974. }
  975. return v4l2_m2m_streamon(file, ctx->m2m_ctx, type);
  976. }
  977. static int fimc_m2m_streamoff(struct file *file, void *fh,
  978. enum v4l2_buf_type type)
  979. {
  980. struct fimc_ctx *ctx = fh_to_ctx(fh);
  981. return v4l2_m2m_streamoff(file, ctx->m2m_ctx, type);
  982. }
  983. static int fimc_m2m_cropcap(struct file *file, void *fh,
  984. struct v4l2_cropcap *cr)
  985. {
  986. struct fimc_ctx *ctx = fh_to_ctx(fh);
  987. struct fimc_frame *frame;
  988. frame = ctx_get_frame(ctx, cr->type);
  989. if (IS_ERR(frame))
  990. return PTR_ERR(frame);
  991. cr->bounds.left = 0;
  992. cr->bounds.top = 0;
  993. cr->bounds.width = frame->o_width;
  994. cr->bounds.height = frame->o_height;
  995. cr->defrect = cr->bounds;
  996. return 0;
  997. }
  998. static int fimc_m2m_g_crop(struct file *file, void *fh, struct v4l2_crop *cr)
  999. {
  1000. struct fimc_ctx *ctx = fh_to_ctx(fh);
  1001. struct fimc_frame *frame;
  1002. frame = ctx_get_frame(ctx, cr->type);
  1003. if (IS_ERR(frame))
  1004. return PTR_ERR(frame);
  1005. cr->c.left = frame->offs_h;
  1006. cr->c.top = frame->offs_v;
  1007. cr->c.width = frame->width;
  1008. cr->c.height = frame->height;
  1009. return 0;
  1010. }
  1011. static int fimc_m2m_try_crop(struct fimc_ctx *ctx, struct v4l2_crop *cr)
  1012. {
  1013. struct fimc_dev *fimc = ctx->fimc_dev;
  1014. struct fimc_frame *f;
  1015. u32 min_size, halign, depth = 0;
  1016. int i;
  1017. if (cr->c.top < 0 || cr->c.left < 0) {
  1018. v4l2_err(fimc->m2m.vfd,
  1019. "doesn't support negative values for top & left\n");
  1020. return -EINVAL;
  1021. }
  1022. if (cr->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
  1023. f = &ctx->d_frame;
  1024. else if (cr->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
  1025. f = &ctx->s_frame;
  1026. else
  1027. return -EINVAL;
  1028. min_size = (f == &ctx->s_frame) ?
  1029. fimc->variant->min_inp_pixsize : fimc->variant->min_out_pixsize;
  1030. /* Get pixel alignment constraints. */
  1031. if (fimc->id == 1 && fimc->variant->pix_hoff)
  1032. halign = fimc_fmt_is_rgb(f->fmt->color) ? 0 : 1;
  1033. else
  1034. halign = ffs(min_size) - 1;
  1035. for (i = 0; i < f->fmt->colplanes; i++)
  1036. depth += f->fmt->depth[i];
  1037. v4l_bound_align_image(&cr->c.width, min_size, f->o_width,
  1038. ffs(min_size) - 1,
  1039. &cr->c.height, min_size, f->o_height,
  1040. halign, 64/(ALIGN(depth, 8)));
  1041. /* adjust left/top if cropping rectangle is out of bounds */
  1042. if (cr->c.left + cr->c.width > f->o_width)
  1043. cr->c.left = f->o_width - cr->c.width;
  1044. if (cr->c.top + cr->c.height > f->o_height)
  1045. cr->c.top = f->o_height - cr->c.height;
  1046. cr->c.left = round_down(cr->c.left, min_size);
  1047. cr->c.top = round_down(cr->c.top, fimc->variant->hor_offs_align);
  1048. dbg("l:%d, t:%d, w:%d, h:%d, f_w: %d, f_h: %d",
  1049. cr->c.left, cr->c.top, cr->c.width, cr->c.height,
  1050. f->f_width, f->f_height);
  1051. return 0;
  1052. }
  1053. static int fimc_m2m_s_crop(struct file *file, void *fh, struct v4l2_crop *cr)
  1054. {
  1055. struct fimc_ctx *ctx = fh_to_ctx(fh);
  1056. struct fimc_dev *fimc = ctx->fimc_dev;
  1057. struct fimc_frame *f;
  1058. int ret;
  1059. ret = fimc_m2m_try_crop(ctx, cr);
  1060. if (ret)
  1061. return ret;
  1062. f = (cr->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) ?
  1063. &ctx->s_frame : &ctx->d_frame;
  1064. /* Check to see if scaling ratio is within supported range */
  1065. if (fimc_ctx_state_is_set(FIMC_DST_FMT | FIMC_SRC_FMT, ctx)) {
  1066. if (cr->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) {
  1067. ret = fimc_check_scaler_ratio(cr->c.width, cr->c.height,
  1068. ctx->d_frame.width,
  1069. ctx->d_frame.height,
  1070. ctx->rotation);
  1071. } else {
  1072. ret = fimc_check_scaler_ratio(ctx->s_frame.width,
  1073. ctx->s_frame.height,
  1074. cr->c.width, cr->c.height,
  1075. ctx->rotation);
  1076. }
  1077. if (ret) {
  1078. v4l2_err(fimc->m2m.vfd, "Out of scaler range\n");
  1079. return -EINVAL;
  1080. }
  1081. }
  1082. f->offs_h = cr->c.left;
  1083. f->offs_v = cr->c.top;
  1084. f->width = cr->c.width;
  1085. f->height = cr->c.height;
  1086. fimc_ctx_state_lock_set(FIMC_PARAMS, ctx);
  1087. return 0;
  1088. }
  1089. static const struct v4l2_ioctl_ops fimc_m2m_ioctl_ops = {
  1090. .vidioc_querycap = fimc_m2m_querycap,
  1091. .vidioc_enum_fmt_vid_cap_mplane = fimc_m2m_enum_fmt_mplane,
  1092. .vidioc_enum_fmt_vid_out_mplane = fimc_m2m_enum_fmt_mplane,
  1093. .vidioc_g_fmt_vid_cap_mplane = fimc_m2m_g_fmt_mplane,
  1094. .vidioc_g_fmt_vid_out_mplane = fimc_m2m_g_fmt_mplane,
  1095. .vidioc_try_fmt_vid_cap_mplane = fimc_m2m_try_fmt_mplane,
  1096. .vidioc_try_fmt_vid_out_mplane = fimc_m2m_try_fmt_mplane,
  1097. .vidioc_s_fmt_vid_cap_mplane = fimc_m2m_s_fmt_mplane,
  1098. .vidioc_s_fmt_vid_out_mplane = fimc_m2m_s_fmt_mplane,
  1099. .vidioc_reqbufs = fimc_m2m_reqbufs,
  1100. .vidioc_querybuf = fimc_m2m_querybuf,
  1101. .vidioc_qbuf = fimc_m2m_qbuf,
  1102. .vidioc_dqbuf = fimc_m2m_dqbuf,
  1103. .vidioc_streamon = fimc_m2m_streamon,
  1104. .vidioc_streamoff = fimc_m2m_streamoff,
  1105. .vidioc_g_crop = fimc_m2m_g_crop,
  1106. .vidioc_s_crop = fimc_m2m_s_crop,
  1107. .vidioc_cropcap = fimc_m2m_cropcap
  1108. };
  1109. static int queue_init(void *priv, struct vb2_queue *src_vq,
  1110. struct vb2_queue *dst_vq)
  1111. {
  1112. struct fimc_ctx *ctx = priv;
  1113. int ret;
  1114. memset(src_vq, 0, sizeof(*src_vq));
  1115. src_vq->type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
  1116. src_vq->io_modes = VB2_MMAP | VB2_USERPTR;
  1117. src_vq->drv_priv = ctx;
  1118. src_vq->ops = &fimc_qops;
  1119. src_vq->mem_ops = &vb2_dma_contig_memops;
  1120. src_vq->buf_struct_size = sizeof(struct v4l2_m2m_buffer);
  1121. ret = vb2_queue_init(src_vq);
  1122. if (ret)
  1123. return ret;
  1124. memset(dst_vq, 0, sizeof(*dst_vq));
  1125. dst_vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
  1126. dst_vq->io_modes = VB2_MMAP | VB2_USERPTR;
  1127. dst_vq->drv_priv = ctx;
  1128. dst_vq->ops = &fimc_qops;
  1129. dst_vq->mem_ops = &vb2_dma_contig_memops;
  1130. dst_vq->buf_struct_size = sizeof(struct v4l2_m2m_buffer);
  1131. return vb2_queue_init(dst_vq);
  1132. }
  1133. static int fimc_m2m_open(struct file *file)
  1134. {
  1135. struct fimc_dev *fimc = video_drvdata(file);
  1136. struct fimc_ctx *ctx;
  1137. int ret;
  1138. dbg("pid: %d, state: 0x%lx, refcnt: %d",
  1139. task_pid_nr(current), fimc->state, fimc->vid_cap.refcnt);
  1140. /*
  1141. * Return if the corresponding video capture node
  1142. * is already opened.
  1143. */
  1144. if (fimc->vid_cap.refcnt > 0)
  1145. return -EBUSY;
  1146. ctx = kzalloc(sizeof *ctx, GFP_KERNEL);
  1147. if (!ctx)
  1148. return -ENOMEM;
  1149. v4l2_fh_init(&ctx->fh, fimc->m2m.vfd);
  1150. ret = fimc_ctrls_create(ctx);
  1151. if (ret)
  1152. goto error_fh;
  1153. /* Use separate control handler per file handle */
  1154. ctx->fh.ctrl_handler = &ctx->ctrl_handler;
  1155. file->private_data = &ctx->fh;
  1156. v4l2_fh_add(&ctx->fh);
  1157. ctx->fimc_dev = fimc;
  1158. /* Default color format */
  1159. ctx->s_frame.fmt = &fimc_formats[0];
  1160. ctx->d_frame.fmt = &fimc_formats[0];
  1161. /* Setup the device context for memory-to-memory mode */
  1162. ctx->state = FIMC_CTX_M2M;
  1163. ctx->flags = 0;
  1164. ctx->in_path = FIMC_DMA;
  1165. ctx->out_path = FIMC_DMA;
  1166. spin_lock_init(&ctx->slock);
  1167. ctx->m2m_ctx = v4l2_m2m_ctx_init(fimc->m2m.m2m_dev, ctx, queue_init);
  1168. if (IS_ERR(ctx->m2m_ctx)) {
  1169. ret = PTR_ERR(ctx->m2m_ctx);
  1170. goto error_c;
  1171. }
  1172. if (fimc->m2m.refcnt++ == 0)
  1173. set_bit(ST_M2M_RUN, &fimc->state);
  1174. return 0;
  1175. error_c:
  1176. fimc_ctrls_delete(ctx);
  1177. error_fh:
  1178. v4l2_fh_del(&ctx->fh);
  1179. v4l2_fh_exit(&ctx->fh);
  1180. kfree(ctx);
  1181. return ret;
  1182. }
  1183. static int fimc_m2m_release(struct file *file)
  1184. {
  1185. struct fimc_ctx *ctx = fh_to_ctx(file->private_data);
  1186. struct fimc_dev *fimc = ctx->fimc_dev;
  1187. dbg("pid: %d, state: 0x%lx, refcnt= %d",
  1188. task_pid_nr(current), fimc->state, fimc->m2m.refcnt);
  1189. v4l2_m2m_ctx_release(ctx->m2m_ctx);
  1190. fimc_ctrls_delete(ctx);
  1191. v4l2_fh_del(&ctx->fh);
  1192. v4l2_fh_exit(&ctx->fh);
  1193. if (--fimc->m2m.refcnt <= 0)
  1194. clear_bit(ST_M2M_RUN, &fimc->state);
  1195. kfree(ctx);
  1196. return 0;
  1197. }
  1198. static unsigned int fimc_m2m_poll(struct file *file,
  1199. struct poll_table_struct *wait)
  1200. {
  1201. struct fimc_ctx *ctx = fh_to_ctx(file->private_data);
  1202. return v4l2_m2m_poll(file, ctx->m2m_ctx, wait);
  1203. }
  1204. static int fimc_m2m_mmap(struct file *file, struct vm_area_struct *vma)
  1205. {
  1206. struct fimc_ctx *ctx = fh_to_ctx(file->private_data);
  1207. return v4l2_m2m_mmap(file, ctx->m2m_ctx, vma);
  1208. }
  1209. static const struct v4l2_file_operations fimc_m2m_fops = {
  1210. .owner = THIS_MODULE,
  1211. .open = fimc_m2m_open,
  1212. .release = fimc_m2m_release,
  1213. .poll = fimc_m2m_poll,
  1214. .unlocked_ioctl = video_ioctl2,
  1215. .mmap = fimc_m2m_mmap,
  1216. };
  1217. static struct v4l2_m2m_ops m2m_ops = {
  1218. .device_run = fimc_dma_run,
  1219. .job_abort = fimc_job_abort,
  1220. };
  1221. int fimc_register_m2m_device(struct fimc_dev *fimc,
  1222. struct v4l2_device *v4l2_dev)
  1223. {
  1224. struct video_device *vfd;
  1225. struct platform_device *pdev;
  1226. int ret = 0;
  1227. if (!fimc)
  1228. return -ENODEV;
  1229. pdev = fimc->pdev;
  1230. fimc->v4l2_dev = v4l2_dev;
  1231. vfd = video_device_alloc();
  1232. if (!vfd) {
  1233. v4l2_err(v4l2_dev, "Failed to allocate video device\n");
  1234. return -ENOMEM;
  1235. }
  1236. vfd->fops = &fimc_m2m_fops;
  1237. vfd->ioctl_ops = &fimc_m2m_ioctl_ops;
  1238. vfd->v4l2_dev = v4l2_dev;
  1239. vfd->minor = -1;
  1240. vfd->release = video_device_release;
  1241. vfd->lock = &fimc->lock;
  1242. snprintf(vfd->name, sizeof(vfd->name), "%s.m2m", dev_name(&pdev->dev));
  1243. video_set_drvdata(vfd, fimc);
  1244. fimc->m2m.vfd = vfd;
  1245. fimc->m2m.m2m_dev = v4l2_m2m_init(&m2m_ops);
  1246. if (IS_ERR(fimc->m2m.m2m_dev)) {
  1247. v4l2_err(v4l2_dev, "failed to initialize v4l2-m2m device\n");
  1248. ret = PTR_ERR(fimc->m2m.m2m_dev);
  1249. goto err_init;
  1250. }
  1251. ret = media_entity_init(&vfd->entity, 0, NULL, 0);
  1252. if (!ret)
  1253. return 0;
  1254. v4l2_m2m_release(fimc->m2m.m2m_dev);
  1255. err_init:
  1256. video_device_release(fimc->m2m.vfd);
  1257. return ret;
  1258. }
  1259. void fimc_unregister_m2m_device(struct fimc_dev *fimc)
  1260. {
  1261. if (!fimc)
  1262. return;
  1263. if (fimc->m2m.m2m_dev)
  1264. v4l2_m2m_release(fimc->m2m.m2m_dev);
  1265. if (fimc->m2m.vfd) {
  1266. media_entity_cleanup(&fimc->m2m.vfd->entity);
  1267. /* Can also be called if video device wasn't registered */
  1268. video_unregister_device(fimc->m2m.vfd);
  1269. }
  1270. }
  1271. static void fimc_clk_put(struct fimc_dev *fimc)
  1272. {
  1273. int i;
  1274. for (i = 0; i < fimc->num_clocks; i++) {
  1275. if (fimc->clock[i])
  1276. clk_put(fimc->clock[i]);
  1277. }
  1278. }
  1279. static int fimc_clk_get(struct fimc_dev *fimc)
  1280. {
  1281. int i;
  1282. for (i = 0; i < fimc->num_clocks; i++) {
  1283. fimc->clock[i] = clk_get(&fimc->pdev->dev, fimc_clocks[i]);
  1284. if (!IS_ERR_OR_NULL(fimc->clock[i]))
  1285. continue;
  1286. dev_err(&fimc->pdev->dev, "failed to get fimc clock: %s\n",
  1287. fimc_clocks[i]);
  1288. return -ENXIO;
  1289. }
  1290. return 0;
  1291. }
  1292. static int fimc_m2m_suspend(struct fimc_dev *fimc)
  1293. {
  1294. unsigned long flags;
  1295. int timeout;
  1296. spin_lock_irqsave(&fimc->slock, flags);
  1297. if (!fimc_m2m_pending(fimc)) {
  1298. spin_unlock_irqrestore(&fimc->slock, flags);
  1299. return 0;
  1300. }
  1301. clear_bit(ST_M2M_SUSPENDED, &fimc->state);
  1302. set_bit(ST_M2M_SUSPENDING, &fimc->state);
  1303. spin_unlock_irqrestore(&fimc->slock, flags);
  1304. timeout = wait_event_timeout(fimc->irq_queue,
  1305. test_bit(ST_M2M_SUSPENDED, &fimc->state),
  1306. FIMC_SHUTDOWN_TIMEOUT);
  1307. clear_bit(ST_M2M_SUSPENDING, &fimc->state);
  1308. return timeout == 0 ? -EAGAIN : 0;
  1309. }
  1310. static int fimc_m2m_resume(struct fimc_dev *fimc)
  1311. {
  1312. unsigned long flags;
  1313. spin_lock_irqsave(&fimc->slock, flags);
  1314. /* Clear for full H/W setup in first run after resume */
  1315. fimc->m2m.ctx = NULL;
  1316. spin_unlock_irqrestore(&fimc->slock, flags);
  1317. if (test_and_clear_bit(ST_M2M_SUSPENDED, &fimc->state))
  1318. fimc_m2m_job_finish(fimc->m2m.ctx,
  1319. VB2_BUF_STATE_ERROR);
  1320. return 0;
  1321. }
  1322. static int fimc_probe(struct platform_device *pdev)
  1323. {
  1324. struct fimc_dev *fimc;
  1325. struct resource *res;
  1326. struct samsung_fimc_driverdata *drv_data;
  1327. struct s5p_platform_fimc *pdata;
  1328. int ret = 0;
  1329. dev_dbg(&pdev->dev, "%s():\n", __func__);
  1330. drv_data = (struct samsung_fimc_driverdata *)
  1331. platform_get_device_id(pdev)->driver_data;
  1332. if (pdev->id >= drv_data->num_entities) {
  1333. dev_err(&pdev->dev, "Invalid platform device id: %d\n",
  1334. pdev->id);
  1335. return -EINVAL;
  1336. }
  1337. fimc = kzalloc(sizeof(struct fimc_dev), GFP_KERNEL);
  1338. if (!fimc)
  1339. return -ENOMEM;
  1340. fimc->id = pdev->id;
  1341. fimc->variant = drv_data->variant[fimc->id];
  1342. fimc->pdev = pdev;
  1343. pdata = pdev->dev.platform_data;
  1344. fimc->pdata = pdata;
  1345. set_bit(ST_LPM, &fimc->state);
  1346. init_waitqueue_head(&fimc->irq_queue);
  1347. spin_lock_init(&fimc->slock);
  1348. mutex_init(&fimc->lock);
  1349. res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
  1350. if (!res) {
  1351. dev_err(&pdev->dev, "failed to find the registers\n");
  1352. ret = -ENOENT;
  1353. goto err_info;
  1354. }
  1355. fimc->regs_res = request_mem_region(res->start, resource_size(res),
  1356. dev_name(&pdev->dev));
  1357. if (!fimc->regs_res) {
  1358. dev_err(&pdev->dev, "failed to obtain register region\n");
  1359. ret = -ENOENT;
  1360. goto err_info;
  1361. }
  1362. fimc->regs = ioremap(res->start, resource_size(res));
  1363. if (!fimc->regs) {
  1364. dev_err(&pdev->dev, "failed to map registers\n");
  1365. ret = -ENXIO;
  1366. goto err_req_region;
  1367. }
  1368. res = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
  1369. if (!res) {
  1370. dev_err(&pdev->dev, "failed to get IRQ resource\n");
  1371. ret = -ENXIO;
  1372. goto err_regs_unmap;
  1373. }
  1374. fimc->irq = res->start;
  1375. fimc->num_clocks = MAX_FIMC_CLOCKS;
  1376. ret = fimc_clk_get(fimc);
  1377. if (ret)
  1378. goto err_regs_unmap;
  1379. clk_set_rate(fimc->clock[CLK_BUS], drv_data->lclk_frequency);
  1380. clk_enable(fimc->clock[CLK_BUS]);
  1381. platform_set_drvdata(pdev, fimc);
  1382. ret = request_irq(fimc->irq, fimc_irq_handler, 0, pdev->name, fimc);
  1383. if (ret) {
  1384. dev_err(&pdev->dev, "failed to install irq (%d)\n", ret);
  1385. goto err_clk;
  1386. }
  1387. pm_runtime_enable(&pdev->dev);
  1388. ret = pm_runtime_get_sync(&pdev->dev);
  1389. if (ret < 0)
  1390. goto err_irq;
  1391. /* Initialize contiguous memory allocator */
  1392. fimc->alloc_ctx = vb2_dma_contig_init_ctx(&pdev->dev);
  1393. if (IS_ERR(fimc->alloc_ctx)) {
  1394. ret = PTR_ERR(fimc->alloc_ctx);
  1395. goto err_pm;
  1396. }
  1397. dev_dbg(&pdev->dev, "FIMC.%d registered successfully\n", fimc->id);
  1398. pm_runtime_put(&pdev->dev);
  1399. return 0;
  1400. err_pm:
  1401. pm_runtime_put(&pdev->dev);
  1402. err_irq:
  1403. free_irq(fimc->irq, fimc);
  1404. err_clk:
  1405. fimc_clk_put(fimc);
  1406. err_regs_unmap:
  1407. iounmap(fimc->regs);
  1408. err_req_region:
  1409. release_resource(fimc->regs_res);
  1410. kfree(fimc->regs_res);
  1411. err_info:
  1412. kfree(fimc);
  1413. return ret;
  1414. }
  1415. static int fimc_runtime_resume(struct device *dev)
  1416. {
  1417. struct fimc_dev *fimc = dev_get_drvdata(dev);
  1418. dbg("fimc%d: state: 0x%lx", fimc->id, fimc->state);
  1419. /* Enable clocks and perform basic initalization */
  1420. clk_enable(fimc->clock[CLK_GATE]);
  1421. fimc_hw_reset(fimc);
  1422. if (fimc->variant->out_buf_count > 4)
  1423. fimc_hw_set_dma_seq(fimc, 0xF);
  1424. /* Resume the capture or mem-to-mem device */
  1425. if (fimc_capture_busy(fimc))
  1426. return fimc_capture_resume(fimc);
  1427. else if (fimc_m2m_pending(fimc))
  1428. return fimc_m2m_resume(fimc);
  1429. return 0;
  1430. }
  1431. static int fimc_runtime_suspend(struct device *dev)
  1432. {
  1433. struct fimc_dev *fimc = dev_get_drvdata(dev);
  1434. int ret = 0;
  1435. if (fimc_capture_busy(fimc))
  1436. ret = fimc_capture_suspend(fimc);
  1437. else
  1438. ret = fimc_m2m_suspend(fimc);
  1439. if (!ret)
  1440. clk_disable(fimc->clock[CLK_GATE]);
  1441. dbg("fimc%d: state: 0x%lx", fimc->id, fimc->state);
  1442. return ret;
  1443. }
  1444. #ifdef CONFIG_PM_SLEEP
  1445. static int fimc_resume(struct device *dev)
  1446. {
  1447. struct fimc_dev *fimc = dev_get_drvdata(dev);
  1448. unsigned long flags;
  1449. dbg("fimc%d: state: 0x%lx", fimc->id, fimc->state);
  1450. /* Do not resume if the device was idle before system suspend */
  1451. spin_lock_irqsave(&fimc->slock, flags);
  1452. if (!test_and_clear_bit(ST_LPM, &fimc->state) ||
  1453. (!fimc_m2m_active(fimc) && !fimc_capture_busy(fimc))) {
  1454. spin_unlock_irqrestore(&fimc->slock, flags);
  1455. return 0;
  1456. }
  1457. fimc_hw_reset(fimc);
  1458. if (fimc->variant->out_buf_count > 4)
  1459. fimc_hw_set_dma_seq(fimc, 0xF);
  1460. spin_unlock_irqrestore(&fimc->slock, flags);
  1461. if (fimc_capture_busy(fimc))
  1462. return fimc_capture_resume(fimc);
  1463. return fimc_m2m_resume(fimc);
  1464. }
  1465. static int fimc_suspend(struct device *dev)
  1466. {
  1467. struct fimc_dev *fimc = dev_get_drvdata(dev);
  1468. dbg("fimc%d: state: 0x%lx", fimc->id, fimc->state);
  1469. if (test_and_set_bit(ST_LPM, &fimc->state))
  1470. return 0;
  1471. if (fimc_capture_busy(fimc))
  1472. return fimc_capture_suspend(fimc);
  1473. return fimc_m2m_suspend(fimc);
  1474. }
  1475. #endif /* CONFIG_PM_SLEEP */
  1476. static int __devexit fimc_remove(struct platform_device *pdev)
  1477. {
  1478. struct fimc_dev *fimc = platform_get_drvdata(pdev);
  1479. pm_runtime_disable(&pdev->dev);
  1480. fimc_runtime_suspend(&pdev->dev);
  1481. pm_runtime_set_suspended(&pdev->dev);
  1482. vb2_dma_contig_cleanup_ctx(fimc->alloc_ctx);
  1483. clk_disable(fimc->clock[CLK_BUS]);
  1484. fimc_clk_put(fimc);
  1485. free_irq(fimc->irq, fimc);
  1486. iounmap(fimc->regs);
  1487. release_resource(fimc->regs_res);
  1488. kfree(fimc->regs_res);
  1489. kfree(fimc);
  1490. dev_info(&pdev->dev, "driver unloaded\n");
  1491. return 0;
  1492. }
  1493. /* Image pixel limits, similar across several FIMC HW revisions. */
  1494. static struct fimc_pix_limit s5p_pix_limit[4] = {
  1495. [0] = {
  1496. .scaler_en_w = 3264,
  1497. .scaler_dis_w = 8192,
  1498. .in_rot_en_h = 1920,
  1499. .in_rot_dis_w = 8192,
  1500. .out_rot_en_w = 1920,
  1501. .out_rot_dis_w = 4224,
  1502. },
  1503. [1] = {
  1504. .scaler_en_w = 4224,
  1505. .scaler_dis_w = 8192,
  1506. .in_rot_en_h = 1920,
  1507. .in_rot_dis_w = 8192,
  1508. .out_rot_en_w = 1920,
  1509. .out_rot_dis_w = 4224,
  1510. },
  1511. [2] = {
  1512. .scaler_en_w = 1920,
  1513. .scaler_dis_w = 8192,
  1514. .in_rot_en_h = 1280,
  1515. .in_rot_dis_w = 8192,
  1516. .out_rot_en_w = 1280,
  1517. .out_rot_dis_w = 1920,
  1518. },
  1519. [3] = {
  1520. .scaler_en_w = 1920,
  1521. .scaler_dis_w = 8192,
  1522. .in_rot_en_h = 1366,
  1523. .in_rot_dis_w = 8192,
  1524. .out_rot_en_w = 1366,
  1525. .out_rot_dis_w = 1920,
  1526. },
  1527. };
  1528. static struct samsung_fimc_variant fimc0_variant_s5p = {
  1529. .has_inp_rot = 1,
  1530. .has_out_rot = 1,
  1531. .has_cam_if = 1,
  1532. .min_inp_pixsize = 16,
  1533. .min_out_pixsize = 16,
  1534. .hor_offs_align = 8,
  1535. .out_buf_count = 4,
  1536. .pix_limit = &s5p_pix_limit[0],
  1537. };
  1538. static struct samsung_fimc_variant fimc2_variant_s5p = {
  1539. .has_cam_if = 1,
  1540. .min_inp_pixsize = 16,
  1541. .min_out_pixsize = 16,
  1542. .hor_offs_align = 8,
  1543. .out_buf_count = 4,
  1544. .pix_limit = &s5p_pix_limit[1],
  1545. };
  1546. static struct samsung_fimc_variant fimc0_variant_s5pv210 = {
  1547. .pix_hoff = 1,
  1548. .has_inp_rot = 1,
  1549. .has_out_rot = 1,
  1550. .has_cam_if = 1,
  1551. .min_inp_pixsize = 16,
  1552. .min_out_pixsize = 16,
  1553. .hor_offs_align = 8,
  1554. .out_buf_count = 4,
  1555. .pix_limit = &s5p_pix_limit[1],
  1556. };
  1557. static struct samsung_fimc_variant fimc1_variant_s5pv210 = {
  1558. .pix_hoff = 1,
  1559. .has_inp_rot = 1,
  1560. .has_out_rot = 1,
  1561. .has_cam_if = 1,
  1562. .has_mainscaler_ext = 1,
  1563. .min_inp_pixsize = 16,
  1564. .min_out_pixsize = 16,
  1565. .hor_offs_align = 1,
  1566. .out_buf_count = 4,
  1567. .pix_limit = &s5p_pix_limit[2],
  1568. };
  1569. static struct samsung_fimc_variant fimc2_variant_s5pv210 = {
  1570. .has_cam_if = 1,
  1571. .pix_hoff = 1,
  1572. .min_inp_pixsize = 16,
  1573. .min_out_pixsize = 16,
  1574. .hor_offs_align = 8,
  1575. .out_buf_count = 4,
  1576. .pix_limit = &s5p_pix_limit[2],
  1577. };
  1578. static struct samsung_fimc_variant fimc0_variant_exynos4 = {
  1579. .pix_hoff = 1,
  1580. .has_inp_rot = 1,
  1581. .has_out_rot = 1,
  1582. .has_cam_if = 1,
  1583. .has_cistatus2 = 1,
  1584. .has_mainscaler_ext = 1,
  1585. .min_inp_pixsize = 16,
  1586. .min_out_pixsize = 16,
  1587. .hor_offs_align = 1,
  1588. .out_buf_count = 32,
  1589. .pix_limit = &s5p_pix_limit[1],
  1590. };
  1591. static struct samsung_fimc_variant fimc3_variant_exynos4 = {
  1592. .pix_hoff = 1,
  1593. .has_cam_if = 1,
  1594. .has_cistatus2 = 1,
  1595. .has_mainscaler_ext = 1,
  1596. .min_inp_pixsize = 16,
  1597. .min_out_pixsize = 16,
  1598. .hor_offs_align = 1,
  1599. .out_buf_count = 32,
  1600. .pix_limit = &s5p_pix_limit[3],
  1601. };
  1602. /* S5PC100 */
  1603. static struct samsung_fimc_driverdata fimc_drvdata_s5p = {
  1604. .variant = {
  1605. [0] = &fimc0_variant_s5p,
  1606. [1] = &fimc0_variant_s5p,
  1607. [2] = &fimc2_variant_s5p,
  1608. },
  1609. .num_entities = 3,
  1610. .lclk_frequency = 133000000UL,
  1611. };
  1612. /* S5PV210, S5PC110 */
  1613. static struct samsung_fimc_driverdata fimc_drvdata_s5pv210 = {
  1614. .variant = {
  1615. [0] = &fimc0_variant_s5pv210,
  1616. [1] = &fimc1_variant_s5pv210,
  1617. [2] = &fimc2_variant_s5pv210,
  1618. },
  1619. .num_entities = 3,
  1620. .lclk_frequency = 166000000UL,
  1621. };
  1622. /* S5PV310, S5PC210 */
  1623. static struct samsung_fimc_driverdata fimc_drvdata_exynos4 = {
  1624. .variant = {
  1625. [0] = &fimc0_variant_exynos4,
  1626. [1] = &fimc0_variant_exynos4,
  1627. [2] = &fimc0_variant_exynos4,
  1628. [3] = &fimc3_variant_exynos4,
  1629. },
  1630. .num_entities = 4,
  1631. .lclk_frequency = 166000000UL,
  1632. };
  1633. static struct platform_device_id fimc_driver_ids[] = {
  1634. {
  1635. .name = "s5p-fimc",
  1636. .driver_data = (unsigned long)&fimc_drvdata_s5p,
  1637. }, {
  1638. .name = "s5pv210-fimc",
  1639. .driver_data = (unsigned long)&fimc_drvdata_s5pv210,
  1640. }, {
  1641. .name = "exynos4-fimc",
  1642. .driver_data = (unsigned long)&fimc_drvdata_exynos4,
  1643. },
  1644. {},
  1645. };
  1646. MODULE_DEVICE_TABLE(platform, fimc_driver_ids);
  1647. static const struct dev_pm_ops fimc_pm_ops = {
  1648. SET_SYSTEM_SLEEP_PM_OPS(fimc_suspend, fimc_resume)
  1649. SET_RUNTIME_PM_OPS(fimc_runtime_suspend, fimc_runtime_resume, NULL)
  1650. };
  1651. static struct platform_driver fimc_driver = {
  1652. .probe = fimc_probe,
  1653. .remove = __devexit_p(fimc_remove),
  1654. .id_table = fimc_driver_ids,
  1655. .driver = {
  1656. .name = FIMC_MODULE_NAME,
  1657. .owner = THIS_MODULE,
  1658. .pm = &fimc_pm_ops,
  1659. }
  1660. };
  1661. int __init fimc_register_driver(void)
  1662. {
  1663. return platform_driver_probe(&fimc_driver, fimc_probe);
  1664. }
  1665. void __exit fimc_unregister_driver(void)
  1666. {
  1667. platform_driver_unregister(&fimc_driver);
  1668. }