mcam-core.c 45 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809
  1. /*
  2. * The Marvell camera core. This device appears in a number of settings,
  3. * so it needs platform-specific support outside of the core.
  4. *
  5. * Copyright 2011 Jonathan Corbet corbet@lwn.net
  6. */
  7. #include <linux/kernel.h>
  8. #include <linux/module.h>
  9. #include <linux/fs.h>
  10. #include <linux/mm.h>
  11. #include <linux/i2c.h>
  12. #include <linux/interrupt.h>
  13. #include <linux/spinlock.h>
  14. #include <linux/slab.h>
  15. #include <linux/device.h>
  16. #include <linux/wait.h>
  17. #include <linux/list.h>
  18. #include <linux/dma-mapping.h>
  19. #include <linux/delay.h>
  20. #include <linux/vmalloc.h>
  21. #include <linux/io.h>
  22. #include <linux/videodev2.h>
  23. #include <media/v4l2-device.h>
  24. #include <media/v4l2-ioctl.h>
  25. #include <media/v4l2-ctrls.h>
  26. #include <media/ov7670.h>
  27. #include <media/videobuf2-vmalloc.h>
  28. #include <media/videobuf2-dma-contig.h>
  29. #include <media/videobuf2-dma-sg.h>
  30. #include "mcam-core.h"
  31. #ifdef MCAM_MODE_VMALLOC
  32. /*
  33. * Internal DMA buffer management. Since the controller cannot do S/G I/O,
  34. * we must have physically contiguous buffers to bring frames into.
  35. * These parameters control how many buffers we use, whether we
  36. * allocate them at load time (better chance of success, but nails down
  37. * memory) or when somebody tries to use the camera (riskier), and,
  38. * for load-time allocation, how big they should be.
  39. *
  40. * The controller can cycle through three buffers. We could use
  41. * more by flipping pointers around, but it probably makes little
  42. * sense.
  43. */
  44. static bool alloc_bufs_at_read;
  45. module_param(alloc_bufs_at_read, bool, 0444);
  46. MODULE_PARM_DESC(alloc_bufs_at_read,
  47. "Non-zero value causes DMA buffers to be allocated when the "
  48. "video capture device is read, rather than at module load "
  49. "time. This saves memory, but decreases the chances of "
  50. "successfully getting those buffers. This parameter is "
  51. "only used in the vmalloc buffer mode");
  52. static int n_dma_bufs = 3;
  53. module_param(n_dma_bufs, uint, 0644);
  54. MODULE_PARM_DESC(n_dma_bufs,
  55. "The number of DMA buffers to allocate. Can be either two "
  56. "(saves memory, makes timing tighter) or three.");
  57. static int dma_buf_size = VGA_WIDTH * VGA_HEIGHT * 2; /* Worst case */
  58. module_param(dma_buf_size, uint, 0444);
  59. MODULE_PARM_DESC(dma_buf_size,
  60. "The size of the allocated DMA buffers. If actual operating "
  61. "parameters require larger buffers, an attempt to reallocate "
  62. "will be made.");
  63. #else /* MCAM_MODE_VMALLOC */
  64. static const bool alloc_bufs_at_read = 0;
  65. static const int n_dma_bufs = 3; /* Used by S/G_PARM */
  66. #endif /* MCAM_MODE_VMALLOC */
  67. static bool flip;
  68. module_param(flip, bool, 0444);
  69. MODULE_PARM_DESC(flip,
  70. "If set, the sensor will be instructed to flip the image "
  71. "vertically.");
  72. static int buffer_mode = -1;
  73. module_param(buffer_mode, int, 0444);
  74. MODULE_PARM_DESC(buffer_mode,
  75. "Set the buffer mode to be used; default is to go with what "
  76. "the platform driver asks for. Set to 0 for vmalloc, 1 for "
  77. "DMA contiguous.");
  78. /*
  79. * Status flags. Always manipulated with bit operations.
  80. */
  81. #define CF_BUF0_VALID 0 /* Buffers valid - first three */
  82. #define CF_BUF1_VALID 1
  83. #define CF_BUF2_VALID 2
  84. #define CF_DMA_ACTIVE 3 /* A frame is incoming */
  85. #define CF_CONFIG_NEEDED 4 /* Must configure hardware */
  86. #define CF_SINGLE_BUFFER 5 /* Running with a single buffer */
  87. #define CF_SG_RESTART 6 /* SG restart needed */
  88. #define sensor_call(cam, o, f, args...) \
  89. v4l2_subdev_call(cam->sensor, o, f, ##args)
  90. static struct mcam_format_struct {
  91. __u8 *desc;
  92. __u32 pixelformat;
  93. int bpp; /* Bytes per pixel */
  94. enum v4l2_mbus_pixelcode mbus_code;
  95. } mcam_formats[] = {
  96. {
  97. .desc = "YUYV 4:2:2",
  98. .pixelformat = V4L2_PIX_FMT_YUYV,
  99. .mbus_code = V4L2_MBUS_FMT_YUYV8_2X8,
  100. .bpp = 2,
  101. },
  102. {
  103. .desc = "RGB 444",
  104. .pixelformat = V4L2_PIX_FMT_RGB444,
  105. .mbus_code = V4L2_MBUS_FMT_RGB444_2X8_PADHI_LE,
  106. .bpp = 2,
  107. },
  108. {
  109. .desc = "RGB 565",
  110. .pixelformat = V4L2_PIX_FMT_RGB565,
  111. .mbus_code = V4L2_MBUS_FMT_RGB565_2X8_LE,
  112. .bpp = 2,
  113. },
  114. {
  115. .desc = "Raw RGB Bayer",
  116. .pixelformat = V4L2_PIX_FMT_SBGGR8,
  117. .mbus_code = V4L2_MBUS_FMT_SBGGR8_1X8,
  118. .bpp = 1
  119. },
  120. };
  121. #define N_MCAM_FMTS ARRAY_SIZE(mcam_formats)
  122. static struct mcam_format_struct *mcam_find_format(u32 pixelformat)
  123. {
  124. unsigned i;
  125. for (i = 0; i < N_MCAM_FMTS; i++)
  126. if (mcam_formats[i].pixelformat == pixelformat)
  127. return mcam_formats + i;
  128. /* Not found? Then return the first format. */
  129. return mcam_formats;
  130. }
  131. /*
  132. * The default format we use until somebody says otherwise.
  133. */
  134. static const struct v4l2_pix_format mcam_def_pix_format = {
  135. .width = VGA_WIDTH,
  136. .height = VGA_HEIGHT,
  137. .pixelformat = V4L2_PIX_FMT_YUYV,
  138. .field = V4L2_FIELD_NONE,
  139. .bytesperline = VGA_WIDTH*2,
  140. .sizeimage = VGA_WIDTH*VGA_HEIGHT*2,
  141. };
  142. static const enum v4l2_mbus_pixelcode mcam_def_mbus_code =
  143. V4L2_MBUS_FMT_YUYV8_2X8;
  144. /*
  145. * The two-word DMA descriptor format used by the Armada 610 and like. There
  146. * Is a three-word format as well (set C1_DESC_3WORD) where the third
  147. * word is a pointer to the next descriptor, but we don't use it. Two-word
  148. * descriptors have to be contiguous in memory.
  149. */
  150. struct mcam_dma_desc {
  151. u32 dma_addr;
  152. u32 segment_len;
  153. };
  154. /*
  155. * Our buffer type for working with videobuf2. Note that the vb2
  156. * developers have decreed that struct vb2_buffer must be at the
  157. * beginning of this structure.
  158. */
  159. struct mcam_vb_buffer {
  160. struct vb2_buffer vb_buf;
  161. struct list_head queue;
  162. struct mcam_dma_desc *dma_desc; /* Descriptor virtual address */
  163. dma_addr_t dma_desc_pa; /* Descriptor physical address */
  164. int dma_desc_nent; /* Number of mapped descriptors */
  165. };
  166. static inline struct mcam_vb_buffer *vb_to_mvb(struct vb2_buffer *vb)
  167. {
  168. return container_of(vb, struct mcam_vb_buffer, vb_buf);
  169. }
  170. /*
  171. * Hand a completed buffer back to user space.
  172. */
  173. static void mcam_buffer_done(struct mcam_camera *cam, int frame,
  174. struct vb2_buffer *vbuf)
  175. {
  176. vbuf->v4l2_buf.bytesused = cam->pix_format.sizeimage;
  177. vbuf->v4l2_buf.sequence = cam->buf_seq[frame];
  178. vb2_set_plane_payload(vbuf, 0, cam->pix_format.sizeimage);
  179. vb2_buffer_done(vbuf, VB2_BUF_STATE_DONE);
  180. }
  181. /*
  182. * Debugging and related.
  183. */
  184. #define cam_err(cam, fmt, arg...) \
  185. dev_err((cam)->dev, fmt, ##arg);
  186. #define cam_warn(cam, fmt, arg...) \
  187. dev_warn((cam)->dev, fmt, ##arg);
  188. #define cam_dbg(cam, fmt, arg...) \
  189. dev_dbg((cam)->dev, fmt, ##arg);
  190. /*
  191. * Flag manipulation helpers
  192. */
  193. static void mcam_reset_buffers(struct mcam_camera *cam)
  194. {
  195. int i;
  196. cam->next_buf = -1;
  197. for (i = 0; i < cam->nbufs; i++)
  198. clear_bit(i, &cam->flags);
  199. }
  200. static inline int mcam_needs_config(struct mcam_camera *cam)
  201. {
  202. return test_bit(CF_CONFIG_NEEDED, &cam->flags);
  203. }
  204. static void mcam_set_config_needed(struct mcam_camera *cam, int needed)
  205. {
  206. if (needed)
  207. set_bit(CF_CONFIG_NEEDED, &cam->flags);
  208. else
  209. clear_bit(CF_CONFIG_NEEDED, &cam->flags);
  210. }
  211. /* ------------------------------------------------------------------- */
  212. /*
  213. * Make the controller start grabbing images. Everything must
  214. * be set up before doing this.
  215. */
  216. static void mcam_ctlr_start(struct mcam_camera *cam)
  217. {
  218. /* set_bit performs a read, so no other barrier should be
  219. needed here */
  220. mcam_reg_set_bit(cam, REG_CTRL0, C0_ENABLE);
  221. }
  222. static void mcam_ctlr_stop(struct mcam_camera *cam)
  223. {
  224. mcam_reg_clear_bit(cam, REG_CTRL0, C0_ENABLE);
  225. }
  226. /* ------------------------------------------------------------------- */
  227. #ifdef MCAM_MODE_VMALLOC
  228. /*
  229. * Code specific to the vmalloc buffer mode.
  230. */
  231. /*
  232. * Allocate in-kernel DMA buffers for vmalloc mode.
  233. */
  234. static int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime)
  235. {
  236. int i;
  237. mcam_set_config_needed(cam, 1);
  238. if (loadtime)
  239. cam->dma_buf_size = dma_buf_size;
  240. else
  241. cam->dma_buf_size = cam->pix_format.sizeimage;
  242. if (n_dma_bufs > 3)
  243. n_dma_bufs = 3;
  244. cam->nbufs = 0;
  245. for (i = 0; i < n_dma_bufs; i++) {
  246. cam->dma_bufs[i] = dma_alloc_coherent(cam->dev,
  247. cam->dma_buf_size, cam->dma_handles + i,
  248. GFP_KERNEL);
  249. if (cam->dma_bufs[i] == NULL) {
  250. cam_warn(cam, "Failed to allocate DMA buffer\n");
  251. break;
  252. }
  253. (cam->nbufs)++;
  254. }
  255. switch (cam->nbufs) {
  256. case 1:
  257. dma_free_coherent(cam->dev, cam->dma_buf_size,
  258. cam->dma_bufs[0], cam->dma_handles[0]);
  259. cam->nbufs = 0;
  260. case 0:
  261. cam_err(cam, "Insufficient DMA buffers, cannot operate\n");
  262. return -ENOMEM;
  263. case 2:
  264. if (n_dma_bufs > 2)
  265. cam_warn(cam, "Will limp along with only 2 buffers\n");
  266. break;
  267. }
  268. return 0;
  269. }
  270. static void mcam_free_dma_bufs(struct mcam_camera *cam)
  271. {
  272. int i;
  273. for (i = 0; i < cam->nbufs; i++) {
  274. dma_free_coherent(cam->dev, cam->dma_buf_size,
  275. cam->dma_bufs[i], cam->dma_handles[i]);
  276. cam->dma_bufs[i] = NULL;
  277. }
  278. cam->nbufs = 0;
  279. }
  280. /*
  281. * Set up DMA buffers when operating in vmalloc mode
  282. */
  283. static void mcam_ctlr_dma_vmalloc(struct mcam_camera *cam)
  284. {
  285. /*
  286. * Store the first two Y buffers (we aren't supporting
  287. * planar formats for now, so no UV bufs). Then either
  288. * set the third if it exists, or tell the controller
  289. * to just use two.
  290. */
  291. mcam_reg_write(cam, REG_Y0BAR, cam->dma_handles[0]);
  292. mcam_reg_write(cam, REG_Y1BAR, cam->dma_handles[1]);
  293. if (cam->nbufs > 2) {
  294. mcam_reg_write(cam, REG_Y2BAR, cam->dma_handles[2]);
  295. mcam_reg_clear_bit(cam, REG_CTRL1, C1_TWOBUFS);
  296. } else
  297. mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
  298. if (cam->chip_id == MCAM_CAFE)
  299. mcam_reg_write(cam, REG_UBAR, 0); /* 32 bits only */
  300. }
  301. /*
  302. * Copy data out to user space in the vmalloc case
  303. */
  304. static void mcam_frame_tasklet(unsigned long data)
  305. {
  306. struct mcam_camera *cam = (struct mcam_camera *) data;
  307. int i;
  308. unsigned long flags;
  309. struct mcam_vb_buffer *buf;
  310. spin_lock_irqsave(&cam->dev_lock, flags);
  311. for (i = 0; i < cam->nbufs; i++) {
  312. int bufno = cam->next_buf;
  313. if (cam->state != S_STREAMING || bufno < 0)
  314. break; /* I/O got stopped */
  315. if (++(cam->next_buf) >= cam->nbufs)
  316. cam->next_buf = 0;
  317. if (!test_bit(bufno, &cam->flags))
  318. continue;
  319. if (list_empty(&cam->buffers)) {
  320. cam->frame_state.singles++;
  321. break; /* Leave it valid, hope for better later */
  322. }
  323. cam->frame_state.delivered++;
  324. clear_bit(bufno, &cam->flags);
  325. buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer,
  326. queue);
  327. list_del_init(&buf->queue);
  328. /*
  329. * Drop the lock during the big copy. This *should* be safe...
  330. */
  331. spin_unlock_irqrestore(&cam->dev_lock, flags);
  332. memcpy(vb2_plane_vaddr(&buf->vb_buf, 0), cam->dma_bufs[bufno],
  333. cam->pix_format.sizeimage);
  334. mcam_buffer_done(cam, bufno, &buf->vb_buf);
  335. spin_lock_irqsave(&cam->dev_lock, flags);
  336. }
  337. spin_unlock_irqrestore(&cam->dev_lock, flags);
  338. }
  339. /*
  340. * Make sure our allocated buffers are up to the task.
  341. */
  342. static int mcam_check_dma_buffers(struct mcam_camera *cam)
  343. {
  344. if (cam->nbufs > 0 && cam->dma_buf_size < cam->pix_format.sizeimage)
  345. mcam_free_dma_bufs(cam);
  346. if (cam->nbufs == 0)
  347. return mcam_alloc_dma_bufs(cam, 0);
  348. return 0;
  349. }
  350. static void mcam_vmalloc_done(struct mcam_camera *cam, int frame)
  351. {
  352. tasklet_schedule(&cam->s_tasklet);
  353. }
  354. #else /* MCAM_MODE_VMALLOC */
  355. static inline int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime)
  356. {
  357. return 0;
  358. }
  359. static inline void mcam_free_dma_bufs(struct mcam_camera *cam)
  360. {
  361. return;
  362. }
  363. static inline int mcam_check_dma_buffers(struct mcam_camera *cam)
  364. {
  365. return 0;
  366. }
  367. #endif /* MCAM_MODE_VMALLOC */
  368. #ifdef MCAM_MODE_DMA_CONTIG
  369. /* ---------------------------------------------------------------------- */
  370. /*
  371. * DMA-contiguous code.
  372. */
  373. /*
  374. * Set up a contiguous buffer for the given frame. Here also is where
  375. * the underrun strategy is set: if there is no buffer available, reuse
  376. * the buffer from the other BAR and set the CF_SINGLE_BUFFER flag to
  377. * keep the interrupt handler from giving that buffer back to user
  378. * space. In this way, we always have a buffer to DMA to and don't
  379. * have to try to play games stopping and restarting the controller.
  380. */
  381. static void mcam_set_contig_buffer(struct mcam_camera *cam, int frame)
  382. {
  383. struct mcam_vb_buffer *buf;
  384. /*
  385. * If there are no available buffers, go into single mode
  386. */
  387. if (list_empty(&cam->buffers)) {
  388. buf = cam->vb_bufs[frame ^ 0x1];
  389. cam->vb_bufs[frame] = buf;
  390. mcam_reg_write(cam, frame == 0 ? REG_Y0BAR : REG_Y1BAR,
  391. vb2_dma_contig_plane_dma_addr(&buf->vb_buf, 0));
  392. set_bit(CF_SINGLE_BUFFER, &cam->flags);
  393. cam->frame_state.singles++;
  394. return;
  395. }
  396. /*
  397. * OK, we have a buffer we can use.
  398. */
  399. buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, queue);
  400. list_del_init(&buf->queue);
  401. mcam_reg_write(cam, frame == 0 ? REG_Y0BAR : REG_Y1BAR,
  402. vb2_dma_contig_plane_dma_addr(&buf->vb_buf, 0));
  403. cam->vb_bufs[frame] = buf;
  404. clear_bit(CF_SINGLE_BUFFER, &cam->flags);
  405. }
  406. /*
  407. * Initial B_DMA_contig setup.
  408. */
  409. static void mcam_ctlr_dma_contig(struct mcam_camera *cam)
  410. {
  411. mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
  412. cam->nbufs = 2;
  413. mcam_set_contig_buffer(cam, 0);
  414. mcam_set_contig_buffer(cam, 1);
  415. }
  416. /*
  417. * Frame completion handling.
  418. */
  419. static void mcam_dma_contig_done(struct mcam_camera *cam, int frame)
  420. {
  421. struct mcam_vb_buffer *buf = cam->vb_bufs[frame];
  422. if (!test_bit(CF_SINGLE_BUFFER, &cam->flags)) {
  423. cam->frame_state.delivered++;
  424. mcam_buffer_done(cam, frame, &buf->vb_buf);
  425. }
  426. mcam_set_contig_buffer(cam, frame);
  427. }
  428. #endif /* MCAM_MODE_DMA_CONTIG */
  429. #ifdef MCAM_MODE_DMA_SG
  430. /* ---------------------------------------------------------------------- */
  431. /*
  432. * Scatter/gather-specific code.
  433. */
  434. /*
  435. * Set up the next buffer for S/G I/O; caller should be sure that
  436. * the controller is stopped and a buffer is available.
  437. */
  438. static void mcam_sg_next_buffer(struct mcam_camera *cam)
  439. {
  440. struct mcam_vb_buffer *buf;
  441. buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, queue);
  442. list_del_init(&buf->queue);
  443. /*
  444. * Very Bad Not Good Things happen if you don't clear
  445. * C1_DESC_ENA before making any descriptor changes.
  446. */
  447. mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_ENA);
  448. mcam_reg_write(cam, REG_DMA_DESC_Y, buf->dma_desc_pa);
  449. mcam_reg_write(cam, REG_DESC_LEN_Y,
  450. buf->dma_desc_nent*sizeof(struct mcam_dma_desc));
  451. mcam_reg_write(cam, REG_DESC_LEN_U, 0);
  452. mcam_reg_write(cam, REG_DESC_LEN_V, 0);
  453. mcam_reg_set_bit(cam, REG_CTRL1, C1_DESC_ENA);
  454. cam->vb_bufs[0] = buf;
  455. }
  456. /*
  457. * Initial B_DMA_sg setup
  458. */
  459. static void mcam_ctlr_dma_sg(struct mcam_camera *cam)
  460. {
  461. /*
  462. * The list-empty condition can hit us at resume time
  463. * if the buffer list was empty when the system was suspended.
  464. */
  465. if (list_empty(&cam->buffers)) {
  466. set_bit(CF_SG_RESTART, &cam->flags);
  467. return;
  468. }
  469. mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_3WORD);
  470. mcam_sg_next_buffer(cam);
  471. cam->nbufs = 3;
  472. }
  473. /*
  474. * Frame completion with S/G is trickier. We can't muck with
  475. * a descriptor chain on the fly, since the controller buffers it
  476. * internally. So we have to actually stop and restart; Marvell
  477. * says this is the way to do it.
  478. *
  479. * Of course, stopping is easier said than done; experience shows
  480. * that the controller can start a frame *after* C0_ENABLE has been
  481. * cleared. So when running in S/G mode, the controller is "stopped"
  482. * on receipt of the start-of-frame interrupt. That means we can
  483. * safely change the DMA descriptor array here and restart things
  484. * (assuming there's another buffer waiting to go).
  485. */
  486. static void mcam_dma_sg_done(struct mcam_camera *cam, int frame)
  487. {
  488. struct mcam_vb_buffer *buf = cam->vb_bufs[0];
  489. /*
  490. * If we're no longer supposed to be streaming, don't do anything.
  491. */
  492. if (cam->state != S_STREAMING)
  493. return;
  494. /*
  495. * If we have another buffer available, put it in and
  496. * restart the engine.
  497. */
  498. if (!list_empty(&cam->buffers)) {
  499. mcam_sg_next_buffer(cam);
  500. mcam_ctlr_start(cam);
  501. /*
  502. * Otherwise set CF_SG_RESTART and the controller will
  503. * be restarted once another buffer shows up.
  504. */
  505. } else {
  506. set_bit(CF_SG_RESTART, &cam->flags);
  507. cam->frame_state.singles++;
  508. cam->vb_bufs[0] = NULL;
  509. }
  510. /*
  511. * Now we can give the completed frame back to user space.
  512. */
  513. cam->frame_state.delivered++;
  514. mcam_buffer_done(cam, frame, &buf->vb_buf);
  515. }
  516. /*
  517. * Scatter/gather mode requires stopping the controller between
  518. * frames so we can put in a new DMA descriptor array. If no new
  519. * buffer exists at frame completion, the controller is left stopped;
  520. * this function is charged with gettig things going again.
  521. */
  522. static void mcam_sg_restart(struct mcam_camera *cam)
  523. {
  524. mcam_ctlr_dma_sg(cam);
  525. mcam_ctlr_start(cam);
  526. clear_bit(CF_SG_RESTART, &cam->flags);
  527. }
  528. #else /* MCAM_MODE_DMA_SG */
  529. static inline void mcam_sg_restart(struct mcam_camera *cam)
  530. {
  531. return;
  532. }
  533. #endif /* MCAM_MODE_DMA_SG */
  534. /* ---------------------------------------------------------------------- */
  535. /*
  536. * Buffer-mode-independent controller code.
  537. */
  538. /*
  539. * Image format setup
  540. */
  541. static void mcam_ctlr_image(struct mcam_camera *cam)
  542. {
  543. int imgsz;
  544. struct v4l2_pix_format *fmt = &cam->pix_format;
  545. imgsz = ((fmt->height << IMGSZ_V_SHIFT) & IMGSZ_V_MASK) |
  546. (fmt->bytesperline & IMGSZ_H_MASK);
  547. mcam_reg_write(cam, REG_IMGSIZE, imgsz);
  548. mcam_reg_write(cam, REG_IMGOFFSET, 0);
  549. /* YPITCH just drops the last two bits */
  550. mcam_reg_write_mask(cam, REG_IMGPITCH, fmt->bytesperline,
  551. IMGP_YP_MASK);
  552. /*
  553. * Tell the controller about the image format we are using.
  554. */
  555. switch (cam->pix_format.pixelformat) {
  556. case V4L2_PIX_FMT_YUYV:
  557. mcam_reg_write_mask(cam, REG_CTRL0,
  558. C0_DF_YUV|C0_YUV_PACKED|C0_YUVE_YUYV,
  559. C0_DF_MASK);
  560. break;
  561. case V4L2_PIX_FMT_RGB444:
  562. mcam_reg_write_mask(cam, REG_CTRL0,
  563. C0_DF_RGB|C0_RGBF_444|C0_RGB4_XRGB,
  564. C0_DF_MASK);
  565. /* Alpha value? */
  566. break;
  567. case V4L2_PIX_FMT_RGB565:
  568. mcam_reg_write_mask(cam, REG_CTRL0,
  569. C0_DF_RGB|C0_RGBF_565|C0_RGB5_BGGR,
  570. C0_DF_MASK);
  571. break;
  572. default:
  573. cam_err(cam, "Unknown format %x\n", cam->pix_format.pixelformat);
  574. break;
  575. }
  576. /*
  577. * Make sure it knows we want to use hsync/vsync.
  578. */
  579. mcam_reg_write_mask(cam, REG_CTRL0, C0_SIF_HVSYNC,
  580. C0_SIFM_MASK);
  581. }
  582. /*
  583. * Configure the controller for operation; caller holds the
  584. * device mutex.
  585. */
  586. static int mcam_ctlr_configure(struct mcam_camera *cam)
  587. {
  588. unsigned long flags;
  589. spin_lock_irqsave(&cam->dev_lock, flags);
  590. clear_bit(CF_SG_RESTART, &cam->flags);
  591. cam->dma_setup(cam);
  592. mcam_ctlr_image(cam);
  593. mcam_set_config_needed(cam, 0);
  594. spin_unlock_irqrestore(&cam->dev_lock, flags);
  595. return 0;
  596. }
  597. static void mcam_ctlr_irq_enable(struct mcam_camera *cam)
  598. {
  599. /*
  600. * Clear any pending interrupts, since we do not
  601. * expect to have I/O active prior to enabling.
  602. */
  603. mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS);
  604. mcam_reg_set_bit(cam, REG_IRQMASK, FRAMEIRQS);
  605. }
  606. static void mcam_ctlr_irq_disable(struct mcam_camera *cam)
  607. {
  608. mcam_reg_clear_bit(cam, REG_IRQMASK, FRAMEIRQS);
  609. }
  610. static void mcam_ctlr_init(struct mcam_camera *cam)
  611. {
  612. unsigned long flags;
  613. spin_lock_irqsave(&cam->dev_lock, flags);
  614. /*
  615. * Make sure it's not powered down.
  616. */
  617. mcam_reg_clear_bit(cam, REG_CTRL1, C1_PWRDWN);
  618. /*
  619. * Turn off the enable bit. It sure should be off anyway,
  620. * but it's good to be sure.
  621. */
  622. mcam_reg_clear_bit(cam, REG_CTRL0, C0_ENABLE);
  623. /*
  624. * Clock the sensor appropriately. Controller clock should
  625. * be 48MHz, sensor "typical" value is half that.
  626. */
  627. mcam_reg_write_mask(cam, REG_CLKCTRL, 2, CLK_DIV_MASK);
  628. spin_unlock_irqrestore(&cam->dev_lock, flags);
  629. }
  630. /*
  631. * Stop the controller, and don't return until we're really sure that no
  632. * further DMA is going on.
  633. */
  634. static void mcam_ctlr_stop_dma(struct mcam_camera *cam)
  635. {
  636. unsigned long flags;
  637. /*
  638. * Theory: stop the camera controller (whether it is operating
  639. * or not). Delay briefly just in case we race with the SOF
  640. * interrupt, then wait until no DMA is active.
  641. */
  642. spin_lock_irqsave(&cam->dev_lock, flags);
  643. clear_bit(CF_SG_RESTART, &cam->flags);
  644. mcam_ctlr_stop(cam);
  645. cam->state = S_IDLE;
  646. spin_unlock_irqrestore(&cam->dev_lock, flags);
  647. /*
  648. * This is a brutally long sleep, but experience shows that
  649. * it can take the controller a while to get the message that
  650. * it needs to stop grabbing frames. In particular, we can
  651. * sometimes (on mmp) get a frame at the end WITHOUT the
  652. * start-of-frame indication.
  653. */
  654. msleep(150);
  655. if (test_bit(CF_DMA_ACTIVE, &cam->flags))
  656. cam_err(cam, "Timeout waiting for DMA to end\n");
  657. /* This would be bad news - what now? */
  658. spin_lock_irqsave(&cam->dev_lock, flags);
  659. mcam_ctlr_irq_disable(cam);
  660. spin_unlock_irqrestore(&cam->dev_lock, flags);
  661. }
  662. /*
  663. * Power up and down.
  664. */
  665. static void mcam_ctlr_power_up(struct mcam_camera *cam)
  666. {
  667. unsigned long flags;
  668. spin_lock_irqsave(&cam->dev_lock, flags);
  669. cam->plat_power_up(cam);
  670. mcam_reg_clear_bit(cam, REG_CTRL1, C1_PWRDWN);
  671. spin_unlock_irqrestore(&cam->dev_lock, flags);
  672. msleep(5); /* Just to be sure */
  673. }
  674. static void mcam_ctlr_power_down(struct mcam_camera *cam)
  675. {
  676. unsigned long flags;
  677. spin_lock_irqsave(&cam->dev_lock, flags);
  678. /*
  679. * School of hard knocks department: be sure we do any register
  680. * twiddling on the controller *before* calling the platform
  681. * power down routine.
  682. */
  683. mcam_reg_set_bit(cam, REG_CTRL1, C1_PWRDWN);
  684. cam->plat_power_down(cam);
  685. spin_unlock_irqrestore(&cam->dev_lock, flags);
  686. }
  687. /* -------------------------------------------------------------------- */
  688. /*
  689. * Communications with the sensor.
  690. */
  691. static int __mcam_cam_reset(struct mcam_camera *cam)
  692. {
  693. return sensor_call(cam, core, reset, 0);
  694. }
  695. /*
  696. * We have found the sensor on the i2c. Let's try to have a
  697. * conversation.
  698. */
  699. static int mcam_cam_init(struct mcam_camera *cam)
  700. {
  701. int ret;
  702. mutex_lock(&cam->s_mutex);
  703. if (cam->state != S_NOTREADY)
  704. cam_warn(cam, "Cam init with device in funky state %d",
  705. cam->state);
  706. ret = __mcam_cam_reset(cam);
  707. /* Get/set parameters? */
  708. cam->state = S_IDLE;
  709. mcam_ctlr_power_down(cam);
  710. mutex_unlock(&cam->s_mutex);
  711. return ret;
  712. }
  713. /*
  714. * Configure the sensor to match the parameters we have. Caller should
  715. * hold s_mutex
  716. */
  717. static int mcam_cam_set_flip(struct mcam_camera *cam)
  718. {
  719. struct v4l2_control ctrl;
  720. memset(&ctrl, 0, sizeof(ctrl));
  721. ctrl.id = V4L2_CID_VFLIP;
  722. ctrl.value = flip;
  723. return sensor_call(cam, core, s_ctrl, &ctrl);
  724. }
  725. static int mcam_cam_configure(struct mcam_camera *cam)
  726. {
  727. struct v4l2_mbus_framefmt mbus_fmt;
  728. int ret;
  729. v4l2_fill_mbus_format(&mbus_fmt, &cam->pix_format, cam->mbus_code);
  730. ret = sensor_call(cam, core, init, 0);
  731. if (ret == 0)
  732. ret = sensor_call(cam, video, s_mbus_fmt, &mbus_fmt);
  733. /*
  734. * OV7670 does weird things if flip is set *before* format...
  735. */
  736. ret += mcam_cam_set_flip(cam);
  737. return ret;
  738. }
  739. /*
  740. * Get everything ready, and start grabbing frames.
  741. */
  742. static int mcam_read_setup(struct mcam_camera *cam)
  743. {
  744. int ret;
  745. unsigned long flags;
  746. /*
  747. * Configuration. If we still don't have DMA buffers,
  748. * make one last, desperate attempt.
  749. */
  750. if (cam->buffer_mode == B_vmalloc && cam->nbufs == 0 &&
  751. mcam_alloc_dma_bufs(cam, 0))
  752. return -ENOMEM;
  753. if (mcam_needs_config(cam)) {
  754. mcam_cam_configure(cam);
  755. ret = mcam_ctlr_configure(cam);
  756. if (ret)
  757. return ret;
  758. }
  759. /*
  760. * Turn it loose.
  761. */
  762. spin_lock_irqsave(&cam->dev_lock, flags);
  763. clear_bit(CF_DMA_ACTIVE, &cam->flags);
  764. mcam_reset_buffers(cam);
  765. mcam_ctlr_irq_enable(cam);
  766. cam->state = S_STREAMING;
  767. if (!test_bit(CF_SG_RESTART, &cam->flags))
  768. mcam_ctlr_start(cam);
  769. spin_unlock_irqrestore(&cam->dev_lock, flags);
  770. return 0;
  771. }
  772. /* ----------------------------------------------------------------------- */
  773. /*
  774. * Videobuf2 interface code.
  775. */
  776. static int mcam_vb_queue_setup(struct vb2_queue *vq,
  777. const struct v4l2_format *fmt, unsigned int *nbufs,
  778. unsigned int *num_planes, unsigned int sizes[],
  779. void *alloc_ctxs[])
  780. {
  781. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  782. int minbufs = (cam->buffer_mode == B_DMA_contig) ? 3 : 2;
  783. sizes[0] = cam->pix_format.sizeimage;
  784. *num_planes = 1; /* Someday we have to support planar formats... */
  785. if (*nbufs < minbufs)
  786. *nbufs = minbufs;
  787. if (cam->buffer_mode == B_DMA_contig)
  788. alloc_ctxs[0] = cam->vb_alloc_ctx;
  789. return 0;
  790. }
  791. static void mcam_vb_buf_queue(struct vb2_buffer *vb)
  792. {
  793. struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
  794. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  795. unsigned long flags;
  796. int start;
  797. spin_lock_irqsave(&cam->dev_lock, flags);
  798. start = (cam->state == S_BUFWAIT) && !list_empty(&cam->buffers);
  799. list_add(&mvb->queue, &cam->buffers);
  800. if (cam->state == S_STREAMING && test_bit(CF_SG_RESTART, &cam->flags))
  801. mcam_sg_restart(cam);
  802. spin_unlock_irqrestore(&cam->dev_lock, flags);
  803. if (start)
  804. mcam_read_setup(cam);
  805. }
  806. /*
  807. * vb2 uses these to release the mutex when waiting in dqbuf. I'm
  808. * not actually sure we need to do this (I'm not sure that vb2_dqbuf() needs
  809. * to be called with the mutex held), but better safe than sorry.
  810. */
  811. static void mcam_vb_wait_prepare(struct vb2_queue *vq)
  812. {
  813. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  814. mutex_unlock(&cam->s_mutex);
  815. }
  816. static void mcam_vb_wait_finish(struct vb2_queue *vq)
  817. {
  818. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  819. mutex_lock(&cam->s_mutex);
  820. }
  821. /*
  822. * These need to be called with the mutex held from vb2
  823. */
  824. static int mcam_vb_start_streaming(struct vb2_queue *vq, unsigned int count)
  825. {
  826. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  827. if (cam->state != S_IDLE) {
  828. INIT_LIST_HEAD(&cam->buffers);
  829. return -EINVAL;
  830. }
  831. cam->sequence = 0;
  832. /*
  833. * Videobuf2 sneakily hoards all the buffers and won't
  834. * give them to us until *after* streaming starts. But
  835. * we can't actually start streaming until we have a
  836. * destination. So go into a wait state and hope they
  837. * give us buffers soon.
  838. */
  839. if (cam->buffer_mode != B_vmalloc && list_empty(&cam->buffers)) {
  840. cam->state = S_BUFWAIT;
  841. return 0;
  842. }
  843. return mcam_read_setup(cam);
  844. }
  845. static int mcam_vb_stop_streaming(struct vb2_queue *vq)
  846. {
  847. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  848. unsigned long flags;
  849. if (cam->state == S_BUFWAIT) {
  850. /* They never gave us buffers */
  851. cam->state = S_IDLE;
  852. return 0;
  853. }
  854. if (cam->state != S_STREAMING)
  855. return -EINVAL;
  856. mcam_ctlr_stop_dma(cam);
  857. /*
  858. * VB2 reclaims the buffers, so we need to forget
  859. * about them.
  860. */
  861. spin_lock_irqsave(&cam->dev_lock, flags);
  862. INIT_LIST_HEAD(&cam->buffers);
  863. spin_unlock_irqrestore(&cam->dev_lock, flags);
  864. return 0;
  865. }
  866. static const struct vb2_ops mcam_vb2_ops = {
  867. .queue_setup = mcam_vb_queue_setup,
  868. .buf_queue = mcam_vb_buf_queue,
  869. .start_streaming = mcam_vb_start_streaming,
  870. .stop_streaming = mcam_vb_stop_streaming,
  871. .wait_prepare = mcam_vb_wait_prepare,
  872. .wait_finish = mcam_vb_wait_finish,
  873. };
  874. #ifdef MCAM_MODE_DMA_SG
  875. /*
  876. * Scatter/gather mode uses all of the above functions plus a
  877. * few extras to deal with DMA mapping.
  878. */
  879. static int mcam_vb_sg_buf_init(struct vb2_buffer *vb)
  880. {
  881. struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
  882. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  883. int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
  884. mvb->dma_desc = dma_alloc_coherent(cam->dev,
  885. ndesc * sizeof(struct mcam_dma_desc),
  886. &mvb->dma_desc_pa, GFP_KERNEL);
  887. if (mvb->dma_desc == NULL) {
  888. cam_err(cam, "Unable to get DMA descriptor array\n");
  889. return -ENOMEM;
  890. }
  891. return 0;
  892. }
  893. static int mcam_vb_sg_buf_prepare(struct vb2_buffer *vb)
  894. {
  895. struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
  896. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  897. struct vb2_dma_sg_desc *sgd = vb2_dma_sg_plane_desc(vb, 0);
  898. struct mcam_dma_desc *desc = mvb->dma_desc;
  899. struct scatterlist *sg;
  900. int i;
  901. mvb->dma_desc_nent = dma_map_sg(cam->dev, sgd->sglist, sgd->num_pages,
  902. DMA_FROM_DEVICE);
  903. if (mvb->dma_desc_nent <= 0)
  904. return -EIO; /* Not sure what's right here */
  905. for_each_sg(sgd->sglist, sg, mvb->dma_desc_nent, i) {
  906. desc->dma_addr = sg_dma_address(sg);
  907. desc->segment_len = sg_dma_len(sg);
  908. desc++;
  909. }
  910. return 0;
  911. }
  912. static int mcam_vb_sg_buf_finish(struct vb2_buffer *vb)
  913. {
  914. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  915. struct vb2_dma_sg_desc *sgd = vb2_dma_sg_plane_desc(vb, 0);
  916. dma_unmap_sg(cam->dev, sgd->sglist, sgd->num_pages, DMA_FROM_DEVICE);
  917. return 0;
  918. }
  919. static void mcam_vb_sg_buf_cleanup(struct vb2_buffer *vb)
  920. {
  921. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  922. struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
  923. int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
  924. dma_free_coherent(cam->dev, ndesc * sizeof(struct mcam_dma_desc),
  925. mvb->dma_desc, mvb->dma_desc_pa);
  926. }
  927. static const struct vb2_ops mcam_vb2_sg_ops = {
  928. .queue_setup = mcam_vb_queue_setup,
  929. .buf_init = mcam_vb_sg_buf_init,
  930. .buf_prepare = mcam_vb_sg_buf_prepare,
  931. .buf_queue = mcam_vb_buf_queue,
  932. .buf_finish = mcam_vb_sg_buf_finish,
  933. .buf_cleanup = mcam_vb_sg_buf_cleanup,
  934. .start_streaming = mcam_vb_start_streaming,
  935. .stop_streaming = mcam_vb_stop_streaming,
  936. .wait_prepare = mcam_vb_wait_prepare,
  937. .wait_finish = mcam_vb_wait_finish,
  938. };
  939. #endif /* MCAM_MODE_DMA_SG */
  940. static int mcam_setup_vb2(struct mcam_camera *cam)
  941. {
  942. struct vb2_queue *vq = &cam->vb_queue;
  943. memset(vq, 0, sizeof(*vq));
  944. vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  945. vq->drv_priv = cam;
  946. INIT_LIST_HEAD(&cam->buffers);
  947. switch (cam->buffer_mode) {
  948. case B_DMA_contig:
  949. #ifdef MCAM_MODE_DMA_CONTIG
  950. vq->ops = &mcam_vb2_ops;
  951. vq->mem_ops = &vb2_dma_contig_memops;
  952. cam->vb_alloc_ctx = vb2_dma_contig_init_ctx(cam->dev);
  953. vq->io_modes = VB2_MMAP | VB2_USERPTR;
  954. cam->dma_setup = mcam_ctlr_dma_contig;
  955. cam->frame_complete = mcam_dma_contig_done;
  956. #endif
  957. break;
  958. case B_DMA_sg:
  959. #ifdef MCAM_MODE_DMA_SG
  960. vq->ops = &mcam_vb2_sg_ops;
  961. vq->mem_ops = &vb2_dma_sg_memops;
  962. vq->io_modes = VB2_MMAP | VB2_USERPTR;
  963. cam->dma_setup = mcam_ctlr_dma_sg;
  964. cam->frame_complete = mcam_dma_sg_done;
  965. #endif
  966. break;
  967. case B_vmalloc:
  968. #ifdef MCAM_MODE_VMALLOC
  969. tasklet_init(&cam->s_tasklet, mcam_frame_tasklet,
  970. (unsigned long) cam);
  971. vq->ops = &mcam_vb2_ops;
  972. vq->mem_ops = &vb2_vmalloc_memops;
  973. vq->buf_struct_size = sizeof(struct mcam_vb_buffer);
  974. vq->io_modes = VB2_MMAP;
  975. cam->dma_setup = mcam_ctlr_dma_vmalloc;
  976. cam->frame_complete = mcam_vmalloc_done;
  977. #endif
  978. break;
  979. }
  980. return vb2_queue_init(vq);
  981. }
  982. static void mcam_cleanup_vb2(struct mcam_camera *cam)
  983. {
  984. vb2_queue_release(&cam->vb_queue);
  985. #ifdef MCAM_MODE_DMA_CONTIG
  986. if (cam->buffer_mode == B_DMA_contig)
  987. vb2_dma_contig_cleanup_ctx(cam->vb_alloc_ctx);
  988. #endif
  989. }
  990. /* ---------------------------------------------------------------------- */
  991. /*
  992. * The long list of V4L2 ioctl() operations.
  993. */
  994. static int mcam_vidioc_streamon(struct file *filp, void *priv,
  995. enum v4l2_buf_type type)
  996. {
  997. struct mcam_camera *cam = filp->private_data;
  998. int ret;
  999. mutex_lock(&cam->s_mutex);
  1000. ret = vb2_streamon(&cam->vb_queue, type);
  1001. mutex_unlock(&cam->s_mutex);
  1002. return ret;
  1003. }
  1004. static int mcam_vidioc_streamoff(struct file *filp, void *priv,
  1005. enum v4l2_buf_type type)
  1006. {
  1007. struct mcam_camera *cam = filp->private_data;
  1008. int ret;
  1009. mutex_lock(&cam->s_mutex);
  1010. ret = vb2_streamoff(&cam->vb_queue, type);
  1011. mutex_unlock(&cam->s_mutex);
  1012. return ret;
  1013. }
  1014. static int mcam_vidioc_reqbufs(struct file *filp, void *priv,
  1015. struct v4l2_requestbuffers *req)
  1016. {
  1017. struct mcam_camera *cam = filp->private_data;
  1018. int ret;
  1019. mutex_lock(&cam->s_mutex);
  1020. ret = vb2_reqbufs(&cam->vb_queue, req);
  1021. mutex_unlock(&cam->s_mutex);
  1022. return ret;
  1023. }
  1024. static int mcam_vidioc_querybuf(struct file *filp, void *priv,
  1025. struct v4l2_buffer *buf)
  1026. {
  1027. struct mcam_camera *cam = filp->private_data;
  1028. int ret;
  1029. mutex_lock(&cam->s_mutex);
  1030. ret = vb2_querybuf(&cam->vb_queue, buf);
  1031. mutex_unlock(&cam->s_mutex);
  1032. return ret;
  1033. }
  1034. static int mcam_vidioc_qbuf(struct file *filp, void *priv,
  1035. struct v4l2_buffer *buf)
  1036. {
  1037. struct mcam_camera *cam = filp->private_data;
  1038. int ret;
  1039. mutex_lock(&cam->s_mutex);
  1040. ret = vb2_qbuf(&cam->vb_queue, buf);
  1041. mutex_unlock(&cam->s_mutex);
  1042. return ret;
  1043. }
  1044. static int mcam_vidioc_dqbuf(struct file *filp, void *priv,
  1045. struct v4l2_buffer *buf)
  1046. {
  1047. struct mcam_camera *cam = filp->private_data;
  1048. int ret;
  1049. mutex_lock(&cam->s_mutex);
  1050. ret = vb2_dqbuf(&cam->vb_queue, buf, filp->f_flags & O_NONBLOCK);
  1051. mutex_unlock(&cam->s_mutex);
  1052. return ret;
  1053. }
  1054. static int mcam_vidioc_querycap(struct file *file, void *priv,
  1055. struct v4l2_capability *cap)
  1056. {
  1057. strcpy(cap->driver, "marvell_ccic");
  1058. strcpy(cap->card, "marvell_ccic");
  1059. cap->version = 1;
  1060. cap->capabilities = V4L2_CAP_VIDEO_CAPTURE |
  1061. V4L2_CAP_READWRITE | V4L2_CAP_STREAMING;
  1062. return 0;
  1063. }
  1064. static int mcam_vidioc_enum_fmt_vid_cap(struct file *filp,
  1065. void *priv, struct v4l2_fmtdesc *fmt)
  1066. {
  1067. if (fmt->index >= N_MCAM_FMTS)
  1068. return -EINVAL;
  1069. strlcpy(fmt->description, mcam_formats[fmt->index].desc,
  1070. sizeof(fmt->description));
  1071. fmt->pixelformat = mcam_formats[fmt->index].pixelformat;
  1072. return 0;
  1073. }
  1074. static int mcam_vidioc_try_fmt_vid_cap(struct file *filp, void *priv,
  1075. struct v4l2_format *fmt)
  1076. {
  1077. struct mcam_camera *cam = priv;
  1078. struct mcam_format_struct *f;
  1079. struct v4l2_pix_format *pix = &fmt->fmt.pix;
  1080. struct v4l2_mbus_framefmt mbus_fmt;
  1081. int ret;
  1082. f = mcam_find_format(pix->pixelformat);
  1083. pix->pixelformat = f->pixelformat;
  1084. v4l2_fill_mbus_format(&mbus_fmt, pix, f->mbus_code);
  1085. mutex_lock(&cam->s_mutex);
  1086. ret = sensor_call(cam, video, try_mbus_fmt, &mbus_fmt);
  1087. mutex_unlock(&cam->s_mutex);
  1088. v4l2_fill_pix_format(pix, &mbus_fmt);
  1089. pix->bytesperline = pix->width * f->bpp;
  1090. pix->sizeimage = pix->height * pix->bytesperline;
  1091. return ret;
  1092. }
  1093. static int mcam_vidioc_s_fmt_vid_cap(struct file *filp, void *priv,
  1094. struct v4l2_format *fmt)
  1095. {
  1096. struct mcam_camera *cam = priv;
  1097. struct mcam_format_struct *f;
  1098. int ret;
  1099. /*
  1100. * Can't do anything if the device is not idle
  1101. * Also can't if there are streaming buffers in place.
  1102. */
  1103. if (cam->state != S_IDLE || cam->vb_queue.num_buffers > 0)
  1104. return -EBUSY;
  1105. f = mcam_find_format(fmt->fmt.pix.pixelformat);
  1106. /*
  1107. * See if the formatting works in principle.
  1108. */
  1109. ret = mcam_vidioc_try_fmt_vid_cap(filp, priv, fmt);
  1110. if (ret)
  1111. return ret;
  1112. /*
  1113. * Now we start to change things for real, so let's do it
  1114. * under lock.
  1115. */
  1116. mutex_lock(&cam->s_mutex);
  1117. cam->pix_format = fmt->fmt.pix;
  1118. cam->mbus_code = f->mbus_code;
  1119. /*
  1120. * Make sure we have appropriate DMA buffers.
  1121. */
  1122. if (cam->buffer_mode == B_vmalloc) {
  1123. ret = mcam_check_dma_buffers(cam);
  1124. if (ret)
  1125. goto out;
  1126. }
  1127. mcam_set_config_needed(cam, 1);
  1128. out:
  1129. mutex_unlock(&cam->s_mutex);
  1130. return ret;
  1131. }
  1132. /*
  1133. * Return our stored notion of how the camera is/should be configured.
  1134. * The V4l2 spec wants us to be smarter, and actually get this from
  1135. * the camera (and not mess with it at open time). Someday.
  1136. */
  1137. static int mcam_vidioc_g_fmt_vid_cap(struct file *filp, void *priv,
  1138. struct v4l2_format *f)
  1139. {
  1140. struct mcam_camera *cam = priv;
  1141. f->fmt.pix = cam->pix_format;
  1142. return 0;
  1143. }
  1144. /*
  1145. * We only have one input - the sensor - so minimize the nonsense here.
  1146. */
  1147. static int mcam_vidioc_enum_input(struct file *filp, void *priv,
  1148. struct v4l2_input *input)
  1149. {
  1150. if (input->index != 0)
  1151. return -EINVAL;
  1152. input->type = V4L2_INPUT_TYPE_CAMERA;
  1153. input->std = V4L2_STD_ALL; /* Not sure what should go here */
  1154. strcpy(input->name, "Camera");
  1155. return 0;
  1156. }
  1157. static int mcam_vidioc_g_input(struct file *filp, void *priv, unsigned int *i)
  1158. {
  1159. *i = 0;
  1160. return 0;
  1161. }
  1162. static int mcam_vidioc_s_input(struct file *filp, void *priv, unsigned int i)
  1163. {
  1164. if (i != 0)
  1165. return -EINVAL;
  1166. return 0;
  1167. }
  1168. /* from vivi.c */
  1169. static int mcam_vidioc_s_std(struct file *filp, void *priv, v4l2_std_id a)
  1170. {
  1171. return 0;
  1172. }
  1173. static int mcam_vidioc_g_std(struct file *filp, void *priv, v4l2_std_id *a)
  1174. {
  1175. *a = V4L2_STD_NTSC_M;
  1176. return 0;
  1177. }
  1178. /*
  1179. * G/S_PARM. Most of this is done by the sensor, but we are
  1180. * the level which controls the number of read buffers.
  1181. */
  1182. static int mcam_vidioc_g_parm(struct file *filp, void *priv,
  1183. struct v4l2_streamparm *parms)
  1184. {
  1185. struct mcam_camera *cam = priv;
  1186. int ret;
  1187. mutex_lock(&cam->s_mutex);
  1188. ret = sensor_call(cam, video, g_parm, parms);
  1189. mutex_unlock(&cam->s_mutex);
  1190. parms->parm.capture.readbuffers = n_dma_bufs;
  1191. return ret;
  1192. }
  1193. static int mcam_vidioc_s_parm(struct file *filp, void *priv,
  1194. struct v4l2_streamparm *parms)
  1195. {
  1196. struct mcam_camera *cam = priv;
  1197. int ret;
  1198. mutex_lock(&cam->s_mutex);
  1199. ret = sensor_call(cam, video, s_parm, parms);
  1200. mutex_unlock(&cam->s_mutex);
  1201. parms->parm.capture.readbuffers = n_dma_bufs;
  1202. return ret;
  1203. }
  1204. static int mcam_vidioc_enum_framesizes(struct file *filp, void *priv,
  1205. struct v4l2_frmsizeenum *sizes)
  1206. {
  1207. struct mcam_camera *cam = priv;
  1208. int ret;
  1209. mutex_lock(&cam->s_mutex);
  1210. ret = sensor_call(cam, video, enum_framesizes, sizes);
  1211. mutex_unlock(&cam->s_mutex);
  1212. return ret;
  1213. }
  1214. static int mcam_vidioc_enum_frameintervals(struct file *filp, void *priv,
  1215. struct v4l2_frmivalenum *interval)
  1216. {
  1217. struct mcam_camera *cam = priv;
  1218. int ret;
  1219. mutex_lock(&cam->s_mutex);
  1220. ret = sensor_call(cam, video, enum_frameintervals, interval);
  1221. mutex_unlock(&cam->s_mutex);
  1222. return ret;
  1223. }
  1224. #ifdef CONFIG_VIDEO_ADV_DEBUG
  1225. static int mcam_vidioc_g_register(struct file *file, void *priv,
  1226. struct v4l2_dbg_register *reg)
  1227. {
  1228. struct mcam_camera *cam = priv;
  1229. if (reg->reg > cam->regs_size - 4)
  1230. return -EINVAL;
  1231. reg->val = mcam_reg_read(cam, reg->reg);
  1232. reg->size = 4;
  1233. return 0;
  1234. }
  1235. static int mcam_vidioc_s_register(struct file *file, void *priv,
  1236. const struct v4l2_dbg_register *reg)
  1237. {
  1238. struct mcam_camera *cam = priv;
  1239. if (reg->reg > cam->regs_size - 4)
  1240. return -EINVAL;
  1241. mcam_reg_write(cam, reg->reg, reg->val);
  1242. return 0;
  1243. }
  1244. #endif
  1245. static const struct v4l2_ioctl_ops mcam_v4l_ioctl_ops = {
  1246. .vidioc_querycap = mcam_vidioc_querycap,
  1247. .vidioc_enum_fmt_vid_cap = mcam_vidioc_enum_fmt_vid_cap,
  1248. .vidioc_try_fmt_vid_cap = mcam_vidioc_try_fmt_vid_cap,
  1249. .vidioc_s_fmt_vid_cap = mcam_vidioc_s_fmt_vid_cap,
  1250. .vidioc_g_fmt_vid_cap = mcam_vidioc_g_fmt_vid_cap,
  1251. .vidioc_enum_input = mcam_vidioc_enum_input,
  1252. .vidioc_g_input = mcam_vidioc_g_input,
  1253. .vidioc_s_input = mcam_vidioc_s_input,
  1254. .vidioc_s_std = mcam_vidioc_s_std,
  1255. .vidioc_g_std = mcam_vidioc_g_std,
  1256. .vidioc_reqbufs = mcam_vidioc_reqbufs,
  1257. .vidioc_querybuf = mcam_vidioc_querybuf,
  1258. .vidioc_qbuf = mcam_vidioc_qbuf,
  1259. .vidioc_dqbuf = mcam_vidioc_dqbuf,
  1260. .vidioc_streamon = mcam_vidioc_streamon,
  1261. .vidioc_streamoff = mcam_vidioc_streamoff,
  1262. .vidioc_g_parm = mcam_vidioc_g_parm,
  1263. .vidioc_s_parm = mcam_vidioc_s_parm,
  1264. .vidioc_enum_framesizes = mcam_vidioc_enum_framesizes,
  1265. .vidioc_enum_frameintervals = mcam_vidioc_enum_frameintervals,
  1266. #ifdef CONFIG_VIDEO_ADV_DEBUG
  1267. .vidioc_g_register = mcam_vidioc_g_register,
  1268. .vidioc_s_register = mcam_vidioc_s_register,
  1269. #endif
  1270. };
  1271. /* ---------------------------------------------------------------------- */
  1272. /*
  1273. * Our various file operations.
  1274. */
  1275. static int mcam_v4l_open(struct file *filp)
  1276. {
  1277. struct mcam_camera *cam = video_drvdata(filp);
  1278. int ret = 0;
  1279. filp->private_data = cam;
  1280. cam->frame_state.frames = 0;
  1281. cam->frame_state.singles = 0;
  1282. cam->frame_state.delivered = 0;
  1283. mutex_lock(&cam->s_mutex);
  1284. if (cam->users == 0) {
  1285. ret = mcam_setup_vb2(cam);
  1286. if (ret)
  1287. goto out;
  1288. mcam_ctlr_power_up(cam);
  1289. __mcam_cam_reset(cam);
  1290. mcam_set_config_needed(cam, 1);
  1291. }
  1292. (cam->users)++;
  1293. out:
  1294. mutex_unlock(&cam->s_mutex);
  1295. return ret;
  1296. }
  1297. static int mcam_v4l_release(struct file *filp)
  1298. {
  1299. struct mcam_camera *cam = filp->private_data;
  1300. cam_dbg(cam, "Release, %d frames, %d singles, %d delivered\n",
  1301. cam->frame_state.frames, cam->frame_state.singles,
  1302. cam->frame_state.delivered);
  1303. mutex_lock(&cam->s_mutex);
  1304. (cam->users)--;
  1305. if (cam->users == 0) {
  1306. mcam_ctlr_stop_dma(cam);
  1307. mcam_cleanup_vb2(cam);
  1308. mcam_ctlr_power_down(cam);
  1309. if (cam->buffer_mode == B_vmalloc && alloc_bufs_at_read)
  1310. mcam_free_dma_bufs(cam);
  1311. }
  1312. mutex_unlock(&cam->s_mutex);
  1313. return 0;
  1314. }
  1315. static ssize_t mcam_v4l_read(struct file *filp,
  1316. char __user *buffer, size_t len, loff_t *pos)
  1317. {
  1318. struct mcam_camera *cam = filp->private_data;
  1319. int ret;
  1320. mutex_lock(&cam->s_mutex);
  1321. ret = vb2_read(&cam->vb_queue, buffer, len, pos,
  1322. filp->f_flags & O_NONBLOCK);
  1323. mutex_unlock(&cam->s_mutex);
  1324. return ret;
  1325. }
  1326. static unsigned int mcam_v4l_poll(struct file *filp,
  1327. struct poll_table_struct *pt)
  1328. {
  1329. struct mcam_camera *cam = filp->private_data;
  1330. int ret;
  1331. mutex_lock(&cam->s_mutex);
  1332. ret = vb2_poll(&cam->vb_queue, filp, pt);
  1333. mutex_unlock(&cam->s_mutex);
  1334. return ret;
  1335. }
  1336. static int mcam_v4l_mmap(struct file *filp, struct vm_area_struct *vma)
  1337. {
  1338. struct mcam_camera *cam = filp->private_data;
  1339. int ret;
  1340. mutex_lock(&cam->s_mutex);
  1341. ret = vb2_mmap(&cam->vb_queue, vma);
  1342. mutex_unlock(&cam->s_mutex);
  1343. return ret;
  1344. }
  1345. static const struct v4l2_file_operations mcam_v4l_fops = {
  1346. .owner = THIS_MODULE,
  1347. .open = mcam_v4l_open,
  1348. .release = mcam_v4l_release,
  1349. .read = mcam_v4l_read,
  1350. .poll = mcam_v4l_poll,
  1351. .mmap = mcam_v4l_mmap,
  1352. .unlocked_ioctl = video_ioctl2,
  1353. };
  1354. /*
  1355. * This template device holds all of those v4l2 methods; we
  1356. * clone it for specific real devices.
  1357. */
  1358. static struct video_device mcam_v4l_template = {
  1359. .name = "mcam",
  1360. .tvnorms = V4L2_STD_NTSC_M,
  1361. .fops = &mcam_v4l_fops,
  1362. .ioctl_ops = &mcam_v4l_ioctl_ops,
  1363. .release = video_device_release_empty,
  1364. };
  1365. /* ---------------------------------------------------------------------- */
  1366. /*
  1367. * Interrupt handler stuff
  1368. */
  1369. static void mcam_frame_complete(struct mcam_camera *cam, int frame)
  1370. {
  1371. /*
  1372. * Basic frame housekeeping.
  1373. */
  1374. set_bit(frame, &cam->flags);
  1375. clear_bit(CF_DMA_ACTIVE, &cam->flags);
  1376. cam->next_buf = frame;
  1377. cam->buf_seq[frame] = ++(cam->sequence);
  1378. cam->frame_state.frames++;
  1379. /*
  1380. * "This should never happen"
  1381. */
  1382. if (cam->state != S_STREAMING)
  1383. return;
  1384. /*
  1385. * Process the frame and set up the next one.
  1386. */
  1387. cam->frame_complete(cam, frame);
  1388. }
  1389. /*
  1390. * The interrupt handler; this needs to be called from the
  1391. * platform irq handler with the lock held.
  1392. */
  1393. int mccic_irq(struct mcam_camera *cam, unsigned int irqs)
  1394. {
  1395. unsigned int frame, handled = 0;
  1396. mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS); /* Clear'em all */
  1397. /*
  1398. * Handle any frame completions. There really should
  1399. * not be more than one of these, or we have fallen
  1400. * far behind.
  1401. *
  1402. * When running in S/G mode, the frame number lacks any
  1403. * real meaning - there's only one descriptor array - but
  1404. * the controller still picks a different one to signal
  1405. * each time.
  1406. */
  1407. for (frame = 0; frame < cam->nbufs; frame++)
  1408. if (irqs & (IRQ_EOF0 << frame)) {
  1409. mcam_frame_complete(cam, frame);
  1410. handled = 1;
  1411. if (cam->buffer_mode == B_DMA_sg)
  1412. break;
  1413. }
  1414. /*
  1415. * If a frame starts, note that we have DMA active. This
  1416. * code assumes that we won't get multiple frame interrupts
  1417. * at once; may want to rethink that.
  1418. */
  1419. if (irqs & (IRQ_SOF0 | IRQ_SOF1 | IRQ_SOF2)) {
  1420. set_bit(CF_DMA_ACTIVE, &cam->flags);
  1421. handled = 1;
  1422. if (cam->buffer_mode == B_DMA_sg)
  1423. mcam_ctlr_stop(cam);
  1424. }
  1425. return handled;
  1426. }
  1427. /* ---------------------------------------------------------------------- */
  1428. /*
  1429. * Registration and such.
  1430. */
  1431. static struct ov7670_config sensor_cfg = {
  1432. /*
  1433. * Exclude QCIF mode, because it only captures a tiny portion
  1434. * of the sensor FOV
  1435. */
  1436. .min_width = 320,
  1437. .min_height = 240,
  1438. };
  1439. int mccic_register(struct mcam_camera *cam)
  1440. {
  1441. struct i2c_board_info ov7670_info = {
  1442. .type = "ov7670",
  1443. .addr = 0x42 >> 1,
  1444. .platform_data = &sensor_cfg,
  1445. };
  1446. int ret;
  1447. /*
  1448. * Validate the requested buffer mode.
  1449. */
  1450. if (buffer_mode >= 0)
  1451. cam->buffer_mode = buffer_mode;
  1452. if (cam->buffer_mode == B_DMA_sg &&
  1453. cam->chip_id == MCAM_CAFE) {
  1454. printk(KERN_ERR "marvell-cam: Cafe can't do S/G I/O, "
  1455. "attempting vmalloc mode instead\n");
  1456. cam->buffer_mode = B_vmalloc;
  1457. }
  1458. if (!mcam_buffer_mode_supported(cam->buffer_mode)) {
  1459. printk(KERN_ERR "marvell-cam: buffer mode %d unsupported\n",
  1460. cam->buffer_mode);
  1461. return -EINVAL;
  1462. }
  1463. /*
  1464. * Register with V4L
  1465. */
  1466. ret = v4l2_device_register(cam->dev, &cam->v4l2_dev);
  1467. if (ret)
  1468. return ret;
  1469. mutex_init(&cam->s_mutex);
  1470. cam->state = S_NOTREADY;
  1471. mcam_set_config_needed(cam, 1);
  1472. cam->pix_format = mcam_def_pix_format;
  1473. cam->mbus_code = mcam_def_mbus_code;
  1474. INIT_LIST_HEAD(&cam->buffers);
  1475. mcam_ctlr_init(cam);
  1476. /*
  1477. * Try to find the sensor.
  1478. */
  1479. sensor_cfg.clock_speed = cam->clock_speed;
  1480. sensor_cfg.use_smbus = cam->use_smbus;
  1481. cam->sensor_addr = ov7670_info.addr;
  1482. cam->sensor = v4l2_i2c_new_subdev_board(&cam->v4l2_dev,
  1483. cam->i2c_adapter, &ov7670_info, NULL);
  1484. if (cam->sensor == NULL) {
  1485. ret = -ENODEV;
  1486. goto out_unregister;
  1487. }
  1488. ret = mcam_cam_init(cam);
  1489. if (ret)
  1490. goto out_unregister;
  1491. /*
  1492. * Get the v4l2 setup done.
  1493. */
  1494. ret = v4l2_ctrl_handler_init(&cam->ctrl_handler, 10);
  1495. if (ret)
  1496. goto out_unregister;
  1497. cam->v4l2_dev.ctrl_handler = &cam->ctrl_handler;
  1498. mutex_lock(&cam->s_mutex);
  1499. cam->vdev = mcam_v4l_template;
  1500. cam->vdev.debug = 0;
  1501. cam->vdev.v4l2_dev = &cam->v4l2_dev;
  1502. video_set_drvdata(&cam->vdev, cam);
  1503. ret = video_register_device(&cam->vdev, VFL_TYPE_GRABBER, -1);
  1504. if (ret)
  1505. goto out;
  1506. /*
  1507. * If so requested, try to get our DMA buffers now.
  1508. */
  1509. if (cam->buffer_mode == B_vmalloc && !alloc_bufs_at_read) {
  1510. if (mcam_alloc_dma_bufs(cam, 1))
  1511. cam_warn(cam, "Unable to alloc DMA buffers at load"
  1512. " will try again later.");
  1513. }
  1514. out:
  1515. v4l2_ctrl_handler_free(&cam->ctrl_handler);
  1516. mutex_unlock(&cam->s_mutex);
  1517. return ret;
  1518. out_unregister:
  1519. v4l2_device_unregister(&cam->v4l2_dev);
  1520. return ret;
  1521. }
  1522. void mccic_shutdown(struct mcam_camera *cam)
  1523. {
  1524. /*
  1525. * If we have no users (and we really, really should have no
  1526. * users) the device will already be powered down. Trying to
  1527. * take it down again will wedge the machine, which is frowned
  1528. * upon.
  1529. */
  1530. if (cam->users > 0) {
  1531. cam_warn(cam, "Removing a device with users!\n");
  1532. mcam_ctlr_power_down(cam);
  1533. }
  1534. vb2_queue_release(&cam->vb_queue);
  1535. if (cam->buffer_mode == B_vmalloc)
  1536. mcam_free_dma_bufs(cam);
  1537. video_unregister_device(&cam->vdev);
  1538. v4l2_ctrl_handler_free(&cam->ctrl_handler);
  1539. v4l2_device_unregister(&cam->v4l2_dev);
  1540. }
  1541. /*
  1542. * Power management
  1543. */
  1544. #ifdef CONFIG_PM
  1545. void mccic_suspend(struct mcam_camera *cam)
  1546. {
  1547. mutex_lock(&cam->s_mutex);
  1548. if (cam->users > 0) {
  1549. enum mcam_state cstate = cam->state;
  1550. mcam_ctlr_stop_dma(cam);
  1551. mcam_ctlr_power_down(cam);
  1552. cam->state = cstate;
  1553. }
  1554. mutex_unlock(&cam->s_mutex);
  1555. }
  1556. int mccic_resume(struct mcam_camera *cam)
  1557. {
  1558. int ret = 0;
  1559. mutex_lock(&cam->s_mutex);
  1560. if (cam->users > 0) {
  1561. mcam_ctlr_power_up(cam);
  1562. __mcam_cam_reset(cam);
  1563. } else {
  1564. mcam_ctlr_power_down(cam);
  1565. }
  1566. mutex_unlock(&cam->s_mutex);
  1567. set_bit(CF_CONFIG_NEEDED, &cam->flags);
  1568. if (cam->state == S_STREAMING) {
  1569. /*
  1570. * If there was a buffer in the DMA engine at suspend
  1571. * time, put it back on the queue or we'll forget about it.
  1572. */
  1573. if (cam->buffer_mode == B_DMA_sg && cam->vb_bufs[0])
  1574. list_add(&cam->vb_bufs[0]->queue, &cam->buffers);
  1575. ret = mcam_read_setup(cam);
  1576. }
  1577. return ret;
  1578. }
  1579. #endif /* CONFIG_PM */