mcam-core.c 45 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798
  1. /*
  2. * The Marvell camera core. This device appears in a number of settings,
  3. * so it needs platform-specific support outside of the core.
  4. *
  5. * Copyright 2011 Jonathan Corbet corbet@lwn.net
  6. */
  7. #include <linux/kernel.h>
  8. #include <linux/module.h>
  9. #include <linux/fs.h>
  10. #include <linux/mm.h>
  11. #include <linux/i2c.h>
  12. #include <linux/interrupt.h>
  13. #include <linux/spinlock.h>
  14. #include <linux/slab.h>
  15. #include <linux/device.h>
  16. #include <linux/wait.h>
  17. #include <linux/list.h>
  18. #include <linux/dma-mapping.h>
  19. #include <linux/delay.h>
  20. #include <linux/vmalloc.h>
  21. #include <linux/io.h>
  22. #include <linux/videodev2.h>
  23. #include <media/v4l2-device.h>
  24. #include <media/v4l2-ioctl.h>
  25. #include <media/v4l2-chip-ident.h>
  26. #include <media/ov7670.h>
  27. #include <media/videobuf2-vmalloc.h>
  28. #include <media/videobuf2-dma-contig.h>
  29. #include <media/videobuf2-dma-sg.h>
  30. #include "mcam-core.h"
  31. /*
  32. * Basic frame stats - to be deleted shortly
  33. */
  34. static int frames;
  35. static int singles;
  36. static int delivered;
  37. /*
  38. * Internal DMA buffer management. Since the controller cannot do S/G I/O,
  39. * we must have physically contiguous buffers to bring frames into.
  40. * These parameters control how many buffers we use, whether we
  41. * allocate them at load time (better chance of success, but nails down
  42. * memory) or when somebody tries to use the camera (riskier), and,
  43. * for load-time allocation, how big they should be.
  44. *
  45. * The controller can cycle through three buffers. We could use
  46. * more by flipping pointers around, but it probably makes little
  47. * sense.
  48. */
  49. static int alloc_bufs_at_read;
  50. module_param(alloc_bufs_at_read, bool, 0444);
  51. MODULE_PARM_DESC(alloc_bufs_at_read,
  52. "Non-zero value causes DMA buffers to be allocated when the "
  53. "video capture device is read, rather than at module load "
  54. "time. This saves memory, but decreases the chances of "
  55. "successfully getting those buffers. This parameter is "
  56. "only used in the vmalloc buffer mode");
  57. static int n_dma_bufs = 3;
  58. module_param(n_dma_bufs, uint, 0644);
  59. MODULE_PARM_DESC(n_dma_bufs,
  60. "The number of DMA buffers to allocate. Can be either two "
  61. "(saves memory, makes timing tighter) or three.");
  62. static int dma_buf_size = VGA_WIDTH * VGA_HEIGHT * 2; /* Worst case */
  63. module_param(dma_buf_size, uint, 0444);
  64. MODULE_PARM_DESC(dma_buf_size,
  65. "The size of the allocated DMA buffers. If actual operating "
  66. "parameters require larger buffers, an attempt to reallocate "
  67. "will be made.");
  68. static int flip;
  69. module_param(flip, bool, 0444);
  70. MODULE_PARM_DESC(flip,
  71. "If set, the sensor will be instructed to flip the image "
  72. "vertically.");
  73. static int buffer_mode = -1;
  74. module_param(buffer_mode, int, 0444);
  75. MODULE_PARM_DESC(buffer_mode,
  76. "Set the buffer mode to be used; default is to go with what "
  77. "the platform driver asks for. Set to 0 for vmalloc, 1 for "
  78. "DMA contiguous.");
  79. /*
  80. * Status flags. Always manipulated with bit operations.
  81. */
  82. #define CF_BUF0_VALID 0 /* Buffers valid - first three */
  83. #define CF_BUF1_VALID 1
  84. #define CF_BUF2_VALID 2
  85. #define CF_DMA_ACTIVE 3 /* A frame is incoming */
  86. #define CF_CONFIG_NEEDED 4 /* Must configure hardware */
  87. #define CF_SINGLE_BUFFER 5 /* Running with a single buffer */
  88. #define CF_SG_RESTART 6 /* SG restart needed */
  89. #define sensor_call(cam, o, f, args...) \
  90. v4l2_subdev_call(cam->sensor, o, f, ##args)
  91. static struct mcam_format_struct {
  92. __u8 *desc;
  93. __u32 pixelformat;
  94. int bpp; /* Bytes per pixel */
  95. enum v4l2_mbus_pixelcode mbus_code;
  96. } mcam_formats[] = {
  97. {
  98. .desc = "YUYV 4:2:2",
  99. .pixelformat = V4L2_PIX_FMT_YUYV,
  100. .mbus_code = V4L2_MBUS_FMT_YUYV8_2X8,
  101. .bpp = 2,
  102. },
  103. {
  104. .desc = "RGB 444",
  105. .pixelformat = V4L2_PIX_FMT_RGB444,
  106. .mbus_code = V4L2_MBUS_FMT_RGB444_2X8_PADHI_LE,
  107. .bpp = 2,
  108. },
  109. {
  110. .desc = "RGB 565",
  111. .pixelformat = V4L2_PIX_FMT_RGB565,
  112. .mbus_code = V4L2_MBUS_FMT_RGB565_2X8_LE,
  113. .bpp = 2,
  114. },
  115. {
  116. .desc = "Raw RGB Bayer",
  117. .pixelformat = V4L2_PIX_FMT_SBGGR8,
  118. .mbus_code = V4L2_MBUS_FMT_SBGGR8_1X8,
  119. .bpp = 1
  120. },
  121. };
  122. #define N_MCAM_FMTS ARRAY_SIZE(mcam_formats)
  123. static struct mcam_format_struct *mcam_find_format(u32 pixelformat)
  124. {
  125. unsigned i;
  126. for (i = 0; i < N_MCAM_FMTS; i++)
  127. if (mcam_formats[i].pixelformat == pixelformat)
  128. return mcam_formats + i;
  129. /* Not found? Then return the first format. */
  130. return mcam_formats;
  131. }
  132. /*
  133. * The default format we use until somebody says otherwise.
  134. */
  135. static const struct v4l2_pix_format mcam_def_pix_format = {
  136. .width = VGA_WIDTH,
  137. .height = VGA_HEIGHT,
  138. .pixelformat = V4L2_PIX_FMT_YUYV,
  139. .field = V4L2_FIELD_NONE,
  140. .bytesperline = VGA_WIDTH*2,
  141. .sizeimage = VGA_WIDTH*VGA_HEIGHT*2,
  142. };
  143. static const enum v4l2_mbus_pixelcode mcam_def_mbus_code =
  144. V4L2_MBUS_FMT_YUYV8_2X8;
  145. /*
  146. * The two-word DMA descriptor format used by the Armada 610 and like. There
  147. * Is a three-word format as well (set C1_DESC_3WORD) where the third
  148. * word is a pointer to the next descriptor, but we don't use it. Two-word
  149. * descriptors have to be contiguous in memory.
  150. */
  151. struct mcam_dma_desc {
  152. u32 dma_addr;
  153. u32 segment_len;
  154. };
  155. /*
  156. * Our buffer type for working with videobuf2. Note that the vb2
  157. * developers have decreed that struct vb2_buffer must be at the
  158. * beginning of this structure.
  159. */
  160. struct mcam_vb_buffer {
  161. struct vb2_buffer vb_buf;
  162. struct list_head queue;
  163. struct mcam_dma_desc *dma_desc; /* Descriptor virtual address */
  164. dma_addr_t dma_desc_pa; /* Descriptor physical address */
  165. int dma_desc_nent; /* Number of mapped descriptors */
  166. };
  167. static inline struct mcam_vb_buffer *vb_to_mvb(struct vb2_buffer *vb)
  168. {
  169. return container_of(vb, struct mcam_vb_buffer, vb_buf);
  170. }
  171. /*
  172. * Hand a completed buffer back to user space.
  173. */
  174. static void mcam_buffer_done(struct mcam_camera *cam, int frame,
  175. struct vb2_buffer *vbuf)
  176. {
  177. vbuf->v4l2_buf.bytesused = cam->pix_format.sizeimage;
  178. vbuf->v4l2_buf.sequence = cam->buf_seq[frame];
  179. vb2_set_plane_payload(vbuf, 0, cam->pix_format.sizeimage);
  180. vb2_buffer_done(vbuf, VB2_BUF_STATE_DONE);
  181. }
  182. /*
  183. * Debugging and related.
  184. */
  185. #define cam_err(cam, fmt, arg...) \
  186. dev_err((cam)->dev, fmt, ##arg);
  187. #define cam_warn(cam, fmt, arg...) \
  188. dev_warn((cam)->dev, fmt, ##arg);
  189. #define cam_dbg(cam, fmt, arg...) \
  190. dev_dbg((cam)->dev, fmt, ##arg);
  191. /*
  192. * Flag manipulation helpers
  193. */
  194. static void mcam_reset_buffers(struct mcam_camera *cam)
  195. {
  196. int i;
  197. cam->next_buf = -1;
  198. for (i = 0; i < cam->nbufs; i++)
  199. clear_bit(i, &cam->flags);
  200. }
  201. static inline int mcam_needs_config(struct mcam_camera *cam)
  202. {
  203. return test_bit(CF_CONFIG_NEEDED, &cam->flags);
  204. }
  205. static void mcam_set_config_needed(struct mcam_camera *cam, int needed)
  206. {
  207. if (needed)
  208. set_bit(CF_CONFIG_NEEDED, &cam->flags);
  209. else
  210. clear_bit(CF_CONFIG_NEEDED, &cam->flags);
  211. }
  212. /* ------------------------------------------------------------------- */
  213. /*
  214. * Make the controller start grabbing images. Everything must
  215. * be set up before doing this.
  216. */
  217. static void mcam_ctlr_start(struct mcam_camera *cam)
  218. {
  219. /* set_bit performs a read, so no other barrier should be
  220. needed here */
  221. mcam_reg_set_bit(cam, REG_CTRL0, C0_ENABLE);
  222. }
  223. static void mcam_ctlr_stop(struct mcam_camera *cam)
  224. {
  225. mcam_reg_clear_bit(cam, REG_CTRL0, C0_ENABLE);
  226. }
  227. /* ------------------------------------------------------------------- */
  228. /*
  229. * Code specific to the vmalloc buffer mode.
  230. */
  231. /*
  232. * Allocate in-kernel DMA buffers for vmalloc mode.
  233. */
  234. static int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime)
  235. {
  236. int i;
  237. mcam_set_config_needed(cam, 1);
  238. if (loadtime)
  239. cam->dma_buf_size = dma_buf_size;
  240. else
  241. cam->dma_buf_size = cam->pix_format.sizeimage;
  242. if (n_dma_bufs > 3)
  243. n_dma_bufs = 3;
  244. cam->nbufs = 0;
  245. for (i = 0; i < n_dma_bufs; i++) {
  246. cam->dma_bufs[i] = dma_alloc_coherent(cam->dev,
  247. cam->dma_buf_size, cam->dma_handles + i,
  248. GFP_KERNEL);
  249. if (cam->dma_bufs[i] == NULL) {
  250. cam_warn(cam, "Failed to allocate DMA buffer\n");
  251. break;
  252. }
  253. (cam->nbufs)++;
  254. }
  255. switch (cam->nbufs) {
  256. case 1:
  257. dma_free_coherent(cam->dev, cam->dma_buf_size,
  258. cam->dma_bufs[0], cam->dma_handles[0]);
  259. cam->nbufs = 0;
  260. case 0:
  261. cam_err(cam, "Insufficient DMA buffers, cannot operate\n");
  262. return -ENOMEM;
  263. case 2:
  264. if (n_dma_bufs > 2)
  265. cam_warn(cam, "Will limp along with only 2 buffers\n");
  266. break;
  267. }
  268. return 0;
  269. }
  270. static void mcam_free_dma_bufs(struct mcam_camera *cam)
  271. {
  272. int i;
  273. for (i = 0; i < cam->nbufs; i++) {
  274. dma_free_coherent(cam->dev, cam->dma_buf_size,
  275. cam->dma_bufs[i], cam->dma_handles[i]);
  276. cam->dma_bufs[i] = NULL;
  277. }
  278. cam->nbufs = 0;
  279. }
  280. /*
  281. * Set up DMA buffers when operating in vmalloc mode
  282. */
  283. static void mcam_ctlr_dma_vmalloc(struct mcam_camera *cam)
  284. {
  285. /*
  286. * Store the first two Y buffers (we aren't supporting
  287. * planar formats for now, so no UV bufs). Then either
  288. * set the third if it exists, or tell the controller
  289. * to just use two.
  290. */
  291. mcam_reg_write(cam, REG_Y0BAR, cam->dma_handles[0]);
  292. mcam_reg_write(cam, REG_Y1BAR, cam->dma_handles[1]);
  293. if (cam->nbufs > 2) {
  294. mcam_reg_write(cam, REG_Y2BAR, cam->dma_handles[2]);
  295. mcam_reg_clear_bit(cam, REG_CTRL1, C1_TWOBUFS);
  296. } else
  297. mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
  298. if (cam->chip_id == V4L2_IDENT_CAFE)
  299. mcam_reg_write(cam, REG_UBAR, 0); /* 32 bits only */
  300. }
  301. /*
  302. * Copy data out to user space in the vmalloc case
  303. */
  304. static void mcam_frame_tasklet(unsigned long data)
  305. {
  306. struct mcam_camera *cam = (struct mcam_camera *) data;
  307. int i;
  308. unsigned long flags;
  309. struct mcam_vb_buffer *buf;
  310. spin_lock_irqsave(&cam->dev_lock, flags);
  311. for (i = 0; i < cam->nbufs; i++) {
  312. int bufno = cam->next_buf;
  313. if (cam->state != S_STREAMING || bufno < 0)
  314. break; /* I/O got stopped */
  315. if (++(cam->next_buf) >= cam->nbufs)
  316. cam->next_buf = 0;
  317. if (!test_bit(bufno, &cam->flags))
  318. continue;
  319. if (list_empty(&cam->buffers)) {
  320. singles++;
  321. break; /* Leave it valid, hope for better later */
  322. }
  323. delivered++;
  324. clear_bit(bufno, &cam->flags);
  325. buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer,
  326. queue);
  327. list_del_init(&buf->queue);
  328. /*
  329. * Drop the lock during the big copy. This *should* be safe...
  330. */
  331. spin_unlock_irqrestore(&cam->dev_lock, flags);
  332. memcpy(vb2_plane_vaddr(&buf->vb_buf, 0), cam->dma_bufs[bufno],
  333. cam->pix_format.sizeimage);
  334. mcam_buffer_done(cam, bufno, &buf->vb_buf);
  335. spin_lock_irqsave(&cam->dev_lock, flags);
  336. }
  337. spin_unlock_irqrestore(&cam->dev_lock, flags);
  338. }
  339. /* ---------------------------------------------------------------------- */
  340. /*
  341. * DMA-contiguous code.
  342. */
  343. /*
  344. * Set up a contiguous buffer for the given frame. Here also is where
  345. * the underrun strategy is set: if there is no buffer available, reuse
  346. * the buffer from the other BAR and set the CF_SINGLE_BUFFER flag to
  347. * keep the interrupt handler from giving that buffer back to user
  348. * space. In this way, we always have a buffer to DMA to and don't
  349. * have to try to play games stopping and restarting the controller.
  350. */
  351. static void mcam_set_contig_buffer(struct mcam_camera *cam, int frame)
  352. {
  353. struct mcam_vb_buffer *buf;
  354. /*
  355. * If there are no available buffers, go into single mode
  356. */
  357. if (list_empty(&cam->buffers)) {
  358. buf = cam->vb_bufs[frame ^ 0x1];
  359. cam->vb_bufs[frame] = buf;
  360. mcam_reg_write(cam, frame == 0 ? REG_Y0BAR : REG_Y1BAR,
  361. vb2_dma_contig_plane_paddr(&buf->vb_buf, 0));
  362. set_bit(CF_SINGLE_BUFFER, &cam->flags);
  363. singles++;
  364. return;
  365. }
  366. /*
  367. * OK, we have a buffer we can use.
  368. */
  369. buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, queue);
  370. list_del_init(&buf->queue);
  371. mcam_reg_write(cam, frame == 0 ? REG_Y0BAR : REG_Y1BAR,
  372. vb2_dma_contig_plane_paddr(&buf->vb_buf, 0));
  373. cam->vb_bufs[frame] = buf;
  374. clear_bit(CF_SINGLE_BUFFER, &cam->flags);
  375. }
  376. /*
  377. * Initial B_DMA_contig setup.
  378. */
  379. static void mcam_ctlr_dma_contig(struct mcam_camera *cam)
  380. {
  381. mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
  382. cam->nbufs = 2;
  383. mcam_set_contig_buffer(cam, 0);
  384. mcam_set_contig_buffer(cam, 1);
  385. }
  386. /*
  387. * Frame completion handling.
  388. */
  389. static void mcam_dma_contig_done(struct mcam_camera *cam, int frame)
  390. {
  391. struct mcam_vb_buffer *buf = cam->vb_bufs[frame];
  392. if (!test_bit(CF_SINGLE_BUFFER, &cam->flags)) {
  393. delivered++;
  394. mcam_buffer_done(cam, frame, &buf->vb_buf);
  395. }
  396. mcam_set_contig_buffer(cam, frame);
  397. }
  398. /* ---------------------------------------------------------------------- */
  399. /*
  400. * Scatter/gather-specific code.
  401. */
  402. /*
  403. * Set up the next buffer for S/G I/O; caller should be sure that
  404. * the controller is stopped and a buffer is available.
  405. */
  406. static void mcam_sg_next_buffer(struct mcam_camera *cam)
  407. {
  408. struct mcam_vb_buffer *buf;
  409. buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, queue);
  410. list_del_init(&buf->queue);
  411. mcam_reg_write(cam, REG_DMA_DESC_Y, buf->dma_desc_pa);
  412. mcam_reg_write(cam, REG_DESC_LEN_Y,
  413. buf->dma_desc_nent*sizeof(struct mcam_dma_desc));
  414. mcam_reg_write(cam, REG_DESC_LEN_U, 0);
  415. mcam_reg_write(cam, REG_DESC_LEN_V, 0);
  416. cam->vb_bufs[0] = buf;
  417. }
  418. /*
  419. * Initial B_DMA_sg setup
  420. */
  421. static void mcam_ctlr_dma_sg(struct mcam_camera *cam)
  422. {
  423. mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_3WORD);
  424. mcam_sg_next_buffer(cam);
  425. mcam_reg_set_bit(cam, REG_CTRL1, C1_DESC_ENA);
  426. cam->nbufs = 3;
  427. }
  428. /*
  429. * Frame completion with S/G is trickier. We can't muck with
  430. * a descriptor chain on the fly, since the controller buffers it
  431. * internally. So we have to actually stop and restart; Marvell
  432. * says this is the way to do it.
  433. *
  434. * Of course, stopping is easier said than done; experience shows
  435. * that the controller can start a frame *after* C0_ENABLE has been
  436. * cleared. So when running in S/G mode, the controller is "stopped"
  437. * on receipt of the start-of-frame interrupt. That means we can
  438. * safely change the DMA descriptor array here and restart things
  439. * (assuming there's another buffer waiting to go).
  440. */
  441. static void mcam_dma_sg_done(struct mcam_camera *cam, int frame)
  442. {
  443. struct mcam_vb_buffer *buf = cam->vb_bufs[0];
  444. /*
  445. * Very Bad Not Good Things happen if you don't clear
  446. * C1_DESC_ENA before making any descriptor changes.
  447. */
  448. mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_ENA);
  449. /*
  450. * If we have another buffer available, put it in and
  451. * restart the engine.
  452. */
  453. if (!list_empty(&cam->buffers)) {
  454. mcam_sg_next_buffer(cam);
  455. mcam_reg_set_bit(cam, REG_CTRL1, C1_DESC_ENA);
  456. mcam_ctlr_start(cam);
  457. /*
  458. * Otherwise set CF_SG_RESTART and the controller will
  459. * be restarted once another buffer shows up.
  460. */
  461. } else {
  462. set_bit(CF_SG_RESTART, &cam->flags);
  463. singles++;
  464. }
  465. /*
  466. * Now we can give the completed frame back to user space.
  467. */
  468. delivered++;
  469. mcam_buffer_done(cam, frame, &buf->vb_buf);
  470. }
  471. /*
  472. * Scatter/gather mode requires stopping the controller between
  473. * frames so we can put in a new DMA descriptor array. If no new
  474. * buffer exists at frame completion, the controller is left stopped;
  475. * this function is charged with gettig things going again.
  476. */
  477. static void mcam_sg_restart(struct mcam_camera *cam)
  478. {
  479. mcam_ctlr_dma_sg(cam);
  480. mcam_ctlr_start(cam);
  481. clear_bit(CF_SG_RESTART, &cam->flags);
  482. }
  483. /* ---------------------------------------------------------------------- */
  484. /*
  485. * Buffer-mode-independent controller code.
  486. */
  487. /*
  488. * Image format setup
  489. */
  490. static void mcam_ctlr_image(struct mcam_camera *cam)
  491. {
  492. int imgsz;
  493. struct v4l2_pix_format *fmt = &cam->pix_format;
  494. imgsz = ((fmt->height << IMGSZ_V_SHIFT) & IMGSZ_V_MASK) |
  495. (fmt->bytesperline & IMGSZ_H_MASK);
  496. mcam_reg_write(cam, REG_IMGSIZE, imgsz);
  497. mcam_reg_write(cam, REG_IMGOFFSET, 0);
  498. /* YPITCH just drops the last two bits */
  499. mcam_reg_write_mask(cam, REG_IMGPITCH, fmt->bytesperline,
  500. IMGP_YP_MASK);
  501. /*
  502. * Tell the controller about the image format we are using.
  503. */
  504. switch (cam->pix_format.pixelformat) {
  505. case V4L2_PIX_FMT_YUYV:
  506. mcam_reg_write_mask(cam, REG_CTRL0,
  507. C0_DF_YUV|C0_YUV_PACKED|C0_YUVE_YUYV,
  508. C0_DF_MASK);
  509. break;
  510. case V4L2_PIX_FMT_RGB444:
  511. mcam_reg_write_mask(cam, REG_CTRL0,
  512. C0_DF_RGB|C0_RGBF_444|C0_RGB4_XRGB,
  513. C0_DF_MASK);
  514. /* Alpha value? */
  515. break;
  516. case V4L2_PIX_FMT_RGB565:
  517. mcam_reg_write_mask(cam, REG_CTRL0,
  518. C0_DF_RGB|C0_RGBF_565|C0_RGB5_BGGR,
  519. C0_DF_MASK);
  520. break;
  521. default:
  522. cam_err(cam, "Unknown format %x\n", cam->pix_format.pixelformat);
  523. break;
  524. }
  525. /*
  526. * Make sure it knows we want to use hsync/vsync.
  527. */
  528. mcam_reg_write_mask(cam, REG_CTRL0, C0_SIF_HVSYNC,
  529. C0_SIFM_MASK);
  530. }
  531. /*
  532. * Configure the controller for operation; caller holds the
  533. * device mutex.
  534. */
  535. static int mcam_ctlr_configure(struct mcam_camera *cam)
  536. {
  537. unsigned long flags;
  538. spin_lock_irqsave(&cam->dev_lock, flags);
  539. switch (cam->buffer_mode) {
  540. case B_vmalloc:
  541. mcam_ctlr_dma_vmalloc(cam);
  542. break;
  543. case B_DMA_contig:
  544. mcam_ctlr_dma_contig(cam);
  545. break;
  546. case B_DMA_sg:
  547. mcam_ctlr_dma_sg(cam);
  548. break;
  549. }
  550. mcam_ctlr_image(cam);
  551. mcam_set_config_needed(cam, 0);
  552. clear_bit(CF_SG_RESTART, &cam->flags);
  553. spin_unlock_irqrestore(&cam->dev_lock, flags);
  554. return 0;
  555. }
  556. static void mcam_ctlr_irq_enable(struct mcam_camera *cam)
  557. {
  558. /*
  559. * Clear any pending interrupts, since we do not
  560. * expect to have I/O active prior to enabling.
  561. */
  562. mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS);
  563. mcam_reg_set_bit(cam, REG_IRQMASK, FRAMEIRQS);
  564. }
  565. static void mcam_ctlr_irq_disable(struct mcam_camera *cam)
  566. {
  567. mcam_reg_clear_bit(cam, REG_IRQMASK, FRAMEIRQS);
  568. }
  569. static void mcam_ctlr_init(struct mcam_camera *cam)
  570. {
  571. unsigned long flags;
  572. spin_lock_irqsave(&cam->dev_lock, flags);
  573. /*
  574. * Make sure it's not powered down.
  575. */
  576. mcam_reg_clear_bit(cam, REG_CTRL1, C1_PWRDWN);
  577. /*
  578. * Turn off the enable bit. It sure should be off anyway,
  579. * but it's good to be sure.
  580. */
  581. mcam_reg_clear_bit(cam, REG_CTRL0, C0_ENABLE);
  582. /*
  583. * Clock the sensor appropriately. Controller clock should
  584. * be 48MHz, sensor "typical" value is half that.
  585. */
  586. mcam_reg_write_mask(cam, REG_CLKCTRL, 2, CLK_DIV_MASK);
  587. spin_unlock_irqrestore(&cam->dev_lock, flags);
  588. }
  589. /*
  590. * Stop the controller, and don't return until we're really sure that no
  591. * further DMA is going on.
  592. */
  593. static void mcam_ctlr_stop_dma(struct mcam_camera *cam)
  594. {
  595. unsigned long flags;
  596. /*
  597. * Theory: stop the camera controller (whether it is operating
  598. * or not). Delay briefly just in case we race with the SOF
  599. * interrupt, then wait until no DMA is active.
  600. */
  601. spin_lock_irqsave(&cam->dev_lock, flags);
  602. clear_bit(CF_SG_RESTART, &cam->flags);
  603. mcam_ctlr_stop(cam);
  604. cam->state = S_IDLE;
  605. spin_unlock_irqrestore(&cam->dev_lock, flags);
  606. msleep(40);
  607. if (test_bit(CF_DMA_ACTIVE, &cam->flags))
  608. cam_err(cam, "Timeout waiting for DMA to end\n");
  609. /* This would be bad news - what now? */
  610. spin_lock_irqsave(&cam->dev_lock, flags);
  611. mcam_ctlr_irq_disable(cam);
  612. spin_unlock_irqrestore(&cam->dev_lock, flags);
  613. }
  614. /*
  615. * Power up and down.
  616. */
  617. static void mcam_ctlr_power_up(struct mcam_camera *cam)
  618. {
  619. unsigned long flags;
  620. spin_lock_irqsave(&cam->dev_lock, flags);
  621. cam->plat_power_up(cam);
  622. mcam_reg_clear_bit(cam, REG_CTRL1, C1_PWRDWN);
  623. spin_unlock_irqrestore(&cam->dev_lock, flags);
  624. msleep(5); /* Just to be sure */
  625. }
  626. static void mcam_ctlr_power_down(struct mcam_camera *cam)
  627. {
  628. unsigned long flags;
  629. spin_lock_irqsave(&cam->dev_lock, flags);
  630. /*
  631. * School of hard knocks department: be sure we do any register
  632. * twiddling on the controller *before* calling the platform
  633. * power down routine.
  634. */
  635. mcam_reg_set_bit(cam, REG_CTRL1, C1_PWRDWN);
  636. cam->plat_power_down(cam);
  637. spin_unlock_irqrestore(&cam->dev_lock, flags);
  638. }
  639. /* -------------------------------------------------------------------- */
  640. /*
  641. * Communications with the sensor.
  642. */
  643. static int __mcam_cam_reset(struct mcam_camera *cam)
  644. {
  645. return sensor_call(cam, core, reset, 0);
  646. }
  647. /*
  648. * We have found the sensor on the i2c. Let's try to have a
  649. * conversation.
  650. */
  651. static int mcam_cam_init(struct mcam_camera *cam)
  652. {
  653. struct v4l2_dbg_chip_ident chip;
  654. int ret;
  655. mutex_lock(&cam->s_mutex);
  656. if (cam->state != S_NOTREADY)
  657. cam_warn(cam, "Cam init with device in funky state %d",
  658. cam->state);
  659. ret = __mcam_cam_reset(cam);
  660. if (ret)
  661. goto out;
  662. chip.ident = V4L2_IDENT_NONE;
  663. chip.match.type = V4L2_CHIP_MATCH_I2C_ADDR;
  664. chip.match.addr = cam->sensor_addr;
  665. ret = sensor_call(cam, core, g_chip_ident, &chip);
  666. if (ret)
  667. goto out;
  668. cam->sensor_type = chip.ident;
  669. if (cam->sensor_type != V4L2_IDENT_OV7670) {
  670. cam_err(cam, "Unsupported sensor type 0x%x", cam->sensor_type);
  671. ret = -EINVAL;
  672. goto out;
  673. }
  674. /* Get/set parameters? */
  675. ret = 0;
  676. cam->state = S_IDLE;
  677. out:
  678. mcam_ctlr_power_down(cam);
  679. mutex_unlock(&cam->s_mutex);
  680. return ret;
  681. }
  682. /*
  683. * Configure the sensor to match the parameters we have. Caller should
  684. * hold s_mutex
  685. */
  686. static int mcam_cam_set_flip(struct mcam_camera *cam)
  687. {
  688. struct v4l2_control ctrl;
  689. memset(&ctrl, 0, sizeof(ctrl));
  690. ctrl.id = V4L2_CID_VFLIP;
  691. ctrl.value = flip;
  692. return sensor_call(cam, core, s_ctrl, &ctrl);
  693. }
  694. static int mcam_cam_configure(struct mcam_camera *cam)
  695. {
  696. struct v4l2_mbus_framefmt mbus_fmt;
  697. int ret;
  698. v4l2_fill_mbus_format(&mbus_fmt, &cam->pix_format, cam->mbus_code);
  699. ret = sensor_call(cam, core, init, 0);
  700. if (ret == 0)
  701. ret = sensor_call(cam, video, s_mbus_fmt, &mbus_fmt);
  702. /*
  703. * OV7670 does weird things if flip is set *before* format...
  704. */
  705. ret += mcam_cam_set_flip(cam);
  706. return ret;
  707. }
  708. /*
  709. * Get everything ready, and start grabbing frames.
  710. */
  711. static int mcam_read_setup(struct mcam_camera *cam)
  712. {
  713. int ret;
  714. unsigned long flags;
  715. /*
  716. * Configuration. If we still don't have DMA buffers,
  717. * make one last, desperate attempt.
  718. */
  719. if (cam->buffer_mode == B_vmalloc && cam->nbufs == 0 &&
  720. mcam_alloc_dma_bufs(cam, 0))
  721. return -ENOMEM;
  722. if (mcam_needs_config(cam)) {
  723. mcam_cam_configure(cam);
  724. ret = mcam_ctlr_configure(cam);
  725. if (ret)
  726. return ret;
  727. }
  728. /*
  729. * Turn it loose.
  730. */
  731. spin_lock_irqsave(&cam->dev_lock, flags);
  732. mcam_reset_buffers(cam);
  733. mcam_ctlr_irq_enable(cam);
  734. cam->state = S_STREAMING;
  735. mcam_ctlr_start(cam);
  736. spin_unlock_irqrestore(&cam->dev_lock, flags);
  737. return 0;
  738. }
  739. /* ----------------------------------------------------------------------- */
  740. /*
  741. * Videobuf2 interface code.
  742. */
  743. static int mcam_vb_queue_setup(struct vb2_queue *vq, unsigned int *nbufs,
  744. unsigned int *num_planes, unsigned long sizes[],
  745. void *alloc_ctxs[])
  746. {
  747. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  748. int minbufs = (cam->buffer_mode == B_DMA_contig) ? 3 : 2;
  749. sizes[0] = cam->pix_format.sizeimage;
  750. *num_planes = 1; /* Someday we have to support planar formats... */
  751. if (*nbufs < minbufs)
  752. *nbufs = minbufs;
  753. if (cam->buffer_mode == B_DMA_contig)
  754. alloc_ctxs[0] = cam->vb_alloc_ctx;
  755. return 0;
  756. }
  757. static void mcam_vb_buf_queue(struct vb2_buffer *vb)
  758. {
  759. struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
  760. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  761. unsigned long flags;
  762. int start;
  763. spin_lock_irqsave(&cam->dev_lock, flags);
  764. start = (cam->state == S_BUFWAIT) && !list_empty(&cam->buffers);
  765. list_add(&mvb->queue, &cam->buffers);
  766. if (test_bit(CF_SG_RESTART, &cam->flags))
  767. mcam_sg_restart(cam);
  768. spin_unlock_irqrestore(&cam->dev_lock, flags);
  769. if (start)
  770. mcam_read_setup(cam);
  771. }
  772. /*
  773. * vb2 uses these to release the mutex when waiting in dqbuf. I'm
  774. * not actually sure we need to do this (I'm not sure that vb2_dqbuf() needs
  775. * to be called with the mutex held), but better safe than sorry.
  776. */
  777. static void mcam_vb_wait_prepare(struct vb2_queue *vq)
  778. {
  779. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  780. mutex_unlock(&cam->s_mutex);
  781. }
  782. static void mcam_vb_wait_finish(struct vb2_queue *vq)
  783. {
  784. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  785. mutex_lock(&cam->s_mutex);
  786. }
  787. /*
  788. * These need to be called with the mutex held from vb2
  789. */
  790. static int mcam_vb_start_streaming(struct vb2_queue *vq)
  791. {
  792. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  793. if (cam->state != S_IDLE)
  794. return -EINVAL;
  795. cam->sequence = 0;
  796. /*
  797. * Videobuf2 sneakily hoards all the buffers and won't
  798. * give them to us until *after* streaming starts. But
  799. * we can't actually start streaming until we have a
  800. * destination. So go into a wait state and hope they
  801. * give us buffers soon.
  802. */
  803. if (cam->buffer_mode != B_vmalloc && list_empty(&cam->buffers)) {
  804. cam->state = S_BUFWAIT;
  805. return 0;
  806. }
  807. return mcam_read_setup(cam);
  808. }
  809. static int mcam_vb_stop_streaming(struct vb2_queue *vq)
  810. {
  811. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  812. unsigned long flags;
  813. if (cam->state == S_BUFWAIT) {
  814. /* They never gave us buffers */
  815. cam->state = S_IDLE;
  816. return 0;
  817. }
  818. if (cam->state != S_STREAMING)
  819. return -EINVAL;
  820. mcam_ctlr_stop_dma(cam);
  821. /*
  822. * VB2 reclaims the buffers, so we need to forget
  823. * about them.
  824. */
  825. spin_lock_irqsave(&cam->dev_lock, flags);
  826. INIT_LIST_HEAD(&cam->buffers);
  827. spin_unlock_irqrestore(&cam->dev_lock, flags);
  828. return 0;
  829. }
  830. static const struct vb2_ops mcam_vb2_ops = {
  831. .queue_setup = mcam_vb_queue_setup,
  832. .buf_queue = mcam_vb_buf_queue,
  833. .start_streaming = mcam_vb_start_streaming,
  834. .stop_streaming = mcam_vb_stop_streaming,
  835. .wait_prepare = mcam_vb_wait_prepare,
  836. .wait_finish = mcam_vb_wait_finish,
  837. };
  838. /*
  839. * Scatter/gather mode uses all of the above functions plus a
  840. * few extras to deal with DMA mapping.
  841. */
  842. static int mcam_vb_sg_buf_init(struct vb2_buffer *vb)
  843. {
  844. struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
  845. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  846. int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
  847. mvb->dma_desc = dma_alloc_coherent(cam->dev,
  848. ndesc * sizeof(struct mcam_dma_desc),
  849. &mvb->dma_desc_pa, GFP_KERNEL);
  850. if (mvb->dma_desc == NULL) {
  851. cam_err(cam, "Unable to get DMA descriptor array\n");
  852. return -ENOMEM;
  853. }
  854. return 0;
  855. }
  856. static int mcam_vb_sg_buf_prepare(struct vb2_buffer *vb)
  857. {
  858. struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
  859. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  860. struct vb2_dma_sg_desc *sgd = vb2_dma_sg_plane_desc(vb, 0);
  861. struct mcam_dma_desc *desc = mvb->dma_desc;
  862. struct scatterlist *sg;
  863. int i;
  864. mvb->dma_desc_nent = dma_map_sg(cam->dev, sgd->sglist, sgd->num_pages,
  865. DMA_FROM_DEVICE);
  866. if (mvb->dma_desc_nent <= 0)
  867. return -EIO; /* Not sure what's right here */
  868. for_each_sg(sgd->sglist, sg, mvb->dma_desc_nent, i) {
  869. desc->dma_addr = sg_dma_address(sg);
  870. desc->segment_len = sg_dma_len(sg);
  871. desc++;
  872. }
  873. return 0;
  874. }
  875. static int mcam_vb_sg_buf_finish(struct vb2_buffer *vb)
  876. {
  877. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  878. struct vb2_dma_sg_desc *sgd = vb2_dma_sg_plane_desc(vb, 0);
  879. dma_unmap_sg(cam->dev, sgd->sglist, sgd->num_pages, DMA_FROM_DEVICE);
  880. return 0;
  881. }
  882. static void mcam_vb_sg_buf_cleanup(struct vb2_buffer *vb)
  883. {
  884. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  885. struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
  886. int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
  887. dma_free_coherent(cam->dev, ndesc * sizeof(struct mcam_dma_desc),
  888. mvb->dma_desc, mvb->dma_desc_pa);
  889. }
  890. static const struct vb2_ops mcam_vb2_sg_ops = {
  891. .queue_setup = mcam_vb_queue_setup,
  892. .buf_init = mcam_vb_sg_buf_init,
  893. .buf_prepare = mcam_vb_sg_buf_prepare,
  894. .buf_queue = mcam_vb_buf_queue,
  895. .buf_finish = mcam_vb_sg_buf_finish,
  896. .buf_cleanup = mcam_vb_sg_buf_cleanup,
  897. .start_streaming = mcam_vb_start_streaming,
  898. .stop_streaming = mcam_vb_stop_streaming,
  899. .wait_prepare = mcam_vb_wait_prepare,
  900. .wait_finish = mcam_vb_wait_finish,
  901. };
  902. static int mcam_setup_vb2(struct mcam_camera *cam)
  903. {
  904. struct vb2_queue *vq = &cam->vb_queue;
  905. memset(vq, 0, sizeof(*vq));
  906. vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  907. vq->drv_priv = cam;
  908. INIT_LIST_HEAD(&cam->buffers);
  909. switch (cam->buffer_mode) {
  910. case B_DMA_contig:
  911. vq->ops = &mcam_vb2_ops;
  912. vq->mem_ops = &vb2_dma_contig_memops;
  913. cam->vb_alloc_ctx = vb2_dma_contig_init_ctx(cam->dev);
  914. vq->io_modes = VB2_MMAP | VB2_USERPTR;
  915. break;
  916. case B_DMA_sg:
  917. vq->ops = &mcam_vb2_sg_ops;
  918. vq->mem_ops = &vb2_dma_sg_memops;
  919. vq->io_modes = VB2_MMAP | VB2_USERPTR;
  920. break;
  921. case B_vmalloc:
  922. vq->ops = &mcam_vb2_ops;
  923. vq->mem_ops = &vb2_vmalloc_memops;
  924. vq->buf_struct_size = sizeof(struct mcam_vb_buffer);
  925. vq->io_modes = VB2_MMAP;
  926. break;
  927. }
  928. return vb2_queue_init(vq);
  929. }
  930. static void mcam_cleanup_vb2(struct mcam_camera *cam)
  931. {
  932. vb2_queue_release(&cam->vb_queue);
  933. if (cam->buffer_mode == B_DMA_contig)
  934. vb2_dma_contig_cleanup_ctx(cam->vb_alloc_ctx);
  935. }
  936. /* ---------------------------------------------------------------------- */
  937. /*
  938. * The long list of V4L2 ioctl() operations.
  939. */
  940. static int mcam_vidioc_streamon(struct file *filp, void *priv,
  941. enum v4l2_buf_type type)
  942. {
  943. struct mcam_camera *cam = filp->private_data;
  944. int ret;
  945. mutex_lock(&cam->s_mutex);
  946. ret = vb2_streamon(&cam->vb_queue, type);
  947. mutex_unlock(&cam->s_mutex);
  948. return ret;
  949. }
  950. static int mcam_vidioc_streamoff(struct file *filp, void *priv,
  951. enum v4l2_buf_type type)
  952. {
  953. struct mcam_camera *cam = filp->private_data;
  954. int ret;
  955. mutex_lock(&cam->s_mutex);
  956. ret = vb2_streamoff(&cam->vb_queue, type);
  957. mutex_unlock(&cam->s_mutex);
  958. return ret;
  959. }
  960. static int mcam_vidioc_reqbufs(struct file *filp, void *priv,
  961. struct v4l2_requestbuffers *req)
  962. {
  963. struct mcam_camera *cam = filp->private_data;
  964. int ret;
  965. mutex_lock(&cam->s_mutex);
  966. ret = vb2_reqbufs(&cam->vb_queue, req);
  967. mutex_unlock(&cam->s_mutex);
  968. return ret;
  969. }
  970. static int mcam_vidioc_querybuf(struct file *filp, void *priv,
  971. struct v4l2_buffer *buf)
  972. {
  973. struct mcam_camera *cam = filp->private_data;
  974. int ret;
  975. mutex_lock(&cam->s_mutex);
  976. ret = vb2_querybuf(&cam->vb_queue, buf);
  977. mutex_unlock(&cam->s_mutex);
  978. return ret;
  979. }
  980. static int mcam_vidioc_qbuf(struct file *filp, void *priv,
  981. struct v4l2_buffer *buf)
  982. {
  983. struct mcam_camera *cam = filp->private_data;
  984. int ret;
  985. mutex_lock(&cam->s_mutex);
  986. ret = vb2_qbuf(&cam->vb_queue, buf);
  987. mutex_unlock(&cam->s_mutex);
  988. return ret;
  989. }
  990. static int mcam_vidioc_dqbuf(struct file *filp, void *priv,
  991. struct v4l2_buffer *buf)
  992. {
  993. struct mcam_camera *cam = filp->private_data;
  994. int ret;
  995. mutex_lock(&cam->s_mutex);
  996. ret = vb2_dqbuf(&cam->vb_queue, buf, filp->f_flags & O_NONBLOCK);
  997. mutex_unlock(&cam->s_mutex);
  998. return ret;
  999. }
  1000. static int mcam_vidioc_queryctrl(struct file *filp, void *priv,
  1001. struct v4l2_queryctrl *qc)
  1002. {
  1003. struct mcam_camera *cam = priv;
  1004. int ret;
  1005. mutex_lock(&cam->s_mutex);
  1006. ret = sensor_call(cam, core, queryctrl, qc);
  1007. mutex_unlock(&cam->s_mutex);
  1008. return ret;
  1009. }
  1010. static int mcam_vidioc_g_ctrl(struct file *filp, void *priv,
  1011. struct v4l2_control *ctrl)
  1012. {
  1013. struct mcam_camera *cam = priv;
  1014. int ret;
  1015. mutex_lock(&cam->s_mutex);
  1016. ret = sensor_call(cam, core, g_ctrl, ctrl);
  1017. mutex_unlock(&cam->s_mutex);
  1018. return ret;
  1019. }
  1020. static int mcam_vidioc_s_ctrl(struct file *filp, void *priv,
  1021. struct v4l2_control *ctrl)
  1022. {
  1023. struct mcam_camera *cam = priv;
  1024. int ret;
  1025. mutex_lock(&cam->s_mutex);
  1026. ret = sensor_call(cam, core, s_ctrl, ctrl);
  1027. mutex_unlock(&cam->s_mutex);
  1028. return ret;
  1029. }
  1030. static int mcam_vidioc_querycap(struct file *file, void *priv,
  1031. struct v4l2_capability *cap)
  1032. {
  1033. strcpy(cap->driver, "marvell_ccic");
  1034. strcpy(cap->card, "marvell_ccic");
  1035. cap->version = 1;
  1036. cap->capabilities = V4L2_CAP_VIDEO_CAPTURE |
  1037. V4L2_CAP_READWRITE | V4L2_CAP_STREAMING;
  1038. return 0;
  1039. }
  1040. static int mcam_vidioc_enum_fmt_vid_cap(struct file *filp,
  1041. void *priv, struct v4l2_fmtdesc *fmt)
  1042. {
  1043. if (fmt->index >= N_MCAM_FMTS)
  1044. return -EINVAL;
  1045. strlcpy(fmt->description, mcam_formats[fmt->index].desc,
  1046. sizeof(fmt->description));
  1047. fmt->pixelformat = mcam_formats[fmt->index].pixelformat;
  1048. return 0;
  1049. }
  1050. static int mcam_vidioc_try_fmt_vid_cap(struct file *filp, void *priv,
  1051. struct v4l2_format *fmt)
  1052. {
  1053. struct mcam_camera *cam = priv;
  1054. struct mcam_format_struct *f;
  1055. struct v4l2_pix_format *pix = &fmt->fmt.pix;
  1056. struct v4l2_mbus_framefmt mbus_fmt;
  1057. int ret;
  1058. f = mcam_find_format(pix->pixelformat);
  1059. pix->pixelformat = f->pixelformat;
  1060. v4l2_fill_mbus_format(&mbus_fmt, pix, f->mbus_code);
  1061. mutex_lock(&cam->s_mutex);
  1062. ret = sensor_call(cam, video, try_mbus_fmt, &mbus_fmt);
  1063. mutex_unlock(&cam->s_mutex);
  1064. v4l2_fill_pix_format(pix, &mbus_fmt);
  1065. pix->bytesperline = pix->width * f->bpp;
  1066. pix->sizeimage = pix->height * pix->bytesperline;
  1067. return ret;
  1068. }
  1069. static int mcam_vidioc_s_fmt_vid_cap(struct file *filp, void *priv,
  1070. struct v4l2_format *fmt)
  1071. {
  1072. struct mcam_camera *cam = priv;
  1073. struct mcam_format_struct *f;
  1074. int ret;
  1075. /*
  1076. * Can't do anything if the device is not idle
  1077. * Also can't if there are streaming buffers in place.
  1078. */
  1079. if (cam->state != S_IDLE || cam->vb_queue.num_buffers > 0)
  1080. return -EBUSY;
  1081. f = mcam_find_format(fmt->fmt.pix.pixelformat);
  1082. /*
  1083. * See if the formatting works in principle.
  1084. */
  1085. ret = mcam_vidioc_try_fmt_vid_cap(filp, priv, fmt);
  1086. if (ret)
  1087. return ret;
  1088. /*
  1089. * Now we start to change things for real, so let's do it
  1090. * under lock.
  1091. */
  1092. mutex_lock(&cam->s_mutex);
  1093. cam->pix_format = fmt->fmt.pix;
  1094. cam->mbus_code = f->mbus_code;
  1095. /*
  1096. * Make sure we have appropriate DMA buffers.
  1097. */
  1098. ret = -ENOMEM;
  1099. if (cam->buffer_mode == B_vmalloc) {
  1100. if (cam->nbufs > 0 &&
  1101. cam->dma_buf_size < cam->pix_format.sizeimage)
  1102. mcam_free_dma_bufs(cam);
  1103. if (cam->nbufs == 0) {
  1104. if (mcam_alloc_dma_bufs(cam, 0))
  1105. goto out;
  1106. }
  1107. }
  1108. mcam_set_config_needed(cam, 1);
  1109. ret = 0;
  1110. out:
  1111. mutex_unlock(&cam->s_mutex);
  1112. return ret;
  1113. }
  1114. /*
  1115. * Return our stored notion of how the camera is/should be configured.
  1116. * The V4l2 spec wants us to be smarter, and actually get this from
  1117. * the camera (and not mess with it at open time). Someday.
  1118. */
  1119. static int mcam_vidioc_g_fmt_vid_cap(struct file *filp, void *priv,
  1120. struct v4l2_format *f)
  1121. {
  1122. struct mcam_camera *cam = priv;
  1123. f->fmt.pix = cam->pix_format;
  1124. return 0;
  1125. }
  1126. /*
  1127. * We only have one input - the sensor - so minimize the nonsense here.
  1128. */
  1129. static int mcam_vidioc_enum_input(struct file *filp, void *priv,
  1130. struct v4l2_input *input)
  1131. {
  1132. if (input->index != 0)
  1133. return -EINVAL;
  1134. input->type = V4L2_INPUT_TYPE_CAMERA;
  1135. input->std = V4L2_STD_ALL; /* Not sure what should go here */
  1136. strcpy(input->name, "Camera");
  1137. return 0;
  1138. }
  1139. static int mcam_vidioc_g_input(struct file *filp, void *priv, unsigned int *i)
  1140. {
  1141. *i = 0;
  1142. return 0;
  1143. }
  1144. static int mcam_vidioc_s_input(struct file *filp, void *priv, unsigned int i)
  1145. {
  1146. if (i != 0)
  1147. return -EINVAL;
  1148. return 0;
  1149. }
  1150. /* from vivi.c */
  1151. static int mcam_vidioc_s_std(struct file *filp, void *priv, v4l2_std_id *a)
  1152. {
  1153. return 0;
  1154. }
  1155. /*
  1156. * G/S_PARM. Most of this is done by the sensor, but we are
  1157. * the level which controls the number of read buffers.
  1158. */
  1159. static int mcam_vidioc_g_parm(struct file *filp, void *priv,
  1160. struct v4l2_streamparm *parms)
  1161. {
  1162. struct mcam_camera *cam = priv;
  1163. int ret;
  1164. mutex_lock(&cam->s_mutex);
  1165. ret = sensor_call(cam, video, g_parm, parms);
  1166. mutex_unlock(&cam->s_mutex);
  1167. parms->parm.capture.readbuffers = n_dma_bufs;
  1168. return ret;
  1169. }
  1170. static int mcam_vidioc_s_parm(struct file *filp, void *priv,
  1171. struct v4l2_streamparm *parms)
  1172. {
  1173. struct mcam_camera *cam = priv;
  1174. int ret;
  1175. mutex_lock(&cam->s_mutex);
  1176. ret = sensor_call(cam, video, s_parm, parms);
  1177. mutex_unlock(&cam->s_mutex);
  1178. parms->parm.capture.readbuffers = n_dma_bufs;
  1179. return ret;
  1180. }
  1181. static int mcam_vidioc_g_chip_ident(struct file *file, void *priv,
  1182. struct v4l2_dbg_chip_ident *chip)
  1183. {
  1184. struct mcam_camera *cam = priv;
  1185. chip->ident = V4L2_IDENT_NONE;
  1186. chip->revision = 0;
  1187. if (v4l2_chip_match_host(&chip->match)) {
  1188. chip->ident = cam->chip_id;
  1189. return 0;
  1190. }
  1191. return sensor_call(cam, core, g_chip_ident, chip);
  1192. }
  1193. static int mcam_vidioc_enum_framesizes(struct file *filp, void *priv,
  1194. struct v4l2_frmsizeenum *sizes)
  1195. {
  1196. struct mcam_camera *cam = priv;
  1197. int ret;
  1198. mutex_lock(&cam->s_mutex);
  1199. ret = sensor_call(cam, video, enum_framesizes, sizes);
  1200. mutex_unlock(&cam->s_mutex);
  1201. return ret;
  1202. }
  1203. static int mcam_vidioc_enum_frameintervals(struct file *filp, void *priv,
  1204. struct v4l2_frmivalenum *interval)
  1205. {
  1206. struct mcam_camera *cam = priv;
  1207. int ret;
  1208. mutex_lock(&cam->s_mutex);
  1209. ret = sensor_call(cam, video, enum_frameintervals, interval);
  1210. mutex_unlock(&cam->s_mutex);
  1211. return ret;
  1212. }
  1213. #ifdef CONFIG_VIDEO_ADV_DEBUG
  1214. static int mcam_vidioc_g_register(struct file *file, void *priv,
  1215. struct v4l2_dbg_register *reg)
  1216. {
  1217. struct mcam_camera *cam = priv;
  1218. if (v4l2_chip_match_host(&reg->match)) {
  1219. reg->val = mcam_reg_read(cam, reg->reg);
  1220. reg->size = 4;
  1221. return 0;
  1222. }
  1223. return sensor_call(cam, core, g_register, reg);
  1224. }
  1225. static int mcam_vidioc_s_register(struct file *file, void *priv,
  1226. struct v4l2_dbg_register *reg)
  1227. {
  1228. struct mcam_camera *cam = priv;
  1229. if (v4l2_chip_match_host(&reg->match)) {
  1230. mcam_reg_write(cam, reg->reg, reg->val);
  1231. return 0;
  1232. }
  1233. return sensor_call(cam, core, s_register, reg);
  1234. }
  1235. #endif
  1236. static const struct v4l2_ioctl_ops mcam_v4l_ioctl_ops = {
  1237. .vidioc_querycap = mcam_vidioc_querycap,
  1238. .vidioc_enum_fmt_vid_cap = mcam_vidioc_enum_fmt_vid_cap,
  1239. .vidioc_try_fmt_vid_cap = mcam_vidioc_try_fmt_vid_cap,
  1240. .vidioc_s_fmt_vid_cap = mcam_vidioc_s_fmt_vid_cap,
  1241. .vidioc_g_fmt_vid_cap = mcam_vidioc_g_fmt_vid_cap,
  1242. .vidioc_enum_input = mcam_vidioc_enum_input,
  1243. .vidioc_g_input = mcam_vidioc_g_input,
  1244. .vidioc_s_input = mcam_vidioc_s_input,
  1245. .vidioc_s_std = mcam_vidioc_s_std,
  1246. .vidioc_reqbufs = mcam_vidioc_reqbufs,
  1247. .vidioc_querybuf = mcam_vidioc_querybuf,
  1248. .vidioc_qbuf = mcam_vidioc_qbuf,
  1249. .vidioc_dqbuf = mcam_vidioc_dqbuf,
  1250. .vidioc_streamon = mcam_vidioc_streamon,
  1251. .vidioc_streamoff = mcam_vidioc_streamoff,
  1252. .vidioc_queryctrl = mcam_vidioc_queryctrl,
  1253. .vidioc_g_ctrl = mcam_vidioc_g_ctrl,
  1254. .vidioc_s_ctrl = mcam_vidioc_s_ctrl,
  1255. .vidioc_g_parm = mcam_vidioc_g_parm,
  1256. .vidioc_s_parm = mcam_vidioc_s_parm,
  1257. .vidioc_enum_framesizes = mcam_vidioc_enum_framesizes,
  1258. .vidioc_enum_frameintervals = mcam_vidioc_enum_frameintervals,
  1259. .vidioc_g_chip_ident = mcam_vidioc_g_chip_ident,
  1260. #ifdef CONFIG_VIDEO_ADV_DEBUG
  1261. .vidioc_g_register = mcam_vidioc_g_register,
  1262. .vidioc_s_register = mcam_vidioc_s_register,
  1263. #endif
  1264. };
  1265. /* ---------------------------------------------------------------------- */
  1266. /*
  1267. * Our various file operations.
  1268. */
  1269. static int mcam_v4l_open(struct file *filp)
  1270. {
  1271. struct mcam_camera *cam = video_drvdata(filp);
  1272. int ret = 0;
  1273. filp->private_data = cam;
  1274. frames = singles = delivered = 0;
  1275. mutex_lock(&cam->s_mutex);
  1276. if (cam->users == 0) {
  1277. ret = mcam_setup_vb2(cam);
  1278. if (ret)
  1279. goto out;
  1280. mcam_ctlr_power_up(cam);
  1281. __mcam_cam_reset(cam);
  1282. mcam_set_config_needed(cam, 1);
  1283. }
  1284. (cam->users)++;
  1285. out:
  1286. mutex_unlock(&cam->s_mutex);
  1287. return ret;
  1288. }
  1289. static int mcam_v4l_release(struct file *filp)
  1290. {
  1291. struct mcam_camera *cam = filp->private_data;
  1292. cam_err(cam, "Release, %d frames, %d singles, %d delivered\n", frames,
  1293. singles, delivered);
  1294. mutex_lock(&cam->s_mutex);
  1295. (cam->users)--;
  1296. if (filp == cam->owner) {
  1297. mcam_ctlr_stop_dma(cam);
  1298. cam->owner = NULL;
  1299. }
  1300. if (cam->users == 0) {
  1301. mcam_cleanup_vb2(cam);
  1302. mcam_ctlr_power_down(cam);
  1303. if (cam->buffer_mode == B_vmalloc && alloc_bufs_at_read)
  1304. mcam_free_dma_bufs(cam);
  1305. }
  1306. mutex_unlock(&cam->s_mutex);
  1307. return 0;
  1308. }
  1309. static ssize_t mcam_v4l_read(struct file *filp,
  1310. char __user *buffer, size_t len, loff_t *pos)
  1311. {
  1312. struct mcam_camera *cam = filp->private_data;
  1313. int ret;
  1314. mutex_lock(&cam->s_mutex);
  1315. ret = vb2_read(&cam->vb_queue, buffer, len, pos,
  1316. filp->f_flags & O_NONBLOCK);
  1317. mutex_unlock(&cam->s_mutex);
  1318. return ret;
  1319. }
  1320. static unsigned int mcam_v4l_poll(struct file *filp,
  1321. struct poll_table_struct *pt)
  1322. {
  1323. struct mcam_camera *cam = filp->private_data;
  1324. int ret;
  1325. mutex_lock(&cam->s_mutex);
  1326. ret = vb2_poll(&cam->vb_queue, filp, pt);
  1327. mutex_unlock(&cam->s_mutex);
  1328. return ret;
  1329. }
  1330. static int mcam_v4l_mmap(struct file *filp, struct vm_area_struct *vma)
  1331. {
  1332. struct mcam_camera *cam = filp->private_data;
  1333. int ret;
  1334. mutex_lock(&cam->s_mutex);
  1335. ret = vb2_mmap(&cam->vb_queue, vma);
  1336. mutex_unlock(&cam->s_mutex);
  1337. return ret;
  1338. }
  1339. static const struct v4l2_file_operations mcam_v4l_fops = {
  1340. .owner = THIS_MODULE,
  1341. .open = mcam_v4l_open,
  1342. .release = mcam_v4l_release,
  1343. .read = mcam_v4l_read,
  1344. .poll = mcam_v4l_poll,
  1345. .mmap = mcam_v4l_mmap,
  1346. .unlocked_ioctl = video_ioctl2,
  1347. };
  1348. /*
  1349. * This template device holds all of those v4l2 methods; we
  1350. * clone it for specific real devices.
  1351. */
  1352. static struct video_device mcam_v4l_template = {
  1353. .name = "mcam",
  1354. .tvnorms = V4L2_STD_NTSC_M,
  1355. .current_norm = V4L2_STD_NTSC_M, /* make mplayer happy */
  1356. .fops = &mcam_v4l_fops,
  1357. .ioctl_ops = &mcam_v4l_ioctl_ops,
  1358. .release = video_device_release_empty,
  1359. };
  1360. /* ---------------------------------------------------------------------- */
  1361. /*
  1362. * Interrupt handler stuff
  1363. */
  1364. static void mcam_frame_complete(struct mcam_camera *cam, int frame)
  1365. {
  1366. /*
  1367. * Basic frame housekeeping.
  1368. */
  1369. set_bit(frame, &cam->flags);
  1370. clear_bit(CF_DMA_ACTIVE, &cam->flags);
  1371. cam->next_buf = frame;
  1372. cam->buf_seq[frame] = ++(cam->sequence);
  1373. cam->last_delivered = frame;
  1374. frames++;
  1375. /*
  1376. * "This should never happen"
  1377. */
  1378. if (cam->state != S_STREAMING)
  1379. return;
  1380. /*
  1381. * Process the frame and set up the next one.
  1382. */
  1383. switch (cam->buffer_mode) {
  1384. case B_vmalloc:
  1385. tasklet_schedule(&cam->s_tasklet);
  1386. break;
  1387. case B_DMA_contig:
  1388. mcam_dma_contig_done(cam, frame);
  1389. break;
  1390. case B_DMA_sg:
  1391. mcam_dma_sg_done(cam, frame);
  1392. break;
  1393. }
  1394. }
  1395. /*
  1396. * The interrupt handler; this needs to be called from the
  1397. * platform irq handler with the lock held.
  1398. */
  1399. int mccic_irq(struct mcam_camera *cam, unsigned int irqs)
  1400. {
  1401. unsigned int frame, handled = 0;
  1402. mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS); /* Clear'em all */
  1403. /*
  1404. * Handle any frame completions. There really should
  1405. * not be more than one of these, or we have fallen
  1406. * far behind.
  1407. *
  1408. * When running in S/G mode, the frame number lacks any
  1409. * real meaning - there's only one descriptor array - but
  1410. * the controller still picks a different one to signal
  1411. * each time.
  1412. */
  1413. for (frame = 0; frame < cam->nbufs; frame++)
  1414. if (irqs & (IRQ_EOF0 << frame)) {
  1415. mcam_frame_complete(cam, frame);
  1416. handled = 1;
  1417. }
  1418. /*
  1419. * If a frame starts, note that we have DMA active. This
  1420. * code assumes that we won't get multiple frame interrupts
  1421. * at once; may want to rethink that.
  1422. */
  1423. if (irqs & (IRQ_SOF0 | IRQ_SOF1 | IRQ_SOF2)) {
  1424. set_bit(CF_DMA_ACTIVE, &cam->flags);
  1425. handled = 1;
  1426. if (cam->buffer_mode == B_DMA_sg)
  1427. mcam_ctlr_stop(cam);
  1428. }
  1429. return handled;
  1430. }
  1431. /* ---------------------------------------------------------------------- */
  1432. /*
  1433. * Registration and such.
  1434. */
  1435. static struct ov7670_config sensor_cfg = {
  1436. /*
  1437. * Exclude QCIF mode, because it only captures a tiny portion
  1438. * of the sensor FOV
  1439. */
  1440. .min_width = 320,
  1441. .min_height = 240,
  1442. };
  1443. int mccic_register(struct mcam_camera *cam)
  1444. {
  1445. struct i2c_board_info ov7670_info = {
  1446. .type = "ov7670",
  1447. .addr = 0x42 >> 1,
  1448. .platform_data = &sensor_cfg,
  1449. };
  1450. int ret;
  1451. /*
  1452. * Register with V4L
  1453. */
  1454. ret = v4l2_device_register(cam->dev, &cam->v4l2_dev);
  1455. if (ret)
  1456. return ret;
  1457. mutex_init(&cam->s_mutex);
  1458. cam->state = S_NOTREADY;
  1459. mcam_set_config_needed(cam, 1);
  1460. cam->pix_format = mcam_def_pix_format;
  1461. cam->mbus_code = mcam_def_mbus_code;
  1462. INIT_LIST_HEAD(&cam->dev_list);
  1463. INIT_LIST_HEAD(&cam->buffers);
  1464. tasklet_init(&cam->s_tasklet, mcam_frame_tasklet, (unsigned long) cam);
  1465. /*
  1466. * User space may want to override the asked-for buffer mode;
  1467. * here's hoping they know what they're doing.
  1468. */
  1469. if (buffer_mode == 0)
  1470. cam->buffer_mode = B_vmalloc;
  1471. else if (buffer_mode == 1)
  1472. cam->buffer_mode = B_DMA_contig;
  1473. else if (buffer_mode == 2) {
  1474. if (cam->chip_id == V4L2_IDENT_ARMADA610)
  1475. cam->buffer_mode = B_DMA_sg;
  1476. else {
  1477. printk(KERN_ERR "marvell-cam: Cafe can't do S/G I/O\n");
  1478. cam->buffer_mode = B_vmalloc;
  1479. }
  1480. } else if (buffer_mode != -1)
  1481. printk(KERN_ERR "marvell-cam: "
  1482. "Strange module buffer mode %d - ignoring\n",
  1483. buffer_mode);
  1484. mcam_ctlr_init(cam);
  1485. /*
  1486. * Try to find the sensor.
  1487. */
  1488. sensor_cfg.clock_speed = cam->clock_speed;
  1489. sensor_cfg.use_smbus = cam->use_smbus;
  1490. cam->sensor_addr = ov7670_info.addr;
  1491. cam->sensor = v4l2_i2c_new_subdev_board(&cam->v4l2_dev,
  1492. cam->i2c_adapter, &ov7670_info, NULL);
  1493. if (cam->sensor == NULL) {
  1494. ret = -ENODEV;
  1495. goto out_unregister;
  1496. }
  1497. ret = mcam_cam_init(cam);
  1498. if (ret)
  1499. goto out_unregister;
  1500. /*
  1501. * Get the v4l2 setup done.
  1502. */
  1503. mutex_lock(&cam->s_mutex);
  1504. cam->vdev = mcam_v4l_template;
  1505. cam->vdev.debug = 0;
  1506. cam->vdev.v4l2_dev = &cam->v4l2_dev;
  1507. ret = video_register_device(&cam->vdev, VFL_TYPE_GRABBER, -1);
  1508. if (ret)
  1509. goto out;
  1510. video_set_drvdata(&cam->vdev, cam);
  1511. /*
  1512. * If so requested, try to get our DMA buffers now.
  1513. */
  1514. if (cam->buffer_mode == B_vmalloc && !alloc_bufs_at_read) {
  1515. if (mcam_alloc_dma_bufs(cam, 1))
  1516. cam_warn(cam, "Unable to alloc DMA buffers at load"
  1517. " will try again later.");
  1518. }
  1519. out:
  1520. mutex_unlock(&cam->s_mutex);
  1521. return ret;
  1522. out_unregister:
  1523. v4l2_device_unregister(&cam->v4l2_dev);
  1524. return ret;
  1525. }
  1526. void mccic_shutdown(struct mcam_camera *cam)
  1527. {
  1528. /*
  1529. * If we have no users (and we really, really should have no
  1530. * users) the device will already be powered down. Trying to
  1531. * take it down again will wedge the machine, which is frowned
  1532. * upon.
  1533. */
  1534. if (cam->users > 0) {
  1535. cam_warn(cam, "Removing a device with users!\n");
  1536. mcam_ctlr_power_down(cam);
  1537. }
  1538. vb2_queue_release(&cam->vb_queue);
  1539. if (cam->buffer_mode == B_vmalloc)
  1540. mcam_free_dma_bufs(cam);
  1541. video_unregister_device(&cam->vdev);
  1542. v4l2_device_unregister(&cam->v4l2_dev);
  1543. }
  1544. /*
  1545. * Power management
  1546. */
  1547. #ifdef CONFIG_PM
  1548. void mccic_suspend(struct mcam_camera *cam)
  1549. {
  1550. enum mcam_state cstate = cam->state;
  1551. mcam_ctlr_stop_dma(cam);
  1552. mcam_ctlr_power_down(cam);
  1553. cam->state = cstate;
  1554. }
  1555. int mccic_resume(struct mcam_camera *cam)
  1556. {
  1557. int ret = 0;
  1558. mutex_lock(&cam->s_mutex);
  1559. if (cam->users > 0) {
  1560. mcam_ctlr_power_up(cam);
  1561. __mcam_cam_reset(cam);
  1562. } else {
  1563. mcam_ctlr_power_down(cam);
  1564. }
  1565. mutex_unlock(&cam->s_mutex);
  1566. set_bit(CF_CONFIG_NEEDED, &cam->flags);
  1567. if (cam->state == S_STREAMING)
  1568. ret = mcam_read_setup(cam);
  1569. return ret;
  1570. }
  1571. #endif /* CONFIG_PM */