mcam-core.c 45 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795
  1. /*
  2. * The Marvell camera core. This device appears in a number of settings,
  3. * so it needs platform-specific support outside of the core.
  4. *
  5. * Copyright 2011 Jonathan Corbet corbet@lwn.net
  6. */
  7. #include <linux/kernel.h>
  8. #include <linux/module.h>
  9. #include <linux/fs.h>
  10. #include <linux/mm.h>
  11. #include <linux/i2c.h>
  12. #include <linux/interrupt.h>
  13. #include <linux/spinlock.h>
  14. #include <linux/slab.h>
  15. #include <linux/device.h>
  16. #include <linux/wait.h>
  17. #include <linux/list.h>
  18. #include <linux/dma-mapping.h>
  19. #include <linux/delay.h>
  20. #include <linux/vmalloc.h>
  21. #include <linux/io.h>
  22. #include <linux/videodev2.h>
  23. #include <media/v4l2-device.h>
  24. #include <media/v4l2-ioctl.h>
  25. #include <media/v4l2-chip-ident.h>
  26. #include <media/ov7670.h>
  27. #include <media/videobuf2-vmalloc.h>
  28. #include <media/videobuf2-dma-contig.h>
  29. #include <media/videobuf2-dma-sg.h>
  30. #include "mcam-core.h"
  31. /*
  32. * Basic frame stats - to be deleted shortly
  33. */
  34. static int frames;
  35. static int singles;
  36. static int delivered;
  37. /*
  38. * Internal DMA buffer management. Since the controller cannot do S/G I/O,
  39. * we must have physically contiguous buffers to bring frames into.
  40. * These parameters control how many buffers we use, whether we
  41. * allocate them at load time (better chance of success, but nails down
  42. * memory) or when somebody tries to use the camera (riskier), and,
  43. * for load-time allocation, how big they should be.
  44. *
  45. * The controller can cycle through three buffers. We could use
  46. * more by flipping pointers around, but it probably makes little
  47. * sense.
  48. */
  49. static int alloc_bufs_at_read;
  50. module_param(alloc_bufs_at_read, bool, 0444);
  51. MODULE_PARM_DESC(alloc_bufs_at_read,
  52. "Non-zero value causes DMA buffers to be allocated when the "
  53. "video capture device is read, rather than at module load "
  54. "time. This saves memory, but decreases the chances of "
  55. "successfully getting those buffers. This parameter is "
  56. "only used in the vmalloc buffer mode");
  57. static int n_dma_bufs = 3;
  58. module_param(n_dma_bufs, uint, 0644);
  59. MODULE_PARM_DESC(n_dma_bufs,
  60. "The number of DMA buffers to allocate. Can be either two "
  61. "(saves memory, makes timing tighter) or three.");
  62. static int dma_buf_size = VGA_WIDTH * VGA_HEIGHT * 2; /* Worst case */
  63. module_param(dma_buf_size, uint, 0444);
  64. MODULE_PARM_DESC(dma_buf_size,
  65. "The size of the allocated DMA buffers. If actual operating "
  66. "parameters require larger buffers, an attempt to reallocate "
  67. "will be made.");
  68. static int min_buffers = 1;
  69. module_param(min_buffers, uint, 0644);
  70. MODULE_PARM_DESC(min_buffers,
  71. "The minimum number of streaming I/O buffers we are willing "
  72. "to work with.");
  73. static int max_buffers = 10;
  74. module_param(max_buffers, uint, 0644);
  75. MODULE_PARM_DESC(max_buffers,
  76. "The maximum number of streaming I/O buffers an application "
  77. "will be allowed to allocate. These buffers are big and live "
  78. "in vmalloc space.");
  79. static int flip;
  80. module_param(flip, bool, 0444);
  81. MODULE_PARM_DESC(flip,
  82. "If set, the sensor will be instructed to flip the image "
  83. "vertically.");
  84. static int buffer_mode = -1;
  85. module_param(buffer_mode, int, 0444);
  86. MODULE_PARM_DESC(buffer_mode,
  87. "Set the buffer mode to be used; default is to go with what "
  88. "the platform driver asks for. Set to 0 for vmalloc, 1 for "
  89. "DMA contiguous.");
  90. /*
  91. * Status flags. Always manipulated with bit operations.
  92. */
  93. #define CF_BUF0_VALID 0 /* Buffers valid - first three */
  94. #define CF_BUF1_VALID 1
  95. #define CF_BUF2_VALID 2
  96. #define CF_DMA_ACTIVE 3 /* A frame is incoming */
  97. #define CF_CONFIG_NEEDED 4 /* Must configure hardware */
  98. #define CF_SINGLE_BUFFER 5 /* Running with a single buffer */
  99. #define CF_SG_RESTART 6 /* SG restart needed */
  100. #define sensor_call(cam, o, f, args...) \
  101. v4l2_subdev_call(cam->sensor, o, f, ##args)
  102. static struct mcam_format_struct {
  103. __u8 *desc;
  104. __u32 pixelformat;
  105. int bpp; /* Bytes per pixel */
  106. enum v4l2_mbus_pixelcode mbus_code;
  107. } mcam_formats[] = {
  108. {
  109. .desc = "YUYV 4:2:2",
  110. .pixelformat = V4L2_PIX_FMT_YUYV,
  111. .mbus_code = V4L2_MBUS_FMT_YUYV8_2X8,
  112. .bpp = 2,
  113. },
  114. {
  115. .desc = "RGB 444",
  116. .pixelformat = V4L2_PIX_FMT_RGB444,
  117. .mbus_code = V4L2_MBUS_FMT_RGB444_2X8_PADHI_LE,
  118. .bpp = 2,
  119. },
  120. {
  121. .desc = "RGB 565",
  122. .pixelformat = V4L2_PIX_FMT_RGB565,
  123. .mbus_code = V4L2_MBUS_FMT_RGB565_2X8_LE,
  124. .bpp = 2,
  125. },
  126. {
  127. .desc = "Raw RGB Bayer",
  128. .pixelformat = V4L2_PIX_FMT_SBGGR8,
  129. .mbus_code = V4L2_MBUS_FMT_SBGGR8_1X8,
  130. .bpp = 1
  131. },
  132. };
  133. #define N_MCAM_FMTS ARRAY_SIZE(mcam_formats)
  134. static struct mcam_format_struct *mcam_find_format(u32 pixelformat)
  135. {
  136. unsigned i;
  137. for (i = 0; i < N_MCAM_FMTS; i++)
  138. if (mcam_formats[i].pixelformat == pixelformat)
  139. return mcam_formats + i;
  140. /* Not found? Then return the first format. */
  141. return mcam_formats;
  142. }
  143. /*
  144. * Start over with DMA buffers - dev_lock needed.
  145. */
  146. static void mcam_reset_buffers(struct mcam_camera *cam)
  147. {
  148. int i;
  149. cam->next_buf = -1;
  150. for (i = 0; i < cam->nbufs; i++)
  151. clear_bit(i, &cam->flags);
  152. }
  153. static inline int mcam_needs_config(struct mcam_camera *cam)
  154. {
  155. return test_bit(CF_CONFIG_NEEDED, &cam->flags);
  156. }
  157. static void mcam_set_config_needed(struct mcam_camera *cam, int needed)
  158. {
  159. if (needed)
  160. set_bit(CF_CONFIG_NEEDED, &cam->flags);
  161. else
  162. clear_bit(CF_CONFIG_NEEDED, &cam->flags);
  163. }
  164. /*
  165. * The two-word DMA descriptor format used by the Armada 610 and like. There
  166. * Is a three-word format as well (set C1_DESC_3WORD) where the third
  167. * word is a pointer to the next descriptor, but we don't use it. Two-word
  168. * descriptors have to be contiguous in memory.
  169. */
  170. struct mcam_dma_desc {
  171. u32 dma_addr;
  172. u32 segment_len;
  173. };
  174. /*
  175. * Our buffer type for working with videobuf2. Note that the vb2
  176. * developers have decreed that struct vb2_buffer must be at the
  177. * beginning of this structure.
  178. */
  179. struct mcam_vb_buffer {
  180. struct vb2_buffer vb_buf;
  181. struct list_head queue;
  182. struct mcam_dma_desc *dma_desc; /* Descriptor virtual address */
  183. dma_addr_t dma_desc_pa; /* Descriptor physical address */
  184. int dma_desc_nent; /* Number of mapped descriptors */
  185. };
  186. static inline struct mcam_vb_buffer *vb_to_mvb(struct vb2_buffer *vb)
  187. {
  188. return container_of(vb, struct mcam_vb_buffer, vb_buf);
  189. }
  190. /*
  191. * Debugging and related.
  192. */
  193. #define cam_err(cam, fmt, arg...) \
  194. dev_err((cam)->dev, fmt, ##arg);
  195. #define cam_warn(cam, fmt, arg...) \
  196. dev_warn((cam)->dev, fmt, ##arg);
  197. #define cam_dbg(cam, fmt, arg...) \
  198. dev_dbg((cam)->dev, fmt, ##arg);
  199. /* ------------------------------------------------------------------- */
  200. /*
  201. * Deal with the controller.
  202. */
  203. /*
  204. * Set up DMA buffers when operating in vmalloc mode
  205. */
  206. static void mcam_ctlr_dma_vmalloc(struct mcam_camera *cam)
  207. {
  208. /*
  209. * Store the first two Y buffers (we aren't supporting
  210. * planar formats for now, so no UV bufs). Then either
  211. * set the third if it exists, or tell the controller
  212. * to just use two.
  213. */
  214. mcam_reg_write(cam, REG_Y0BAR, cam->dma_handles[0]);
  215. mcam_reg_write(cam, REG_Y1BAR, cam->dma_handles[1]);
  216. if (cam->nbufs > 2) {
  217. mcam_reg_write(cam, REG_Y2BAR, cam->dma_handles[2]);
  218. mcam_reg_clear_bit(cam, REG_CTRL1, C1_TWOBUFS);
  219. } else
  220. mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
  221. if (cam->chip_id == V4L2_IDENT_CAFE)
  222. mcam_reg_write(cam, REG_UBAR, 0); /* 32 bits only */
  223. }
  224. /*
  225. * Set up a contiguous buffer for the given frame. Here also is where
  226. * the underrun strategy is set: if there is no buffer available, reuse
  227. * the buffer from the other BAR and set the CF_SINGLE_BUFFER flag to
  228. * keep the interrupt handler from giving that buffer back to user
  229. * space. In this way, we always have a buffer to DMA to and don't
  230. * have to try to play games stopping and restarting the controller.
  231. */
  232. static void mcam_set_contig_buffer(struct mcam_camera *cam, int frame)
  233. {
  234. struct mcam_vb_buffer *buf;
  235. /*
  236. * If there are no available buffers, go into single mode
  237. */
  238. if (list_empty(&cam->buffers)) {
  239. buf = cam->vb_bufs[frame ^ 0x1];
  240. cam->vb_bufs[frame] = buf;
  241. mcam_reg_write(cam, frame == 0 ? REG_Y0BAR : REG_Y1BAR,
  242. vb2_dma_contig_plane_paddr(&buf->vb_buf, 0));
  243. set_bit(CF_SINGLE_BUFFER, &cam->flags);
  244. singles++;
  245. return;
  246. }
  247. /*
  248. * OK, we have a buffer we can use.
  249. */
  250. buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, queue);
  251. list_del_init(&buf->queue);
  252. mcam_reg_write(cam, frame == 0 ? REG_Y0BAR : REG_Y1BAR,
  253. vb2_dma_contig_plane_paddr(&buf->vb_buf, 0));
  254. cam->vb_bufs[frame] = buf;
  255. clear_bit(CF_SINGLE_BUFFER, &cam->flags);
  256. }
  257. /*
  258. * Initial B_DMA_contig setup.
  259. */
  260. static void mcam_ctlr_dma_contig(struct mcam_camera *cam)
  261. {
  262. mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
  263. cam->nbufs = 2;
  264. mcam_set_contig_buffer(cam, 0);
  265. mcam_set_contig_buffer(cam, 1);
  266. }
  267. /*
  268. * Set up the next buffer for S/G I/O; caller should be sure that
  269. * the controller is stopped and a buffer is available.
  270. */
  271. static void mcam_sg_next_buffer(struct mcam_camera *cam)
  272. {
  273. struct mcam_vb_buffer *buf;
  274. buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, queue);
  275. list_del_init(&buf->queue);
  276. mcam_reg_write(cam, REG_DMA_DESC_Y, buf->dma_desc_pa);
  277. mcam_reg_write(cam, REG_DESC_LEN_Y,
  278. buf->dma_desc_nent*sizeof(struct mcam_dma_desc));
  279. mcam_reg_write(cam, REG_DESC_LEN_U, 0);
  280. mcam_reg_write(cam, REG_DESC_LEN_V, 0);
  281. cam->vb_bufs[0] = buf;
  282. }
  283. /*
  284. * Initial B_DMA_sg setup
  285. */
  286. static void mcam_ctlr_dma_sg(struct mcam_camera *cam)
  287. {
  288. mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_3WORD);
  289. mcam_sg_next_buffer(cam);
  290. mcam_reg_set_bit(cam, REG_CTRL1, C1_DESC_ENA);
  291. cam->nbufs = 3;
  292. }
  293. /*
  294. * Image format setup, independent of DMA scheme.
  295. */
  296. static void mcam_ctlr_image(struct mcam_camera *cam)
  297. {
  298. int imgsz;
  299. struct v4l2_pix_format *fmt = &cam->pix_format;
  300. imgsz = ((fmt->height << IMGSZ_V_SHIFT) & IMGSZ_V_MASK) |
  301. (fmt->bytesperline & IMGSZ_H_MASK);
  302. mcam_reg_write(cam, REG_IMGSIZE, imgsz);
  303. mcam_reg_write(cam, REG_IMGOFFSET, 0);
  304. /* YPITCH just drops the last two bits */
  305. mcam_reg_write_mask(cam, REG_IMGPITCH, fmt->bytesperline,
  306. IMGP_YP_MASK);
  307. /*
  308. * Tell the controller about the image format we are using.
  309. */
  310. switch (cam->pix_format.pixelformat) {
  311. case V4L2_PIX_FMT_YUYV:
  312. mcam_reg_write_mask(cam, REG_CTRL0,
  313. C0_DF_YUV|C0_YUV_PACKED|C0_YUVE_YUYV,
  314. C0_DF_MASK);
  315. break;
  316. case V4L2_PIX_FMT_RGB444:
  317. mcam_reg_write_mask(cam, REG_CTRL0,
  318. C0_DF_RGB|C0_RGBF_444|C0_RGB4_XRGB,
  319. C0_DF_MASK);
  320. /* Alpha value? */
  321. break;
  322. case V4L2_PIX_FMT_RGB565:
  323. mcam_reg_write_mask(cam, REG_CTRL0,
  324. C0_DF_RGB|C0_RGBF_565|C0_RGB5_BGGR,
  325. C0_DF_MASK);
  326. break;
  327. default:
  328. cam_err(cam, "Unknown format %x\n", cam->pix_format.pixelformat);
  329. break;
  330. }
  331. /*
  332. * Make sure it knows we want to use hsync/vsync.
  333. */
  334. mcam_reg_write_mask(cam, REG_CTRL0, C0_SIF_HVSYNC,
  335. C0_SIFM_MASK);
  336. }
  337. /*
  338. * Configure the controller for operation; caller holds the
  339. * device mutex.
  340. */
  341. static int mcam_ctlr_configure(struct mcam_camera *cam)
  342. {
  343. unsigned long flags;
  344. spin_lock_irqsave(&cam->dev_lock, flags);
  345. switch (cam->buffer_mode) {
  346. case B_vmalloc:
  347. mcam_ctlr_dma_vmalloc(cam);
  348. break;
  349. case B_DMA_contig:
  350. mcam_ctlr_dma_contig(cam);
  351. break;
  352. case B_DMA_sg:
  353. mcam_ctlr_dma_sg(cam);
  354. break;
  355. }
  356. mcam_ctlr_image(cam);
  357. mcam_set_config_needed(cam, 0);
  358. clear_bit(CF_SG_RESTART, &cam->flags);
  359. spin_unlock_irqrestore(&cam->dev_lock, flags);
  360. return 0;
  361. }
  362. static void mcam_ctlr_irq_enable(struct mcam_camera *cam)
  363. {
  364. /*
  365. * Clear any pending interrupts, since we do not
  366. * expect to have I/O active prior to enabling.
  367. */
  368. mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS);
  369. mcam_reg_set_bit(cam, REG_IRQMASK, FRAMEIRQS);
  370. }
  371. static void mcam_ctlr_irq_disable(struct mcam_camera *cam)
  372. {
  373. mcam_reg_clear_bit(cam, REG_IRQMASK, FRAMEIRQS);
  374. }
  375. /*
  376. * Make the controller start grabbing images. Everything must
  377. * be set up before doing this.
  378. */
  379. static void mcam_ctlr_start(struct mcam_camera *cam)
  380. {
  381. /* set_bit performs a read, so no other barrier should be
  382. needed here */
  383. mcam_reg_set_bit(cam, REG_CTRL0, C0_ENABLE);
  384. }
  385. static void mcam_ctlr_stop(struct mcam_camera *cam)
  386. {
  387. mcam_reg_clear_bit(cam, REG_CTRL0, C0_ENABLE);
  388. }
  389. /*
  390. * Scatter/gather mode requires stopping the controller between
  391. * frames so we can put in a new DMA descriptor array. If no new
  392. * buffer exists at frame completion, the controller is left stopped;
  393. * this function is charged with gettig things going again.
  394. */
  395. static void mcam_sg_restart(struct mcam_camera *cam)
  396. {
  397. mcam_ctlr_dma_sg(cam);
  398. mcam_ctlr_start(cam);
  399. clear_bit(CF_SG_RESTART, &cam->flags);
  400. }
  401. static void mcam_ctlr_init(struct mcam_camera *cam)
  402. {
  403. unsigned long flags;
  404. spin_lock_irqsave(&cam->dev_lock, flags);
  405. /*
  406. * Make sure it's not powered down.
  407. */
  408. mcam_reg_clear_bit(cam, REG_CTRL1, C1_PWRDWN);
  409. /*
  410. * Turn off the enable bit. It sure should be off anyway,
  411. * but it's good to be sure.
  412. */
  413. mcam_reg_clear_bit(cam, REG_CTRL0, C0_ENABLE);
  414. /*
  415. * Clock the sensor appropriately. Controller clock should
  416. * be 48MHz, sensor "typical" value is half that.
  417. */
  418. mcam_reg_write_mask(cam, REG_CLKCTRL, 2, CLK_DIV_MASK);
  419. spin_unlock_irqrestore(&cam->dev_lock, flags);
  420. }
  421. /*
  422. * Stop the controller, and don't return until we're really sure that no
  423. * further DMA is going on.
  424. */
  425. static void mcam_ctlr_stop_dma(struct mcam_camera *cam)
  426. {
  427. unsigned long flags;
  428. /*
  429. * Theory: stop the camera controller (whether it is operating
  430. * or not). Delay briefly just in case we race with the SOF
  431. * interrupt, then wait until no DMA is active.
  432. */
  433. spin_lock_irqsave(&cam->dev_lock, flags);
  434. clear_bit(CF_SG_RESTART, &cam->flags);
  435. mcam_ctlr_stop(cam);
  436. cam->state = S_IDLE;
  437. spin_unlock_irqrestore(&cam->dev_lock, flags);
  438. msleep(40);
  439. if (test_bit(CF_DMA_ACTIVE, &cam->flags))
  440. cam_err(cam, "Timeout waiting for DMA to end\n");
  441. /* This would be bad news - what now? */
  442. spin_lock_irqsave(&cam->dev_lock, flags);
  443. mcam_ctlr_irq_disable(cam);
  444. spin_unlock_irqrestore(&cam->dev_lock, flags);
  445. }
  446. /*
  447. * Power up and down.
  448. */
  449. static void mcam_ctlr_power_up(struct mcam_camera *cam)
  450. {
  451. unsigned long flags;
  452. spin_lock_irqsave(&cam->dev_lock, flags);
  453. cam->plat_power_up(cam);
  454. mcam_reg_clear_bit(cam, REG_CTRL1, C1_PWRDWN);
  455. spin_unlock_irqrestore(&cam->dev_lock, flags);
  456. msleep(5); /* Just to be sure */
  457. }
  458. static void mcam_ctlr_power_down(struct mcam_camera *cam)
  459. {
  460. unsigned long flags;
  461. spin_lock_irqsave(&cam->dev_lock, flags);
  462. /*
  463. * School of hard knocks department: be sure we do any register
  464. * twiddling on the controller *before* calling the platform
  465. * power down routine.
  466. */
  467. mcam_reg_set_bit(cam, REG_CTRL1, C1_PWRDWN);
  468. cam->plat_power_down(cam);
  469. spin_unlock_irqrestore(&cam->dev_lock, flags);
  470. }
  471. /* -------------------------------------------------------------------- */
  472. /*
  473. * Communications with the sensor.
  474. */
  475. static int __mcam_cam_reset(struct mcam_camera *cam)
  476. {
  477. return sensor_call(cam, core, reset, 0);
  478. }
  479. /*
  480. * We have found the sensor on the i2c. Let's try to have a
  481. * conversation.
  482. */
  483. static int mcam_cam_init(struct mcam_camera *cam)
  484. {
  485. struct v4l2_dbg_chip_ident chip;
  486. int ret;
  487. mutex_lock(&cam->s_mutex);
  488. if (cam->state != S_NOTREADY)
  489. cam_warn(cam, "Cam init with device in funky state %d",
  490. cam->state);
  491. ret = __mcam_cam_reset(cam);
  492. if (ret)
  493. goto out;
  494. chip.ident = V4L2_IDENT_NONE;
  495. chip.match.type = V4L2_CHIP_MATCH_I2C_ADDR;
  496. chip.match.addr = cam->sensor_addr;
  497. ret = sensor_call(cam, core, g_chip_ident, &chip);
  498. if (ret)
  499. goto out;
  500. cam->sensor_type = chip.ident;
  501. if (cam->sensor_type != V4L2_IDENT_OV7670) {
  502. cam_err(cam, "Unsupported sensor type 0x%x", cam->sensor_type);
  503. ret = -EINVAL;
  504. goto out;
  505. }
  506. /* Get/set parameters? */
  507. ret = 0;
  508. cam->state = S_IDLE;
  509. out:
  510. mcam_ctlr_power_down(cam);
  511. mutex_unlock(&cam->s_mutex);
  512. return ret;
  513. }
  514. /*
  515. * Configure the sensor to match the parameters we have. Caller should
  516. * hold s_mutex
  517. */
  518. static int mcam_cam_set_flip(struct mcam_camera *cam)
  519. {
  520. struct v4l2_control ctrl;
  521. memset(&ctrl, 0, sizeof(ctrl));
  522. ctrl.id = V4L2_CID_VFLIP;
  523. ctrl.value = flip;
  524. return sensor_call(cam, core, s_ctrl, &ctrl);
  525. }
  526. static int mcam_cam_configure(struct mcam_camera *cam)
  527. {
  528. struct v4l2_mbus_framefmt mbus_fmt;
  529. int ret;
  530. v4l2_fill_mbus_format(&mbus_fmt, &cam->pix_format, cam->mbus_code);
  531. ret = sensor_call(cam, core, init, 0);
  532. if (ret == 0)
  533. ret = sensor_call(cam, video, s_mbus_fmt, &mbus_fmt);
  534. /*
  535. * OV7670 does weird things if flip is set *before* format...
  536. */
  537. ret += mcam_cam_set_flip(cam);
  538. return ret;
  539. }
  540. /* -------------------------------------------------------------------- */
  541. /*
  542. * DMA buffer management. These functions need s_mutex held.
  543. */
  544. /*
  545. * Allocate in-kernel DMA buffers for vmalloc mode.
  546. */
  547. static int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime)
  548. {
  549. int i;
  550. mcam_set_config_needed(cam, 1);
  551. if (loadtime)
  552. cam->dma_buf_size = dma_buf_size;
  553. else
  554. cam->dma_buf_size = cam->pix_format.sizeimage;
  555. if (n_dma_bufs > 3)
  556. n_dma_bufs = 3;
  557. cam->nbufs = 0;
  558. for (i = 0; i < n_dma_bufs; i++) {
  559. cam->dma_bufs[i] = dma_alloc_coherent(cam->dev,
  560. cam->dma_buf_size, cam->dma_handles + i,
  561. GFP_KERNEL);
  562. if (cam->dma_bufs[i] == NULL) {
  563. cam_warn(cam, "Failed to allocate DMA buffer\n");
  564. break;
  565. }
  566. (cam->nbufs)++;
  567. }
  568. switch (cam->nbufs) {
  569. case 1:
  570. dma_free_coherent(cam->dev, cam->dma_buf_size,
  571. cam->dma_bufs[0], cam->dma_handles[0]);
  572. cam->nbufs = 0;
  573. case 0:
  574. cam_err(cam, "Insufficient DMA buffers, cannot operate\n");
  575. return -ENOMEM;
  576. case 2:
  577. if (n_dma_bufs > 2)
  578. cam_warn(cam, "Will limp along with only 2 buffers\n");
  579. break;
  580. }
  581. return 0;
  582. }
  583. static void mcam_free_dma_bufs(struct mcam_camera *cam)
  584. {
  585. int i;
  586. for (i = 0; i < cam->nbufs; i++) {
  587. dma_free_coherent(cam->dev, cam->dma_buf_size,
  588. cam->dma_bufs[i], cam->dma_handles[i]);
  589. cam->dma_bufs[i] = NULL;
  590. }
  591. cam->nbufs = 0;
  592. }
  593. /* ----------------------------------------------------------------------- */
  594. /*
  595. * Here starts the V4L2 interface code.
  596. */
  597. /*
  598. * Get everything ready, and start grabbing frames.
  599. */
  600. static int mcam_read_setup(struct mcam_camera *cam)
  601. {
  602. int ret;
  603. unsigned long flags;
  604. /*
  605. * Configuration. If we still don't have DMA buffers,
  606. * make one last, desperate attempt.
  607. */
  608. if (cam->buffer_mode == B_vmalloc && cam->nbufs == 0 &&
  609. mcam_alloc_dma_bufs(cam, 0))
  610. return -ENOMEM;
  611. if (mcam_needs_config(cam)) {
  612. mcam_cam_configure(cam);
  613. ret = mcam_ctlr_configure(cam);
  614. if (ret)
  615. return ret;
  616. }
  617. /*
  618. * Turn it loose.
  619. */
  620. spin_lock_irqsave(&cam->dev_lock, flags);
  621. mcam_reset_buffers(cam);
  622. mcam_ctlr_irq_enable(cam);
  623. cam->state = S_STREAMING;
  624. mcam_ctlr_start(cam);
  625. spin_unlock_irqrestore(&cam->dev_lock, flags);
  626. return 0;
  627. }
  628. /* ----------------------------------------------------------------------- */
  629. /*
  630. * Videobuf2 interface code.
  631. */
  632. static int mcam_vb_queue_setup(struct vb2_queue *vq, unsigned int *nbufs,
  633. unsigned int *num_planes, unsigned long sizes[],
  634. void *alloc_ctxs[])
  635. {
  636. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  637. int minbufs = (cam->buffer_mode == B_DMA_contig) ? 3 : 2;
  638. sizes[0] = cam->pix_format.sizeimage;
  639. *num_planes = 1; /* Someday we have to support planar formats... */
  640. if (*nbufs < minbufs)
  641. *nbufs = minbufs;
  642. if (cam->buffer_mode == B_DMA_contig)
  643. alloc_ctxs[0] = cam->vb_alloc_ctx;
  644. return 0;
  645. }
  646. /* DMA_sg only */
  647. static int mcam_vb_sg_buf_init(struct vb2_buffer *vb)
  648. {
  649. struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
  650. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  651. int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
  652. mvb->dma_desc = dma_alloc_coherent(cam->dev,
  653. ndesc * sizeof(struct mcam_dma_desc),
  654. &mvb->dma_desc_pa, GFP_KERNEL);
  655. if (mvb->dma_desc == NULL) {
  656. cam_err(cam, "Unable to get DMA descriptor array\n");
  657. return -ENOMEM;
  658. }
  659. return 0;
  660. }
  661. static int mcam_vb_sg_buf_prepare(struct vb2_buffer *vb)
  662. {
  663. struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
  664. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  665. struct vb2_dma_sg_desc *sgd = vb2_dma_sg_plane_desc(vb, 0);
  666. struct mcam_dma_desc *desc = mvb->dma_desc;
  667. struct scatterlist *sg;
  668. int i;
  669. mvb->dma_desc_nent = dma_map_sg(cam->dev, sgd->sglist, sgd->num_pages,
  670. DMA_FROM_DEVICE);
  671. if (mvb->dma_desc_nent <= 0)
  672. return -EIO; /* Not sure what's right here */
  673. for_each_sg(sgd->sglist, sg, mvb->dma_desc_nent, i) {
  674. desc->dma_addr = sg_dma_address(sg);
  675. desc->segment_len = sg_dma_len(sg);
  676. desc++;
  677. }
  678. return 0;
  679. }
  680. static void mcam_vb_buf_queue(struct vb2_buffer *vb)
  681. {
  682. struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
  683. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  684. unsigned long flags;
  685. int start;
  686. spin_lock_irqsave(&cam->dev_lock, flags);
  687. start = (cam->state == S_BUFWAIT) && !list_empty(&cam->buffers);
  688. list_add(&mvb->queue, &cam->buffers);
  689. if (test_bit(CF_SG_RESTART, &cam->flags))
  690. mcam_sg_restart(cam);
  691. spin_unlock_irqrestore(&cam->dev_lock, flags);
  692. if (start)
  693. mcam_read_setup(cam);
  694. }
  695. static int mcam_vb_sg_buf_finish(struct vb2_buffer *vb)
  696. {
  697. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  698. struct vb2_dma_sg_desc *sgd = vb2_dma_sg_plane_desc(vb, 0);
  699. dma_unmap_sg(cam->dev, sgd->sglist, sgd->num_pages, DMA_FROM_DEVICE);
  700. return 0;
  701. }
  702. static void mcam_vb_sg_buf_cleanup(struct vb2_buffer *vb)
  703. {
  704. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  705. struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
  706. int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
  707. dma_free_coherent(cam->dev, ndesc * sizeof(struct mcam_dma_desc),
  708. mvb->dma_desc, mvb->dma_desc_pa);
  709. }
  710. /*
  711. * vb2 uses these to release the mutex when waiting in dqbuf. I'm
  712. * not actually sure we need to do this (I'm not sure that vb2_dqbuf() needs
  713. * to be called with the mutex held), but better safe than sorry.
  714. */
  715. static void mcam_vb_wait_prepare(struct vb2_queue *vq)
  716. {
  717. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  718. mutex_unlock(&cam->s_mutex);
  719. }
  720. static void mcam_vb_wait_finish(struct vb2_queue *vq)
  721. {
  722. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  723. mutex_lock(&cam->s_mutex);
  724. }
  725. /*
  726. * These need to be called with the mutex held from vb2
  727. */
  728. static int mcam_vb_start_streaming(struct vb2_queue *vq)
  729. {
  730. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  731. if (cam->state != S_IDLE)
  732. return -EINVAL;
  733. cam->sequence = 0;
  734. /*
  735. * Videobuf2 sneakily hoards all the buffers and won't
  736. * give them to us until *after* streaming starts. But
  737. * we can't actually start streaming until we have a
  738. * destination. So go into a wait state and hope they
  739. * give us buffers soon.
  740. */
  741. if (cam->buffer_mode != B_vmalloc && list_empty(&cam->buffers)) {
  742. cam->state = S_BUFWAIT;
  743. return 0;
  744. }
  745. return mcam_read_setup(cam);
  746. }
  747. static int mcam_vb_stop_streaming(struct vb2_queue *vq)
  748. {
  749. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  750. unsigned long flags;
  751. if (cam->state == S_BUFWAIT) {
  752. /* They never gave us buffers */
  753. cam->state = S_IDLE;
  754. return 0;
  755. }
  756. if (cam->state != S_STREAMING)
  757. return -EINVAL;
  758. mcam_ctlr_stop_dma(cam);
  759. /*
  760. * VB2 reclaims the buffers, so we need to forget
  761. * about them.
  762. */
  763. spin_lock_irqsave(&cam->dev_lock, flags);
  764. INIT_LIST_HEAD(&cam->buffers);
  765. spin_unlock_irqrestore(&cam->dev_lock, flags);
  766. return 0;
  767. }
  768. static const struct vb2_ops mcam_vb2_ops = {
  769. .queue_setup = mcam_vb_queue_setup,
  770. .buf_queue = mcam_vb_buf_queue,
  771. .start_streaming = mcam_vb_start_streaming,
  772. .stop_streaming = mcam_vb_stop_streaming,
  773. .wait_prepare = mcam_vb_wait_prepare,
  774. .wait_finish = mcam_vb_wait_finish,
  775. };
  776. /*
  777. * Scatter/gather mode complicates things somewhat.
  778. */
  779. static const struct vb2_ops mcam_vb2_sg_ops = {
  780. .queue_setup = mcam_vb_queue_setup,
  781. .buf_init = mcam_vb_sg_buf_init,
  782. .buf_prepare = mcam_vb_sg_buf_prepare,
  783. .buf_queue = mcam_vb_buf_queue,
  784. .buf_finish = mcam_vb_sg_buf_finish,
  785. .buf_cleanup = mcam_vb_sg_buf_cleanup,
  786. .start_streaming = mcam_vb_start_streaming,
  787. .stop_streaming = mcam_vb_stop_streaming,
  788. .wait_prepare = mcam_vb_wait_prepare,
  789. .wait_finish = mcam_vb_wait_finish,
  790. };
  791. static int mcam_setup_vb2(struct mcam_camera *cam)
  792. {
  793. struct vb2_queue *vq = &cam->vb_queue;
  794. memset(vq, 0, sizeof(*vq));
  795. vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  796. vq->drv_priv = cam;
  797. INIT_LIST_HEAD(&cam->buffers);
  798. switch (cam->buffer_mode) {
  799. case B_DMA_contig:
  800. vq->ops = &mcam_vb2_ops;
  801. vq->mem_ops = &vb2_dma_contig_memops;
  802. cam->vb_alloc_ctx = vb2_dma_contig_init_ctx(cam->dev);
  803. vq->io_modes = VB2_MMAP | VB2_USERPTR;
  804. break;
  805. case B_DMA_sg:
  806. vq->ops = &mcam_vb2_sg_ops;
  807. vq->mem_ops = &vb2_dma_sg_memops;
  808. vq->io_modes = VB2_MMAP | VB2_USERPTR;
  809. break;
  810. case B_vmalloc:
  811. vq->ops = &mcam_vb2_ops;
  812. vq->mem_ops = &vb2_vmalloc_memops;
  813. vq->buf_struct_size = sizeof(struct mcam_vb_buffer);
  814. vq->io_modes = VB2_MMAP;
  815. break;
  816. }
  817. return vb2_queue_init(vq);
  818. }
  819. static void mcam_cleanup_vb2(struct mcam_camera *cam)
  820. {
  821. vb2_queue_release(&cam->vb_queue);
  822. if (cam->buffer_mode == B_DMA_contig)
  823. vb2_dma_contig_cleanup_ctx(cam->vb_alloc_ctx);
  824. }
  825. static ssize_t mcam_v4l_read(struct file *filp,
  826. char __user *buffer, size_t len, loff_t *pos)
  827. {
  828. struct mcam_camera *cam = filp->private_data;
  829. int ret;
  830. mutex_lock(&cam->s_mutex);
  831. ret = vb2_read(&cam->vb_queue, buffer, len, pos,
  832. filp->f_flags & O_NONBLOCK);
  833. mutex_unlock(&cam->s_mutex);
  834. return ret;
  835. }
  836. /*
  837. * Streaming I/O support.
  838. */
  839. static int mcam_vidioc_streamon(struct file *filp, void *priv,
  840. enum v4l2_buf_type type)
  841. {
  842. struct mcam_camera *cam = filp->private_data;
  843. int ret;
  844. mutex_lock(&cam->s_mutex);
  845. ret = vb2_streamon(&cam->vb_queue, type);
  846. mutex_unlock(&cam->s_mutex);
  847. return ret;
  848. }
  849. static int mcam_vidioc_streamoff(struct file *filp, void *priv,
  850. enum v4l2_buf_type type)
  851. {
  852. struct mcam_camera *cam = filp->private_data;
  853. int ret;
  854. mutex_lock(&cam->s_mutex);
  855. ret = vb2_streamoff(&cam->vb_queue, type);
  856. mutex_unlock(&cam->s_mutex);
  857. return ret;
  858. }
  859. static int mcam_vidioc_reqbufs(struct file *filp, void *priv,
  860. struct v4l2_requestbuffers *req)
  861. {
  862. struct mcam_camera *cam = filp->private_data;
  863. int ret;
  864. mutex_lock(&cam->s_mutex);
  865. ret = vb2_reqbufs(&cam->vb_queue, req);
  866. mutex_unlock(&cam->s_mutex);
  867. return ret;
  868. }
  869. static int mcam_vidioc_querybuf(struct file *filp, void *priv,
  870. struct v4l2_buffer *buf)
  871. {
  872. struct mcam_camera *cam = filp->private_data;
  873. int ret;
  874. mutex_lock(&cam->s_mutex);
  875. ret = vb2_querybuf(&cam->vb_queue, buf);
  876. mutex_unlock(&cam->s_mutex);
  877. return ret;
  878. }
  879. static int mcam_vidioc_qbuf(struct file *filp, void *priv,
  880. struct v4l2_buffer *buf)
  881. {
  882. struct mcam_camera *cam = filp->private_data;
  883. int ret;
  884. mutex_lock(&cam->s_mutex);
  885. ret = vb2_qbuf(&cam->vb_queue, buf);
  886. mutex_unlock(&cam->s_mutex);
  887. return ret;
  888. }
  889. static int mcam_vidioc_dqbuf(struct file *filp, void *priv,
  890. struct v4l2_buffer *buf)
  891. {
  892. struct mcam_camera *cam = filp->private_data;
  893. int ret;
  894. mutex_lock(&cam->s_mutex);
  895. ret = vb2_dqbuf(&cam->vb_queue, buf, filp->f_flags & O_NONBLOCK);
  896. mutex_unlock(&cam->s_mutex);
  897. return ret;
  898. }
  899. static int mcam_v4l_mmap(struct file *filp, struct vm_area_struct *vma)
  900. {
  901. struct mcam_camera *cam = filp->private_data;
  902. int ret;
  903. mutex_lock(&cam->s_mutex);
  904. ret = vb2_mmap(&cam->vb_queue, vma);
  905. mutex_unlock(&cam->s_mutex);
  906. return ret;
  907. }
  908. static int mcam_v4l_open(struct file *filp)
  909. {
  910. struct mcam_camera *cam = video_drvdata(filp);
  911. int ret = 0;
  912. filp->private_data = cam;
  913. frames = singles = delivered = 0;
  914. mutex_lock(&cam->s_mutex);
  915. if (cam->users == 0) {
  916. ret = mcam_setup_vb2(cam);
  917. if (ret)
  918. goto out;
  919. mcam_ctlr_power_up(cam);
  920. __mcam_cam_reset(cam);
  921. mcam_set_config_needed(cam, 1);
  922. }
  923. (cam->users)++;
  924. out:
  925. mutex_unlock(&cam->s_mutex);
  926. return ret;
  927. }
  928. static int mcam_v4l_release(struct file *filp)
  929. {
  930. struct mcam_camera *cam = filp->private_data;
  931. cam_err(cam, "Release, %d frames, %d singles, %d delivered\n", frames,
  932. singles, delivered);
  933. mutex_lock(&cam->s_mutex);
  934. (cam->users)--;
  935. if (filp == cam->owner) {
  936. mcam_ctlr_stop_dma(cam);
  937. cam->owner = NULL;
  938. }
  939. if (cam->users == 0) {
  940. mcam_cleanup_vb2(cam);
  941. mcam_ctlr_power_down(cam);
  942. if (cam->buffer_mode == B_vmalloc && alloc_bufs_at_read)
  943. mcam_free_dma_bufs(cam);
  944. }
  945. mutex_unlock(&cam->s_mutex);
  946. return 0;
  947. }
  948. static unsigned int mcam_v4l_poll(struct file *filp,
  949. struct poll_table_struct *pt)
  950. {
  951. struct mcam_camera *cam = filp->private_data;
  952. int ret;
  953. mutex_lock(&cam->s_mutex);
  954. ret = vb2_poll(&cam->vb_queue, filp, pt);
  955. mutex_unlock(&cam->s_mutex);
  956. return ret;
  957. }
  958. static int mcam_vidioc_queryctrl(struct file *filp, void *priv,
  959. struct v4l2_queryctrl *qc)
  960. {
  961. struct mcam_camera *cam = priv;
  962. int ret;
  963. mutex_lock(&cam->s_mutex);
  964. ret = sensor_call(cam, core, queryctrl, qc);
  965. mutex_unlock(&cam->s_mutex);
  966. return ret;
  967. }
  968. static int mcam_vidioc_g_ctrl(struct file *filp, void *priv,
  969. struct v4l2_control *ctrl)
  970. {
  971. struct mcam_camera *cam = priv;
  972. int ret;
  973. mutex_lock(&cam->s_mutex);
  974. ret = sensor_call(cam, core, g_ctrl, ctrl);
  975. mutex_unlock(&cam->s_mutex);
  976. return ret;
  977. }
  978. static int mcam_vidioc_s_ctrl(struct file *filp, void *priv,
  979. struct v4l2_control *ctrl)
  980. {
  981. struct mcam_camera *cam = priv;
  982. int ret;
  983. mutex_lock(&cam->s_mutex);
  984. ret = sensor_call(cam, core, s_ctrl, ctrl);
  985. mutex_unlock(&cam->s_mutex);
  986. return ret;
  987. }
  988. static int mcam_vidioc_querycap(struct file *file, void *priv,
  989. struct v4l2_capability *cap)
  990. {
  991. strcpy(cap->driver, "marvell_ccic");
  992. strcpy(cap->card, "marvell_ccic");
  993. cap->version = 1;
  994. cap->capabilities = V4L2_CAP_VIDEO_CAPTURE |
  995. V4L2_CAP_READWRITE | V4L2_CAP_STREAMING;
  996. return 0;
  997. }
  998. /*
  999. * The default format we use until somebody says otherwise.
  1000. */
  1001. static const struct v4l2_pix_format mcam_def_pix_format = {
  1002. .width = VGA_WIDTH,
  1003. .height = VGA_HEIGHT,
  1004. .pixelformat = V4L2_PIX_FMT_YUYV,
  1005. .field = V4L2_FIELD_NONE,
  1006. .bytesperline = VGA_WIDTH*2,
  1007. .sizeimage = VGA_WIDTH*VGA_HEIGHT*2,
  1008. };
  1009. static const enum v4l2_mbus_pixelcode mcam_def_mbus_code =
  1010. V4L2_MBUS_FMT_YUYV8_2X8;
  1011. static int mcam_vidioc_enum_fmt_vid_cap(struct file *filp,
  1012. void *priv, struct v4l2_fmtdesc *fmt)
  1013. {
  1014. if (fmt->index >= N_MCAM_FMTS)
  1015. return -EINVAL;
  1016. strlcpy(fmt->description, mcam_formats[fmt->index].desc,
  1017. sizeof(fmt->description));
  1018. fmt->pixelformat = mcam_formats[fmt->index].pixelformat;
  1019. return 0;
  1020. }
  1021. static int mcam_vidioc_try_fmt_vid_cap(struct file *filp, void *priv,
  1022. struct v4l2_format *fmt)
  1023. {
  1024. struct mcam_camera *cam = priv;
  1025. struct mcam_format_struct *f;
  1026. struct v4l2_pix_format *pix = &fmt->fmt.pix;
  1027. struct v4l2_mbus_framefmt mbus_fmt;
  1028. int ret;
  1029. f = mcam_find_format(pix->pixelformat);
  1030. pix->pixelformat = f->pixelformat;
  1031. v4l2_fill_mbus_format(&mbus_fmt, pix, f->mbus_code);
  1032. mutex_lock(&cam->s_mutex);
  1033. ret = sensor_call(cam, video, try_mbus_fmt, &mbus_fmt);
  1034. mutex_unlock(&cam->s_mutex);
  1035. v4l2_fill_pix_format(pix, &mbus_fmt);
  1036. pix->bytesperline = pix->width * f->bpp;
  1037. pix->sizeimage = pix->height * pix->bytesperline;
  1038. return ret;
  1039. }
  1040. static int mcam_vidioc_s_fmt_vid_cap(struct file *filp, void *priv,
  1041. struct v4l2_format *fmt)
  1042. {
  1043. struct mcam_camera *cam = priv;
  1044. struct mcam_format_struct *f;
  1045. int ret;
  1046. /*
  1047. * Can't do anything if the device is not idle
  1048. * Also can't if there are streaming buffers in place.
  1049. */
  1050. if (cam->state != S_IDLE || cam->vb_queue.num_buffers > 0)
  1051. return -EBUSY;
  1052. f = mcam_find_format(fmt->fmt.pix.pixelformat);
  1053. /*
  1054. * See if the formatting works in principle.
  1055. */
  1056. ret = mcam_vidioc_try_fmt_vid_cap(filp, priv, fmt);
  1057. if (ret)
  1058. return ret;
  1059. /*
  1060. * Now we start to change things for real, so let's do it
  1061. * under lock.
  1062. */
  1063. mutex_lock(&cam->s_mutex);
  1064. cam->pix_format = fmt->fmt.pix;
  1065. cam->mbus_code = f->mbus_code;
  1066. /*
  1067. * Make sure we have appropriate DMA buffers.
  1068. */
  1069. ret = -ENOMEM;
  1070. if (cam->buffer_mode == B_vmalloc) {
  1071. if (cam->nbufs > 0 &&
  1072. cam->dma_buf_size < cam->pix_format.sizeimage)
  1073. mcam_free_dma_bufs(cam);
  1074. if (cam->nbufs == 0) {
  1075. if (mcam_alloc_dma_bufs(cam, 0))
  1076. goto out;
  1077. }
  1078. }
  1079. mcam_set_config_needed(cam, 1);
  1080. ret = 0;
  1081. out:
  1082. mutex_unlock(&cam->s_mutex);
  1083. return ret;
  1084. }
  1085. /*
  1086. * Return our stored notion of how the camera is/should be configured.
  1087. * The V4l2 spec wants us to be smarter, and actually get this from
  1088. * the camera (and not mess with it at open time). Someday.
  1089. */
  1090. static int mcam_vidioc_g_fmt_vid_cap(struct file *filp, void *priv,
  1091. struct v4l2_format *f)
  1092. {
  1093. struct mcam_camera *cam = priv;
  1094. f->fmt.pix = cam->pix_format;
  1095. return 0;
  1096. }
  1097. /*
  1098. * We only have one input - the sensor - so minimize the nonsense here.
  1099. */
  1100. static int mcam_vidioc_enum_input(struct file *filp, void *priv,
  1101. struct v4l2_input *input)
  1102. {
  1103. if (input->index != 0)
  1104. return -EINVAL;
  1105. input->type = V4L2_INPUT_TYPE_CAMERA;
  1106. input->std = V4L2_STD_ALL; /* Not sure what should go here */
  1107. strcpy(input->name, "Camera");
  1108. return 0;
  1109. }
  1110. static int mcam_vidioc_g_input(struct file *filp, void *priv, unsigned int *i)
  1111. {
  1112. *i = 0;
  1113. return 0;
  1114. }
  1115. static int mcam_vidioc_s_input(struct file *filp, void *priv, unsigned int i)
  1116. {
  1117. if (i != 0)
  1118. return -EINVAL;
  1119. return 0;
  1120. }
  1121. /* from vivi.c */
  1122. static int mcam_vidioc_s_std(struct file *filp, void *priv, v4l2_std_id *a)
  1123. {
  1124. return 0;
  1125. }
  1126. /*
  1127. * G/S_PARM. Most of this is done by the sensor, but we are
  1128. * the level which controls the number of read buffers.
  1129. */
  1130. static int mcam_vidioc_g_parm(struct file *filp, void *priv,
  1131. struct v4l2_streamparm *parms)
  1132. {
  1133. struct mcam_camera *cam = priv;
  1134. int ret;
  1135. mutex_lock(&cam->s_mutex);
  1136. ret = sensor_call(cam, video, g_parm, parms);
  1137. mutex_unlock(&cam->s_mutex);
  1138. parms->parm.capture.readbuffers = n_dma_bufs;
  1139. return ret;
  1140. }
  1141. static int mcam_vidioc_s_parm(struct file *filp, void *priv,
  1142. struct v4l2_streamparm *parms)
  1143. {
  1144. struct mcam_camera *cam = priv;
  1145. int ret;
  1146. mutex_lock(&cam->s_mutex);
  1147. ret = sensor_call(cam, video, s_parm, parms);
  1148. mutex_unlock(&cam->s_mutex);
  1149. parms->parm.capture.readbuffers = n_dma_bufs;
  1150. return ret;
  1151. }
  1152. static int mcam_vidioc_g_chip_ident(struct file *file, void *priv,
  1153. struct v4l2_dbg_chip_ident *chip)
  1154. {
  1155. struct mcam_camera *cam = priv;
  1156. chip->ident = V4L2_IDENT_NONE;
  1157. chip->revision = 0;
  1158. if (v4l2_chip_match_host(&chip->match)) {
  1159. chip->ident = cam->chip_id;
  1160. return 0;
  1161. }
  1162. return sensor_call(cam, core, g_chip_ident, chip);
  1163. }
  1164. static int mcam_vidioc_enum_framesizes(struct file *filp, void *priv,
  1165. struct v4l2_frmsizeenum *sizes)
  1166. {
  1167. struct mcam_camera *cam = priv;
  1168. int ret;
  1169. mutex_lock(&cam->s_mutex);
  1170. ret = sensor_call(cam, video, enum_framesizes, sizes);
  1171. mutex_unlock(&cam->s_mutex);
  1172. return ret;
  1173. }
  1174. static int mcam_vidioc_enum_frameintervals(struct file *filp, void *priv,
  1175. struct v4l2_frmivalenum *interval)
  1176. {
  1177. struct mcam_camera *cam = priv;
  1178. int ret;
  1179. mutex_lock(&cam->s_mutex);
  1180. ret = sensor_call(cam, video, enum_frameintervals, interval);
  1181. mutex_unlock(&cam->s_mutex);
  1182. return ret;
  1183. }
  1184. #ifdef CONFIG_VIDEO_ADV_DEBUG
  1185. static int mcam_vidioc_g_register(struct file *file, void *priv,
  1186. struct v4l2_dbg_register *reg)
  1187. {
  1188. struct mcam_camera *cam = priv;
  1189. if (v4l2_chip_match_host(&reg->match)) {
  1190. reg->val = mcam_reg_read(cam, reg->reg);
  1191. reg->size = 4;
  1192. return 0;
  1193. }
  1194. return sensor_call(cam, core, g_register, reg);
  1195. }
  1196. static int mcam_vidioc_s_register(struct file *file, void *priv,
  1197. struct v4l2_dbg_register *reg)
  1198. {
  1199. struct mcam_camera *cam = priv;
  1200. if (v4l2_chip_match_host(&reg->match)) {
  1201. mcam_reg_write(cam, reg->reg, reg->val);
  1202. return 0;
  1203. }
  1204. return sensor_call(cam, core, s_register, reg);
  1205. }
  1206. #endif
  1207. /*
  1208. * This template device holds all of those v4l2 methods; we
  1209. * clone it for specific real devices.
  1210. */
  1211. static const struct v4l2_file_operations mcam_v4l_fops = {
  1212. .owner = THIS_MODULE,
  1213. .open = mcam_v4l_open,
  1214. .release = mcam_v4l_release,
  1215. .read = mcam_v4l_read,
  1216. .poll = mcam_v4l_poll,
  1217. .mmap = mcam_v4l_mmap,
  1218. .unlocked_ioctl = video_ioctl2,
  1219. };
  1220. static const struct v4l2_ioctl_ops mcam_v4l_ioctl_ops = {
  1221. .vidioc_querycap = mcam_vidioc_querycap,
  1222. .vidioc_enum_fmt_vid_cap = mcam_vidioc_enum_fmt_vid_cap,
  1223. .vidioc_try_fmt_vid_cap = mcam_vidioc_try_fmt_vid_cap,
  1224. .vidioc_s_fmt_vid_cap = mcam_vidioc_s_fmt_vid_cap,
  1225. .vidioc_g_fmt_vid_cap = mcam_vidioc_g_fmt_vid_cap,
  1226. .vidioc_enum_input = mcam_vidioc_enum_input,
  1227. .vidioc_g_input = mcam_vidioc_g_input,
  1228. .vidioc_s_input = mcam_vidioc_s_input,
  1229. .vidioc_s_std = mcam_vidioc_s_std,
  1230. .vidioc_reqbufs = mcam_vidioc_reqbufs,
  1231. .vidioc_querybuf = mcam_vidioc_querybuf,
  1232. .vidioc_qbuf = mcam_vidioc_qbuf,
  1233. .vidioc_dqbuf = mcam_vidioc_dqbuf,
  1234. .vidioc_streamon = mcam_vidioc_streamon,
  1235. .vidioc_streamoff = mcam_vidioc_streamoff,
  1236. .vidioc_queryctrl = mcam_vidioc_queryctrl,
  1237. .vidioc_g_ctrl = mcam_vidioc_g_ctrl,
  1238. .vidioc_s_ctrl = mcam_vidioc_s_ctrl,
  1239. .vidioc_g_parm = mcam_vidioc_g_parm,
  1240. .vidioc_s_parm = mcam_vidioc_s_parm,
  1241. .vidioc_enum_framesizes = mcam_vidioc_enum_framesizes,
  1242. .vidioc_enum_frameintervals = mcam_vidioc_enum_frameintervals,
  1243. .vidioc_g_chip_ident = mcam_vidioc_g_chip_ident,
  1244. #ifdef CONFIG_VIDEO_ADV_DEBUG
  1245. .vidioc_g_register = mcam_vidioc_g_register,
  1246. .vidioc_s_register = mcam_vidioc_s_register,
  1247. #endif
  1248. };
  1249. static struct video_device mcam_v4l_template = {
  1250. .name = "mcam",
  1251. .tvnorms = V4L2_STD_NTSC_M,
  1252. .current_norm = V4L2_STD_NTSC_M, /* make mplayer happy */
  1253. .fops = &mcam_v4l_fops,
  1254. .ioctl_ops = &mcam_v4l_ioctl_ops,
  1255. .release = video_device_release_empty,
  1256. };
  1257. /* ---------------------------------------------------------------------- */
  1258. /*
  1259. * Interrupt handler stuff
  1260. */
  1261. static void mcam_buffer_done(struct mcam_camera *cam, int frame,
  1262. struct vb2_buffer *vbuf)
  1263. {
  1264. vbuf->v4l2_buf.bytesused = cam->pix_format.sizeimage;
  1265. vbuf->v4l2_buf.sequence = cam->buf_seq[frame];
  1266. vb2_set_plane_payload(vbuf, 0, cam->pix_format.sizeimage);
  1267. vb2_buffer_done(vbuf, VB2_BUF_STATE_DONE);
  1268. }
  1269. /*
  1270. * Copy data out to user space in the vmalloc case
  1271. */
  1272. static void mcam_frame_tasklet(unsigned long data)
  1273. {
  1274. struct mcam_camera *cam = (struct mcam_camera *) data;
  1275. int i;
  1276. unsigned long flags;
  1277. struct mcam_vb_buffer *buf;
  1278. spin_lock_irqsave(&cam->dev_lock, flags);
  1279. for (i = 0; i < cam->nbufs; i++) {
  1280. int bufno = cam->next_buf;
  1281. if (cam->state != S_STREAMING || bufno < 0)
  1282. break; /* I/O got stopped */
  1283. if (++(cam->next_buf) >= cam->nbufs)
  1284. cam->next_buf = 0;
  1285. if (!test_bit(bufno, &cam->flags))
  1286. continue;
  1287. if (list_empty(&cam->buffers)) {
  1288. singles++;
  1289. break; /* Leave it valid, hope for better later */
  1290. }
  1291. delivered++;
  1292. clear_bit(bufno, &cam->flags);
  1293. buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer,
  1294. queue);
  1295. list_del_init(&buf->queue);
  1296. /*
  1297. * Drop the lock during the big copy. This *should* be safe...
  1298. */
  1299. spin_unlock_irqrestore(&cam->dev_lock, flags);
  1300. memcpy(vb2_plane_vaddr(&buf->vb_buf, 0), cam->dma_bufs[bufno],
  1301. cam->pix_format.sizeimage);
  1302. mcam_buffer_done(cam, bufno, &buf->vb_buf);
  1303. spin_lock_irqsave(&cam->dev_lock, flags);
  1304. }
  1305. spin_unlock_irqrestore(&cam->dev_lock, flags);
  1306. }
  1307. /*
  1308. * For direct DMA, mark the buffer ready and set up another one.
  1309. */
  1310. static void mcam_dma_contig_done(struct mcam_camera *cam, int frame)
  1311. {
  1312. struct mcam_vb_buffer *buf = cam->vb_bufs[frame];
  1313. if (!test_bit(CF_SINGLE_BUFFER, &cam->flags)) {
  1314. delivered++;
  1315. mcam_buffer_done(cam, frame, &buf->vb_buf);
  1316. }
  1317. mcam_set_contig_buffer(cam, frame);
  1318. }
  1319. /*
  1320. * Frame completion with S/G is trickier. We can't muck with
  1321. * a descriptor chain on the fly, since the controller buffers it
  1322. * internally. So we have to actually stop and restart; Marvell
  1323. * says this is the way to do it.
  1324. *
  1325. * Of course, stopping is easier said than done; experience shows
  1326. * that the controller can start a frame *after* C0_ENABLE has been
  1327. * cleared. So when running in S/G mode, the controller is "stopped"
  1328. * on receipt of the start-of-frame interrupt. That means we can
  1329. * safely change the DMA descriptor array here and restart things
  1330. * (assuming there's another buffer waiting to go).
  1331. */
  1332. static void mcam_dma_sg_done(struct mcam_camera *cam, int frame)
  1333. {
  1334. struct mcam_vb_buffer *buf = cam->vb_bufs[0];
  1335. /*
  1336. * Very Bad Not Good Things happen if you don't clear
  1337. * C1_DESC_ENA before making any descriptor changes.
  1338. */
  1339. mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_ENA);
  1340. /*
  1341. * If we have another buffer available, put it in and
  1342. * restart the engine.
  1343. */
  1344. if (!list_empty(&cam->buffers)) {
  1345. mcam_sg_next_buffer(cam);
  1346. mcam_reg_set_bit(cam, REG_CTRL1, C1_DESC_ENA);
  1347. mcam_ctlr_start(cam);
  1348. /*
  1349. * Otherwise set CF_SG_RESTART and the controller will
  1350. * be restarted once another buffer shows up.
  1351. */
  1352. } else {
  1353. set_bit(CF_SG_RESTART, &cam->flags);
  1354. singles++;
  1355. }
  1356. /*
  1357. * Now we can give the completed frame back to user space.
  1358. */
  1359. delivered++;
  1360. mcam_buffer_done(cam, frame, &buf->vb_buf);
  1361. }
  1362. static void mcam_frame_complete(struct mcam_camera *cam, int frame)
  1363. {
  1364. /*
  1365. * Basic frame housekeeping.
  1366. */
  1367. set_bit(frame, &cam->flags);
  1368. clear_bit(CF_DMA_ACTIVE, &cam->flags);
  1369. cam->next_buf = frame;
  1370. cam->buf_seq[frame] = ++(cam->sequence);
  1371. cam->last_delivered = frame;
  1372. frames++;
  1373. /*
  1374. * "This should never happen"
  1375. */
  1376. if (cam->state != S_STREAMING)
  1377. return;
  1378. /*
  1379. * Process the frame and set up the next one.
  1380. */
  1381. switch (cam->buffer_mode) {
  1382. case B_vmalloc:
  1383. tasklet_schedule(&cam->s_tasklet);
  1384. break;
  1385. case B_DMA_contig:
  1386. mcam_dma_contig_done(cam, frame);
  1387. break;
  1388. case B_DMA_sg:
  1389. mcam_dma_sg_done(cam, frame);
  1390. break;
  1391. }
  1392. }
  1393. int mccic_irq(struct mcam_camera *cam, unsigned int irqs)
  1394. {
  1395. unsigned int frame, handled = 0;
  1396. mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS); /* Clear'em all */
  1397. /*
  1398. * Handle any frame completions. There really should
  1399. * not be more than one of these, or we have fallen
  1400. * far behind.
  1401. *
  1402. * When running in S/G mode, the frame number lacks any
  1403. * real meaning - there's only one descriptor array - but
  1404. * the controller still picks a different one to signal
  1405. * each time.
  1406. */
  1407. for (frame = 0; frame < cam->nbufs; frame++)
  1408. if (irqs & (IRQ_EOF0 << frame)) {
  1409. mcam_frame_complete(cam, frame);
  1410. handled = 1;
  1411. }
  1412. /*
  1413. * If a frame starts, note that we have DMA active. This
  1414. * code assumes that we won't get multiple frame interrupts
  1415. * at once; may want to rethink that.
  1416. */
  1417. if (irqs & (IRQ_SOF0 | IRQ_SOF1 | IRQ_SOF2)) {
  1418. set_bit(CF_DMA_ACTIVE, &cam->flags);
  1419. handled = 1;
  1420. if (cam->buffer_mode == B_DMA_sg)
  1421. mcam_ctlr_stop(cam);
  1422. }
  1423. return handled;
  1424. }
  1425. /*
  1426. * Registration and such.
  1427. */
  1428. static struct ov7670_config sensor_cfg = {
  1429. /*
  1430. * Exclude QCIF mode, because it only captures a tiny portion
  1431. * of the sensor FOV
  1432. */
  1433. .min_width = 320,
  1434. .min_height = 240,
  1435. };
  1436. int mccic_register(struct mcam_camera *cam)
  1437. {
  1438. struct i2c_board_info ov7670_info = {
  1439. .type = "ov7670",
  1440. .addr = 0x42 >> 1,
  1441. .platform_data = &sensor_cfg,
  1442. };
  1443. int ret;
  1444. /*
  1445. * Register with V4L
  1446. */
  1447. ret = v4l2_device_register(cam->dev, &cam->v4l2_dev);
  1448. if (ret)
  1449. return ret;
  1450. mutex_init(&cam->s_mutex);
  1451. cam->state = S_NOTREADY;
  1452. mcam_set_config_needed(cam, 1);
  1453. cam->pix_format = mcam_def_pix_format;
  1454. cam->mbus_code = mcam_def_mbus_code;
  1455. INIT_LIST_HEAD(&cam->dev_list);
  1456. INIT_LIST_HEAD(&cam->buffers);
  1457. tasklet_init(&cam->s_tasklet, mcam_frame_tasklet, (unsigned long) cam);
  1458. /*
  1459. * User space may want to override the asked-for buffer mode;
  1460. * here's hoping they know what they're doing.
  1461. */
  1462. if (buffer_mode == 0)
  1463. cam->buffer_mode = B_vmalloc;
  1464. else if (buffer_mode == 1)
  1465. cam->buffer_mode = B_DMA_contig;
  1466. else if (buffer_mode == 2) {
  1467. if (cam->chip_id == V4L2_IDENT_ARMADA610)
  1468. cam->buffer_mode = B_DMA_sg;
  1469. else {
  1470. printk(KERN_ERR "marvell-cam: Cafe can't do S/G I/O\n");
  1471. cam->buffer_mode = B_vmalloc;
  1472. }
  1473. } else if (buffer_mode != -1)
  1474. printk(KERN_ERR "marvell-cam: "
  1475. "Strange module buffer mode %d - ignoring\n",
  1476. buffer_mode);
  1477. mcam_ctlr_init(cam);
  1478. /*
  1479. * Try to find the sensor.
  1480. */
  1481. sensor_cfg.clock_speed = cam->clock_speed;
  1482. sensor_cfg.use_smbus = cam->use_smbus;
  1483. cam->sensor_addr = ov7670_info.addr;
  1484. cam->sensor = v4l2_i2c_new_subdev_board(&cam->v4l2_dev,
  1485. cam->i2c_adapter, &ov7670_info, NULL);
  1486. if (cam->sensor == NULL) {
  1487. ret = -ENODEV;
  1488. goto out_unregister;
  1489. }
  1490. ret = mcam_cam_init(cam);
  1491. if (ret)
  1492. goto out_unregister;
  1493. /*
  1494. * Get the v4l2 setup done.
  1495. */
  1496. mutex_lock(&cam->s_mutex);
  1497. cam->vdev = mcam_v4l_template;
  1498. cam->vdev.debug = 0;
  1499. cam->vdev.v4l2_dev = &cam->v4l2_dev;
  1500. ret = video_register_device(&cam->vdev, VFL_TYPE_GRABBER, -1);
  1501. if (ret)
  1502. goto out;
  1503. video_set_drvdata(&cam->vdev, cam);
  1504. /*
  1505. * If so requested, try to get our DMA buffers now.
  1506. */
  1507. if (cam->buffer_mode == B_vmalloc && !alloc_bufs_at_read) {
  1508. if (mcam_alloc_dma_bufs(cam, 1))
  1509. cam_warn(cam, "Unable to alloc DMA buffers at load"
  1510. " will try again later.");
  1511. }
  1512. out:
  1513. mutex_unlock(&cam->s_mutex);
  1514. return ret;
  1515. out_unregister:
  1516. v4l2_device_unregister(&cam->v4l2_dev);
  1517. return ret;
  1518. }
  1519. void mccic_shutdown(struct mcam_camera *cam)
  1520. {
  1521. /*
  1522. * If we have no users (and we really, really should have no
  1523. * users) the device will already be powered down. Trying to
  1524. * take it down again will wedge the machine, which is frowned
  1525. * upon.
  1526. */
  1527. if (cam->users > 0) {
  1528. cam_warn(cam, "Removing a device with users!\n");
  1529. mcam_ctlr_power_down(cam);
  1530. }
  1531. vb2_queue_release(&cam->vb_queue);
  1532. if (cam->buffer_mode == B_vmalloc)
  1533. mcam_free_dma_bufs(cam);
  1534. video_unregister_device(&cam->vdev);
  1535. v4l2_device_unregister(&cam->v4l2_dev);
  1536. }
  1537. /*
  1538. * Power management
  1539. */
  1540. #ifdef CONFIG_PM
  1541. void mccic_suspend(struct mcam_camera *cam)
  1542. {
  1543. enum mcam_state cstate = cam->state;
  1544. mcam_ctlr_stop_dma(cam);
  1545. mcam_ctlr_power_down(cam);
  1546. cam->state = cstate;
  1547. }
  1548. int mccic_resume(struct mcam_camera *cam)
  1549. {
  1550. int ret = 0;
  1551. mutex_lock(&cam->s_mutex);
  1552. if (cam->users > 0) {
  1553. mcam_ctlr_power_up(cam);
  1554. __mcam_cam_reset(cam);
  1555. } else {
  1556. mcam_ctlr_power_down(cam);
  1557. }
  1558. mutex_unlock(&cam->s_mutex);
  1559. set_bit(CF_CONFIG_NEEDED, &cam->flags);
  1560. if (cam->state == S_STREAMING)
  1561. ret = mcam_read_setup(cam);
  1562. return ret;
  1563. }
  1564. #endif /* CONFIG_PM */