mcam-core.c 47 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887
  1. /*
  2. * The Marvell camera core. This device appears in a number of settings,
  3. * so it needs platform-specific support outside of the core.
  4. *
  5. * Copyright 2011 Jonathan Corbet corbet@lwn.net
  6. */
  7. #include <linux/kernel.h>
  8. #include <linux/module.h>
  9. #include <linux/fs.h>
  10. #include <linux/mm.h>
  11. #include <linux/i2c.h>
  12. #include <linux/interrupt.h>
  13. #include <linux/spinlock.h>
  14. #include <linux/slab.h>
  15. #include <linux/device.h>
  16. #include <linux/wait.h>
  17. #include <linux/list.h>
  18. #include <linux/dma-mapping.h>
  19. #include <linux/delay.h>
  20. #include <linux/vmalloc.h>
  21. #include <linux/io.h>
  22. #include <linux/clk.h>
  23. #include <linux/videodev2.h>
  24. #include <media/v4l2-device.h>
  25. #include <media/v4l2-ioctl.h>
  26. #include <media/v4l2-ctrls.h>
  27. #include <media/ov7670.h>
  28. #include <media/videobuf2-vmalloc.h>
  29. #include <media/videobuf2-dma-contig.h>
  30. #include <media/videobuf2-dma-sg.h>
  31. #include "mcam-core.h"
  32. #ifdef MCAM_MODE_VMALLOC
  33. /*
  34. * Internal DMA buffer management. Since the controller cannot do S/G I/O,
  35. * we must have physically contiguous buffers to bring frames into.
  36. * These parameters control how many buffers we use, whether we
  37. * allocate them at load time (better chance of success, but nails down
  38. * memory) or when somebody tries to use the camera (riskier), and,
  39. * for load-time allocation, how big they should be.
  40. *
  41. * The controller can cycle through three buffers. We could use
  42. * more by flipping pointers around, but it probably makes little
  43. * sense.
  44. */
  45. static bool alloc_bufs_at_read;
  46. module_param(alloc_bufs_at_read, bool, 0444);
  47. MODULE_PARM_DESC(alloc_bufs_at_read,
  48. "Non-zero value causes DMA buffers to be allocated when the "
  49. "video capture device is read, rather than at module load "
  50. "time. This saves memory, but decreases the chances of "
  51. "successfully getting those buffers. This parameter is "
  52. "only used in the vmalloc buffer mode");
  53. static int n_dma_bufs = 3;
  54. module_param(n_dma_bufs, uint, 0644);
  55. MODULE_PARM_DESC(n_dma_bufs,
  56. "The number of DMA buffers to allocate. Can be either two "
  57. "(saves memory, makes timing tighter) or three.");
  58. static int dma_buf_size = VGA_WIDTH * VGA_HEIGHT * 2; /* Worst case */
  59. module_param(dma_buf_size, uint, 0444);
  60. MODULE_PARM_DESC(dma_buf_size,
  61. "The size of the allocated DMA buffers. If actual operating "
  62. "parameters require larger buffers, an attempt to reallocate "
  63. "will be made.");
  64. #else /* MCAM_MODE_VMALLOC */
  65. static const bool alloc_bufs_at_read = 0;
  66. static const int n_dma_bufs = 3; /* Used by S/G_PARM */
  67. #endif /* MCAM_MODE_VMALLOC */
  68. static bool flip;
  69. module_param(flip, bool, 0444);
  70. MODULE_PARM_DESC(flip,
  71. "If set, the sensor will be instructed to flip the image "
  72. "vertically.");
  73. static int buffer_mode = -1;
  74. module_param(buffer_mode, int, 0444);
  75. MODULE_PARM_DESC(buffer_mode,
  76. "Set the buffer mode to be used; default is to go with what "
  77. "the platform driver asks for. Set to 0 for vmalloc, 1 for "
  78. "DMA contiguous.");
  79. /*
  80. * Status flags. Always manipulated with bit operations.
  81. */
  82. #define CF_BUF0_VALID 0 /* Buffers valid - first three */
  83. #define CF_BUF1_VALID 1
  84. #define CF_BUF2_VALID 2
  85. #define CF_DMA_ACTIVE 3 /* A frame is incoming */
  86. #define CF_CONFIG_NEEDED 4 /* Must configure hardware */
  87. #define CF_SINGLE_BUFFER 5 /* Running with a single buffer */
  88. #define CF_SG_RESTART 6 /* SG restart needed */
  89. #define sensor_call(cam, o, f, args...) \
  90. v4l2_subdev_call(cam->sensor, o, f, ##args)
  91. static struct mcam_format_struct {
  92. __u8 *desc;
  93. __u32 pixelformat;
  94. int bpp; /* Bytes per pixel */
  95. enum v4l2_mbus_pixelcode mbus_code;
  96. } mcam_formats[] = {
  97. {
  98. .desc = "YUYV 4:2:2",
  99. .pixelformat = V4L2_PIX_FMT_YUYV,
  100. .mbus_code = V4L2_MBUS_FMT_YUYV8_2X8,
  101. .bpp = 2,
  102. },
  103. {
  104. .desc = "RGB 444",
  105. .pixelformat = V4L2_PIX_FMT_RGB444,
  106. .mbus_code = V4L2_MBUS_FMT_RGB444_2X8_PADHI_LE,
  107. .bpp = 2,
  108. },
  109. {
  110. .desc = "RGB 565",
  111. .pixelformat = V4L2_PIX_FMT_RGB565,
  112. .mbus_code = V4L2_MBUS_FMT_RGB565_2X8_LE,
  113. .bpp = 2,
  114. },
  115. {
  116. .desc = "Raw RGB Bayer",
  117. .pixelformat = V4L2_PIX_FMT_SBGGR8,
  118. .mbus_code = V4L2_MBUS_FMT_SBGGR8_1X8,
  119. .bpp = 1
  120. },
  121. };
  122. #define N_MCAM_FMTS ARRAY_SIZE(mcam_formats)
  123. static struct mcam_format_struct *mcam_find_format(u32 pixelformat)
  124. {
  125. unsigned i;
  126. for (i = 0; i < N_MCAM_FMTS; i++)
  127. if (mcam_formats[i].pixelformat == pixelformat)
  128. return mcam_formats + i;
  129. /* Not found? Then return the first format. */
  130. return mcam_formats;
  131. }
  132. /*
  133. * The default format we use until somebody says otherwise.
  134. */
  135. static const struct v4l2_pix_format mcam_def_pix_format = {
  136. .width = VGA_WIDTH,
  137. .height = VGA_HEIGHT,
  138. .pixelformat = V4L2_PIX_FMT_YUYV,
  139. .field = V4L2_FIELD_NONE,
  140. .bytesperline = VGA_WIDTH*2,
  141. .sizeimage = VGA_WIDTH*VGA_HEIGHT*2,
  142. };
  143. static const enum v4l2_mbus_pixelcode mcam_def_mbus_code =
  144. V4L2_MBUS_FMT_YUYV8_2X8;
  145. /*
  146. * The two-word DMA descriptor format used by the Armada 610 and like. There
  147. * Is a three-word format as well (set C1_DESC_3WORD) where the third
  148. * word is a pointer to the next descriptor, but we don't use it. Two-word
  149. * descriptors have to be contiguous in memory.
  150. */
  151. struct mcam_dma_desc {
  152. u32 dma_addr;
  153. u32 segment_len;
  154. };
  155. /*
  156. * Our buffer type for working with videobuf2. Note that the vb2
  157. * developers have decreed that struct vb2_buffer must be at the
  158. * beginning of this structure.
  159. */
  160. struct mcam_vb_buffer {
  161. struct vb2_buffer vb_buf;
  162. struct list_head queue;
  163. struct mcam_dma_desc *dma_desc; /* Descriptor virtual address */
  164. dma_addr_t dma_desc_pa; /* Descriptor physical address */
  165. int dma_desc_nent; /* Number of mapped descriptors */
  166. };
  167. static inline struct mcam_vb_buffer *vb_to_mvb(struct vb2_buffer *vb)
  168. {
  169. return container_of(vb, struct mcam_vb_buffer, vb_buf);
  170. }
  171. /*
  172. * Hand a completed buffer back to user space.
  173. */
  174. static void mcam_buffer_done(struct mcam_camera *cam, int frame,
  175. struct vb2_buffer *vbuf)
  176. {
  177. vbuf->v4l2_buf.bytesused = cam->pix_format.sizeimage;
  178. vbuf->v4l2_buf.sequence = cam->buf_seq[frame];
  179. vb2_set_plane_payload(vbuf, 0, cam->pix_format.sizeimage);
  180. vb2_buffer_done(vbuf, VB2_BUF_STATE_DONE);
  181. }
  182. /*
  183. * Debugging and related.
  184. */
  185. #define cam_err(cam, fmt, arg...) \
  186. dev_err((cam)->dev, fmt, ##arg);
  187. #define cam_warn(cam, fmt, arg...) \
  188. dev_warn((cam)->dev, fmt, ##arg);
  189. #define cam_dbg(cam, fmt, arg...) \
  190. dev_dbg((cam)->dev, fmt, ##arg);
  191. /*
  192. * Flag manipulation helpers
  193. */
  194. static void mcam_reset_buffers(struct mcam_camera *cam)
  195. {
  196. int i;
  197. cam->next_buf = -1;
  198. for (i = 0; i < cam->nbufs; i++)
  199. clear_bit(i, &cam->flags);
  200. }
  201. static inline int mcam_needs_config(struct mcam_camera *cam)
  202. {
  203. return test_bit(CF_CONFIG_NEEDED, &cam->flags);
  204. }
  205. static void mcam_set_config_needed(struct mcam_camera *cam, int needed)
  206. {
  207. if (needed)
  208. set_bit(CF_CONFIG_NEEDED, &cam->flags);
  209. else
  210. clear_bit(CF_CONFIG_NEEDED, &cam->flags);
  211. }
  212. /* ------------------------------------------------------------------- */
  213. /*
  214. * Make the controller start grabbing images. Everything must
  215. * be set up before doing this.
  216. */
  217. static void mcam_ctlr_start(struct mcam_camera *cam)
  218. {
  219. /* set_bit performs a read, so no other barrier should be
  220. needed here */
  221. mcam_reg_set_bit(cam, REG_CTRL0, C0_ENABLE);
  222. }
  223. static void mcam_ctlr_stop(struct mcam_camera *cam)
  224. {
  225. mcam_reg_clear_bit(cam, REG_CTRL0, C0_ENABLE);
  226. }
  227. static void mcam_enable_mipi(struct mcam_camera *mcam)
  228. {
  229. /* Using MIPI mode and enable MIPI */
  230. cam_dbg(mcam, "camera: DPHY3=0x%x, DPHY5=0x%x, DPHY6=0x%x\n",
  231. mcam->dphy[0], mcam->dphy[1], mcam->dphy[2]);
  232. mcam_reg_write(mcam, REG_CSI2_DPHY3, mcam->dphy[0]);
  233. mcam_reg_write(mcam, REG_CSI2_DPHY5, mcam->dphy[1]);
  234. mcam_reg_write(mcam, REG_CSI2_DPHY6, mcam->dphy[2]);
  235. if (!mcam->mipi_enabled) {
  236. if (mcam->lane > 4 || mcam->lane <= 0) {
  237. cam_warn(mcam, "lane number error\n");
  238. mcam->lane = 1; /* set the default value */
  239. }
  240. /*
  241. * 0x41 actives 1 lane
  242. * 0x43 actives 2 lanes
  243. * 0x45 actives 3 lanes (never happen)
  244. * 0x47 actives 4 lanes
  245. */
  246. mcam_reg_write(mcam, REG_CSI2_CTRL0,
  247. CSI2_C0_MIPI_EN | CSI2_C0_ACT_LANE(mcam->lane));
  248. mcam_reg_write(mcam, REG_CLKCTRL,
  249. (mcam->mclk_src << 29) | mcam->mclk_div);
  250. mcam->mipi_enabled = true;
  251. }
  252. }
  253. static void mcam_disable_mipi(struct mcam_camera *mcam)
  254. {
  255. /* Using Parallel mode or disable MIPI */
  256. mcam_reg_write(mcam, REG_CSI2_CTRL0, 0x0);
  257. mcam_reg_write(mcam, REG_CSI2_DPHY3, 0x0);
  258. mcam_reg_write(mcam, REG_CSI2_DPHY5, 0x0);
  259. mcam_reg_write(mcam, REG_CSI2_DPHY6, 0x0);
  260. mcam->mipi_enabled = false;
  261. }
  262. /* ------------------------------------------------------------------- */
  263. #ifdef MCAM_MODE_VMALLOC
  264. /*
  265. * Code specific to the vmalloc buffer mode.
  266. */
  267. /*
  268. * Allocate in-kernel DMA buffers for vmalloc mode.
  269. */
  270. static int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime)
  271. {
  272. int i;
  273. mcam_set_config_needed(cam, 1);
  274. if (loadtime)
  275. cam->dma_buf_size = dma_buf_size;
  276. else
  277. cam->dma_buf_size = cam->pix_format.sizeimage;
  278. if (n_dma_bufs > 3)
  279. n_dma_bufs = 3;
  280. cam->nbufs = 0;
  281. for (i = 0; i < n_dma_bufs; i++) {
  282. cam->dma_bufs[i] = dma_alloc_coherent(cam->dev,
  283. cam->dma_buf_size, cam->dma_handles + i,
  284. GFP_KERNEL);
  285. if (cam->dma_bufs[i] == NULL) {
  286. cam_warn(cam, "Failed to allocate DMA buffer\n");
  287. break;
  288. }
  289. (cam->nbufs)++;
  290. }
  291. switch (cam->nbufs) {
  292. case 1:
  293. dma_free_coherent(cam->dev, cam->dma_buf_size,
  294. cam->dma_bufs[0], cam->dma_handles[0]);
  295. cam->nbufs = 0;
  296. case 0:
  297. cam_err(cam, "Insufficient DMA buffers, cannot operate\n");
  298. return -ENOMEM;
  299. case 2:
  300. if (n_dma_bufs > 2)
  301. cam_warn(cam, "Will limp along with only 2 buffers\n");
  302. break;
  303. }
  304. return 0;
  305. }
  306. static void mcam_free_dma_bufs(struct mcam_camera *cam)
  307. {
  308. int i;
  309. for (i = 0; i < cam->nbufs; i++) {
  310. dma_free_coherent(cam->dev, cam->dma_buf_size,
  311. cam->dma_bufs[i], cam->dma_handles[i]);
  312. cam->dma_bufs[i] = NULL;
  313. }
  314. cam->nbufs = 0;
  315. }
  316. /*
  317. * Set up DMA buffers when operating in vmalloc mode
  318. */
  319. static void mcam_ctlr_dma_vmalloc(struct mcam_camera *cam)
  320. {
  321. /*
  322. * Store the first two Y buffers (we aren't supporting
  323. * planar formats for now, so no UV bufs). Then either
  324. * set the third if it exists, or tell the controller
  325. * to just use two.
  326. */
  327. mcam_reg_write(cam, REG_Y0BAR, cam->dma_handles[0]);
  328. mcam_reg_write(cam, REG_Y1BAR, cam->dma_handles[1]);
  329. if (cam->nbufs > 2) {
  330. mcam_reg_write(cam, REG_Y2BAR, cam->dma_handles[2]);
  331. mcam_reg_clear_bit(cam, REG_CTRL1, C1_TWOBUFS);
  332. } else
  333. mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
  334. if (cam->chip_id == MCAM_CAFE)
  335. mcam_reg_write(cam, REG_UBAR, 0); /* 32 bits only */
  336. }
  337. /*
  338. * Copy data out to user space in the vmalloc case
  339. */
  340. static void mcam_frame_tasklet(unsigned long data)
  341. {
  342. struct mcam_camera *cam = (struct mcam_camera *) data;
  343. int i;
  344. unsigned long flags;
  345. struct mcam_vb_buffer *buf;
  346. spin_lock_irqsave(&cam->dev_lock, flags);
  347. for (i = 0; i < cam->nbufs; i++) {
  348. int bufno = cam->next_buf;
  349. if (cam->state != S_STREAMING || bufno < 0)
  350. break; /* I/O got stopped */
  351. if (++(cam->next_buf) >= cam->nbufs)
  352. cam->next_buf = 0;
  353. if (!test_bit(bufno, &cam->flags))
  354. continue;
  355. if (list_empty(&cam->buffers)) {
  356. cam->frame_state.singles++;
  357. break; /* Leave it valid, hope for better later */
  358. }
  359. cam->frame_state.delivered++;
  360. clear_bit(bufno, &cam->flags);
  361. buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer,
  362. queue);
  363. list_del_init(&buf->queue);
  364. /*
  365. * Drop the lock during the big copy. This *should* be safe...
  366. */
  367. spin_unlock_irqrestore(&cam->dev_lock, flags);
  368. memcpy(vb2_plane_vaddr(&buf->vb_buf, 0), cam->dma_bufs[bufno],
  369. cam->pix_format.sizeimage);
  370. mcam_buffer_done(cam, bufno, &buf->vb_buf);
  371. spin_lock_irqsave(&cam->dev_lock, flags);
  372. }
  373. spin_unlock_irqrestore(&cam->dev_lock, flags);
  374. }
  375. /*
  376. * Make sure our allocated buffers are up to the task.
  377. */
  378. static int mcam_check_dma_buffers(struct mcam_camera *cam)
  379. {
  380. if (cam->nbufs > 0 && cam->dma_buf_size < cam->pix_format.sizeimage)
  381. mcam_free_dma_bufs(cam);
  382. if (cam->nbufs == 0)
  383. return mcam_alloc_dma_bufs(cam, 0);
  384. return 0;
  385. }
  386. static void mcam_vmalloc_done(struct mcam_camera *cam, int frame)
  387. {
  388. tasklet_schedule(&cam->s_tasklet);
  389. }
  390. #else /* MCAM_MODE_VMALLOC */
  391. static inline int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime)
  392. {
  393. return 0;
  394. }
  395. static inline void mcam_free_dma_bufs(struct mcam_camera *cam)
  396. {
  397. return;
  398. }
  399. static inline int mcam_check_dma_buffers(struct mcam_camera *cam)
  400. {
  401. return 0;
  402. }
  403. #endif /* MCAM_MODE_VMALLOC */
  404. #ifdef MCAM_MODE_DMA_CONTIG
  405. /* ---------------------------------------------------------------------- */
  406. /*
  407. * DMA-contiguous code.
  408. */
  409. /*
  410. * Set up a contiguous buffer for the given frame. Here also is where
  411. * the underrun strategy is set: if there is no buffer available, reuse
  412. * the buffer from the other BAR and set the CF_SINGLE_BUFFER flag to
  413. * keep the interrupt handler from giving that buffer back to user
  414. * space. In this way, we always have a buffer to DMA to and don't
  415. * have to try to play games stopping and restarting the controller.
  416. */
  417. static void mcam_set_contig_buffer(struct mcam_camera *cam, int frame)
  418. {
  419. struct mcam_vb_buffer *buf;
  420. /*
  421. * If there are no available buffers, go into single mode
  422. */
  423. if (list_empty(&cam->buffers)) {
  424. buf = cam->vb_bufs[frame ^ 0x1];
  425. cam->vb_bufs[frame] = buf;
  426. mcam_reg_write(cam, frame == 0 ? REG_Y0BAR : REG_Y1BAR,
  427. vb2_dma_contig_plane_dma_addr(&buf->vb_buf, 0));
  428. set_bit(CF_SINGLE_BUFFER, &cam->flags);
  429. cam->frame_state.singles++;
  430. return;
  431. }
  432. /*
  433. * OK, we have a buffer we can use.
  434. */
  435. buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, queue);
  436. list_del_init(&buf->queue);
  437. mcam_reg_write(cam, frame == 0 ? REG_Y0BAR : REG_Y1BAR,
  438. vb2_dma_contig_plane_dma_addr(&buf->vb_buf, 0));
  439. cam->vb_bufs[frame] = buf;
  440. clear_bit(CF_SINGLE_BUFFER, &cam->flags);
  441. }
  442. /*
  443. * Initial B_DMA_contig setup.
  444. */
  445. static void mcam_ctlr_dma_contig(struct mcam_camera *cam)
  446. {
  447. mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
  448. cam->nbufs = 2;
  449. mcam_set_contig_buffer(cam, 0);
  450. mcam_set_contig_buffer(cam, 1);
  451. }
  452. /*
  453. * Frame completion handling.
  454. */
  455. static void mcam_dma_contig_done(struct mcam_camera *cam, int frame)
  456. {
  457. struct mcam_vb_buffer *buf = cam->vb_bufs[frame];
  458. if (!test_bit(CF_SINGLE_BUFFER, &cam->flags)) {
  459. cam->frame_state.delivered++;
  460. mcam_buffer_done(cam, frame, &buf->vb_buf);
  461. }
  462. mcam_set_contig_buffer(cam, frame);
  463. }
  464. #endif /* MCAM_MODE_DMA_CONTIG */
  465. #ifdef MCAM_MODE_DMA_SG
  466. /* ---------------------------------------------------------------------- */
  467. /*
  468. * Scatter/gather-specific code.
  469. */
  470. /*
  471. * Set up the next buffer for S/G I/O; caller should be sure that
  472. * the controller is stopped and a buffer is available.
  473. */
  474. static void mcam_sg_next_buffer(struct mcam_camera *cam)
  475. {
  476. struct mcam_vb_buffer *buf;
  477. buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, queue);
  478. list_del_init(&buf->queue);
  479. /*
  480. * Very Bad Not Good Things happen if you don't clear
  481. * C1_DESC_ENA before making any descriptor changes.
  482. */
  483. mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_ENA);
  484. mcam_reg_write(cam, REG_DMA_DESC_Y, buf->dma_desc_pa);
  485. mcam_reg_write(cam, REG_DESC_LEN_Y,
  486. buf->dma_desc_nent*sizeof(struct mcam_dma_desc));
  487. mcam_reg_write(cam, REG_DESC_LEN_U, 0);
  488. mcam_reg_write(cam, REG_DESC_LEN_V, 0);
  489. mcam_reg_set_bit(cam, REG_CTRL1, C1_DESC_ENA);
  490. cam->vb_bufs[0] = buf;
  491. }
  492. /*
  493. * Initial B_DMA_sg setup
  494. */
  495. static void mcam_ctlr_dma_sg(struct mcam_camera *cam)
  496. {
  497. /*
  498. * The list-empty condition can hit us at resume time
  499. * if the buffer list was empty when the system was suspended.
  500. */
  501. if (list_empty(&cam->buffers)) {
  502. set_bit(CF_SG_RESTART, &cam->flags);
  503. return;
  504. }
  505. mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_3WORD);
  506. mcam_sg_next_buffer(cam);
  507. cam->nbufs = 3;
  508. }
  509. /*
  510. * Frame completion with S/G is trickier. We can't muck with
  511. * a descriptor chain on the fly, since the controller buffers it
  512. * internally. So we have to actually stop and restart; Marvell
  513. * says this is the way to do it.
  514. *
  515. * Of course, stopping is easier said than done; experience shows
  516. * that the controller can start a frame *after* C0_ENABLE has been
  517. * cleared. So when running in S/G mode, the controller is "stopped"
  518. * on receipt of the start-of-frame interrupt. That means we can
  519. * safely change the DMA descriptor array here and restart things
  520. * (assuming there's another buffer waiting to go).
  521. */
  522. static void mcam_dma_sg_done(struct mcam_camera *cam, int frame)
  523. {
  524. struct mcam_vb_buffer *buf = cam->vb_bufs[0];
  525. /*
  526. * If we're no longer supposed to be streaming, don't do anything.
  527. */
  528. if (cam->state != S_STREAMING)
  529. return;
  530. /*
  531. * If we have another buffer available, put it in and
  532. * restart the engine.
  533. */
  534. if (!list_empty(&cam->buffers)) {
  535. mcam_sg_next_buffer(cam);
  536. mcam_ctlr_start(cam);
  537. /*
  538. * Otherwise set CF_SG_RESTART and the controller will
  539. * be restarted once another buffer shows up.
  540. */
  541. } else {
  542. set_bit(CF_SG_RESTART, &cam->flags);
  543. cam->frame_state.singles++;
  544. cam->vb_bufs[0] = NULL;
  545. }
  546. /*
  547. * Now we can give the completed frame back to user space.
  548. */
  549. cam->frame_state.delivered++;
  550. mcam_buffer_done(cam, frame, &buf->vb_buf);
  551. }
  552. /*
  553. * Scatter/gather mode requires stopping the controller between
  554. * frames so we can put in a new DMA descriptor array. If no new
  555. * buffer exists at frame completion, the controller is left stopped;
  556. * this function is charged with gettig things going again.
  557. */
  558. static void mcam_sg_restart(struct mcam_camera *cam)
  559. {
  560. mcam_ctlr_dma_sg(cam);
  561. mcam_ctlr_start(cam);
  562. clear_bit(CF_SG_RESTART, &cam->flags);
  563. }
  564. #else /* MCAM_MODE_DMA_SG */
  565. static inline void mcam_sg_restart(struct mcam_camera *cam)
  566. {
  567. return;
  568. }
  569. #endif /* MCAM_MODE_DMA_SG */
  570. /* ---------------------------------------------------------------------- */
  571. /*
  572. * Buffer-mode-independent controller code.
  573. */
  574. /*
  575. * Image format setup
  576. */
  577. static void mcam_ctlr_image(struct mcam_camera *cam)
  578. {
  579. int imgsz;
  580. struct v4l2_pix_format *fmt = &cam->pix_format;
  581. imgsz = ((fmt->height << IMGSZ_V_SHIFT) & IMGSZ_V_MASK) |
  582. (fmt->bytesperline & IMGSZ_H_MASK);
  583. mcam_reg_write(cam, REG_IMGSIZE, imgsz);
  584. mcam_reg_write(cam, REG_IMGOFFSET, 0);
  585. /* YPITCH just drops the last two bits */
  586. mcam_reg_write_mask(cam, REG_IMGPITCH, fmt->bytesperline,
  587. IMGP_YP_MASK);
  588. /*
  589. * Tell the controller about the image format we are using.
  590. */
  591. switch (cam->pix_format.pixelformat) {
  592. case V4L2_PIX_FMT_YUYV:
  593. mcam_reg_write_mask(cam, REG_CTRL0,
  594. C0_DF_YUV|C0_YUV_PACKED|C0_YUVE_YUYV,
  595. C0_DF_MASK);
  596. break;
  597. case V4L2_PIX_FMT_RGB444:
  598. mcam_reg_write_mask(cam, REG_CTRL0,
  599. C0_DF_RGB|C0_RGBF_444|C0_RGB4_XRGB,
  600. C0_DF_MASK);
  601. /* Alpha value? */
  602. break;
  603. case V4L2_PIX_FMT_RGB565:
  604. mcam_reg_write_mask(cam, REG_CTRL0,
  605. C0_DF_RGB|C0_RGBF_565|C0_RGB5_BGGR,
  606. C0_DF_MASK);
  607. break;
  608. default:
  609. cam_err(cam, "Unknown format %x\n", cam->pix_format.pixelformat);
  610. break;
  611. }
  612. /*
  613. * Make sure it knows we want to use hsync/vsync.
  614. */
  615. mcam_reg_write_mask(cam, REG_CTRL0, C0_SIF_HVSYNC,
  616. C0_SIFM_MASK);
  617. /*
  618. * This field controls the generation of EOF(DVP only)
  619. */
  620. if (cam->bus_type != V4L2_MBUS_CSI2)
  621. mcam_reg_set_bit(cam, REG_CTRL0,
  622. C0_EOF_VSYNC | C0_VEDGE_CTRL);
  623. }
  624. /*
  625. * Configure the controller for operation; caller holds the
  626. * device mutex.
  627. */
  628. static int mcam_ctlr_configure(struct mcam_camera *cam)
  629. {
  630. unsigned long flags;
  631. spin_lock_irqsave(&cam->dev_lock, flags);
  632. clear_bit(CF_SG_RESTART, &cam->flags);
  633. cam->dma_setup(cam);
  634. mcam_ctlr_image(cam);
  635. mcam_set_config_needed(cam, 0);
  636. spin_unlock_irqrestore(&cam->dev_lock, flags);
  637. return 0;
  638. }
  639. static void mcam_ctlr_irq_enable(struct mcam_camera *cam)
  640. {
  641. /*
  642. * Clear any pending interrupts, since we do not
  643. * expect to have I/O active prior to enabling.
  644. */
  645. mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS);
  646. mcam_reg_set_bit(cam, REG_IRQMASK, FRAMEIRQS);
  647. }
  648. static void mcam_ctlr_irq_disable(struct mcam_camera *cam)
  649. {
  650. mcam_reg_clear_bit(cam, REG_IRQMASK, FRAMEIRQS);
  651. }
  652. static void mcam_ctlr_init(struct mcam_camera *cam)
  653. {
  654. unsigned long flags;
  655. spin_lock_irqsave(&cam->dev_lock, flags);
  656. /*
  657. * Make sure it's not powered down.
  658. */
  659. mcam_reg_clear_bit(cam, REG_CTRL1, C1_PWRDWN);
  660. /*
  661. * Turn off the enable bit. It sure should be off anyway,
  662. * but it's good to be sure.
  663. */
  664. mcam_reg_clear_bit(cam, REG_CTRL0, C0_ENABLE);
  665. /*
  666. * Clock the sensor appropriately. Controller clock should
  667. * be 48MHz, sensor "typical" value is half that.
  668. */
  669. mcam_reg_write_mask(cam, REG_CLKCTRL, 2, CLK_DIV_MASK);
  670. spin_unlock_irqrestore(&cam->dev_lock, flags);
  671. }
  672. /*
  673. * Stop the controller, and don't return until we're really sure that no
  674. * further DMA is going on.
  675. */
  676. static void mcam_ctlr_stop_dma(struct mcam_camera *cam)
  677. {
  678. unsigned long flags;
  679. /*
  680. * Theory: stop the camera controller (whether it is operating
  681. * or not). Delay briefly just in case we race with the SOF
  682. * interrupt, then wait until no DMA is active.
  683. */
  684. spin_lock_irqsave(&cam->dev_lock, flags);
  685. clear_bit(CF_SG_RESTART, &cam->flags);
  686. mcam_ctlr_stop(cam);
  687. cam->state = S_IDLE;
  688. spin_unlock_irqrestore(&cam->dev_lock, flags);
  689. /*
  690. * This is a brutally long sleep, but experience shows that
  691. * it can take the controller a while to get the message that
  692. * it needs to stop grabbing frames. In particular, we can
  693. * sometimes (on mmp) get a frame at the end WITHOUT the
  694. * start-of-frame indication.
  695. */
  696. msleep(150);
  697. if (test_bit(CF_DMA_ACTIVE, &cam->flags))
  698. cam_err(cam, "Timeout waiting for DMA to end\n");
  699. /* This would be bad news - what now? */
  700. spin_lock_irqsave(&cam->dev_lock, flags);
  701. mcam_ctlr_irq_disable(cam);
  702. spin_unlock_irqrestore(&cam->dev_lock, flags);
  703. }
  704. /*
  705. * Power up and down.
  706. */
  707. static int mcam_ctlr_power_up(struct mcam_camera *cam)
  708. {
  709. unsigned long flags;
  710. int ret;
  711. spin_lock_irqsave(&cam->dev_lock, flags);
  712. ret = cam->plat_power_up(cam);
  713. if (ret) {
  714. spin_unlock_irqrestore(&cam->dev_lock, flags);
  715. return ret;
  716. }
  717. mcam_reg_clear_bit(cam, REG_CTRL1, C1_PWRDWN);
  718. spin_unlock_irqrestore(&cam->dev_lock, flags);
  719. msleep(5); /* Just to be sure */
  720. return 0;
  721. }
  722. static void mcam_ctlr_power_down(struct mcam_camera *cam)
  723. {
  724. unsigned long flags;
  725. spin_lock_irqsave(&cam->dev_lock, flags);
  726. /*
  727. * School of hard knocks department: be sure we do any register
  728. * twiddling on the controller *before* calling the platform
  729. * power down routine.
  730. */
  731. mcam_reg_set_bit(cam, REG_CTRL1, C1_PWRDWN);
  732. cam->plat_power_down(cam);
  733. spin_unlock_irqrestore(&cam->dev_lock, flags);
  734. }
  735. /* -------------------------------------------------------------------- */
  736. /*
  737. * Communications with the sensor.
  738. */
  739. static int __mcam_cam_reset(struct mcam_camera *cam)
  740. {
  741. return sensor_call(cam, core, reset, 0);
  742. }
  743. /*
  744. * We have found the sensor on the i2c. Let's try to have a
  745. * conversation.
  746. */
  747. static int mcam_cam_init(struct mcam_camera *cam)
  748. {
  749. int ret;
  750. mutex_lock(&cam->s_mutex);
  751. if (cam->state != S_NOTREADY)
  752. cam_warn(cam, "Cam init with device in funky state %d",
  753. cam->state);
  754. ret = __mcam_cam_reset(cam);
  755. /* Get/set parameters? */
  756. cam->state = S_IDLE;
  757. mcam_ctlr_power_down(cam);
  758. mutex_unlock(&cam->s_mutex);
  759. return ret;
  760. }
  761. /*
  762. * Configure the sensor to match the parameters we have. Caller should
  763. * hold s_mutex
  764. */
  765. static int mcam_cam_set_flip(struct mcam_camera *cam)
  766. {
  767. struct v4l2_control ctrl;
  768. memset(&ctrl, 0, sizeof(ctrl));
  769. ctrl.id = V4L2_CID_VFLIP;
  770. ctrl.value = flip;
  771. return sensor_call(cam, core, s_ctrl, &ctrl);
  772. }
  773. static int mcam_cam_configure(struct mcam_camera *cam)
  774. {
  775. struct v4l2_mbus_framefmt mbus_fmt;
  776. int ret;
  777. v4l2_fill_mbus_format(&mbus_fmt, &cam->pix_format, cam->mbus_code);
  778. ret = sensor_call(cam, core, init, 0);
  779. if (ret == 0)
  780. ret = sensor_call(cam, video, s_mbus_fmt, &mbus_fmt);
  781. /*
  782. * OV7670 does weird things if flip is set *before* format...
  783. */
  784. ret += mcam_cam_set_flip(cam);
  785. return ret;
  786. }
  787. /*
  788. * Get everything ready, and start grabbing frames.
  789. */
  790. static int mcam_read_setup(struct mcam_camera *cam)
  791. {
  792. int ret;
  793. unsigned long flags;
  794. /*
  795. * Configuration. If we still don't have DMA buffers,
  796. * make one last, desperate attempt.
  797. */
  798. if (cam->buffer_mode == B_vmalloc && cam->nbufs == 0 &&
  799. mcam_alloc_dma_bufs(cam, 0))
  800. return -ENOMEM;
  801. if (mcam_needs_config(cam)) {
  802. mcam_cam_configure(cam);
  803. ret = mcam_ctlr_configure(cam);
  804. if (ret)
  805. return ret;
  806. }
  807. /*
  808. * Turn it loose.
  809. */
  810. spin_lock_irqsave(&cam->dev_lock, flags);
  811. clear_bit(CF_DMA_ACTIVE, &cam->flags);
  812. mcam_reset_buffers(cam);
  813. /*
  814. * Update CSI2_DPHY value
  815. */
  816. if (cam->calc_dphy)
  817. cam->calc_dphy(cam);
  818. cam_dbg(cam, "camera: DPHY sets: dphy3=0x%x, dphy5=0x%x, dphy6=0x%x\n",
  819. cam->dphy[0], cam->dphy[1], cam->dphy[2]);
  820. if (cam->bus_type == V4L2_MBUS_CSI2)
  821. mcam_enable_mipi(cam);
  822. else
  823. mcam_disable_mipi(cam);
  824. mcam_ctlr_irq_enable(cam);
  825. cam->state = S_STREAMING;
  826. if (!test_bit(CF_SG_RESTART, &cam->flags))
  827. mcam_ctlr_start(cam);
  828. spin_unlock_irqrestore(&cam->dev_lock, flags);
  829. return 0;
  830. }
  831. /* ----------------------------------------------------------------------- */
  832. /*
  833. * Videobuf2 interface code.
  834. */
  835. static int mcam_vb_queue_setup(struct vb2_queue *vq,
  836. const struct v4l2_format *fmt, unsigned int *nbufs,
  837. unsigned int *num_planes, unsigned int sizes[],
  838. void *alloc_ctxs[])
  839. {
  840. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  841. int minbufs = (cam->buffer_mode == B_DMA_contig) ? 3 : 2;
  842. sizes[0] = cam->pix_format.sizeimage;
  843. *num_planes = 1; /* Someday we have to support planar formats... */
  844. if (*nbufs < minbufs)
  845. *nbufs = minbufs;
  846. if (cam->buffer_mode == B_DMA_contig)
  847. alloc_ctxs[0] = cam->vb_alloc_ctx;
  848. return 0;
  849. }
  850. static void mcam_vb_buf_queue(struct vb2_buffer *vb)
  851. {
  852. struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
  853. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  854. unsigned long flags;
  855. int start;
  856. spin_lock_irqsave(&cam->dev_lock, flags);
  857. start = (cam->state == S_BUFWAIT) && !list_empty(&cam->buffers);
  858. list_add(&mvb->queue, &cam->buffers);
  859. if (cam->state == S_STREAMING && test_bit(CF_SG_RESTART, &cam->flags))
  860. mcam_sg_restart(cam);
  861. spin_unlock_irqrestore(&cam->dev_lock, flags);
  862. if (start)
  863. mcam_read_setup(cam);
  864. }
  865. /*
  866. * vb2 uses these to release the mutex when waiting in dqbuf. I'm
  867. * not actually sure we need to do this (I'm not sure that vb2_dqbuf() needs
  868. * to be called with the mutex held), but better safe than sorry.
  869. */
  870. static void mcam_vb_wait_prepare(struct vb2_queue *vq)
  871. {
  872. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  873. mutex_unlock(&cam->s_mutex);
  874. }
  875. static void mcam_vb_wait_finish(struct vb2_queue *vq)
  876. {
  877. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  878. mutex_lock(&cam->s_mutex);
  879. }
  880. /*
  881. * These need to be called with the mutex held from vb2
  882. */
  883. static int mcam_vb_start_streaming(struct vb2_queue *vq, unsigned int count)
  884. {
  885. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  886. if (cam->state != S_IDLE) {
  887. INIT_LIST_HEAD(&cam->buffers);
  888. return -EINVAL;
  889. }
  890. cam->sequence = 0;
  891. /*
  892. * Videobuf2 sneakily hoards all the buffers and won't
  893. * give them to us until *after* streaming starts. But
  894. * we can't actually start streaming until we have a
  895. * destination. So go into a wait state and hope they
  896. * give us buffers soon.
  897. */
  898. if (cam->buffer_mode != B_vmalloc && list_empty(&cam->buffers)) {
  899. cam->state = S_BUFWAIT;
  900. return 0;
  901. }
  902. return mcam_read_setup(cam);
  903. }
  904. static int mcam_vb_stop_streaming(struct vb2_queue *vq)
  905. {
  906. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  907. unsigned long flags;
  908. if (cam->state == S_BUFWAIT) {
  909. /* They never gave us buffers */
  910. cam->state = S_IDLE;
  911. return 0;
  912. }
  913. if (cam->state != S_STREAMING)
  914. return -EINVAL;
  915. mcam_ctlr_stop_dma(cam);
  916. /*
  917. * Reset the CCIC PHY after stopping streaming,
  918. * otherwise, the CCIC may be unstable.
  919. */
  920. if (cam->ctlr_reset)
  921. cam->ctlr_reset(cam);
  922. /*
  923. * VB2 reclaims the buffers, so we need to forget
  924. * about them.
  925. */
  926. spin_lock_irqsave(&cam->dev_lock, flags);
  927. INIT_LIST_HEAD(&cam->buffers);
  928. spin_unlock_irqrestore(&cam->dev_lock, flags);
  929. return 0;
  930. }
  931. static const struct vb2_ops mcam_vb2_ops = {
  932. .queue_setup = mcam_vb_queue_setup,
  933. .buf_queue = mcam_vb_buf_queue,
  934. .start_streaming = mcam_vb_start_streaming,
  935. .stop_streaming = mcam_vb_stop_streaming,
  936. .wait_prepare = mcam_vb_wait_prepare,
  937. .wait_finish = mcam_vb_wait_finish,
  938. };
  939. #ifdef MCAM_MODE_DMA_SG
  940. /*
  941. * Scatter/gather mode uses all of the above functions plus a
  942. * few extras to deal with DMA mapping.
  943. */
  944. static int mcam_vb_sg_buf_init(struct vb2_buffer *vb)
  945. {
  946. struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
  947. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  948. int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
  949. mvb->dma_desc = dma_alloc_coherent(cam->dev,
  950. ndesc * sizeof(struct mcam_dma_desc),
  951. &mvb->dma_desc_pa, GFP_KERNEL);
  952. if (mvb->dma_desc == NULL) {
  953. cam_err(cam, "Unable to get DMA descriptor array\n");
  954. return -ENOMEM;
  955. }
  956. return 0;
  957. }
  958. static int mcam_vb_sg_buf_prepare(struct vb2_buffer *vb)
  959. {
  960. struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
  961. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  962. struct vb2_dma_sg_desc *sgd = vb2_dma_sg_plane_desc(vb, 0);
  963. struct mcam_dma_desc *desc = mvb->dma_desc;
  964. struct scatterlist *sg;
  965. int i;
  966. mvb->dma_desc_nent = dma_map_sg(cam->dev, sgd->sglist, sgd->num_pages,
  967. DMA_FROM_DEVICE);
  968. if (mvb->dma_desc_nent <= 0)
  969. return -EIO; /* Not sure what's right here */
  970. for_each_sg(sgd->sglist, sg, mvb->dma_desc_nent, i) {
  971. desc->dma_addr = sg_dma_address(sg);
  972. desc->segment_len = sg_dma_len(sg);
  973. desc++;
  974. }
  975. return 0;
  976. }
  977. static int mcam_vb_sg_buf_finish(struct vb2_buffer *vb)
  978. {
  979. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  980. struct vb2_dma_sg_desc *sgd = vb2_dma_sg_plane_desc(vb, 0);
  981. dma_unmap_sg(cam->dev, sgd->sglist, sgd->num_pages, DMA_FROM_DEVICE);
  982. return 0;
  983. }
  984. static void mcam_vb_sg_buf_cleanup(struct vb2_buffer *vb)
  985. {
  986. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  987. struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
  988. int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
  989. dma_free_coherent(cam->dev, ndesc * sizeof(struct mcam_dma_desc),
  990. mvb->dma_desc, mvb->dma_desc_pa);
  991. }
  992. static const struct vb2_ops mcam_vb2_sg_ops = {
  993. .queue_setup = mcam_vb_queue_setup,
  994. .buf_init = mcam_vb_sg_buf_init,
  995. .buf_prepare = mcam_vb_sg_buf_prepare,
  996. .buf_queue = mcam_vb_buf_queue,
  997. .buf_finish = mcam_vb_sg_buf_finish,
  998. .buf_cleanup = mcam_vb_sg_buf_cleanup,
  999. .start_streaming = mcam_vb_start_streaming,
  1000. .stop_streaming = mcam_vb_stop_streaming,
  1001. .wait_prepare = mcam_vb_wait_prepare,
  1002. .wait_finish = mcam_vb_wait_finish,
  1003. };
  1004. #endif /* MCAM_MODE_DMA_SG */
  1005. static int mcam_setup_vb2(struct mcam_camera *cam)
  1006. {
  1007. struct vb2_queue *vq = &cam->vb_queue;
  1008. memset(vq, 0, sizeof(*vq));
  1009. vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  1010. vq->drv_priv = cam;
  1011. INIT_LIST_HEAD(&cam->buffers);
  1012. switch (cam->buffer_mode) {
  1013. case B_DMA_contig:
  1014. #ifdef MCAM_MODE_DMA_CONTIG
  1015. vq->ops = &mcam_vb2_ops;
  1016. vq->mem_ops = &vb2_dma_contig_memops;
  1017. cam->vb_alloc_ctx = vb2_dma_contig_init_ctx(cam->dev);
  1018. vq->io_modes = VB2_MMAP | VB2_USERPTR;
  1019. cam->dma_setup = mcam_ctlr_dma_contig;
  1020. cam->frame_complete = mcam_dma_contig_done;
  1021. #endif
  1022. break;
  1023. case B_DMA_sg:
  1024. #ifdef MCAM_MODE_DMA_SG
  1025. vq->ops = &mcam_vb2_sg_ops;
  1026. vq->mem_ops = &vb2_dma_sg_memops;
  1027. vq->io_modes = VB2_MMAP | VB2_USERPTR;
  1028. cam->dma_setup = mcam_ctlr_dma_sg;
  1029. cam->frame_complete = mcam_dma_sg_done;
  1030. #endif
  1031. break;
  1032. case B_vmalloc:
  1033. #ifdef MCAM_MODE_VMALLOC
  1034. tasklet_init(&cam->s_tasklet, mcam_frame_tasklet,
  1035. (unsigned long) cam);
  1036. vq->ops = &mcam_vb2_ops;
  1037. vq->mem_ops = &vb2_vmalloc_memops;
  1038. vq->buf_struct_size = sizeof(struct mcam_vb_buffer);
  1039. vq->io_modes = VB2_MMAP;
  1040. cam->dma_setup = mcam_ctlr_dma_vmalloc;
  1041. cam->frame_complete = mcam_vmalloc_done;
  1042. #endif
  1043. break;
  1044. }
  1045. return vb2_queue_init(vq);
  1046. }
  1047. static void mcam_cleanup_vb2(struct mcam_camera *cam)
  1048. {
  1049. vb2_queue_release(&cam->vb_queue);
  1050. #ifdef MCAM_MODE_DMA_CONTIG
  1051. if (cam->buffer_mode == B_DMA_contig)
  1052. vb2_dma_contig_cleanup_ctx(cam->vb_alloc_ctx);
  1053. #endif
  1054. }
  1055. /* ---------------------------------------------------------------------- */
  1056. /*
  1057. * The long list of V4L2 ioctl() operations.
  1058. */
  1059. static int mcam_vidioc_streamon(struct file *filp, void *priv,
  1060. enum v4l2_buf_type type)
  1061. {
  1062. struct mcam_camera *cam = filp->private_data;
  1063. int ret;
  1064. mutex_lock(&cam->s_mutex);
  1065. ret = vb2_streamon(&cam->vb_queue, type);
  1066. mutex_unlock(&cam->s_mutex);
  1067. return ret;
  1068. }
  1069. static int mcam_vidioc_streamoff(struct file *filp, void *priv,
  1070. enum v4l2_buf_type type)
  1071. {
  1072. struct mcam_camera *cam = filp->private_data;
  1073. int ret;
  1074. mutex_lock(&cam->s_mutex);
  1075. ret = vb2_streamoff(&cam->vb_queue, type);
  1076. mutex_unlock(&cam->s_mutex);
  1077. return ret;
  1078. }
  1079. static int mcam_vidioc_reqbufs(struct file *filp, void *priv,
  1080. struct v4l2_requestbuffers *req)
  1081. {
  1082. struct mcam_camera *cam = filp->private_data;
  1083. int ret;
  1084. mutex_lock(&cam->s_mutex);
  1085. ret = vb2_reqbufs(&cam->vb_queue, req);
  1086. mutex_unlock(&cam->s_mutex);
  1087. return ret;
  1088. }
  1089. static int mcam_vidioc_querybuf(struct file *filp, void *priv,
  1090. struct v4l2_buffer *buf)
  1091. {
  1092. struct mcam_camera *cam = filp->private_data;
  1093. int ret;
  1094. mutex_lock(&cam->s_mutex);
  1095. ret = vb2_querybuf(&cam->vb_queue, buf);
  1096. mutex_unlock(&cam->s_mutex);
  1097. return ret;
  1098. }
  1099. static int mcam_vidioc_qbuf(struct file *filp, void *priv,
  1100. struct v4l2_buffer *buf)
  1101. {
  1102. struct mcam_camera *cam = filp->private_data;
  1103. int ret;
  1104. mutex_lock(&cam->s_mutex);
  1105. ret = vb2_qbuf(&cam->vb_queue, buf);
  1106. mutex_unlock(&cam->s_mutex);
  1107. return ret;
  1108. }
  1109. static int mcam_vidioc_dqbuf(struct file *filp, void *priv,
  1110. struct v4l2_buffer *buf)
  1111. {
  1112. struct mcam_camera *cam = filp->private_data;
  1113. int ret;
  1114. mutex_lock(&cam->s_mutex);
  1115. ret = vb2_dqbuf(&cam->vb_queue, buf, filp->f_flags & O_NONBLOCK);
  1116. mutex_unlock(&cam->s_mutex);
  1117. return ret;
  1118. }
  1119. static int mcam_vidioc_querycap(struct file *file, void *priv,
  1120. struct v4l2_capability *cap)
  1121. {
  1122. strcpy(cap->driver, "marvell_ccic");
  1123. strcpy(cap->card, "marvell_ccic");
  1124. cap->version = 1;
  1125. cap->capabilities = V4L2_CAP_VIDEO_CAPTURE |
  1126. V4L2_CAP_READWRITE | V4L2_CAP_STREAMING;
  1127. return 0;
  1128. }
  1129. static int mcam_vidioc_enum_fmt_vid_cap(struct file *filp,
  1130. void *priv, struct v4l2_fmtdesc *fmt)
  1131. {
  1132. if (fmt->index >= N_MCAM_FMTS)
  1133. return -EINVAL;
  1134. strlcpy(fmt->description, mcam_formats[fmt->index].desc,
  1135. sizeof(fmt->description));
  1136. fmt->pixelformat = mcam_formats[fmt->index].pixelformat;
  1137. return 0;
  1138. }
  1139. static int mcam_vidioc_try_fmt_vid_cap(struct file *filp, void *priv,
  1140. struct v4l2_format *fmt)
  1141. {
  1142. struct mcam_camera *cam = priv;
  1143. struct mcam_format_struct *f;
  1144. struct v4l2_pix_format *pix = &fmt->fmt.pix;
  1145. struct v4l2_mbus_framefmt mbus_fmt;
  1146. int ret;
  1147. f = mcam_find_format(pix->pixelformat);
  1148. pix->pixelformat = f->pixelformat;
  1149. v4l2_fill_mbus_format(&mbus_fmt, pix, f->mbus_code);
  1150. mutex_lock(&cam->s_mutex);
  1151. ret = sensor_call(cam, video, try_mbus_fmt, &mbus_fmt);
  1152. mutex_unlock(&cam->s_mutex);
  1153. v4l2_fill_pix_format(pix, &mbus_fmt);
  1154. pix->bytesperline = pix->width * f->bpp;
  1155. pix->sizeimage = pix->height * pix->bytesperline;
  1156. return ret;
  1157. }
  1158. static int mcam_vidioc_s_fmt_vid_cap(struct file *filp, void *priv,
  1159. struct v4l2_format *fmt)
  1160. {
  1161. struct mcam_camera *cam = priv;
  1162. struct mcam_format_struct *f;
  1163. int ret;
  1164. /*
  1165. * Can't do anything if the device is not idle
  1166. * Also can't if there are streaming buffers in place.
  1167. */
  1168. if (cam->state != S_IDLE || cam->vb_queue.num_buffers > 0)
  1169. return -EBUSY;
  1170. f = mcam_find_format(fmt->fmt.pix.pixelformat);
  1171. /*
  1172. * See if the formatting works in principle.
  1173. */
  1174. ret = mcam_vidioc_try_fmt_vid_cap(filp, priv, fmt);
  1175. if (ret)
  1176. return ret;
  1177. /*
  1178. * Now we start to change things for real, so let's do it
  1179. * under lock.
  1180. */
  1181. mutex_lock(&cam->s_mutex);
  1182. cam->pix_format = fmt->fmt.pix;
  1183. cam->mbus_code = f->mbus_code;
  1184. /*
  1185. * Make sure we have appropriate DMA buffers.
  1186. */
  1187. if (cam->buffer_mode == B_vmalloc) {
  1188. ret = mcam_check_dma_buffers(cam);
  1189. if (ret)
  1190. goto out;
  1191. }
  1192. mcam_set_config_needed(cam, 1);
  1193. out:
  1194. mutex_unlock(&cam->s_mutex);
  1195. return ret;
  1196. }
  1197. /*
  1198. * Return our stored notion of how the camera is/should be configured.
  1199. * The V4l2 spec wants us to be smarter, and actually get this from
  1200. * the camera (and not mess with it at open time). Someday.
  1201. */
  1202. static int mcam_vidioc_g_fmt_vid_cap(struct file *filp, void *priv,
  1203. struct v4l2_format *f)
  1204. {
  1205. struct mcam_camera *cam = priv;
  1206. f->fmt.pix = cam->pix_format;
  1207. return 0;
  1208. }
  1209. /*
  1210. * We only have one input - the sensor - so minimize the nonsense here.
  1211. */
  1212. static int mcam_vidioc_enum_input(struct file *filp, void *priv,
  1213. struct v4l2_input *input)
  1214. {
  1215. if (input->index != 0)
  1216. return -EINVAL;
  1217. input->type = V4L2_INPUT_TYPE_CAMERA;
  1218. input->std = V4L2_STD_ALL; /* Not sure what should go here */
  1219. strcpy(input->name, "Camera");
  1220. return 0;
  1221. }
  1222. static int mcam_vidioc_g_input(struct file *filp, void *priv, unsigned int *i)
  1223. {
  1224. *i = 0;
  1225. return 0;
  1226. }
  1227. static int mcam_vidioc_s_input(struct file *filp, void *priv, unsigned int i)
  1228. {
  1229. if (i != 0)
  1230. return -EINVAL;
  1231. return 0;
  1232. }
  1233. /* from vivi.c */
  1234. static int mcam_vidioc_s_std(struct file *filp, void *priv, v4l2_std_id a)
  1235. {
  1236. return 0;
  1237. }
  1238. static int mcam_vidioc_g_std(struct file *filp, void *priv, v4l2_std_id *a)
  1239. {
  1240. *a = V4L2_STD_NTSC_M;
  1241. return 0;
  1242. }
  1243. /*
  1244. * G/S_PARM. Most of this is done by the sensor, but we are
  1245. * the level which controls the number of read buffers.
  1246. */
  1247. static int mcam_vidioc_g_parm(struct file *filp, void *priv,
  1248. struct v4l2_streamparm *parms)
  1249. {
  1250. struct mcam_camera *cam = priv;
  1251. int ret;
  1252. mutex_lock(&cam->s_mutex);
  1253. ret = sensor_call(cam, video, g_parm, parms);
  1254. mutex_unlock(&cam->s_mutex);
  1255. parms->parm.capture.readbuffers = n_dma_bufs;
  1256. return ret;
  1257. }
  1258. static int mcam_vidioc_s_parm(struct file *filp, void *priv,
  1259. struct v4l2_streamparm *parms)
  1260. {
  1261. struct mcam_camera *cam = priv;
  1262. int ret;
  1263. mutex_lock(&cam->s_mutex);
  1264. ret = sensor_call(cam, video, s_parm, parms);
  1265. mutex_unlock(&cam->s_mutex);
  1266. parms->parm.capture.readbuffers = n_dma_bufs;
  1267. return ret;
  1268. }
  1269. static int mcam_vidioc_enum_framesizes(struct file *filp, void *priv,
  1270. struct v4l2_frmsizeenum *sizes)
  1271. {
  1272. struct mcam_camera *cam = priv;
  1273. int ret;
  1274. mutex_lock(&cam->s_mutex);
  1275. ret = sensor_call(cam, video, enum_framesizes, sizes);
  1276. mutex_unlock(&cam->s_mutex);
  1277. return ret;
  1278. }
  1279. static int mcam_vidioc_enum_frameintervals(struct file *filp, void *priv,
  1280. struct v4l2_frmivalenum *interval)
  1281. {
  1282. struct mcam_camera *cam = priv;
  1283. int ret;
  1284. mutex_lock(&cam->s_mutex);
  1285. ret = sensor_call(cam, video, enum_frameintervals, interval);
  1286. mutex_unlock(&cam->s_mutex);
  1287. return ret;
  1288. }
  1289. #ifdef CONFIG_VIDEO_ADV_DEBUG
  1290. static int mcam_vidioc_g_register(struct file *file, void *priv,
  1291. struct v4l2_dbg_register *reg)
  1292. {
  1293. struct mcam_camera *cam = priv;
  1294. if (reg->reg > cam->regs_size - 4)
  1295. return -EINVAL;
  1296. reg->val = mcam_reg_read(cam, reg->reg);
  1297. reg->size = 4;
  1298. return 0;
  1299. }
  1300. static int mcam_vidioc_s_register(struct file *file, void *priv,
  1301. const struct v4l2_dbg_register *reg)
  1302. {
  1303. struct mcam_camera *cam = priv;
  1304. if (reg->reg > cam->regs_size - 4)
  1305. return -EINVAL;
  1306. mcam_reg_write(cam, reg->reg, reg->val);
  1307. return 0;
  1308. }
  1309. #endif
  1310. static const struct v4l2_ioctl_ops mcam_v4l_ioctl_ops = {
  1311. .vidioc_querycap = mcam_vidioc_querycap,
  1312. .vidioc_enum_fmt_vid_cap = mcam_vidioc_enum_fmt_vid_cap,
  1313. .vidioc_try_fmt_vid_cap = mcam_vidioc_try_fmt_vid_cap,
  1314. .vidioc_s_fmt_vid_cap = mcam_vidioc_s_fmt_vid_cap,
  1315. .vidioc_g_fmt_vid_cap = mcam_vidioc_g_fmt_vid_cap,
  1316. .vidioc_enum_input = mcam_vidioc_enum_input,
  1317. .vidioc_g_input = mcam_vidioc_g_input,
  1318. .vidioc_s_input = mcam_vidioc_s_input,
  1319. .vidioc_s_std = mcam_vidioc_s_std,
  1320. .vidioc_g_std = mcam_vidioc_g_std,
  1321. .vidioc_reqbufs = mcam_vidioc_reqbufs,
  1322. .vidioc_querybuf = mcam_vidioc_querybuf,
  1323. .vidioc_qbuf = mcam_vidioc_qbuf,
  1324. .vidioc_dqbuf = mcam_vidioc_dqbuf,
  1325. .vidioc_streamon = mcam_vidioc_streamon,
  1326. .vidioc_streamoff = mcam_vidioc_streamoff,
  1327. .vidioc_g_parm = mcam_vidioc_g_parm,
  1328. .vidioc_s_parm = mcam_vidioc_s_parm,
  1329. .vidioc_enum_framesizes = mcam_vidioc_enum_framesizes,
  1330. .vidioc_enum_frameintervals = mcam_vidioc_enum_frameintervals,
  1331. #ifdef CONFIG_VIDEO_ADV_DEBUG
  1332. .vidioc_g_register = mcam_vidioc_g_register,
  1333. .vidioc_s_register = mcam_vidioc_s_register,
  1334. #endif
  1335. };
  1336. /* ---------------------------------------------------------------------- */
  1337. /*
  1338. * Our various file operations.
  1339. */
  1340. static int mcam_v4l_open(struct file *filp)
  1341. {
  1342. struct mcam_camera *cam = video_drvdata(filp);
  1343. int ret = 0;
  1344. filp->private_data = cam;
  1345. cam->frame_state.frames = 0;
  1346. cam->frame_state.singles = 0;
  1347. cam->frame_state.delivered = 0;
  1348. mutex_lock(&cam->s_mutex);
  1349. if (cam->users == 0) {
  1350. ret = mcam_setup_vb2(cam);
  1351. if (ret)
  1352. goto out;
  1353. ret = mcam_ctlr_power_up(cam);
  1354. if (ret)
  1355. goto out;
  1356. __mcam_cam_reset(cam);
  1357. mcam_set_config_needed(cam, 1);
  1358. }
  1359. (cam->users)++;
  1360. out:
  1361. mutex_unlock(&cam->s_mutex);
  1362. return ret;
  1363. }
  1364. static int mcam_v4l_release(struct file *filp)
  1365. {
  1366. struct mcam_camera *cam = filp->private_data;
  1367. cam_dbg(cam, "Release, %d frames, %d singles, %d delivered\n",
  1368. cam->frame_state.frames, cam->frame_state.singles,
  1369. cam->frame_state.delivered);
  1370. mutex_lock(&cam->s_mutex);
  1371. (cam->users)--;
  1372. if (cam->users == 0) {
  1373. mcam_ctlr_stop_dma(cam);
  1374. mcam_cleanup_vb2(cam);
  1375. mcam_disable_mipi(cam);
  1376. mcam_ctlr_power_down(cam);
  1377. if (cam->buffer_mode == B_vmalloc && alloc_bufs_at_read)
  1378. mcam_free_dma_bufs(cam);
  1379. }
  1380. mutex_unlock(&cam->s_mutex);
  1381. return 0;
  1382. }
  1383. static ssize_t mcam_v4l_read(struct file *filp,
  1384. char __user *buffer, size_t len, loff_t *pos)
  1385. {
  1386. struct mcam_camera *cam = filp->private_data;
  1387. int ret;
  1388. mutex_lock(&cam->s_mutex);
  1389. ret = vb2_read(&cam->vb_queue, buffer, len, pos,
  1390. filp->f_flags & O_NONBLOCK);
  1391. mutex_unlock(&cam->s_mutex);
  1392. return ret;
  1393. }
  1394. static unsigned int mcam_v4l_poll(struct file *filp,
  1395. struct poll_table_struct *pt)
  1396. {
  1397. struct mcam_camera *cam = filp->private_data;
  1398. int ret;
  1399. mutex_lock(&cam->s_mutex);
  1400. ret = vb2_poll(&cam->vb_queue, filp, pt);
  1401. mutex_unlock(&cam->s_mutex);
  1402. return ret;
  1403. }
  1404. static int mcam_v4l_mmap(struct file *filp, struct vm_area_struct *vma)
  1405. {
  1406. struct mcam_camera *cam = filp->private_data;
  1407. int ret;
  1408. mutex_lock(&cam->s_mutex);
  1409. ret = vb2_mmap(&cam->vb_queue, vma);
  1410. mutex_unlock(&cam->s_mutex);
  1411. return ret;
  1412. }
  1413. static const struct v4l2_file_operations mcam_v4l_fops = {
  1414. .owner = THIS_MODULE,
  1415. .open = mcam_v4l_open,
  1416. .release = mcam_v4l_release,
  1417. .read = mcam_v4l_read,
  1418. .poll = mcam_v4l_poll,
  1419. .mmap = mcam_v4l_mmap,
  1420. .unlocked_ioctl = video_ioctl2,
  1421. };
  1422. /*
  1423. * This template device holds all of those v4l2 methods; we
  1424. * clone it for specific real devices.
  1425. */
  1426. static struct video_device mcam_v4l_template = {
  1427. .name = "mcam",
  1428. .tvnorms = V4L2_STD_NTSC_M,
  1429. .fops = &mcam_v4l_fops,
  1430. .ioctl_ops = &mcam_v4l_ioctl_ops,
  1431. .release = video_device_release_empty,
  1432. };
  1433. /* ---------------------------------------------------------------------- */
  1434. /*
  1435. * Interrupt handler stuff
  1436. */
  1437. static void mcam_frame_complete(struct mcam_camera *cam, int frame)
  1438. {
  1439. /*
  1440. * Basic frame housekeeping.
  1441. */
  1442. set_bit(frame, &cam->flags);
  1443. clear_bit(CF_DMA_ACTIVE, &cam->flags);
  1444. cam->next_buf = frame;
  1445. cam->buf_seq[frame] = ++(cam->sequence);
  1446. cam->frame_state.frames++;
  1447. /*
  1448. * "This should never happen"
  1449. */
  1450. if (cam->state != S_STREAMING)
  1451. return;
  1452. /*
  1453. * Process the frame and set up the next one.
  1454. */
  1455. cam->frame_complete(cam, frame);
  1456. }
  1457. /*
  1458. * The interrupt handler; this needs to be called from the
  1459. * platform irq handler with the lock held.
  1460. */
  1461. int mccic_irq(struct mcam_camera *cam, unsigned int irqs)
  1462. {
  1463. unsigned int frame, handled = 0;
  1464. mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS); /* Clear'em all */
  1465. /*
  1466. * Handle any frame completions. There really should
  1467. * not be more than one of these, or we have fallen
  1468. * far behind.
  1469. *
  1470. * When running in S/G mode, the frame number lacks any
  1471. * real meaning - there's only one descriptor array - but
  1472. * the controller still picks a different one to signal
  1473. * each time.
  1474. */
  1475. for (frame = 0; frame < cam->nbufs; frame++)
  1476. if (irqs & (IRQ_EOF0 << frame)) {
  1477. mcam_frame_complete(cam, frame);
  1478. handled = 1;
  1479. if (cam->buffer_mode == B_DMA_sg)
  1480. break;
  1481. }
  1482. /*
  1483. * If a frame starts, note that we have DMA active. This
  1484. * code assumes that we won't get multiple frame interrupts
  1485. * at once; may want to rethink that.
  1486. */
  1487. if (irqs & (IRQ_SOF0 | IRQ_SOF1 | IRQ_SOF2)) {
  1488. set_bit(CF_DMA_ACTIVE, &cam->flags);
  1489. handled = 1;
  1490. if (cam->buffer_mode == B_DMA_sg)
  1491. mcam_ctlr_stop(cam);
  1492. }
  1493. return handled;
  1494. }
  1495. /* ---------------------------------------------------------------------- */
  1496. /*
  1497. * Registration and such.
  1498. */
  1499. static struct ov7670_config sensor_cfg = {
  1500. /*
  1501. * Exclude QCIF mode, because it only captures a tiny portion
  1502. * of the sensor FOV
  1503. */
  1504. .min_width = 320,
  1505. .min_height = 240,
  1506. };
  1507. int mccic_register(struct mcam_camera *cam)
  1508. {
  1509. struct i2c_board_info ov7670_info = {
  1510. .type = "ov7670",
  1511. .addr = 0x42 >> 1,
  1512. .platform_data = &sensor_cfg,
  1513. };
  1514. int ret;
  1515. /*
  1516. * Validate the requested buffer mode.
  1517. */
  1518. if (buffer_mode >= 0)
  1519. cam->buffer_mode = buffer_mode;
  1520. if (cam->buffer_mode == B_DMA_sg &&
  1521. cam->chip_id == MCAM_CAFE) {
  1522. printk(KERN_ERR "marvell-cam: Cafe can't do S/G I/O, "
  1523. "attempting vmalloc mode instead\n");
  1524. cam->buffer_mode = B_vmalloc;
  1525. }
  1526. if (!mcam_buffer_mode_supported(cam->buffer_mode)) {
  1527. printk(KERN_ERR "marvell-cam: buffer mode %d unsupported\n",
  1528. cam->buffer_mode);
  1529. return -EINVAL;
  1530. }
  1531. /*
  1532. * Register with V4L
  1533. */
  1534. ret = v4l2_device_register(cam->dev, &cam->v4l2_dev);
  1535. if (ret)
  1536. return ret;
  1537. mutex_init(&cam->s_mutex);
  1538. cam->state = S_NOTREADY;
  1539. mcam_set_config_needed(cam, 1);
  1540. cam->pix_format = mcam_def_pix_format;
  1541. cam->mbus_code = mcam_def_mbus_code;
  1542. INIT_LIST_HEAD(&cam->buffers);
  1543. mcam_ctlr_init(cam);
  1544. /*
  1545. * Try to find the sensor.
  1546. */
  1547. sensor_cfg.clock_speed = cam->clock_speed;
  1548. sensor_cfg.use_smbus = cam->use_smbus;
  1549. cam->sensor_addr = ov7670_info.addr;
  1550. cam->sensor = v4l2_i2c_new_subdev_board(&cam->v4l2_dev,
  1551. cam->i2c_adapter, &ov7670_info, NULL);
  1552. if (cam->sensor == NULL) {
  1553. ret = -ENODEV;
  1554. goto out_unregister;
  1555. }
  1556. ret = mcam_cam_init(cam);
  1557. if (ret)
  1558. goto out_unregister;
  1559. /*
  1560. * Get the v4l2 setup done.
  1561. */
  1562. ret = v4l2_ctrl_handler_init(&cam->ctrl_handler, 10);
  1563. if (ret)
  1564. goto out_unregister;
  1565. cam->v4l2_dev.ctrl_handler = &cam->ctrl_handler;
  1566. mutex_lock(&cam->s_mutex);
  1567. cam->vdev = mcam_v4l_template;
  1568. cam->vdev.debug = 0;
  1569. cam->vdev.v4l2_dev = &cam->v4l2_dev;
  1570. video_set_drvdata(&cam->vdev, cam);
  1571. ret = video_register_device(&cam->vdev, VFL_TYPE_GRABBER, -1);
  1572. if (ret)
  1573. goto out;
  1574. /*
  1575. * If so requested, try to get our DMA buffers now.
  1576. */
  1577. if (cam->buffer_mode == B_vmalloc && !alloc_bufs_at_read) {
  1578. if (mcam_alloc_dma_bufs(cam, 1))
  1579. cam_warn(cam, "Unable to alloc DMA buffers at load"
  1580. " will try again later.");
  1581. }
  1582. out:
  1583. v4l2_ctrl_handler_free(&cam->ctrl_handler);
  1584. mutex_unlock(&cam->s_mutex);
  1585. return ret;
  1586. out_unregister:
  1587. v4l2_device_unregister(&cam->v4l2_dev);
  1588. return ret;
  1589. }
  1590. void mccic_shutdown(struct mcam_camera *cam)
  1591. {
  1592. /*
  1593. * If we have no users (and we really, really should have no
  1594. * users) the device will already be powered down. Trying to
  1595. * take it down again will wedge the machine, which is frowned
  1596. * upon.
  1597. */
  1598. if (cam->users > 0) {
  1599. cam_warn(cam, "Removing a device with users!\n");
  1600. mcam_ctlr_power_down(cam);
  1601. }
  1602. vb2_queue_release(&cam->vb_queue);
  1603. if (cam->buffer_mode == B_vmalloc)
  1604. mcam_free_dma_bufs(cam);
  1605. video_unregister_device(&cam->vdev);
  1606. v4l2_ctrl_handler_free(&cam->ctrl_handler);
  1607. v4l2_device_unregister(&cam->v4l2_dev);
  1608. }
  1609. /*
  1610. * Power management
  1611. */
  1612. #ifdef CONFIG_PM
  1613. void mccic_suspend(struct mcam_camera *cam)
  1614. {
  1615. mutex_lock(&cam->s_mutex);
  1616. if (cam->users > 0) {
  1617. enum mcam_state cstate = cam->state;
  1618. mcam_ctlr_stop_dma(cam);
  1619. mcam_ctlr_power_down(cam);
  1620. cam->state = cstate;
  1621. }
  1622. mutex_unlock(&cam->s_mutex);
  1623. }
  1624. int mccic_resume(struct mcam_camera *cam)
  1625. {
  1626. int ret = 0;
  1627. mutex_lock(&cam->s_mutex);
  1628. if (cam->users > 0) {
  1629. ret = mcam_ctlr_power_up(cam);
  1630. if (ret) {
  1631. mutex_unlock(&cam->s_mutex);
  1632. return ret;
  1633. }
  1634. __mcam_cam_reset(cam);
  1635. } else {
  1636. mcam_ctlr_power_down(cam);
  1637. }
  1638. mutex_unlock(&cam->s_mutex);
  1639. set_bit(CF_CONFIG_NEEDED, &cam->flags);
  1640. if (cam->state == S_STREAMING) {
  1641. /*
  1642. * If there was a buffer in the DMA engine at suspend
  1643. * time, put it back on the queue or we'll forget about it.
  1644. */
  1645. if (cam->buffer_mode == B_DMA_sg && cam->vb_bufs[0])
  1646. list_add(&cam->vb_bufs[0]->queue, &cam->buffers);
  1647. ret = mcam_read_setup(cam);
  1648. }
  1649. return ret;
  1650. }
  1651. #endif /* CONFIG_PM */