mcam-core.c 47 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874
  1. /*
  2. * The Marvell camera core. This device appears in a number of settings,
  3. * so it needs platform-specific support outside of the core.
  4. *
  5. * Copyright 2011 Jonathan Corbet corbet@lwn.net
  6. */
  7. #include <linux/kernel.h>
  8. #include <linux/module.h>
  9. #include <linux/fs.h>
  10. #include <linux/mm.h>
  11. #include <linux/i2c.h>
  12. #include <linux/interrupt.h>
  13. #include <linux/spinlock.h>
  14. #include <linux/slab.h>
  15. #include <linux/device.h>
  16. #include <linux/wait.h>
  17. #include <linux/list.h>
  18. #include <linux/dma-mapping.h>
  19. #include <linux/delay.h>
  20. #include <linux/vmalloc.h>
  21. #include <linux/io.h>
  22. #include <linux/videodev2.h>
  23. #include <media/v4l2-device.h>
  24. #include <media/v4l2-ioctl.h>
  25. #include <media/v4l2-chip-ident.h>
  26. #include <media/ov7670.h>
  27. #include <media/videobuf2-vmalloc.h>
  28. #include <media/videobuf2-dma-contig.h>
  29. #include <media/videobuf2-dma-sg.h>
  30. #include "mcam-core.h"
  31. #ifdef MCAM_MODE_VMALLOC
  32. /*
  33. * Internal DMA buffer management. Since the controller cannot do S/G I/O,
  34. * we must have physically contiguous buffers to bring frames into.
  35. * These parameters control how many buffers we use, whether we
  36. * allocate them at load time (better chance of success, but nails down
  37. * memory) or when somebody tries to use the camera (riskier), and,
  38. * for load-time allocation, how big they should be.
  39. *
  40. * The controller can cycle through three buffers. We could use
  41. * more by flipping pointers around, but it probably makes little
  42. * sense.
  43. */
  44. static bool alloc_bufs_at_read;
  45. module_param(alloc_bufs_at_read, bool, 0444);
  46. MODULE_PARM_DESC(alloc_bufs_at_read,
  47. "Non-zero value causes DMA buffers to be allocated when the "
  48. "video capture device is read, rather than at module load "
  49. "time. This saves memory, but decreases the chances of "
  50. "successfully getting those buffers. This parameter is "
  51. "only used in the vmalloc buffer mode");
  52. static int n_dma_bufs = 3;
  53. module_param(n_dma_bufs, uint, 0644);
  54. MODULE_PARM_DESC(n_dma_bufs,
  55. "The number of DMA buffers to allocate. Can be either two "
  56. "(saves memory, makes timing tighter) or three.");
  57. static int dma_buf_size = VGA_WIDTH * VGA_HEIGHT * 2; /* Worst case */
  58. module_param(dma_buf_size, uint, 0444);
  59. MODULE_PARM_DESC(dma_buf_size,
  60. "The size of the allocated DMA buffers. If actual operating "
  61. "parameters require larger buffers, an attempt to reallocate "
  62. "will be made.");
  63. #else /* MCAM_MODE_VMALLOC */
  64. static const bool alloc_bufs_at_read = 0;
  65. static const int n_dma_bufs = 3; /* Used by S/G_PARM */
  66. #endif /* MCAM_MODE_VMALLOC */
  67. static bool flip;
  68. module_param(flip, bool, 0444);
  69. MODULE_PARM_DESC(flip,
  70. "If set, the sensor will be instructed to flip the image "
  71. "vertically.");
  72. static int buffer_mode = -1;
  73. module_param(buffer_mode, int, 0444);
  74. MODULE_PARM_DESC(buffer_mode,
  75. "Set the buffer mode to be used; default is to go with what "
  76. "the platform driver asks for. Set to 0 for vmalloc, 1 for "
  77. "DMA contiguous.");
  78. /*
  79. * Status flags. Always manipulated with bit operations.
  80. */
  81. #define CF_BUF0_VALID 0 /* Buffers valid - first three */
  82. #define CF_BUF1_VALID 1
  83. #define CF_BUF2_VALID 2
  84. #define CF_DMA_ACTIVE 3 /* A frame is incoming */
  85. #define CF_CONFIG_NEEDED 4 /* Must configure hardware */
  86. #define CF_SINGLE_BUFFER 5 /* Running with a single buffer */
  87. #define CF_SG_RESTART 6 /* SG restart needed */
  88. #define sensor_call(cam, o, f, args...) \
  89. v4l2_subdev_call(cam->sensor, o, f, ##args)
  90. static struct mcam_format_struct {
  91. __u8 *desc;
  92. __u32 pixelformat;
  93. int bpp; /* Bytes per pixel */
  94. enum v4l2_mbus_pixelcode mbus_code;
  95. } mcam_formats[] = {
  96. {
  97. .desc = "YUYV 4:2:2",
  98. .pixelformat = V4L2_PIX_FMT_YUYV,
  99. .mbus_code = V4L2_MBUS_FMT_YUYV8_2X8,
  100. .bpp = 2,
  101. },
  102. {
  103. .desc = "RGB 444",
  104. .pixelformat = V4L2_PIX_FMT_RGB444,
  105. .mbus_code = V4L2_MBUS_FMT_RGB444_2X8_PADHI_LE,
  106. .bpp = 2,
  107. },
  108. {
  109. .desc = "RGB 565",
  110. .pixelformat = V4L2_PIX_FMT_RGB565,
  111. .mbus_code = V4L2_MBUS_FMT_RGB565_2X8_LE,
  112. .bpp = 2,
  113. },
  114. {
  115. .desc = "Raw RGB Bayer",
  116. .pixelformat = V4L2_PIX_FMT_SBGGR8,
  117. .mbus_code = V4L2_MBUS_FMT_SBGGR8_1X8,
  118. .bpp = 1
  119. },
  120. };
  121. #define N_MCAM_FMTS ARRAY_SIZE(mcam_formats)
  122. static struct mcam_format_struct *mcam_find_format(u32 pixelformat)
  123. {
  124. unsigned i;
  125. for (i = 0; i < N_MCAM_FMTS; i++)
  126. if (mcam_formats[i].pixelformat == pixelformat)
  127. return mcam_formats + i;
  128. /* Not found? Then return the first format. */
  129. return mcam_formats;
  130. }
  131. /*
  132. * The default format we use until somebody says otherwise.
  133. */
  134. static const struct v4l2_pix_format mcam_def_pix_format = {
  135. .width = VGA_WIDTH,
  136. .height = VGA_HEIGHT,
  137. .pixelformat = V4L2_PIX_FMT_YUYV,
  138. .field = V4L2_FIELD_NONE,
  139. .bytesperline = VGA_WIDTH*2,
  140. .sizeimage = VGA_WIDTH*VGA_HEIGHT*2,
  141. };
  142. static const enum v4l2_mbus_pixelcode mcam_def_mbus_code =
  143. V4L2_MBUS_FMT_YUYV8_2X8;
  144. /*
  145. * The two-word DMA descriptor format used by the Armada 610 and like. There
  146. * Is a three-word format as well (set C1_DESC_3WORD) where the third
  147. * word is a pointer to the next descriptor, but we don't use it. Two-word
  148. * descriptors have to be contiguous in memory.
  149. */
  150. struct mcam_dma_desc {
  151. u32 dma_addr;
  152. u32 segment_len;
  153. };
  154. /*
  155. * Our buffer type for working with videobuf2. Note that the vb2
  156. * developers have decreed that struct vb2_buffer must be at the
  157. * beginning of this structure.
  158. */
  159. struct mcam_vb_buffer {
  160. struct vb2_buffer vb_buf;
  161. struct list_head queue;
  162. struct mcam_dma_desc *dma_desc; /* Descriptor virtual address */
  163. dma_addr_t dma_desc_pa; /* Descriptor physical address */
  164. int dma_desc_nent; /* Number of mapped descriptors */
  165. };
  166. static inline struct mcam_vb_buffer *vb_to_mvb(struct vb2_buffer *vb)
  167. {
  168. return container_of(vb, struct mcam_vb_buffer, vb_buf);
  169. }
  170. /*
  171. * Hand a completed buffer back to user space.
  172. */
  173. static void mcam_buffer_done(struct mcam_camera *cam, int frame,
  174. struct vb2_buffer *vbuf)
  175. {
  176. vbuf->v4l2_buf.bytesused = cam->pix_format.sizeimage;
  177. vbuf->v4l2_buf.sequence = cam->buf_seq[frame];
  178. vb2_set_plane_payload(vbuf, 0, cam->pix_format.sizeimage);
  179. vb2_buffer_done(vbuf, VB2_BUF_STATE_DONE);
  180. }
  181. /*
  182. * Debugging and related.
  183. */
  184. #define cam_err(cam, fmt, arg...) \
  185. dev_err((cam)->dev, fmt, ##arg);
  186. #define cam_warn(cam, fmt, arg...) \
  187. dev_warn((cam)->dev, fmt, ##arg);
  188. #define cam_dbg(cam, fmt, arg...) \
  189. dev_dbg((cam)->dev, fmt, ##arg);
  190. /*
  191. * Flag manipulation helpers
  192. */
  193. static void mcam_reset_buffers(struct mcam_camera *cam)
  194. {
  195. int i;
  196. cam->next_buf = -1;
  197. for (i = 0; i < cam->nbufs; i++)
  198. clear_bit(i, &cam->flags);
  199. }
  200. static inline int mcam_needs_config(struct mcam_camera *cam)
  201. {
  202. return test_bit(CF_CONFIG_NEEDED, &cam->flags);
  203. }
  204. static void mcam_set_config_needed(struct mcam_camera *cam, int needed)
  205. {
  206. if (needed)
  207. set_bit(CF_CONFIG_NEEDED, &cam->flags);
  208. else
  209. clear_bit(CF_CONFIG_NEEDED, &cam->flags);
  210. }
  211. /* ------------------------------------------------------------------- */
  212. /*
  213. * Make the controller start grabbing images. Everything must
  214. * be set up before doing this.
  215. */
  216. static void mcam_ctlr_start(struct mcam_camera *cam)
  217. {
  218. /* set_bit performs a read, so no other barrier should be
  219. needed here */
  220. mcam_reg_set_bit(cam, REG_CTRL0, C0_ENABLE);
  221. }
  222. static void mcam_ctlr_stop(struct mcam_camera *cam)
  223. {
  224. mcam_reg_clear_bit(cam, REG_CTRL0, C0_ENABLE);
  225. }
  226. /* ------------------------------------------------------------------- */
  227. #ifdef MCAM_MODE_VMALLOC
  228. /*
  229. * Code specific to the vmalloc buffer mode.
  230. */
  231. /*
  232. * Allocate in-kernel DMA buffers for vmalloc mode.
  233. */
  234. static int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime)
  235. {
  236. int i;
  237. mcam_set_config_needed(cam, 1);
  238. if (loadtime)
  239. cam->dma_buf_size = dma_buf_size;
  240. else
  241. cam->dma_buf_size = cam->pix_format.sizeimage;
  242. if (n_dma_bufs > 3)
  243. n_dma_bufs = 3;
  244. cam->nbufs = 0;
  245. for (i = 0; i < n_dma_bufs; i++) {
  246. cam->dma_bufs[i] = dma_alloc_coherent(cam->dev,
  247. cam->dma_buf_size, cam->dma_handles + i,
  248. GFP_KERNEL);
  249. if (cam->dma_bufs[i] == NULL) {
  250. cam_warn(cam, "Failed to allocate DMA buffer\n");
  251. break;
  252. }
  253. (cam->nbufs)++;
  254. }
  255. switch (cam->nbufs) {
  256. case 1:
  257. dma_free_coherent(cam->dev, cam->dma_buf_size,
  258. cam->dma_bufs[0], cam->dma_handles[0]);
  259. cam->nbufs = 0;
  260. case 0:
  261. cam_err(cam, "Insufficient DMA buffers, cannot operate\n");
  262. return -ENOMEM;
  263. case 2:
  264. if (n_dma_bufs > 2)
  265. cam_warn(cam, "Will limp along with only 2 buffers\n");
  266. break;
  267. }
  268. return 0;
  269. }
  270. static void mcam_free_dma_bufs(struct mcam_camera *cam)
  271. {
  272. int i;
  273. for (i = 0; i < cam->nbufs; i++) {
  274. dma_free_coherent(cam->dev, cam->dma_buf_size,
  275. cam->dma_bufs[i], cam->dma_handles[i]);
  276. cam->dma_bufs[i] = NULL;
  277. }
  278. cam->nbufs = 0;
  279. }
  280. /*
  281. * Set up DMA buffers when operating in vmalloc mode
  282. */
  283. static void mcam_ctlr_dma_vmalloc(struct mcam_camera *cam)
  284. {
  285. /*
  286. * Store the first two Y buffers (we aren't supporting
  287. * planar formats for now, so no UV bufs). Then either
  288. * set the third if it exists, or tell the controller
  289. * to just use two.
  290. */
  291. mcam_reg_write(cam, REG_Y0BAR, cam->dma_handles[0]);
  292. mcam_reg_write(cam, REG_Y1BAR, cam->dma_handles[1]);
  293. if (cam->nbufs > 2) {
  294. mcam_reg_write(cam, REG_Y2BAR, cam->dma_handles[2]);
  295. mcam_reg_clear_bit(cam, REG_CTRL1, C1_TWOBUFS);
  296. } else
  297. mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
  298. if (cam->chip_id == V4L2_IDENT_CAFE)
  299. mcam_reg_write(cam, REG_UBAR, 0); /* 32 bits only */
  300. }
  301. /*
  302. * Copy data out to user space in the vmalloc case
  303. */
  304. static void mcam_frame_tasklet(unsigned long data)
  305. {
  306. struct mcam_camera *cam = (struct mcam_camera *) data;
  307. int i;
  308. unsigned long flags;
  309. struct mcam_vb_buffer *buf;
  310. spin_lock_irqsave(&cam->dev_lock, flags);
  311. for (i = 0; i < cam->nbufs; i++) {
  312. int bufno = cam->next_buf;
  313. if (cam->state != S_STREAMING || bufno < 0)
  314. break; /* I/O got stopped */
  315. if (++(cam->next_buf) >= cam->nbufs)
  316. cam->next_buf = 0;
  317. if (!test_bit(bufno, &cam->flags))
  318. continue;
  319. if (list_empty(&cam->buffers)) {
  320. cam->frame_state.singles++;
  321. break; /* Leave it valid, hope for better later */
  322. }
  323. cam->frame_state.delivered++;
  324. clear_bit(bufno, &cam->flags);
  325. buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer,
  326. queue);
  327. list_del_init(&buf->queue);
  328. /*
  329. * Drop the lock during the big copy. This *should* be safe...
  330. */
  331. spin_unlock_irqrestore(&cam->dev_lock, flags);
  332. memcpy(vb2_plane_vaddr(&buf->vb_buf, 0), cam->dma_bufs[bufno],
  333. cam->pix_format.sizeimage);
  334. mcam_buffer_done(cam, bufno, &buf->vb_buf);
  335. spin_lock_irqsave(&cam->dev_lock, flags);
  336. }
  337. spin_unlock_irqrestore(&cam->dev_lock, flags);
  338. }
  339. /*
  340. * Make sure our allocated buffers are up to the task.
  341. */
  342. static int mcam_check_dma_buffers(struct mcam_camera *cam)
  343. {
  344. if (cam->nbufs > 0 && cam->dma_buf_size < cam->pix_format.sizeimage)
  345. mcam_free_dma_bufs(cam);
  346. if (cam->nbufs == 0)
  347. return mcam_alloc_dma_bufs(cam, 0);
  348. return 0;
  349. }
  350. static void mcam_vmalloc_done(struct mcam_camera *cam, int frame)
  351. {
  352. tasklet_schedule(&cam->s_tasklet);
  353. }
  354. #else /* MCAM_MODE_VMALLOC */
  355. static inline int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime)
  356. {
  357. return 0;
  358. }
  359. static inline void mcam_free_dma_bufs(struct mcam_camera *cam)
  360. {
  361. return;
  362. }
  363. static inline int mcam_check_dma_buffers(struct mcam_camera *cam)
  364. {
  365. return 0;
  366. }
  367. #endif /* MCAM_MODE_VMALLOC */
  368. #ifdef MCAM_MODE_DMA_CONTIG
  369. /* ---------------------------------------------------------------------- */
  370. /*
  371. * DMA-contiguous code.
  372. */
  373. /*
  374. * Set up a contiguous buffer for the given frame. Here also is where
  375. * the underrun strategy is set: if there is no buffer available, reuse
  376. * the buffer from the other BAR and set the CF_SINGLE_BUFFER flag to
  377. * keep the interrupt handler from giving that buffer back to user
  378. * space. In this way, we always have a buffer to DMA to and don't
  379. * have to try to play games stopping and restarting the controller.
  380. */
  381. static void mcam_set_contig_buffer(struct mcam_camera *cam, int frame)
  382. {
  383. struct mcam_vb_buffer *buf;
  384. /*
  385. * If there are no available buffers, go into single mode
  386. */
  387. if (list_empty(&cam->buffers)) {
  388. buf = cam->vb_bufs[frame ^ 0x1];
  389. cam->vb_bufs[frame] = buf;
  390. mcam_reg_write(cam, frame == 0 ? REG_Y0BAR : REG_Y1BAR,
  391. vb2_dma_contig_plane_dma_addr(&buf->vb_buf, 0));
  392. set_bit(CF_SINGLE_BUFFER, &cam->flags);
  393. cam->frame_state.singles++;
  394. return;
  395. }
  396. /*
  397. * OK, we have a buffer we can use.
  398. */
  399. buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, queue);
  400. list_del_init(&buf->queue);
  401. mcam_reg_write(cam, frame == 0 ? REG_Y0BAR : REG_Y1BAR,
  402. vb2_dma_contig_plane_dma_addr(&buf->vb_buf, 0));
  403. cam->vb_bufs[frame] = buf;
  404. clear_bit(CF_SINGLE_BUFFER, &cam->flags);
  405. }
  406. /*
  407. * Initial B_DMA_contig setup.
  408. */
  409. static void mcam_ctlr_dma_contig(struct mcam_camera *cam)
  410. {
  411. mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
  412. cam->nbufs = 2;
  413. mcam_set_contig_buffer(cam, 0);
  414. mcam_set_contig_buffer(cam, 1);
  415. }
  416. /*
  417. * Frame completion handling.
  418. */
  419. static void mcam_dma_contig_done(struct mcam_camera *cam, int frame)
  420. {
  421. struct mcam_vb_buffer *buf = cam->vb_bufs[frame];
  422. if (!test_bit(CF_SINGLE_BUFFER, &cam->flags)) {
  423. cam->frame_state.delivered++;
  424. mcam_buffer_done(cam, frame, &buf->vb_buf);
  425. }
  426. mcam_set_contig_buffer(cam, frame);
  427. }
  428. #endif /* MCAM_MODE_DMA_CONTIG */
  429. #ifdef MCAM_MODE_DMA_SG
  430. /* ---------------------------------------------------------------------- */
  431. /*
  432. * Scatter/gather-specific code.
  433. */
  434. /*
  435. * Set up the next buffer for S/G I/O; caller should be sure that
  436. * the controller is stopped and a buffer is available.
  437. */
  438. static void mcam_sg_next_buffer(struct mcam_camera *cam)
  439. {
  440. struct mcam_vb_buffer *buf;
  441. buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, queue);
  442. list_del_init(&buf->queue);
  443. /*
  444. * Very Bad Not Good Things happen if you don't clear
  445. * C1_DESC_ENA before making any descriptor changes.
  446. */
  447. mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_ENA);
  448. mcam_reg_write(cam, REG_DMA_DESC_Y, buf->dma_desc_pa);
  449. mcam_reg_write(cam, REG_DESC_LEN_Y,
  450. buf->dma_desc_nent*sizeof(struct mcam_dma_desc));
  451. mcam_reg_write(cam, REG_DESC_LEN_U, 0);
  452. mcam_reg_write(cam, REG_DESC_LEN_V, 0);
  453. mcam_reg_set_bit(cam, REG_CTRL1, C1_DESC_ENA);
  454. cam->vb_bufs[0] = buf;
  455. }
  456. /*
  457. * Initial B_DMA_sg setup
  458. */
  459. static void mcam_ctlr_dma_sg(struct mcam_camera *cam)
  460. {
  461. /*
  462. * The list-empty condition can hit us at resume time
  463. * if the buffer list was empty when the system was suspended.
  464. */
  465. if (list_empty(&cam->buffers)) {
  466. set_bit(CF_SG_RESTART, &cam->flags);
  467. return;
  468. }
  469. mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_3WORD);
  470. mcam_sg_next_buffer(cam);
  471. cam->nbufs = 3;
  472. }
  473. /*
  474. * Frame completion with S/G is trickier. We can't muck with
  475. * a descriptor chain on the fly, since the controller buffers it
  476. * internally. So we have to actually stop and restart; Marvell
  477. * says this is the way to do it.
  478. *
  479. * Of course, stopping is easier said than done; experience shows
  480. * that the controller can start a frame *after* C0_ENABLE has been
  481. * cleared. So when running in S/G mode, the controller is "stopped"
  482. * on receipt of the start-of-frame interrupt. That means we can
  483. * safely change the DMA descriptor array here and restart things
  484. * (assuming there's another buffer waiting to go).
  485. */
  486. static void mcam_dma_sg_done(struct mcam_camera *cam, int frame)
  487. {
  488. struct mcam_vb_buffer *buf = cam->vb_bufs[0];
  489. /*
  490. * If we're no longer supposed to be streaming, don't do anything.
  491. */
  492. if (cam->state != S_STREAMING)
  493. return;
  494. /*
  495. * If we have another buffer available, put it in and
  496. * restart the engine.
  497. */
  498. if (!list_empty(&cam->buffers)) {
  499. mcam_sg_next_buffer(cam);
  500. mcam_ctlr_start(cam);
  501. /*
  502. * Otherwise set CF_SG_RESTART and the controller will
  503. * be restarted once another buffer shows up.
  504. */
  505. } else {
  506. set_bit(CF_SG_RESTART, &cam->flags);
  507. cam->frame_state.singles++;
  508. cam->vb_bufs[0] = NULL;
  509. }
  510. /*
  511. * Now we can give the completed frame back to user space.
  512. */
  513. cam->frame_state.delivered++;
  514. mcam_buffer_done(cam, frame, &buf->vb_buf);
  515. }
  516. /*
  517. * Scatter/gather mode requires stopping the controller between
  518. * frames so we can put in a new DMA descriptor array. If no new
  519. * buffer exists at frame completion, the controller is left stopped;
  520. * this function is charged with gettig things going again.
  521. */
  522. static void mcam_sg_restart(struct mcam_camera *cam)
  523. {
  524. mcam_ctlr_dma_sg(cam);
  525. mcam_ctlr_start(cam);
  526. clear_bit(CF_SG_RESTART, &cam->flags);
  527. }
  528. #else /* MCAM_MODE_DMA_SG */
  529. static inline void mcam_sg_restart(struct mcam_camera *cam)
  530. {
  531. return;
  532. }
  533. #endif /* MCAM_MODE_DMA_SG */
  534. /* ---------------------------------------------------------------------- */
  535. /*
  536. * Buffer-mode-independent controller code.
  537. */
  538. /*
  539. * Image format setup
  540. */
  541. static void mcam_ctlr_image(struct mcam_camera *cam)
  542. {
  543. int imgsz;
  544. struct v4l2_pix_format *fmt = &cam->pix_format;
  545. imgsz = ((fmt->height << IMGSZ_V_SHIFT) & IMGSZ_V_MASK) |
  546. (fmt->bytesperline & IMGSZ_H_MASK);
  547. mcam_reg_write(cam, REG_IMGSIZE, imgsz);
  548. mcam_reg_write(cam, REG_IMGOFFSET, 0);
  549. /* YPITCH just drops the last two bits */
  550. mcam_reg_write_mask(cam, REG_IMGPITCH, fmt->bytesperline,
  551. IMGP_YP_MASK);
  552. /*
  553. * Tell the controller about the image format we are using.
  554. */
  555. switch (cam->pix_format.pixelformat) {
  556. case V4L2_PIX_FMT_YUYV:
  557. mcam_reg_write_mask(cam, REG_CTRL0,
  558. C0_DF_YUV|C0_YUV_PACKED|C0_YUVE_YUYV,
  559. C0_DF_MASK);
  560. break;
  561. case V4L2_PIX_FMT_RGB444:
  562. mcam_reg_write_mask(cam, REG_CTRL0,
  563. C0_DF_RGB|C0_RGBF_444|C0_RGB4_XRGB,
  564. C0_DF_MASK);
  565. /* Alpha value? */
  566. break;
  567. case V4L2_PIX_FMT_RGB565:
  568. mcam_reg_write_mask(cam, REG_CTRL0,
  569. C0_DF_RGB|C0_RGBF_565|C0_RGB5_BGGR,
  570. C0_DF_MASK);
  571. break;
  572. default:
  573. cam_err(cam, "Unknown format %x\n", cam->pix_format.pixelformat);
  574. break;
  575. }
  576. /*
  577. * Make sure it knows we want to use hsync/vsync.
  578. */
  579. mcam_reg_write_mask(cam, REG_CTRL0, C0_SIF_HVSYNC,
  580. C0_SIFM_MASK);
  581. }
  582. /*
  583. * Configure the controller for operation; caller holds the
  584. * device mutex.
  585. */
  586. static int mcam_ctlr_configure(struct mcam_camera *cam)
  587. {
  588. unsigned long flags;
  589. spin_lock_irqsave(&cam->dev_lock, flags);
  590. clear_bit(CF_SG_RESTART, &cam->flags);
  591. cam->dma_setup(cam);
  592. mcam_ctlr_image(cam);
  593. mcam_set_config_needed(cam, 0);
  594. spin_unlock_irqrestore(&cam->dev_lock, flags);
  595. return 0;
  596. }
  597. static void mcam_ctlr_irq_enable(struct mcam_camera *cam)
  598. {
  599. /*
  600. * Clear any pending interrupts, since we do not
  601. * expect to have I/O active prior to enabling.
  602. */
  603. mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS);
  604. mcam_reg_set_bit(cam, REG_IRQMASK, FRAMEIRQS);
  605. }
  606. static void mcam_ctlr_irq_disable(struct mcam_camera *cam)
  607. {
  608. mcam_reg_clear_bit(cam, REG_IRQMASK, FRAMEIRQS);
  609. }
  610. static void mcam_ctlr_init(struct mcam_camera *cam)
  611. {
  612. unsigned long flags;
  613. spin_lock_irqsave(&cam->dev_lock, flags);
  614. /*
  615. * Make sure it's not powered down.
  616. */
  617. mcam_reg_clear_bit(cam, REG_CTRL1, C1_PWRDWN);
  618. /*
  619. * Turn off the enable bit. It sure should be off anyway,
  620. * but it's good to be sure.
  621. */
  622. mcam_reg_clear_bit(cam, REG_CTRL0, C0_ENABLE);
  623. /*
  624. * Clock the sensor appropriately. Controller clock should
  625. * be 48MHz, sensor "typical" value is half that.
  626. */
  627. mcam_reg_write_mask(cam, REG_CLKCTRL, 2, CLK_DIV_MASK);
  628. spin_unlock_irqrestore(&cam->dev_lock, flags);
  629. }
  630. /*
  631. * Stop the controller, and don't return until we're really sure that no
  632. * further DMA is going on.
  633. */
  634. static void mcam_ctlr_stop_dma(struct mcam_camera *cam)
  635. {
  636. unsigned long flags;
  637. /*
  638. * Theory: stop the camera controller (whether it is operating
  639. * or not). Delay briefly just in case we race with the SOF
  640. * interrupt, then wait until no DMA is active.
  641. */
  642. spin_lock_irqsave(&cam->dev_lock, flags);
  643. clear_bit(CF_SG_RESTART, &cam->flags);
  644. mcam_ctlr_stop(cam);
  645. cam->state = S_IDLE;
  646. spin_unlock_irqrestore(&cam->dev_lock, flags);
  647. /*
  648. * This is a brutally long sleep, but experience shows that
  649. * it can take the controller a while to get the message that
  650. * it needs to stop grabbing frames. In particular, we can
  651. * sometimes (on mmp) get a frame at the end WITHOUT the
  652. * start-of-frame indication.
  653. */
  654. msleep(150);
  655. if (test_bit(CF_DMA_ACTIVE, &cam->flags))
  656. cam_err(cam, "Timeout waiting for DMA to end\n");
  657. /* This would be bad news - what now? */
  658. spin_lock_irqsave(&cam->dev_lock, flags);
  659. mcam_ctlr_irq_disable(cam);
  660. spin_unlock_irqrestore(&cam->dev_lock, flags);
  661. }
  662. /*
  663. * Power up and down.
  664. */
  665. static void mcam_ctlr_power_up(struct mcam_camera *cam)
  666. {
  667. unsigned long flags;
  668. spin_lock_irqsave(&cam->dev_lock, flags);
  669. cam->plat_power_up(cam);
  670. mcam_reg_clear_bit(cam, REG_CTRL1, C1_PWRDWN);
  671. spin_unlock_irqrestore(&cam->dev_lock, flags);
  672. msleep(5); /* Just to be sure */
  673. }
  674. static void mcam_ctlr_power_down(struct mcam_camera *cam)
  675. {
  676. unsigned long flags;
  677. spin_lock_irqsave(&cam->dev_lock, flags);
  678. /*
  679. * School of hard knocks department: be sure we do any register
  680. * twiddling on the controller *before* calling the platform
  681. * power down routine.
  682. */
  683. mcam_reg_set_bit(cam, REG_CTRL1, C1_PWRDWN);
  684. cam->plat_power_down(cam);
  685. spin_unlock_irqrestore(&cam->dev_lock, flags);
  686. }
  687. /* -------------------------------------------------------------------- */
  688. /*
  689. * Communications with the sensor.
  690. */
  691. static int __mcam_cam_reset(struct mcam_camera *cam)
  692. {
  693. return sensor_call(cam, core, reset, 0);
  694. }
  695. /*
  696. * We have found the sensor on the i2c. Let's try to have a
  697. * conversation.
  698. */
  699. static int mcam_cam_init(struct mcam_camera *cam)
  700. {
  701. struct v4l2_dbg_chip_ident chip;
  702. int ret;
  703. mutex_lock(&cam->s_mutex);
  704. if (cam->state != S_NOTREADY)
  705. cam_warn(cam, "Cam init with device in funky state %d",
  706. cam->state);
  707. ret = __mcam_cam_reset(cam);
  708. if (ret)
  709. goto out;
  710. chip.ident = V4L2_IDENT_NONE;
  711. chip.match.type = V4L2_CHIP_MATCH_I2C_ADDR;
  712. chip.match.addr = cam->sensor_addr;
  713. ret = sensor_call(cam, core, g_chip_ident, &chip);
  714. if (ret)
  715. goto out;
  716. cam->sensor_type = chip.ident;
  717. if (cam->sensor_type != V4L2_IDENT_OV7670) {
  718. cam_err(cam, "Unsupported sensor type 0x%x", cam->sensor_type);
  719. ret = -EINVAL;
  720. goto out;
  721. }
  722. /* Get/set parameters? */
  723. ret = 0;
  724. cam->state = S_IDLE;
  725. out:
  726. mcam_ctlr_power_down(cam);
  727. mutex_unlock(&cam->s_mutex);
  728. return ret;
  729. }
  730. /*
  731. * Configure the sensor to match the parameters we have. Caller should
  732. * hold s_mutex
  733. */
  734. static int mcam_cam_set_flip(struct mcam_camera *cam)
  735. {
  736. struct v4l2_control ctrl;
  737. memset(&ctrl, 0, sizeof(ctrl));
  738. ctrl.id = V4L2_CID_VFLIP;
  739. ctrl.value = flip;
  740. return sensor_call(cam, core, s_ctrl, &ctrl);
  741. }
  742. static int mcam_cam_configure(struct mcam_camera *cam)
  743. {
  744. struct v4l2_mbus_framefmt mbus_fmt;
  745. int ret;
  746. v4l2_fill_mbus_format(&mbus_fmt, &cam->pix_format, cam->mbus_code);
  747. ret = sensor_call(cam, core, init, 0);
  748. if (ret == 0)
  749. ret = sensor_call(cam, video, s_mbus_fmt, &mbus_fmt);
  750. /*
  751. * OV7670 does weird things if flip is set *before* format...
  752. */
  753. ret += mcam_cam_set_flip(cam);
  754. return ret;
  755. }
  756. /*
  757. * Get everything ready, and start grabbing frames.
  758. */
  759. static int mcam_read_setup(struct mcam_camera *cam)
  760. {
  761. int ret;
  762. unsigned long flags;
  763. /*
  764. * Configuration. If we still don't have DMA buffers,
  765. * make one last, desperate attempt.
  766. */
  767. if (cam->buffer_mode == B_vmalloc && cam->nbufs == 0 &&
  768. mcam_alloc_dma_bufs(cam, 0))
  769. return -ENOMEM;
  770. if (mcam_needs_config(cam)) {
  771. mcam_cam_configure(cam);
  772. ret = mcam_ctlr_configure(cam);
  773. if (ret)
  774. return ret;
  775. }
  776. /*
  777. * Turn it loose.
  778. */
  779. spin_lock_irqsave(&cam->dev_lock, flags);
  780. clear_bit(CF_DMA_ACTIVE, &cam->flags);
  781. mcam_reset_buffers(cam);
  782. mcam_ctlr_irq_enable(cam);
  783. cam->state = S_STREAMING;
  784. if (!test_bit(CF_SG_RESTART, &cam->flags))
  785. mcam_ctlr_start(cam);
  786. spin_unlock_irqrestore(&cam->dev_lock, flags);
  787. return 0;
  788. }
  789. /* ----------------------------------------------------------------------- */
  790. /*
  791. * Videobuf2 interface code.
  792. */
  793. static int mcam_vb_queue_setup(struct vb2_queue *vq,
  794. const struct v4l2_format *fmt, unsigned int *nbufs,
  795. unsigned int *num_planes, unsigned int sizes[],
  796. void *alloc_ctxs[])
  797. {
  798. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  799. int minbufs = (cam->buffer_mode == B_DMA_contig) ? 3 : 2;
  800. sizes[0] = cam->pix_format.sizeimage;
  801. *num_planes = 1; /* Someday we have to support planar formats... */
  802. if (*nbufs < minbufs)
  803. *nbufs = minbufs;
  804. if (cam->buffer_mode == B_DMA_contig)
  805. alloc_ctxs[0] = cam->vb_alloc_ctx;
  806. return 0;
  807. }
  808. static void mcam_vb_buf_queue(struct vb2_buffer *vb)
  809. {
  810. struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
  811. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  812. unsigned long flags;
  813. int start;
  814. spin_lock_irqsave(&cam->dev_lock, flags);
  815. start = (cam->state == S_BUFWAIT) && !list_empty(&cam->buffers);
  816. list_add(&mvb->queue, &cam->buffers);
  817. if (cam->state == S_STREAMING && test_bit(CF_SG_RESTART, &cam->flags))
  818. mcam_sg_restart(cam);
  819. spin_unlock_irqrestore(&cam->dev_lock, flags);
  820. if (start)
  821. mcam_read_setup(cam);
  822. }
  823. /*
  824. * vb2 uses these to release the mutex when waiting in dqbuf. I'm
  825. * not actually sure we need to do this (I'm not sure that vb2_dqbuf() needs
  826. * to be called with the mutex held), but better safe than sorry.
  827. */
  828. static void mcam_vb_wait_prepare(struct vb2_queue *vq)
  829. {
  830. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  831. mutex_unlock(&cam->s_mutex);
  832. }
  833. static void mcam_vb_wait_finish(struct vb2_queue *vq)
  834. {
  835. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  836. mutex_lock(&cam->s_mutex);
  837. }
  838. /*
  839. * These need to be called with the mutex held from vb2
  840. */
  841. static int mcam_vb_start_streaming(struct vb2_queue *vq, unsigned int count)
  842. {
  843. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  844. if (cam->state != S_IDLE) {
  845. INIT_LIST_HEAD(&cam->buffers);
  846. return -EINVAL;
  847. }
  848. cam->sequence = 0;
  849. /*
  850. * Videobuf2 sneakily hoards all the buffers and won't
  851. * give them to us until *after* streaming starts. But
  852. * we can't actually start streaming until we have a
  853. * destination. So go into a wait state and hope they
  854. * give us buffers soon.
  855. */
  856. if (cam->buffer_mode != B_vmalloc && list_empty(&cam->buffers)) {
  857. cam->state = S_BUFWAIT;
  858. return 0;
  859. }
  860. return mcam_read_setup(cam);
  861. }
  862. static int mcam_vb_stop_streaming(struct vb2_queue *vq)
  863. {
  864. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  865. unsigned long flags;
  866. if (cam->state == S_BUFWAIT) {
  867. /* They never gave us buffers */
  868. cam->state = S_IDLE;
  869. return 0;
  870. }
  871. if (cam->state != S_STREAMING)
  872. return -EINVAL;
  873. mcam_ctlr_stop_dma(cam);
  874. /*
  875. * VB2 reclaims the buffers, so we need to forget
  876. * about them.
  877. */
  878. spin_lock_irqsave(&cam->dev_lock, flags);
  879. INIT_LIST_HEAD(&cam->buffers);
  880. spin_unlock_irqrestore(&cam->dev_lock, flags);
  881. return 0;
  882. }
  883. static const struct vb2_ops mcam_vb2_ops = {
  884. .queue_setup = mcam_vb_queue_setup,
  885. .buf_queue = mcam_vb_buf_queue,
  886. .start_streaming = mcam_vb_start_streaming,
  887. .stop_streaming = mcam_vb_stop_streaming,
  888. .wait_prepare = mcam_vb_wait_prepare,
  889. .wait_finish = mcam_vb_wait_finish,
  890. };
  891. #ifdef MCAM_MODE_DMA_SG
  892. /*
  893. * Scatter/gather mode uses all of the above functions plus a
  894. * few extras to deal with DMA mapping.
  895. */
  896. static int mcam_vb_sg_buf_init(struct vb2_buffer *vb)
  897. {
  898. struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
  899. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  900. int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
  901. mvb->dma_desc = dma_alloc_coherent(cam->dev,
  902. ndesc * sizeof(struct mcam_dma_desc),
  903. &mvb->dma_desc_pa, GFP_KERNEL);
  904. if (mvb->dma_desc == NULL) {
  905. cam_err(cam, "Unable to get DMA descriptor array\n");
  906. return -ENOMEM;
  907. }
  908. return 0;
  909. }
  910. static int mcam_vb_sg_buf_prepare(struct vb2_buffer *vb)
  911. {
  912. struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
  913. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  914. struct vb2_dma_sg_desc *sgd = vb2_dma_sg_plane_desc(vb, 0);
  915. struct mcam_dma_desc *desc = mvb->dma_desc;
  916. struct scatterlist *sg;
  917. int i;
  918. mvb->dma_desc_nent = dma_map_sg(cam->dev, sgd->sglist, sgd->num_pages,
  919. DMA_FROM_DEVICE);
  920. if (mvb->dma_desc_nent <= 0)
  921. return -EIO; /* Not sure what's right here */
  922. for_each_sg(sgd->sglist, sg, mvb->dma_desc_nent, i) {
  923. desc->dma_addr = sg_dma_address(sg);
  924. desc->segment_len = sg_dma_len(sg);
  925. desc++;
  926. }
  927. return 0;
  928. }
  929. static int mcam_vb_sg_buf_finish(struct vb2_buffer *vb)
  930. {
  931. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  932. struct vb2_dma_sg_desc *sgd = vb2_dma_sg_plane_desc(vb, 0);
  933. dma_unmap_sg(cam->dev, sgd->sglist, sgd->num_pages, DMA_FROM_DEVICE);
  934. return 0;
  935. }
  936. static void mcam_vb_sg_buf_cleanup(struct vb2_buffer *vb)
  937. {
  938. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  939. struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
  940. int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
  941. dma_free_coherent(cam->dev, ndesc * sizeof(struct mcam_dma_desc),
  942. mvb->dma_desc, mvb->dma_desc_pa);
  943. }
  944. static const struct vb2_ops mcam_vb2_sg_ops = {
  945. .queue_setup = mcam_vb_queue_setup,
  946. .buf_init = mcam_vb_sg_buf_init,
  947. .buf_prepare = mcam_vb_sg_buf_prepare,
  948. .buf_queue = mcam_vb_buf_queue,
  949. .buf_finish = mcam_vb_sg_buf_finish,
  950. .buf_cleanup = mcam_vb_sg_buf_cleanup,
  951. .start_streaming = mcam_vb_start_streaming,
  952. .stop_streaming = mcam_vb_stop_streaming,
  953. .wait_prepare = mcam_vb_wait_prepare,
  954. .wait_finish = mcam_vb_wait_finish,
  955. };
  956. #endif /* MCAM_MODE_DMA_SG */
  957. static int mcam_setup_vb2(struct mcam_camera *cam)
  958. {
  959. struct vb2_queue *vq = &cam->vb_queue;
  960. memset(vq, 0, sizeof(*vq));
  961. vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  962. vq->drv_priv = cam;
  963. INIT_LIST_HEAD(&cam->buffers);
  964. switch (cam->buffer_mode) {
  965. case B_DMA_contig:
  966. #ifdef MCAM_MODE_DMA_CONTIG
  967. vq->ops = &mcam_vb2_ops;
  968. vq->mem_ops = &vb2_dma_contig_memops;
  969. cam->vb_alloc_ctx = vb2_dma_contig_init_ctx(cam->dev);
  970. vq->io_modes = VB2_MMAP | VB2_USERPTR;
  971. cam->dma_setup = mcam_ctlr_dma_contig;
  972. cam->frame_complete = mcam_dma_contig_done;
  973. #endif
  974. break;
  975. case B_DMA_sg:
  976. #ifdef MCAM_MODE_DMA_SG
  977. vq->ops = &mcam_vb2_sg_ops;
  978. vq->mem_ops = &vb2_dma_sg_memops;
  979. vq->io_modes = VB2_MMAP | VB2_USERPTR;
  980. cam->dma_setup = mcam_ctlr_dma_sg;
  981. cam->frame_complete = mcam_dma_sg_done;
  982. #endif
  983. break;
  984. case B_vmalloc:
  985. #ifdef MCAM_MODE_VMALLOC
  986. tasklet_init(&cam->s_tasklet, mcam_frame_tasklet,
  987. (unsigned long) cam);
  988. vq->ops = &mcam_vb2_ops;
  989. vq->mem_ops = &vb2_vmalloc_memops;
  990. vq->buf_struct_size = sizeof(struct mcam_vb_buffer);
  991. vq->io_modes = VB2_MMAP;
  992. cam->dma_setup = mcam_ctlr_dma_vmalloc;
  993. cam->frame_complete = mcam_vmalloc_done;
  994. #endif
  995. break;
  996. }
  997. return vb2_queue_init(vq);
  998. }
  999. static void mcam_cleanup_vb2(struct mcam_camera *cam)
  1000. {
  1001. vb2_queue_release(&cam->vb_queue);
  1002. #ifdef MCAM_MODE_DMA_CONTIG
  1003. if (cam->buffer_mode == B_DMA_contig)
  1004. vb2_dma_contig_cleanup_ctx(cam->vb_alloc_ctx);
  1005. #endif
  1006. }
  1007. /* ---------------------------------------------------------------------- */
  1008. /*
  1009. * The long list of V4L2 ioctl() operations.
  1010. */
  1011. static int mcam_vidioc_streamon(struct file *filp, void *priv,
  1012. enum v4l2_buf_type type)
  1013. {
  1014. struct mcam_camera *cam = filp->private_data;
  1015. int ret;
  1016. mutex_lock(&cam->s_mutex);
  1017. ret = vb2_streamon(&cam->vb_queue, type);
  1018. mutex_unlock(&cam->s_mutex);
  1019. return ret;
  1020. }
  1021. static int mcam_vidioc_streamoff(struct file *filp, void *priv,
  1022. enum v4l2_buf_type type)
  1023. {
  1024. struct mcam_camera *cam = filp->private_data;
  1025. int ret;
  1026. mutex_lock(&cam->s_mutex);
  1027. ret = vb2_streamoff(&cam->vb_queue, type);
  1028. mutex_unlock(&cam->s_mutex);
  1029. return ret;
  1030. }
  1031. static int mcam_vidioc_reqbufs(struct file *filp, void *priv,
  1032. struct v4l2_requestbuffers *req)
  1033. {
  1034. struct mcam_camera *cam = filp->private_data;
  1035. int ret;
  1036. mutex_lock(&cam->s_mutex);
  1037. ret = vb2_reqbufs(&cam->vb_queue, req);
  1038. mutex_unlock(&cam->s_mutex);
  1039. return ret;
  1040. }
  1041. static int mcam_vidioc_querybuf(struct file *filp, void *priv,
  1042. struct v4l2_buffer *buf)
  1043. {
  1044. struct mcam_camera *cam = filp->private_data;
  1045. int ret;
  1046. mutex_lock(&cam->s_mutex);
  1047. ret = vb2_querybuf(&cam->vb_queue, buf);
  1048. mutex_unlock(&cam->s_mutex);
  1049. return ret;
  1050. }
  1051. static int mcam_vidioc_qbuf(struct file *filp, void *priv,
  1052. struct v4l2_buffer *buf)
  1053. {
  1054. struct mcam_camera *cam = filp->private_data;
  1055. int ret;
  1056. mutex_lock(&cam->s_mutex);
  1057. ret = vb2_qbuf(&cam->vb_queue, buf);
  1058. mutex_unlock(&cam->s_mutex);
  1059. return ret;
  1060. }
  1061. static int mcam_vidioc_dqbuf(struct file *filp, void *priv,
  1062. struct v4l2_buffer *buf)
  1063. {
  1064. struct mcam_camera *cam = filp->private_data;
  1065. int ret;
  1066. mutex_lock(&cam->s_mutex);
  1067. ret = vb2_dqbuf(&cam->vb_queue, buf, filp->f_flags & O_NONBLOCK);
  1068. mutex_unlock(&cam->s_mutex);
  1069. return ret;
  1070. }
  1071. static int mcam_vidioc_queryctrl(struct file *filp, void *priv,
  1072. struct v4l2_queryctrl *qc)
  1073. {
  1074. struct mcam_camera *cam = priv;
  1075. int ret;
  1076. mutex_lock(&cam->s_mutex);
  1077. ret = sensor_call(cam, core, queryctrl, qc);
  1078. mutex_unlock(&cam->s_mutex);
  1079. return ret;
  1080. }
  1081. static int mcam_vidioc_g_ctrl(struct file *filp, void *priv,
  1082. struct v4l2_control *ctrl)
  1083. {
  1084. struct mcam_camera *cam = priv;
  1085. int ret;
  1086. mutex_lock(&cam->s_mutex);
  1087. ret = sensor_call(cam, core, g_ctrl, ctrl);
  1088. mutex_unlock(&cam->s_mutex);
  1089. return ret;
  1090. }
  1091. static int mcam_vidioc_s_ctrl(struct file *filp, void *priv,
  1092. struct v4l2_control *ctrl)
  1093. {
  1094. struct mcam_camera *cam = priv;
  1095. int ret;
  1096. mutex_lock(&cam->s_mutex);
  1097. ret = sensor_call(cam, core, s_ctrl, ctrl);
  1098. mutex_unlock(&cam->s_mutex);
  1099. return ret;
  1100. }
  1101. static int mcam_vidioc_querycap(struct file *file, void *priv,
  1102. struct v4l2_capability *cap)
  1103. {
  1104. strcpy(cap->driver, "marvell_ccic");
  1105. strcpy(cap->card, "marvell_ccic");
  1106. cap->version = 1;
  1107. cap->capabilities = V4L2_CAP_VIDEO_CAPTURE |
  1108. V4L2_CAP_READWRITE | V4L2_CAP_STREAMING;
  1109. return 0;
  1110. }
  1111. static int mcam_vidioc_enum_fmt_vid_cap(struct file *filp,
  1112. void *priv, struct v4l2_fmtdesc *fmt)
  1113. {
  1114. if (fmt->index >= N_MCAM_FMTS)
  1115. return -EINVAL;
  1116. strlcpy(fmt->description, mcam_formats[fmt->index].desc,
  1117. sizeof(fmt->description));
  1118. fmt->pixelformat = mcam_formats[fmt->index].pixelformat;
  1119. return 0;
  1120. }
  1121. static int mcam_vidioc_try_fmt_vid_cap(struct file *filp, void *priv,
  1122. struct v4l2_format *fmt)
  1123. {
  1124. struct mcam_camera *cam = priv;
  1125. struct mcam_format_struct *f;
  1126. struct v4l2_pix_format *pix = &fmt->fmt.pix;
  1127. struct v4l2_mbus_framefmt mbus_fmt;
  1128. int ret;
  1129. f = mcam_find_format(pix->pixelformat);
  1130. pix->pixelformat = f->pixelformat;
  1131. v4l2_fill_mbus_format(&mbus_fmt, pix, f->mbus_code);
  1132. mutex_lock(&cam->s_mutex);
  1133. ret = sensor_call(cam, video, try_mbus_fmt, &mbus_fmt);
  1134. mutex_unlock(&cam->s_mutex);
  1135. v4l2_fill_pix_format(pix, &mbus_fmt);
  1136. pix->bytesperline = pix->width * f->bpp;
  1137. pix->sizeimage = pix->height * pix->bytesperline;
  1138. return ret;
  1139. }
  1140. static int mcam_vidioc_s_fmt_vid_cap(struct file *filp, void *priv,
  1141. struct v4l2_format *fmt)
  1142. {
  1143. struct mcam_camera *cam = priv;
  1144. struct mcam_format_struct *f;
  1145. int ret;
  1146. /*
  1147. * Can't do anything if the device is not idle
  1148. * Also can't if there are streaming buffers in place.
  1149. */
  1150. if (cam->state != S_IDLE || cam->vb_queue.num_buffers > 0)
  1151. return -EBUSY;
  1152. f = mcam_find_format(fmt->fmt.pix.pixelformat);
  1153. /*
  1154. * See if the formatting works in principle.
  1155. */
  1156. ret = mcam_vidioc_try_fmt_vid_cap(filp, priv, fmt);
  1157. if (ret)
  1158. return ret;
  1159. /*
  1160. * Now we start to change things for real, so let's do it
  1161. * under lock.
  1162. */
  1163. mutex_lock(&cam->s_mutex);
  1164. cam->pix_format = fmt->fmt.pix;
  1165. cam->mbus_code = f->mbus_code;
  1166. /*
  1167. * Make sure we have appropriate DMA buffers.
  1168. */
  1169. if (cam->buffer_mode == B_vmalloc) {
  1170. ret = mcam_check_dma_buffers(cam);
  1171. if (ret)
  1172. goto out;
  1173. }
  1174. mcam_set_config_needed(cam, 1);
  1175. out:
  1176. mutex_unlock(&cam->s_mutex);
  1177. return ret;
  1178. }
  1179. /*
  1180. * Return our stored notion of how the camera is/should be configured.
  1181. * The V4l2 spec wants us to be smarter, and actually get this from
  1182. * the camera (and not mess with it at open time). Someday.
  1183. */
  1184. static int mcam_vidioc_g_fmt_vid_cap(struct file *filp, void *priv,
  1185. struct v4l2_format *f)
  1186. {
  1187. struct mcam_camera *cam = priv;
  1188. f->fmt.pix = cam->pix_format;
  1189. return 0;
  1190. }
  1191. /*
  1192. * We only have one input - the sensor - so minimize the nonsense here.
  1193. */
  1194. static int mcam_vidioc_enum_input(struct file *filp, void *priv,
  1195. struct v4l2_input *input)
  1196. {
  1197. if (input->index != 0)
  1198. return -EINVAL;
  1199. input->type = V4L2_INPUT_TYPE_CAMERA;
  1200. input->std = V4L2_STD_ALL; /* Not sure what should go here */
  1201. strcpy(input->name, "Camera");
  1202. return 0;
  1203. }
  1204. static int mcam_vidioc_g_input(struct file *filp, void *priv, unsigned int *i)
  1205. {
  1206. *i = 0;
  1207. return 0;
  1208. }
  1209. static int mcam_vidioc_s_input(struct file *filp, void *priv, unsigned int i)
  1210. {
  1211. if (i != 0)
  1212. return -EINVAL;
  1213. return 0;
  1214. }
  1215. /* from vivi.c */
  1216. static int mcam_vidioc_s_std(struct file *filp, void *priv, v4l2_std_id *a)
  1217. {
  1218. return 0;
  1219. }
  1220. /*
  1221. * G/S_PARM. Most of this is done by the sensor, but we are
  1222. * the level which controls the number of read buffers.
  1223. */
  1224. static int mcam_vidioc_g_parm(struct file *filp, void *priv,
  1225. struct v4l2_streamparm *parms)
  1226. {
  1227. struct mcam_camera *cam = priv;
  1228. int ret;
  1229. mutex_lock(&cam->s_mutex);
  1230. ret = sensor_call(cam, video, g_parm, parms);
  1231. mutex_unlock(&cam->s_mutex);
  1232. parms->parm.capture.readbuffers = n_dma_bufs;
  1233. return ret;
  1234. }
  1235. static int mcam_vidioc_s_parm(struct file *filp, void *priv,
  1236. struct v4l2_streamparm *parms)
  1237. {
  1238. struct mcam_camera *cam = priv;
  1239. int ret;
  1240. mutex_lock(&cam->s_mutex);
  1241. ret = sensor_call(cam, video, s_parm, parms);
  1242. mutex_unlock(&cam->s_mutex);
  1243. parms->parm.capture.readbuffers = n_dma_bufs;
  1244. return ret;
  1245. }
  1246. static int mcam_vidioc_g_chip_ident(struct file *file, void *priv,
  1247. struct v4l2_dbg_chip_ident *chip)
  1248. {
  1249. struct mcam_camera *cam = priv;
  1250. chip->ident = V4L2_IDENT_NONE;
  1251. chip->revision = 0;
  1252. if (v4l2_chip_match_host(&chip->match)) {
  1253. chip->ident = cam->chip_id;
  1254. return 0;
  1255. }
  1256. return sensor_call(cam, core, g_chip_ident, chip);
  1257. }
  1258. static int mcam_vidioc_enum_framesizes(struct file *filp, void *priv,
  1259. struct v4l2_frmsizeenum *sizes)
  1260. {
  1261. struct mcam_camera *cam = priv;
  1262. int ret;
  1263. mutex_lock(&cam->s_mutex);
  1264. ret = sensor_call(cam, video, enum_framesizes, sizes);
  1265. mutex_unlock(&cam->s_mutex);
  1266. return ret;
  1267. }
  1268. static int mcam_vidioc_enum_frameintervals(struct file *filp, void *priv,
  1269. struct v4l2_frmivalenum *interval)
  1270. {
  1271. struct mcam_camera *cam = priv;
  1272. int ret;
  1273. mutex_lock(&cam->s_mutex);
  1274. ret = sensor_call(cam, video, enum_frameintervals, interval);
  1275. mutex_unlock(&cam->s_mutex);
  1276. return ret;
  1277. }
  1278. #ifdef CONFIG_VIDEO_ADV_DEBUG
  1279. static int mcam_vidioc_g_register(struct file *file, void *priv,
  1280. struct v4l2_dbg_register *reg)
  1281. {
  1282. struct mcam_camera *cam = priv;
  1283. if (v4l2_chip_match_host(&reg->match)) {
  1284. reg->val = mcam_reg_read(cam, reg->reg);
  1285. reg->size = 4;
  1286. return 0;
  1287. }
  1288. return sensor_call(cam, core, g_register, reg);
  1289. }
  1290. static int mcam_vidioc_s_register(struct file *file, void *priv,
  1291. struct v4l2_dbg_register *reg)
  1292. {
  1293. struct mcam_camera *cam = priv;
  1294. if (v4l2_chip_match_host(&reg->match)) {
  1295. mcam_reg_write(cam, reg->reg, reg->val);
  1296. return 0;
  1297. }
  1298. return sensor_call(cam, core, s_register, reg);
  1299. }
  1300. #endif
  1301. static const struct v4l2_ioctl_ops mcam_v4l_ioctl_ops = {
  1302. .vidioc_querycap = mcam_vidioc_querycap,
  1303. .vidioc_enum_fmt_vid_cap = mcam_vidioc_enum_fmt_vid_cap,
  1304. .vidioc_try_fmt_vid_cap = mcam_vidioc_try_fmt_vid_cap,
  1305. .vidioc_s_fmt_vid_cap = mcam_vidioc_s_fmt_vid_cap,
  1306. .vidioc_g_fmt_vid_cap = mcam_vidioc_g_fmt_vid_cap,
  1307. .vidioc_enum_input = mcam_vidioc_enum_input,
  1308. .vidioc_g_input = mcam_vidioc_g_input,
  1309. .vidioc_s_input = mcam_vidioc_s_input,
  1310. .vidioc_s_std = mcam_vidioc_s_std,
  1311. .vidioc_reqbufs = mcam_vidioc_reqbufs,
  1312. .vidioc_querybuf = mcam_vidioc_querybuf,
  1313. .vidioc_qbuf = mcam_vidioc_qbuf,
  1314. .vidioc_dqbuf = mcam_vidioc_dqbuf,
  1315. .vidioc_streamon = mcam_vidioc_streamon,
  1316. .vidioc_streamoff = mcam_vidioc_streamoff,
  1317. .vidioc_queryctrl = mcam_vidioc_queryctrl,
  1318. .vidioc_g_ctrl = mcam_vidioc_g_ctrl,
  1319. .vidioc_s_ctrl = mcam_vidioc_s_ctrl,
  1320. .vidioc_g_parm = mcam_vidioc_g_parm,
  1321. .vidioc_s_parm = mcam_vidioc_s_parm,
  1322. .vidioc_enum_framesizes = mcam_vidioc_enum_framesizes,
  1323. .vidioc_enum_frameintervals = mcam_vidioc_enum_frameintervals,
  1324. .vidioc_g_chip_ident = mcam_vidioc_g_chip_ident,
  1325. #ifdef CONFIG_VIDEO_ADV_DEBUG
  1326. .vidioc_g_register = mcam_vidioc_g_register,
  1327. .vidioc_s_register = mcam_vidioc_s_register,
  1328. #endif
  1329. };
  1330. /* ---------------------------------------------------------------------- */
  1331. /*
  1332. * Our various file operations.
  1333. */
  1334. static int mcam_v4l_open(struct file *filp)
  1335. {
  1336. struct mcam_camera *cam = video_drvdata(filp);
  1337. int ret = 0;
  1338. filp->private_data = cam;
  1339. cam->frame_state.frames = 0;
  1340. cam->frame_state.singles = 0;
  1341. cam->frame_state.delivered = 0;
  1342. mutex_lock(&cam->s_mutex);
  1343. if (cam->users == 0) {
  1344. ret = mcam_setup_vb2(cam);
  1345. if (ret)
  1346. goto out;
  1347. mcam_ctlr_power_up(cam);
  1348. __mcam_cam_reset(cam);
  1349. mcam_set_config_needed(cam, 1);
  1350. }
  1351. (cam->users)++;
  1352. out:
  1353. mutex_unlock(&cam->s_mutex);
  1354. return ret;
  1355. }
  1356. static int mcam_v4l_release(struct file *filp)
  1357. {
  1358. struct mcam_camera *cam = filp->private_data;
  1359. cam_dbg(cam, "Release, %d frames, %d singles, %d delivered\n",
  1360. cam->frame_state.frames, cam->frame_state.singles,
  1361. cam->frame_state.delivered);
  1362. mutex_lock(&cam->s_mutex);
  1363. (cam->users)--;
  1364. if (cam->users == 0) {
  1365. mcam_ctlr_stop_dma(cam);
  1366. mcam_cleanup_vb2(cam);
  1367. mcam_ctlr_power_down(cam);
  1368. if (cam->buffer_mode == B_vmalloc && alloc_bufs_at_read)
  1369. mcam_free_dma_bufs(cam);
  1370. }
  1371. mutex_unlock(&cam->s_mutex);
  1372. return 0;
  1373. }
  1374. static ssize_t mcam_v4l_read(struct file *filp,
  1375. char __user *buffer, size_t len, loff_t *pos)
  1376. {
  1377. struct mcam_camera *cam = filp->private_data;
  1378. int ret;
  1379. mutex_lock(&cam->s_mutex);
  1380. ret = vb2_read(&cam->vb_queue, buffer, len, pos,
  1381. filp->f_flags & O_NONBLOCK);
  1382. mutex_unlock(&cam->s_mutex);
  1383. return ret;
  1384. }
  1385. static unsigned int mcam_v4l_poll(struct file *filp,
  1386. struct poll_table_struct *pt)
  1387. {
  1388. struct mcam_camera *cam = filp->private_data;
  1389. int ret;
  1390. mutex_lock(&cam->s_mutex);
  1391. ret = vb2_poll(&cam->vb_queue, filp, pt);
  1392. mutex_unlock(&cam->s_mutex);
  1393. return ret;
  1394. }
  1395. static int mcam_v4l_mmap(struct file *filp, struct vm_area_struct *vma)
  1396. {
  1397. struct mcam_camera *cam = filp->private_data;
  1398. int ret;
  1399. mutex_lock(&cam->s_mutex);
  1400. ret = vb2_mmap(&cam->vb_queue, vma);
  1401. mutex_unlock(&cam->s_mutex);
  1402. return ret;
  1403. }
  1404. static const struct v4l2_file_operations mcam_v4l_fops = {
  1405. .owner = THIS_MODULE,
  1406. .open = mcam_v4l_open,
  1407. .release = mcam_v4l_release,
  1408. .read = mcam_v4l_read,
  1409. .poll = mcam_v4l_poll,
  1410. .mmap = mcam_v4l_mmap,
  1411. .unlocked_ioctl = video_ioctl2,
  1412. };
  1413. /*
  1414. * This template device holds all of those v4l2 methods; we
  1415. * clone it for specific real devices.
  1416. */
  1417. static struct video_device mcam_v4l_template = {
  1418. .name = "mcam",
  1419. .tvnorms = V4L2_STD_NTSC_M,
  1420. .current_norm = V4L2_STD_NTSC_M, /* make mplayer happy */
  1421. .fops = &mcam_v4l_fops,
  1422. .ioctl_ops = &mcam_v4l_ioctl_ops,
  1423. .release = video_device_release_empty,
  1424. };
  1425. /* ---------------------------------------------------------------------- */
  1426. /*
  1427. * Interrupt handler stuff
  1428. */
  1429. static void mcam_frame_complete(struct mcam_camera *cam, int frame)
  1430. {
  1431. /*
  1432. * Basic frame housekeeping.
  1433. */
  1434. set_bit(frame, &cam->flags);
  1435. clear_bit(CF_DMA_ACTIVE, &cam->flags);
  1436. cam->next_buf = frame;
  1437. cam->buf_seq[frame] = ++(cam->sequence);
  1438. cam->frame_state.frames++;
  1439. /*
  1440. * "This should never happen"
  1441. */
  1442. if (cam->state != S_STREAMING)
  1443. return;
  1444. /*
  1445. * Process the frame and set up the next one.
  1446. */
  1447. cam->frame_complete(cam, frame);
  1448. }
  1449. /*
  1450. * The interrupt handler; this needs to be called from the
  1451. * platform irq handler with the lock held.
  1452. */
  1453. int mccic_irq(struct mcam_camera *cam, unsigned int irqs)
  1454. {
  1455. unsigned int frame, handled = 0;
  1456. mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS); /* Clear'em all */
  1457. /*
  1458. * Handle any frame completions. There really should
  1459. * not be more than one of these, or we have fallen
  1460. * far behind.
  1461. *
  1462. * When running in S/G mode, the frame number lacks any
  1463. * real meaning - there's only one descriptor array - but
  1464. * the controller still picks a different one to signal
  1465. * each time.
  1466. */
  1467. for (frame = 0; frame < cam->nbufs; frame++)
  1468. if (irqs & (IRQ_EOF0 << frame)) {
  1469. mcam_frame_complete(cam, frame);
  1470. handled = 1;
  1471. if (cam->buffer_mode == B_DMA_sg)
  1472. break;
  1473. }
  1474. /*
  1475. * If a frame starts, note that we have DMA active. This
  1476. * code assumes that we won't get multiple frame interrupts
  1477. * at once; may want to rethink that.
  1478. */
  1479. if (irqs & (IRQ_SOF0 | IRQ_SOF1 | IRQ_SOF2)) {
  1480. set_bit(CF_DMA_ACTIVE, &cam->flags);
  1481. handled = 1;
  1482. if (cam->buffer_mode == B_DMA_sg)
  1483. mcam_ctlr_stop(cam);
  1484. }
  1485. return handled;
  1486. }
  1487. /* ---------------------------------------------------------------------- */
  1488. /*
  1489. * Registration and such.
  1490. */
  1491. static struct ov7670_config sensor_cfg = {
  1492. /*
  1493. * Exclude QCIF mode, because it only captures a tiny portion
  1494. * of the sensor FOV
  1495. */
  1496. .min_width = 320,
  1497. .min_height = 240,
  1498. };
  1499. int mccic_register(struct mcam_camera *cam)
  1500. {
  1501. struct i2c_board_info ov7670_info = {
  1502. .type = "ov7670",
  1503. .addr = 0x42 >> 1,
  1504. .platform_data = &sensor_cfg,
  1505. };
  1506. int ret;
  1507. /*
  1508. * Validate the requested buffer mode.
  1509. */
  1510. if (buffer_mode >= 0)
  1511. cam->buffer_mode = buffer_mode;
  1512. if (cam->buffer_mode == B_DMA_sg &&
  1513. cam->chip_id == V4L2_IDENT_CAFE) {
  1514. printk(KERN_ERR "marvell-cam: Cafe can't do S/G I/O, "
  1515. "attempting vmalloc mode instead\n");
  1516. cam->buffer_mode = B_vmalloc;
  1517. }
  1518. if (!mcam_buffer_mode_supported(cam->buffer_mode)) {
  1519. printk(KERN_ERR "marvell-cam: buffer mode %d unsupported\n",
  1520. cam->buffer_mode);
  1521. return -EINVAL;
  1522. }
  1523. /*
  1524. * Register with V4L
  1525. */
  1526. ret = v4l2_device_register(cam->dev, &cam->v4l2_dev);
  1527. if (ret)
  1528. return ret;
  1529. mutex_init(&cam->s_mutex);
  1530. cam->state = S_NOTREADY;
  1531. mcam_set_config_needed(cam, 1);
  1532. cam->pix_format = mcam_def_pix_format;
  1533. cam->mbus_code = mcam_def_mbus_code;
  1534. INIT_LIST_HEAD(&cam->buffers);
  1535. mcam_ctlr_init(cam);
  1536. /*
  1537. * Try to find the sensor.
  1538. */
  1539. sensor_cfg.clock_speed = cam->clock_speed;
  1540. sensor_cfg.use_smbus = cam->use_smbus;
  1541. cam->sensor_addr = ov7670_info.addr;
  1542. cam->sensor = v4l2_i2c_new_subdev_board(&cam->v4l2_dev,
  1543. cam->i2c_adapter, &ov7670_info, NULL);
  1544. if (cam->sensor == NULL) {
  1545. ret = -ENODEV;
  1546. goto out_unregister;
  1547. }
  1548. ret = mcam_cam_init(cam);
  1549. if (ret)
  1550. goto out_unregister;
  1551. /*
  1552. * Get the v4l2 setup done.
  1553. */
  1554. mutex_lock(&cam->s_mutex);
  1555. cam->vdev = mcam_v4l_template;
  1556. cam->vdev.debug = 0;
  1557. cam->vdev.v4l2_dev = &cam->v4l2_dev;
  1558. ret = video_register_device(&cam->vdev, VFL_TYPE_GRABBER, -1);
  1559. if (ret)
  1560. goto out;
  1561. video_set_drvdata(&cam->vdev, cam);
  1562. /*
  1563. * If so requested, try to get our DMA buffers now.
  1564. */
  1565. if (cam->buffer_mode == B_vmalloc && !alloc_bufs_at_read) {
  1566. if (mcam_alloc_dma_bufs(cam, 1))
  1567. cam_warn(cam, "Unable to alloc DMA buffers at load"
  1568. " will try again later.");
  1569. }
  1570. out:
  1571. mutex_unlock(&cam->s_mutex);
  1572. return ret;
  1573. out_unregister:
  1574. v4l2_device_unregister(&cam->v4l2_dev);
  1575. return ret;
  1576. }
  1577. void mccic_shutdown(struct mcam_camera *cam)
  1578. {
  1579. /*
  1580. * If we have no users (and we really, really should have no
  1581. * users) the device will already be powered down. Trying to
  1582. * take it down again will wedge the machine, which is frowned
  1583. * upon.
  1584. */
  1585. if (cam->users > 0) {
  1586. cam_warn(cam, "Removing a device with users!\n");
  1587. mcam_ctlr_power_down(cam);
  1588. }
  1589. vb2_queue_release(&cam->vb_queue);
  1590. if (cam->buffer_mode == B_vmalloc)
  1591. mcam_free_dma_bufs(cam);
  1592. video_unregister_device(&cam->vdev);
  1593. v4l2_device_unregister(&cam->v4l2_dev);
  1594. }
  1595. /*
  1596. * Power management
  1597. */
  1598. #ifdef CONFIG_PM
  1599. void mccic_suspend(struct mcam_camera *cam)
  1600. {
  1601. mutex_lock(&cam->s_mutex);
  1602. if (cam->users > 0) {
  1603. enum mcam_state cstate = cam->state;
  1604. mcam_ctlr_stop_dma(cam);
  1605. mcam_ctlr_power_down(cam);
  1606. cam->state = cstate;
  1607. }
  1608. mutex_unlock(&cam->s_mutex);
  1609. }
  1610. int mccic_resume(struct mcam_camera *cam)
  1611. {
  1612. int ret = 0;
  1613. mutex_lock(&cam->s_mutex);
  1614. if (cam->users > 0) {
  1615. mcam_ctlr_power_up(cam);
  1616. __mcam_cam_reset(cam);
  1617. } else {
  1618. mcam_ctlr_power_down(cam);
  1619. }
  1620. mutex_unlock(&cam->s_mutex);
  1621. set_bit(CF_CONFIG_NEEDED, &cam->flags);
  1622. if (cam->state == S_STREAMING) {
  1623. /*
  1624. * If there was a buffer in the DMA engine at suspend
  1625. * time, put it back on the queue or we'll forget about it.
  1626. */
  1627. if (cam->buffer_mode == B_DMA_sg && cam->vb_bufs[0])
  1628. list_add(&cam->vb_bufs[0]->queue, &cam->buffers);
  1629. ret = mcam_read_setup(cam);
  1630. }
  1631. return ret;
  1632. }
  1633. #endif /* CONFIG_PM */