mcam-core.c 46 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838
  1. /*
  2. * The Marvell camera core. This device appears in a number of settings,
  3. * so it needs platform-specific support outside of the core.
  4. *
  5. * Copyright 2011 Jonathan Corbet corbet@lwn.net
  6. */
  7. #include <linux/kernel.h>
  8. #include <linux/module.h>
  9. #include <linux/fs.h>
  10. #include <linux/mm.h>
  11. #include <linux/i2c.h>
  12. #include <linux/interrupt.h>
  13. #include <linux/spinlock.h>
  14. #include <linux/slab.h>
  15. #include <linux/device.h>
  16. #include <linux/wait.h>
  17. #include <linux/list.h>
  18. #include <linux/dma-mapping.h>
  19. #include <linux/delay.h>
  20. #include <linux/vmalloc.h>
  21. #include <linux/io.h>
  22. #include <linux/videodev2.h>
  23. #include <media/v4l2-device.h>
  24. #include <media/v4l2-ioctl.h>
  25. #include <media/v4l2-ctrls.h>
  26. #include <media/v4l2-chip-ident.h>
  27. #include <media/ov7670.h>
  28. #include <media/videobuf2-vmalloc.h>
  29. #include <media/videobuf2-dma-contig.h>
  30. #include <media/videobuf2-dma-sg.h>
  31. #include "mcam-core.h"
  32. #ifdef MCAM_MODE_VMALLOC
  33. /*
  34. * Internal DMA buffer management. Since the controller cannot do S/G I/O,
  35. * we must have physically contiguous buffers to bring frames into.
  36. * These parameters control how many buffers we use, whether we
  37. * allocate them at load time (better chance of success, but nails down
  38. * memory) or when somebody tries to use the camera (riskier), and,
  39. * for load-time allocation, how big they should be.
  40. *
  41. * The controller can cycle through three buffers. We could use
  42. * more by flipping pointers around, but it probably makes little
  43. * sense.
  44. */
  45. static bool alloc_bufs_at_read;
  46. module_param(alloc_bufs_at_read, bool, 0444);
  47. MODULE_PARM_DESC(alloc_bufs_at_read,
  48. "Non-zero value causes DMA buffers to be allocated when the "
  49. "video capture device is read, rather than at module load "
  50. "time. This saves memory, but decreases the chances of "
  51. "successfully getting those buffers. This parameter is "
  52. "only used in the vmalloc buffer mode");
  53. static int n_dma_bufs = 3;
  54. module_param(n_dma_bufs, uint, 0644);
  55. MODULE_PARM_DESC(n_dma_bufs,
  56. "The number of DMA buffers to allocate. Can be either two "
  57. "(saves memory, makes timing tighter) or three.");
  58. static int dma_buf_size = VGA_WIDTH * VGA_HEIGHT * 2; /* Worst case */
  59. module_param(dma_buf_size, uint, 0444);
  60. MODULE_PARM_DESC(dma_buf_size,
  61. "The size of the allocated DMA buffers. If actual operating "
  62. "parameters require larger buffers, an attempt to reallocate "
  63. "will be made.");
  64. #else /* MCAM_MODE_VMALLOC */
  65. static const bool alloc_bufs_at_read = 0;
  66. static const int n_dma_bufs = 3; /* Used by S/G_PARM */
  67. #endif /* MCAM_MODE_VMALLOC */
  68. static bool flip;
  69. module_param(flip, bool, 0444);
  70. MODULE_PARM_DESC(flip,
  71. "If set, the sensor will be instructed to flip the image "
  72. "vertically.");
  73. static int buffer_mode = -1;
  74. module_param(buffer_mode, int, 0444);
  75. MODULE_PARM_DESC(buffer_mode,
  76. "Set the buffer mode to be used; default is to go with what "
  77. "the platform driver asks for. Set to 0 for vmalloc, 1 for "
  78. "DMA contiguous.");
  79. /*
  80. * Status flags. Always manipulated with bit operations.
  81. */
  82. #define CF_BUF0_VALID 0 /* Buffers valid - first three */
  83. #define CF_BUF1_VALID 1
  84. #define CF_BUF2_VALID 2
  85. #define CF_DMA_ACTIVE 3 /* A frame is incoming */
  86. #define CF_CONFIG_NEEDED 4 /* Must configure hardware */
  87. #define CF_SINGLE_BUFFER 5 /* Running with a single buffer */
  88. #define CF_SG_RESTART 6 /* SG restart needed */
  89. #define sensor_call(cam, o, f, args...) \
  90. v4l2_subdev_call(cam->sensor, o, f, ##args)
  91. static struct mcam_format_struct {
  92. __u8 *desc;
  93. __u32 pixelformat;
  94. int bpp; /* Bytes per pixel */
  95. enum v4l2_mbus_pixelcode mbus_code;
  96. } mcam_formats[] = {
  97. {
  98. .desc = "YUYV 4:2:2",
  99. .pixelformat = V4L2_PIX_FMT_YUYV,
  100. .mbus_code = V4L2_MBUS_FMT_YUYV8_2X8,
  101. .bpp = 2,
  102. },
  103. {
  104. .desc = "RGB 444",
  105. .pixelformat = V4L2_PIX_FMT_RGB444,
  106. .mbus_code = V4L2_MBUS_FMT_RGB444_2X8_PADHI_LE,
  107. .bpp = 2,
  108. },
  109. {
  110. .desc = "RGB 565",
  111. .pixelformat = V4L2_PIX_FMT_RGB565,
  112. .mbus_code = V4L2_MBUS_FMT_RGB565_2X8_LE,
  113. .bpp = 2,
  114. },
  115. {
  116. .desc = "Raw RGB Bayer",
  117. .pixelformat = V4L2_PIX_FMT_SBGGR8,
  118. .mbus_code = V4L2_MBUS_FMT_SBGGR8_1X8,
  119. .bpp = 1
  120. },
  121. };
  122. #define N_MCAM_FMTS ARRAY_SIZE(mcam_formats)
  123. static struct mcam_format_struct *mcam_find_format(u32 pixelformat)
  124. {
  125. unsigned i;
  126. for (i = 0; i < N_MCAM_FMTS; i++)
  127. if (mcam_formats[i].pixelformat == pixelformat)
  128. return mcam_formats + i;
  129. /* Not found? Then return the first format. */
  130. return mcam_formats;
  131. }
  132. /*
  133. * The default format we use until somebody says otherwise.
  134. */
  135. static const struct v4l2_pix_format mcam_def_pix_format = {
  136. .width = VGA_WIDTH,
  137. .height = VGA_HEIGHT,
  138. .pixelformat = V4L2_PIX_FMT_YUYV,
  139. .field = V4L2_FIELD_NONE,
  140. .bytesperline = VGA_WIDTH*2,
  141. .sizeimage = VGA_WIDTH*VGA_HEIGHT*2,
  142. };
  143. static const enum v4l2_mbus_pixelcode mcam_def_mbus_code =
  144. V4L2_MBUS_FMT_YUYV8_2X8;
  145. /*
  146. * The two-word DMA descriptor format used by the Armada 610 and like. There
  147. * Is a three-word format as well (set C1_DESC_3WORD) where the third
  148. * word is a pointer to the next descriptor, but we don't use it. Two-word
  149. * descriptors have to be contiguous in memory.
  150. */
  151. struct mcam_dma_desc {
  152. u32 dma_addr;
  153. u32 segment_len;
  154. };
  155. /*
  156. * Our buffer type for working with videobuf2. Note that the vb2
  157. * developers have decreed that struct vb2_buffer must be at the
  158. * beginning of this structure.
  159. */
  160. struct mcam_vb_buffer {
  161. struct vb2_buffer vb_buf;
  162. struct list_head queue;
  163. struct mcam_dma_desc *dma_desc; /* Descriptor virtual address */
  164. dma_addr_t dma_desc_pa; /* Descriptor physical address */
  165. int dma_desc_nent; /* Number of mapped descriptors */
  166. };
  167. static inline struct mcam_vb_buffer *vb_to_mvb(struct vb2_buffer *vb)
  168. {
  169. return container_of(vb, struct mcam_vb_buffer, vb_buf);
  170. }
  171. /*
  172. * Hand a completed buffer back to user space.
  173. */
  174. static void mcam_buffer_done(struct mcam_camera *cam, int frame,
  175. struct vb2_buffer *vbuf)
  176. {
  177. vbuf->v4l2_buf.bytesused = cam->pix_format.sizeimage;
  178. vbuf->v4l2_buf.sequence = cam->buf_seq[frame];
  179. vb2_set_plane_payload(vbuf, 0, cam->pix_format.sizeimage);
  180. vb2_buffer_done(vbuf, VB2_BUF_STATE_DONE);
  181. }
  182. /*
  183. * Debugging and related.
  184. */
  185. #define cam_err(cam, fmt, arg...) \
  186. dev_err((cam)->dev, fmt, ##arg);
  187. #define cam_warn(cam, fmt, arg...) \
  188. dev_warn((cam)->dev, fmt, ##arg);
  189. #define cam_dbg(cam, fmt, arg...) \
  190. dev_dbg((cam)->dev, fmt, ##arg);
  191. /*
  192. * Flag manipulation helpers
  193. */
  194. static void mcam_reset_buffers(struct mcam_camera *cam)
  195. {
  196. int i;
  197. cam->next_buf = -1;
  198. for (i = 0; i < cam->nbufs; i++)
  199. clear_bit(i, &cam->flags);
  200. }
  201. static inline int mcam_needs_config(struct mcam_camera *cam)
  202. {
  203. return test_bit(CF_CONFIG_NEEDED, &cam->flags);
  204. }
  205. static void mcam_set_config_needed(struct mcam_camera *cam, int needed)
  206. {
  207. if (needed)
  208. set_bit(CF_CONFIG_NEEDED, &cam->flags);
  209. else
  210. clear_bit(CF_CONFIG_NEEDED, &cam->flags);
  211. }
  212. /* ------------------------------------------------------------------- */
  213. /*
  214. * Make the controller start grabbing images. Everything must
  215. * be set up before doing this.
  216. */
  217. static void mcam_ctlr_start(struct mcam_camera *cam)
  218. {
  219. /* set_bit performs a read, so no other barrier should be
  220. needed here */
  221. mcam_reg_set_bit(cam, REG_CTRL0, C0_ENABLE);
  222. }
  223. static void mcam_ctlr_stop(struct mcam_camera *cam)
  224. {
  225. mcam_reg_clear_bit(cam, REG_CTRL0, C0_ENABLE);
  226. }
  227. /* ------------------------------------------------------------------- */
  228. #ifdef MCAM_MODE_VMALLOC
  229. /*
  230. * Code specific to the vmalloc buffer mode.
  231. */
  232. /*
  233. * Allocate in-kernel DMA buffers for vmalloc mode.
  234. */
  235. static int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime)
  236. {
  237. int i;
  238. mcam_set_config_needed(cam, 1);
  239. if (loadtime)
  240. cam->dma_buf_size = dma_buf_size;
  241. else
  242. cam->dma_buf_size = cam->pix_format.sizeimage;
  243. if (n_dma_bufs > 3)
  244. n_dma_bufs = 3;
  245. cam->nbufs = 0;
  246. for (i = 0; i < n_dma_bufs; i++) {
  247. cam->dma_bufs[i] = dma_alloc_coherent(cam->dev,
  248. cam->dma_buf_size, cam->dma_handles + i,
  249. GFP_KERNEL);
  250. if (cam->dma_bufs[i] == NULL) {
  251. cam_warn(cam, "Failed to allocate DMA buffer\n");
  252. break;
  253. }
  254. (cam->nbufs)++;
  255. }
  256. switch (cam->nbufs) {
  257. case 1:
  258. dma_free_coherent(cam->dev, cam->dma_buf_size,
  259. cam->dma_bufs[0], cam->dma_handles[0]);
  260. cam->nbufs = 0;
  261. case 0:
  262. cam_err(cam, "Insufficient DMA buffers, cannot operate\n");
  263. return -ENOMEM;
  264. case 2:
  265. if (n_dma_bufs > 2)
  266. cam_warn(cam, "Will limp along with only 2 buffers\n");
  267. break;
  268. }
  269. return 0;
  270. }
  271. static void mcam_free_dma_bufs(struct mcam_camera *cam)
  272. {
  273. int i;
  274. for (i = 0; i < cam->nbufs; i++) {
  275. dma_free_coherent(cam->dev, cam->dma_buf_size,
  276. cam->dma_bufs[i], cam->dma_handles[i]);
  277. cam->dma_bufs[i] = NULL;
  278. }
  279. cam->nbufs = 0;
  280. }
  281. /*
  282. * Set up DMA buffers when operating in vmalloc mode
  283. */
  284. static void mcam_ctlr_dma_vmalloc(struct mcam_camera *cam)
  285. {
  286. /*
  287. * Store the first two Y buffers (we aren't supporting
  288. * planar formats for now, so no UV bufs). Then either
  289. * set the third if it exists, or tell the controller
  290. * to just use two.
  291. */
  292. mcam_reg_write(cam, REG_Y0BAR, cam->dma_handles[0]);
  293. mcam_reg_write(cam, REG_Y1BAR, cam->dma_handles[1]);
  294. if (cam->nbufs > 2) {
  295. mcam_reg_write(cam, REG_Y2BAR, cam->dma_handles[2]);
  296. mcam_reg_clear_bit(cam, REG_CTRL1, C1_TWOBUFS);
  297. } else
  298. mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
  299. if (cam->chip_id == V4L2_IDENT_CAFE)
  300. mcam_reg_write(cam, REG_UBAR, 0); /* 32 bits only */
  301. }
  302. /*
  303. * Copy data out to user space in the vmalloc case
  304. */
  305. static void mcam_frame_tasklet(unsigned long data)
  306. {
  307. struct mcam_camera *cam = (struct mcam_camera *) data;
  308. int i;
  309. unsigned long flags;
  310. struct mcam_vb_buffer *buf;
  311. spin_lock_irqsave(&cam->dev_lock, flags);
  312. for (i = 0; i < cam->nbufs; i++) {
  313. int bufno = cam->next_buf;
  314. if (cam->state != S_STREAMING || bufno < 0)
  315. break; /* I/O got stopped */
  316. if (++(cam->next_buf) >= cam->nbufs)
  317. cam->next_buf = 0;
  318. if (!test_bit(bufno, &cam->flags))
  319. continue;
  320. if (list_empty(&cam->buffers)) {
  321. cam->frame_state.singles++;
  322. break; /* Leave it valid, hope for better later */
  323. }
  324. cam->frame_state.delivered++;
  325. clear_bit(bufno, &cam->flags);
  326. buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer,
  327. queue);
  328. list_del_init(&buf->queue);
  329. /*
  330. * Drop the lock during the big copy. This *should* be safe...
  331. */
  332. spin_unlock_irqrestore(&cam->dev_lock, flags);
  333. memcpy(vb2_plane_vaddr(&buf->vb_buf, 0), cam->dma_bufs[bufno],
  334. cam->pix_format.sizeimage);
  335. mcam_buffer_done(cam, bufno, &buf->vb_buf);
  336. spin_lock_irqsave(&cam->dev_lock, flags);
  337. }
  338. spin_unlock_irqrestore(&cam->dev_lock, flags);
  339. }
  340. /*
  341. * Make sure our allocated buffers are up to the task.
  342. */
  343. static int mcam_check_dma_buffers(struct mcam_camera *cam)
  344. {
  345. if (cam->nbufs > 0 && cam->dma_buf_size < cam->pix_format.sizeimage)
  346. mcam_free_dma_bufs(cam);
  347. if (cam->nbufs == 0)
  348. return mcam_alloc_dma_bufs(cam, 0);
  349. return 0;
  350. }
  351. static void mcam_vmalloc_done(struct mcam_camera *cam, int frame)
  352. {
  353. tasklet_schedule(&cam->s_tasklet);
  354. }
  355. #else /* MCAM_MODE_VMALLOC */
  356. static inline int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime)
  357. {
  358. return 0;
  359. }
  360. static inline void mcam_free_dma_bufs(struct mcam_camera *cam)
  361. {
  362. return;
  363. }
  364. static inline int mcam_check_dma_buffers(struct mcam_camera *cam)
  365. {
  366. return 0;
  367. }
  368. #endif /* MCAM_MODE_VMALLOC */
  369. #ifdef MCAM_MODE_DMA_CONTIG
  370. /* ---------------------------------------------------------------------- */
  371. /*
  372. * DMA-contiguous code.
  373. */
  374. /*
  375. * Set up a contiguous buffer for the given frame. Here also is where
  376. * the underrun strategy is set: if there is no buffer available, reuse
  377. * the buffer from the other BAR and set the CF_SINGLE_BUFFER flag to
  378. * keep the interrupt handler from giving that buffer back to user
  379. * space. In this way, we always have a buffer to DMA to and don't
  380. * have to try to play games stopping and restarting the controller.
  381. */
  382. static void mcam_set_contig_buffer(struct mcam_camera *cam, int frame)
  383. {
  384. struct mcam_vb_buffer *buf;
  385. /*
  386. * If there are no available buffers, go into single mode
  387. */
  388. if (list_empty(&cam->buffers)) {
  389. buf = cam->vb_bufs[frame ^ 0x1];
  390. cam->vb_bufs[frame] = buf;
  391. mcam_reg_write(cam, frame == 0 ? REG_Y0BAR : REG_Y1BAR,
  392. vb2_dma_contig_plane_dma_addr(&buf->vb_buf, 0));
  393. set_bit(CF_SINGLE_BUFFER, &cam->flags);
  394. cam->frame_state.singles++;
  395. return;
  396. }
  397. /*
  398. * OK, we have a buffer we can use.
  399. */
  400. buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, queue);
  401. list_del_init(&buf->queue);
  402. mcam_reg_write(cam, frame == 0 ? REG_Y0BAR : REG_Y1BAR,
  403. vb2_dma_contig_plane_dma_addr(&buf->vb_buf, 0));
  404. cam->vb_bufs[frame] = buf;
  405. clear_bit(CF_SINGLE_BUFFER, &cam->flags);
  406. }
  407. /*
  408. * Initial B_DMA_contig setup.
  409. */
  410. static void mcam_ctlr_dma_contig(struct mcam_camera *cam)
  411. {
  412. mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
  413. cam->nbufs = 2;
  414. mcam_set_contig_buffer(cam, 0);
  415. mcam_set_contig_buffer(cam, 1);
  416. }
  417. /*
  418. * Frame completion handling.
  419. */
  420. static void mcam_dma_contig_done(struct mcam_camera *cam, int frame)
  421. {
  422. struct mcam_vb_buffer *buf = cam->vb_bufs[frame];
  423. if (!test_bit(CF_SINGLE_BUFFER, &cam->flags)) {
  424. cam->frame_state.delivered++;
  425. mcam_buffer_done(cam, frame, &buf->vb_buf);
  426. }
  427. mcam_set_contig_buffer(cam, frame);
  428. }
  429. #endif /* MCAM_MODE_DMA_CONTIG */
  430. #ifdef MCAM_MODE_DMA_SG
  431. /* ---------------------------------------------------------------------- */
  432. /*
  433. * Scatter/gather-specific code.
  434. */
  435. /*
  436. * Set up the next buffer for S/G I/O; caller should be sure that
  437. * the controller is stopped and a buffer is available.
  438. */
  439. static void mcam_sg_next_buffer(struct mcam_camera *cam)
  440. {
  441. struct mcam_vb_buffer *buf;
  442. buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, queue);
  443. list_del_init(&buf->queue);
  444. /*
  445. * Very Bad Not Good Things happen if you don't clear
  446. * C1_DESC_ENA before making any descriptor changes.
  447. */
  448. mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_ENA);
  449. mcam_reg_write(cam, REG_DMA_DESC_Y, buf->dma_desc_pa);
  450. mcam_reg_write(cam, REG_DESC_LEN_Y,
  451. buf->dma_desc_nent*sizeof(struct mcam_dma_desc));
  452. mcam_reg_write(cam, REG_DESC_LEN_U, 0);
  453. mcam_reg_write(cam, REG_DESC_LEN_V, 0);
  454. mcam_reg_set_bit(cam, REG_CTRL1, C1_DESC_ENA);
  455. cam->vb_bufs[0] = buf;
  456. }
  457. /*
  458. * Initial B_DMA_sg setup
  459. */
  460. static void mcam_ctlr_dma_sg(struct mcam_camera *cam)
  461. {
  462. /*
  463. * The list-empty condition can hit us at resume time
  464. * if the buffer list was empty when the system was suspended.
  465. */
  466. if (list_empty(&cam->buffers)) {
  467. set_bit(CF_SG_RESTART, &cam->flags);
  468. return;
  469. }
  470. mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_3WORD);
  471. mcam_sg_next_buffer(cam);
  472. cam->nbufs = 3;
  473. }
  474. /*
  475. * Frame completion with S/G is trickier. We can't muck with
  476. * a descriptor chain on the fly, since the controller buffers it
  477. * internally. So we have to actually stop and restart; Marvell
  478. * says this is the way to do it.
  479. *
  480. * Of course, stopping is easier said than done; experience shows
  481. * that the controller can start a frame *after* C0_ENABLE has been
  482. * cleared. So when running in S/G mode, the controller is "stopped"
  483. * on receipt of the start-of-frame interrupt. That means we can
  484. * safely change the DMA descriptor array here and restart things
  485. * (assuming there's another buffer waiting to go).
  486. */
  487. static void mcam_dma_sg_done(struct mcam_camera *cam, int frame)
  488. {
  489. struct mcam_vb_buffer *buf = cam->vb_bufs[0];
  490. /*
  491. * If we're no longer supposed to be streaming, don't do anything.
  492. */
  493. if (cam->state != S_STREAMING)
  494. return;
  495. /*
  496. * If we have another buffer available, put it in and
  497. * restart the engine.
  498. */
  499. if (!list_empty(&cam->buffers)) {
  500. mcam_sg_next_buffer(cam);
  501. mcam_ctlr_start(cam);
  502. /*
  503. * Otherwise set CF_SG_RESTART and the controller will
  504. * be restarted once another buffer shows up.
  505. */
  506. } else {
  507. set_bit(CF_SG_RESTART, &cam->flags);
  508. cam->frame_state.singles++;
  509. cam->vb_bufs[0] = NULL;
  510. }
  511. /*
  512. * Now we can give the completed frame back to user space.
  513. */
  514. cam->frame_state.delivered++;
  515. mcam_buffer_done(cam, frame, &buf->vb_buf);
  516. }
  517. /*
  518. * Scatter/gather mode requires stopping the controller between
  519. * frames so we can put in a new DMA descriptor array. If no new
  520. * buffer exists at frame completion, the controller is left stopped;
  521. * this function is charged with gettig things going again.
  522. */
  523. static void mcam_sg_restart(struct mcam_camera *cam)
  524. {
  525. mcam_ctlr_dma_sg(cam);
  526. mcam_ctlr_start(cam);
  527. clear_bit(CF_SG_RESTART, &cam->flags);
  528. }
  529. #else /* MCAM_MODE_DMA_SG */
  530. static inline void mcam_sg_restart(struct mcam_camera *cam)
  531. {
  532. return;
  533. }
  534. #endif /* MCAM_MODE_DMA_SG */
  535. /* ---------------------------------------------------------------------- */
  536. /*
  537. * Buffer-mode-independent controller code.
  538. */
  539. /*
  540. * Image format setup
  541. */
  542. static void mcam_ctlr_image(struct mcam_camera *cam)
  543. {
  544. int imgsz;
  545. struct v4l2_pix_format *fmt = &cam->pix_format;
  546. imgsz = ((fmt->height << IMGSZ_V_SHIFT) & IMGSZ_V_MASK) |
  547. (fmt->bytesperline & IMGSZ_H_MASK);
  548. mcam_reg_write(cam, REG_IMGSIZE, imgsz);
  549. mcam_reg_write(cam, REG_IMGOFFSET, 0);
  550. /* YPITCH just drops the last two bits */
  551. mcam_reg_write_mask(cam, REG_IMGPITCH, fmt->bytesperline,
  552. IMGP_YP_MASK);
  553. /*
  554. * Tell the controller about the image format we are using.
  555. */
  556. switch (cam->pix_format.pixelformat) {
  557. case V4L2_PIX_FMT_YUYV:
  558. mcam_reg_write_mask(cam, REG_CTRL0,
  559. C0_DF_YUV|C0_YUV_PACKED|C0_YUVE_YUYV,
  560. C0_DF_MASK);
  561. break;
  562. case V4L2_PIX_FMT_RGB444:
  563. mcam_reg_write_mask(cam, REG_CTRL0,
  564. C0_DF_RGB|C0_RGBF_444|C0_RGB4_XRGB,
  565. C0_DF_MASK);
  566. /* Alpha value? */
  567. break;
  568. case V4L2_PIX_FMT_RGB565:
  569. mcam_reg_write_mask(cam, REG_CTRL0,
  570. C0_DF_RGB|C0_RGBF_565|C0_RGB5_BGGR,
  571. C0_DF_MASK);
  572. break;
  573. default:
  574. cam_err(cam, "Unknown format %x\n", cam->pix_format.pixelformat);
  575. break;
  576. }
  577. /*
  578. * Make sure it knows we want to use hsync/vsync.
  579. */
  580. mcam_reg_write_mask(cam, REG_CTRL0, C0_SIF_HVSYNC,
  581. C0_SIFM_MASK);
  582. }
  583. /*
  584. * Configure the controller for operation; caller holds the
  585. * device mutex.
  586. */
  587. static int mcam_ctlr_configure(struct mcam_camera *cam)
  588. {
  589. unsigned long flags;
  590. spin_lock_irqsave(&cam->dev_lock, flags);
  591. clear_bit(CF_SG_RESTART, &cam->flags);
  592. cam->dma_setup(cam);
  593. mcam_ctlr_image(cam);
  594. mcam_set_config_needed(cam, 0);
  595. spin_unlock_irqrestore(&cam->dev_lock, flags);
  596. return 0;
  597. }
  598. static void mcam_ctlr_irq_enable(struct mcam_camera *cam)
  599. {
  600. /*
  601. * Clear any pending interrupts, since we do not
  602. * expect to have I/O active prior to enabling.
  603. */
  604. mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS);
  605. mcam_reg_set_bit(cam, REG_IRQMASK, FRAMEIRQS);
  606. }
  607. static void mcam_ctlr_irq_disable(struct mcam_camera *cam)
  608. {
  609. mcam_reg_clear_bit(cam, REG_IRQMASK, FRAMEIRQS);
  610. }
  611. static void mcam_ctlr_init(struct mcam_camera *cam)
  612. {
  613. unsigned long flags;
  614. spin_lock_irqsave(&cam->dev_lock, flags);
  615. /*
  616. * Make sure it's not powered down.
  617. */
  618. mcam_reg_clear_bit(cam, REG_CTRL1, C1_PWRDWN);
  619. /*
  620. * Turn off the enable bit. It sure should be off anyway,
  621. * but it's good to be sure.
  622. */
  623. mcam_reg_clear_bit(cam, REG_CTRL0, C0_ENABLE);
  624. /*
  625. * Clock the sensor appropriately. Controller clock should
  626. * be 48MHz, sensor "typical" value is half that.
  627. */
  628. mcam_reg_write_mask(cam, REG_CLKCTRL, 2, CLK_DIV_MASK);
  629. spin_unlock_irqrestore(&cam->dev_lock, flags);
  630. }
  631. /*
  632. * Stop the controller, and don't return until we're really sure that no
  633. * further DMA is going on.
  634. */
  635. static void mcam_ctlr_stop_dma(struct mcam_camera *cam)
  636. {
  637. unsigned long flags;
  638. /*
  639. * Theory: stop the camera controller (whether it is operating
  640. * or not). Delay briefly just in case we race with the SOF
  641. * interrupt, then wait until no DMA is active.
  642. */
  643. spin_lock_irqsave(&cam->dev_lock, flags);
  644. clear_bit(CF_SG_RESTART, &cam->flags);
  645. mcam_ctlr_stop(cam);
  646. cam->state = S_IDLE;
  647. spin_unlock_irqrestore(&cam->dev_lock, flags);
  648. /*
  649. * This is a brutally long sleep, but experience shows that
  650. * it can take the controller a while to get the message that
  651. * it needs to stop grabbing frames. In particular, we can
  652. * sometimes (on mmp) get a frame at the end WITHOUT the
  653. * start-of-frame indication.
  654. */
  655. msleep(150);
  656. if (test_bit(CF_DMA_ACTIVE, &cam->flags))
  657. cam_err(cam, "Timeout waiting for DMA to end\n");
  658. /* This would be bad news - what now? */
  659. spin_lock_irqsave(&cam->dev_lock, flags);
  660. mcam_ctlr_irq_disable(cam);
  661. spin_unlock_irqrestore(&cam->dev_lock, flags);
  662. }
  663. /*
  664. * Power up and down.
  665. */
  666. static void mcam_ctlr_power_up(struct mcam_camera *cam)
  667. {
  668. unsigned long flags;
  669. spin_lock_irqsave(&cam->dev_lock, flags);
  670. cam->plat_power_up(cam);
  671. mcam_reg_clear_bit(cam, REG_CTRL1, C1_PWRDWN);
  672. spin_unlock_irqrestore(&cam->dev_lock, flags);
  673. msleep(5); /* Just to be sure */
  674. }
  675. static void mcam_ctlr_power_down(struct mcam_camera *cam)
  676. {
  677. unsigned long flags;
  678. spin_lock_irqsave(&cam->dev_lock, flags);
  679. /*
  680. * School of hard knocks department: be sure we do any register
  681. * twiddling on the controller *before* calling the platform
  682. * power down routine.
  683. */
  684. mcam_reg_set_bit(cam, REG_CTRL1, C1_PWRDWN);
  685. cam->plat_power_down(cam);
  686. spin_unlock_irqrestore(&cam->dev_lock, flags);
  687. }
  688. /* -------------------------------------------------------------------- */
  689. /*
  690. * Communications with the sensor.
  691. */
  692. static int __mcam_cam_reset(struct mcam_camera *cam)
  693. {
  694. return sensor_call(cam, core, reset, 0);
  695. }
  696. /*
  697. * We have found the sensor on the i2c. Let's try to have a
  698. * conversation.
  699. */
  700. static int mcam_cam_init(struct mcam_camera *cam)
  701. {
  702. struct v4l2_dbg_chip_ident chip;
  703. int ret;
  704. mutex_lock(&cam->s_mutex);
  705. if (cam->state != S_NOTREADY)
  706. cam_warn(cam, "Cam init with device in funky state %d",
  707. cam->state);
  708. ret = __mcam_cam_reset(cam);
  709. if (ret)
  710. goto out;
  711. chip.ident = V4L2_IDENT_NONE;
  712. chip.match.type = V4L2_CHIP_MATCH_I2C_ADDR;
  713. chip.match.addr = cam->sensor_addr;
  714. ret = sensor_call(cam, core, g_chip_ident, &chip);
  715. if (ret)
  716. goto out;
  717. cam->sensor_type = chip.ident;
  718. if (cam->sensor_type != V4L2_IDENT_OV7670) {
  719. cam_err(cam, "Unsupported sensor type 0x%x", cam->sensor_type);
  720. ret = -EINVAL;
  721. goto out;
  722. }
  723. /* Get/set parameters? */
  724. ret = 0;
  725. cam->state = S_IDLE;
  726. out:
  727. mcam_ctlr_power_down(cam);
  728. mutex_unlock(&cam->s_mutex);
  729. return ret;
  730. }
  731. /*
  732. * Configure the sensor to match the parameters we have. Caller should
  733. * hold s_mutex
  734. */
  735. static int mcam_cam_set_flip(struct mcam_camera *cam)
  736. {
  737. struct v4l2_control ctrl;
  738. memset(&ctrl, 0, sizeof(ctrl));
  739. ctrl.id = V4L2_CID_VFLIP;
  740. ctrl.value = flip;
  741. return sensor_call(cam, core, s_ctrl, &ctrl);
  742. }
  743. static int mcam_cam_configure(struct mcam_camera *cam)
  744. {
  745. struct v4l2_mbus_framefmt mbus_fmt;
  746. int ret;
  747. v4l2_fill_mbus_format(&mbus_fmt, &cam->pix_format, cam->mbus_code);
  748. ret = sensor_call(cam, core, init, 0);
  749. if (ret == 0)
  750. ret = sensor_call(cam, video, s_mbus_fmt, &mbus_fmt);
  751. /*
  752. * OV7670 does weird things if flip is set *before* format...
  753. */
  754. ret += mcam_cam_set_flip(cam);
  755. return ret;
  756. }
  757. /*
  758. * Get everything ready, and start grabbing frames.
  759. */
  760. static int mcam_read_setup(struct mcam_camera *cam)
  761. {
  762. int ret;
  763. unsigned long flags;
  764. /*
  765. * Configuration. If we still don't have DMA buffers,
  766. * make one last, desperate attempt.
  767. */
  768. if (cam->buffer_mode == B_vmalloc && cam->nbufs == 0 &&
  769. mcam_alloc_dma_bufs(cam, 0))
  770. return -ENOMEM;
  771. if (mcam_needs_config(cam)) {
  772. mcam_cam_configure(cam);
  773. ret = mcam_ctlr_configure(cam);
  774. if (ret)
  775. return ret;
  776. }
  777. /*
  778. * Turn it loose.
  779. */
  780. spin_lock_irqsave(&cam->dev_lock, flags);
  781. clear_bit(CF_DMA_ACTIVE, &cam->flags);
  782. mcam_reset_buffers(cam);
  783. mcam_ctlr_irq_enable(cam);
  784. cam->state = S_STREAMING;
  785. if (!test_bit(CF_SG_RESTART, &cam->flags))
  786. mcam_ctlr_start(cam);
  787. spin_unlock_irqrestore(&cam->dev_lock, flags);
  788. return 0;
  789. }
  790. /* ----------------------------------------------------------------------- */
  791. /*
  792. * Videobuf2 interface code.
  793. */
  794. static int mcam_vb_queue_setup(struct vb2_queue *vq,
  795. const struct v4l2_format *fmt, unsigned int *nbufs,
  796. unsigned int *num_planes, unsigned int sizes[],
  797. void *alloc_ctxs[])
  798. {
  799. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  800. int minbufs = (cam->buffer_mode == B_DMA_contig) ? 3 : 2;
  801. sizes[0] = cam->pix_format.sizeimage;
  802. *num_planes = 1; /* Someday we have to support planar formats... */
  803. if (*nbufs < minbufs)
  804. *nbufs = minbufs;
  805. if (cam->buffer_mode == B_DMA_contig)
  806. alloc_ctxs[0] = cam->vb_alloc_ctx;
  807. return 0;
  808. }
  809. static void mcam_vb_buf_queue(struct vb2_buffer *vb)
  810. {
  811. struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
  812. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  813. unsigned long flags;
  814. int start;
  815. spin_lock_irqsave(&cam->dev_lock, flags);
  816. start = (cam->state == S_BUFWAIT) && !list_empty(&cam->buffers);
  817. list_add(&mvb->queue, &cam->buffers);
  818. if (cam->state == S_STREAMING && test_bit(CF_SG_RESTART, &cam->flags))
  819. mcam_sg_restart(cam);
  820. spin_unlock_irqrestore(&cam->dev_lock, flags);
  821. if (start)
  822. mcam_read_setup(cam);
  823. }
  824. /*
  825. * vb2 uses these to release the mutex when waiting in dqbuf. I'm
  826. * not actually sure we need to do this (I'm not sure that vb2_dqbuf() needs
  827. * to be called with the mutex held), but better safe than sorry.
  828. */
  829. static void mcam_vb_wait_prepare(struct vb2_queue *vq)
  830. {
  831. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  832. mutex_unlock(&cam->s_mutex);
  833. }
  834. static void mcam_vb_wait_finish(struct vb2_queue *vq)
  835. {
  836. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  837. mutex_lock(&cam->s_mutex);
  838. }
  839. /*
  840. * These need to be called with the mutex held from vb2
  841. */
  842. static int mcam_vb_start_streaming(struct vb2_queue *vq, unsigned int count)
  843. {
  844. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  845. if (cam->state != S_IDLE) {
  846. INIT_LIST_HEAD(&cam->buffers);
  847. return -EINVAL;
  848. }
  849. cam->sequence = 0;
  850. /*
  851. * Videobuf2 sneakily hoards all the buffers and won't
  852. * give them to us until *after* streaming starts. But
  853. * we can't actually start streaming until we have a
  854. * destination. So go into a wait state and hope they
  855. * give us buffers soon.
  856. */
  857. if (cam->buffer_mode != B_vmalloc && list_empty(&cam->buffers)) {
  858. cam->state = S_BUFWAIT;
  859. return 0;
  860. }
  861. return mcam_read_setup(cam);
  862. }
  863. static int mcam_vb_stop_streaming(struct vb2_queue *vq)
  864. {
  865. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  866. unsigned long flags;
  867. if (cam->state == S_BUFWAIT) {
  868. /* They never gave us buffers */
  869. cam->state = S_IDLE;
  870. return 0;
  871. }
  872. if (cam->state != S_STREAMING)
  873. return -EINVAL;
  874. mcam_ctlr_stop_dma(cam);
  875. /*
  876. * VB2 reclaims the buffers, so we need to forget
  877. * about them.
  878. */
  879. spin_lock_irqsave(&cam->dev_lock, flags);
  880. INIT_LIST_HEAD(&cam->buffers);
  881. spin_unlock_irqrestore(&cam->dev_lock, flags);
  882. return 0;
  883. }
  884. static const struct vb2_ops mcam_vb2_ops = {
  885. .queue_setup = mcam_vb_queue_setup,
  886. .buf_queue = mcam_vb_buf_queue,
  887. .start_streaming = mcam_vb_start_streaming,
  888. .stop_streaming = mcam_vb_stop_streaming,
  889. .wait_prepare = mcam_vb_wait_prepare,
  890. .wait_finish = mcam_vb_wait_finish,
  891. };
  892. #ifdef MCAM_MODE_DMA_SG
  893. /*
  894. * Scatter/gather mode uses all of the above functions plus a
  895. * few extras to deal with DMA mapping.
  896. */
  897. static int mcam_vb_sg_buf_init(struct vb2_buffer *vb)
  898. {
  899. struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
  900. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  901. int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
  902. mvb->dma_desc = dma_alloc_coherent(cam->dev,
  903. ndesc * sizeof(struct mcam_dma_desc),
  904. &mvb->dma_desc_pa, GFP_KERNEL);
  905. if (mvb->dma_desc == NULL) {
  906. cam_err(cam, "Unable to get DMA descriptor array\n");
  907. return -ENOMEM;
  908. }
  909. return 0;
  910. }
  911. static int mcam_vb_sg_buf_prepare(struct vb2_buffer *vb)
  912. {
  913. struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
  914. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  915. struct vb2_dma_sg_desc *sgd = vb2_dma_sg_plane_desc(vb, 0);
  916. struct mcam_dma_desc *desc = mvb->dma_desc;
  917. struct scatterlist *sg;
  918. int i;
  919. mvb->dma_desc_nent = dma_map_sg(cam->dev, sgd->sglist, sgd->num_pages,
  920. DMA_FROM_DEVICE);
  921. if (mvb->dma_desc_nent <= 0)
  922. return -EIO; /* Not sure what's right here */
  923. for_each_sg(sgd->sglist, sg, mvb->dma_desc_nent, i) {
  924. desc->dma_addr = sg_dma_address(sg);
  925. desc->segment_len = sg_dma_len(sg);
  926. desc++;
  927. }
  928. return 0;
  929. }
  930. static int mcam_vb_sg_buf_finish(struct vb2_buffer *vb)
  931. {
  932. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  933. struct vb2_dma_sg_desc *sgd = vb2_dma_sg_plane_desc(vb, 0);
  934. dma_unmap_sg(cam->dev, sgd->sglist, sgd->num_pages, DMA_FROM_DEVICE);
  935. return 0;
  936. }
  937. static void mcam_vb_sg_buf_cleanup(struct vb2_buffer *vb)
  938. {
  939. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  940. struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
  941. int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
  942. dma_free_coherent(cam->dev, ndesc * sizeof(struct mcam_dma_desc),
  943. mvb->dma_desc, mvb->dma_desc_pa);
  944. }
  945. static const struct vb2_ops mcam_vb2_sg_ops = {
  946. .queue_setup = mcam_vb_queue_setup,
  947. .buf_init = mcam_vb_sg_buf_init,
  948. .buf_prepare = mcam_vb_sg_buf_prepare,
  949. .buf_queue = mcam_vb_buf_queue,
  950. .buf_finish = mcam_vb_sg_buf_finish,
  951. .buf_cleanup = mcam_vb_sg_buf_cleanup,
  952. .start_streaming = mcam_vb_start_streaming,
  953. .stop_streaming = mcam_vb_stop_streaming,
  954. .wait_prepare = mcam_vb_wait_prepare,
  955. .wait_finish = mcam_vb_wait_finish,
  956. };
  957. #endif /* MCAM_MODE_DMA_SG */
  958. static int mcam_setup_vb2(struct mcam_camera *cam)
  959. {
  960. struct vb2_queue *vq = &cam->vb_queue;
  961. memset(vq, 0, sizeof(*vq));
  962. vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  963. vq->drv_priv = cam;
  964. INIT_LIST_HEAD(&cam->buffers);
  965. switch (cam->buffer_mode) {
  966. case B_DMA_contig:
  967. #ifdef MCAM_MODE_DMA_CONTIG
  968. vq->ops = &mcam_vb2_ops;
  969. vq->mem_ops = &vb2_dma_contig_memops;
  970. cam->vb_alloc_ctx = vb2_dma_contig_init_ctx(cam->dev);
  971. vq->io_modes = VB2_MMAP | VB2_USERPTR;
  972. cam->dma_setup = mcam_ctlr_dma_contig;
  973. cam->frame_complete = mcam_dma_contig_done;
  974. #endif
  975. break;
  976. case B_DMA_sg:
  977. #ifdef MCAM_MODE_DMA_SG
  978. vq->ops = &mcam_vb2_sg_ops;
  979. vq->mem_ops = &vb2_dma_sg_memops;
  980. vq->io_modes = VB2_MMAP | VB2_USERPTR;
  981. cam->dma_setup = mcam_ctlr_dma_sg;
  982. cam->frame_complete = mcam_dma_sg_done;
  983. #endif
  984. break;
  985. case B_vmalloc:
  986. #ifdef MCAM_MODE_VMALLOC
  987. tasklet_init(&cam->s_tasklet, mcam_frame_tasklet,
  988. (unsigned long) cam);
  989. vq->ops = &mcam_vb2_ops;
  990. vq->mem_ops = &vb2_vmalloc_memops;
  991. vq->buf_struct_size = sizeof(struct mcam_vb_buffer);
  992. vq->io_modes = VB2_MMAP;
  993. cam->dma_setup = mcam_ctlr_dma_vmalloc;
  994. cam->frame_complete = mcam_vmalloc_done;
  995. #endif
  996. break;
  997. }
  998. return vb2_queue_init(vq);
  999. }
  1000. static void mcam_cleanup_vb2(struct mcam_camera *cam)
  1001. {
  1002. vb2_queue_release(&cam->vb_queue);
  1003. #ifdef MCAM_MODE_DMA_CONTIG
  1004. if (cam->buffer_mode == B_DMA_contig)
  1005. vb2_dma_contig_cleanup_ctx(cam->vb_alloc_ctx);
  1006. #endif
  1007. }
  1008. /* ---------------------------------------------------------------------- */
  1009. /*
  1010. * The long list of V4L2 ioctl() operations.
  1011. */
  1012. static int mcam_vidioc_streamon(struct file *filp, void *priv,
  1013. enum v4l2_buf_type type)
  1014. {
  1015. struct mcam_camera *cam = filp->private_data;
  1016. int ret;
  1017. mutex_lock(&cam->s_mutex);
  1018. ret = vb2_streamon(&cam->vb_queue, type);
  1019. mutex_unlock(&cam->s_mutex);
  1020. return ret;
  1021. }
  1022. static int mcam_vidioc_streamoff(struct file *filp, void *priv,
  1023. enum v4l2_buf_type type)
  1024. {
  1025. struct mcam_camera *cam = filp->private_data;
  1026. int ret;
  1027. mutex_lock(&cam->s_mutex);
  1028. ret = vb2_streamoff(&cam->vb_queue, type);
  1029. mutex_unlock(&cam->s_mutex);
  1030. return ret;
  1031. }
  1032. static int mcam_vidioc_reqbufs(struct file *filp, void *priv,
  1033. struct v4l2_requestbuffers *req)
  1034. {
  1035. struct mcam_camera *cam = filp->private_data;
  1036. int ret;
  1037. mutex_lock(&cam->s_mutex);
  1038. ret = vb2_reqbufs(&cam->vb_queue, req);
  1039. mutex_unlock(&cam->s_mutex);
  1040. return ret;
  1041. }
  1042. static int mcam_vidioc_querybuf(struct file *filp, void *priv,
  1043. struct v4l2_buffer *buf)
  1044. {
  1045. struct mcam_camera *cam = filp->private_data;
  1046. int ret;
  1047. mutex_lock(&cam->s_mutex);
  1048. ret = vb2_querybuf(&cam->vb_queue, buf);
  1049. mutex_unlock(&cam->s_mutex);
  1050. return ret;
  1051. }
  1052. static int mcam_vidioc_qbuf(struct file *filp, void *priv,
  1053. struct v4l2_buffer *buf)
  1054. {
  1055. struct mcam_camera *cam = filp->private_data;
  1056. int ret;
  1057. mutex_lock(&cam->s_mutex);
  1058. ret = vb2_qbuf(&cam->vb_queue, buf);
  1059. mutex_unlock(&cam->s_mutex);
  1060. return ret;
  1061. }
  1062. static int mcam_vidioc_dqbuf(struct file *filp, void *priv,
  1063. struct v4l2_buffer *buf)
  1064. {
  1065. struct mcam_camera *cam = filp->private_data;
  1066. int ret;
  1067. mutex_lock(&cam->s_mutex);
  1068. ret = vb2_dqbuf(&cam->vb_queue, buf, filp->f_flags & O_NONBLOCK);
  1069. mutex_unlock(&cam->s_mutex);
  1070. return ret;
  1071. }
  1072. static int mcam_vidioc_querycap(struct file *file, void *priv,
  1073. struct v4l2_capability *cap)
  1074. {
  1075. strcpy(cap->driver, "marvell_ccic");
  1076. strcpy(cap->card, "marvell_ccic");
  1077. cap->version = 1;
  1078. cap->capabilities = V4L2_CAP_VIDEO_CAPTURE |
  1079. V4L2_CAP_READWRITE | V4L2_CAP_STREAMING;
  1080. return 0;
  1081. }
  1082. static int mcam_vidioc_enum_fmt_vid_cap(struct file *filp,
  1083. void *priv, struct v4l2_fmtdesc *fmt)
  1084. {
  1085. if (fmt->index >= N_MCAM_FMTS)
  1086. return -EINVAL;
  1087. strlcpy(fmt->description, mcam_formats[fmt->index].desc,
  1088. sizeof(fmt->description));
  1089. fmt->pixelformat = mcam_formats[fmt->index].pixelformat;
  1090. return 0;
  1091. }
  1092. static int mcam_vidioc_try_fmt_vid_cap(struct file *filp, void *priv,
  1093. struct v4l2_format *fmt)
  1094. {
  1095. struct mcam_camera *cam = priv;
  1096. struct mcam_format_struct *f;
  1097. struct v4l2_pix_format *pix = &fmt->fmt.pix;
  1098. struct v4l2_mbus_framefmt mbus_fmt;
  1099. int ret;
  1100. f = mcam_find_format(pix->pixelformat);
  1101. pix->pixelformat = f->pixelformat;
  1102. v4l2_fill_mbus_format(&mbus_fmt, pix, f->mbus_code);
  1103. mutex_lock(&cam->s_mutex);
  1104. ret = sensor_call(cam, video, try_mbus_fmt, &mbus_fmt);
  1105. mutex_unlock(&cam->s_mutex);
  1106. v4l2_fill_pix_format(pix, &mbus_fmt);
  1107. pix->bytesperline = pix->width * f->bpp;
  1108. pix->sizeimage = pix->height * pix->bytesperline;
  1109. return ret;
  1110. }
  1111. static int mcam_vidioc_s_fmt_vid_cap(struct file *filp, void *priv,
  1112. struct v4l2_format *fmt)
  1113. {
  1114. struct mcam_camera *cam = priv;
  1115. struct mcam_format_struct *f;
  1116. int ret;
  1117. /*
  1118. * Can't do anything if the device is not idle
  1119. * Also can't if there are streaming buffers in place.
  1120. */
  1121. if (cam->state != S_IDLE || cam->vb_queue.num_buffers > 0)
  1122. return -EBUSY;
  1123. f = mcam_find_format(fmt->fmt.pix.pixelformat);
  1124. /*
  1125. * See if the formatting works in principle.
  1126. */
  1127. ret = mcam_vidioc_try_fmt_vid_cap(filp, priv, fmt);
  1128. if (ret)
  1129. return ret;
  1130. /*
  1131. * Now we start to change things for real, so let's do it
  1132. * under lock.
  1133. */
  1134. mutex_lock(&cam->s_mutex);
  1135. cam->pix_format = fmt->fmt.pix;
  1136. cam->mbus_code = f->mbus_code;
  1137. /*
  1138. * Make sure we have appropriate DMA buffers.
  1139. */
  1140. if (cam->buffer_mode == B_vmalloc) {
  1141. ret = mcam_check_dma_buffers(cam);
  1142. if (ret)
  1143. goto out;
  1144. }
  1145. mcam_set_config_needed(cam, 1);
  1146. out:
  1147. mutex_unlock(&cam->s_mutex);
  1148. return ret;
  1149. }
  1150. /*
  1151. * Return our stored notion of how the camera is/should be configured.
  1152. * The V4l2 spec wants us to be smarter, and actually get this from
  1153. * the camera (and not mess with it at open time). Someday.
  1154. */
  1155. static int mcam_vidioc_g_fmt_vid_cap(struct file *filp, void *priv,
  1156. struct v4l2_format *f)
  1157. {
  1158. struct mcam_camera *cam = priv;
  1159. f->fmt.pix = cam->pix_format;
  1160. return 0;
  1161. }
  1162. /*
  1163. * We only have one input - the sensor - so minimize the nonsense here.
  1164. */
  1165. static int mcam_vidioc_enum_input(struct file *filp, void *priv,
  1166. struct v4l2_input *input)
  1167. {
  1168. if (input->index != 0)
  1169. return -EINVAL;
  1170. input->type = V4L2_INPUT_TYPE_CAMERA;
  1171. input->std = V4L2_STD_ALL; /* Not sure what should go here */
  1172. strcpy(input->name, "Camera");
  1173. return 0;
  1174. }
  1175. static int mcam_vidioc_g_input(struct file *filp, void *priv, unsigned int *i)
  1176. {
  1177. *i = 0;
  1178. return 0;
  1179. }
  1180. static int mcam_vidioc_s_input(struct file *filp, void *priv, unsigned int i)
  1181. {
  1182. if (i != 0)
  1183. return -EINVAL;
  1184. return 0;
  1185. }
  1186. /* from vivi.c */
  1187. static int mcam_vidioc_s_std(struct file *filp, void *priv, v4l2_std_id *a)
  1188. {
  1189. return 0;
  1190. }
  1191. /*
  1192. * G/S_PARM. Most of this is done by the sensor, but we are
  1193. * the level which controls the number of read buffers.
  1194. */
  1195. static int mcam_vidioc_g_parm(struct file *filp, void *priv,
  1196. struct v4l2_streamparm *parms)
  1197. {
  1198. struct mcam_camera *cam = priv;
  1199. int ret;
  1200. mutex_lock(&cam->s_mutex);
  1201. ret = sensor_call(cam, video, g_parm, parms);
  1202. mutex_unlock(&cam->s_mutex);
  1203. parms->parm.capture.readbuffers = n_dma_bufs;
  1204. return ret;
  1205. }
  1206. static int mcam_vidioc_s_parm(struct file *filp, void *priv,
  1207. struct v4l2_streamparm *parms)
  1208. {
  1209. struct mcam_camera *cam = priv;
  1210. int ret;
  1211. mutex_lock(&cam->s_mutex);
  1212. ret = sensor_call(cam, video, s_parm, parms);
  1213. mutex_unlock(&cam->s_mutex);
  1214. parms->parm.capture.readbuffers = n_dma_bufs;
  1215. return ret;
  1216. }
  1217. static int mcam_vidioc_g_chip_ident(struct file *file, void *priv,
  1218. struct v4l2_dbg_chip_ident *chip)
  1219. {
  1220. struct mcam_camera *cam = priv;
  1221. chip->ident = V4L2_IDENT_NONE;
  1222. chip->revision = 0;
  1223. if (v4l2_chip_match_host(&chip->match)) {
  1224. chip->ident = cam->chip_id;
  1225. return 0;
  1226. }
  1227. return sensor_call(cam, core, g_chip_ident, chip);
  1228. }
  1229. static int mcam_vidioc_enum_framesizes(struct file *filp, void *priv,
  1230. struct v4l2_frmsizeenum *sizes)
  1231. {
  1232. struct mcam_camera *cam = priv;
  1233. int ret;
  1234. mutex_lock(&cam->s_mutex);
  1235. ret = sensor_call(cam, video, enum_framesizes, sizes);
  1236. mutex_unlock(&cam->s_mutex);
  1237. return ret;
  1238. }
  1239. static int mcam_vidioc_enum_frameintervals(struct file *filp, void *priv,
  1240. struct v4l2_frmivalenum *interval)
  1241. {
  1242. struct mcam_camera *cam = priv;
  1243. int ret;
  1244. mutex_lock(&cam->s_mutex);
  1245. ret = sensor_call(cam, video, enum_frameintervals, interval);
  1246. mutex_unlock(&cam->s_mutex);
  1247. return ret;
  1248. }
  1249. #ifdef CONFIG_VIDEO_ADV_DEBUG
  1250. static int mcam_vidioc_g_register(struct file *file, void *priv,
  1251. struct v4l2_dbg_register *reg)
  1252. {
  1253. struct mcam_camera *cam = priv;
  1254. if (v4l2_chip_match_host(&reg->match)) {
  1255. reg->val = mcam_reg_read(cam, reg->reg);
  1256. reg->size = 4;
  1257. return 0;
  1258. }
  1259. return sensor_call(cam, core, g_register, reg);
  1260. }
  1261. static int mcam_vidioc_s_register(struct file *file, void *priv,
  1262. struct v4l2_dbg_register *reg)
  1263. {
  1264. struct mcam_camera *cam = priv;
  1265. if (v4l2_chip_match_host(&reg->match)) {
  1266. mcam_reg_write(cam, reg->reg, reg->val);
  1267. return 0;
  1268. }
  1269. return sensor_call(cam, core, s_register, reg);
  1270. }
  1271. #endif
  1272. static const struct v4l2_ioctl_ops mcam_v4l_ioctl_ops = {
  1273. .vidioc_querycap = mcam_vidioc_querycap,
  1274. .vidioc_enum_fmt_vid_cap = mcam_vidioc_enum_fmt_vid_cap,
  1275. .vidioc_try_fmt_vid_cap = mcam_vidioc_try_fmt_vid_cap,
  1276. .vidioc_s_fmt_vid_cap = mcam_vidioc_s_fmt_vid_cap,
  1277. .vidioc_g_fmt_vid_cap = mcam_vidioc_g_fmt_vid_cap,
  1278. .vidioc_enum_input = mcam_vidioc_enum_input,
  1279. .vidioc_g_input = mcam_vidioc_g_input,
  1280. .vidioc_s_input = mcam_vidioc_s_input,
  1281. .vidioc_s_std = mcam_vidioc_s_std,
  1282. .vidioc_reqbufs = mcam_vidioc_reqbufs,
  1283. .vidioc_querybuf = mcam_vidioc_querybuf,
  1284. .vidioc_qbuf = mcam_vidioc_qbuf,
  1285. .vidioc_dqbuf = mcam_vidioc_dqbuf,
  1286. .vidioc_streamon = mcam_vidioc_streamon,
  1287. .vidioc_streamoff = mcam_vidioc_streamoff,
  1288. .vidioc_g_parm = mcam_vidioc_g_parm,
  1289. .vidioc_s_parm = mcam_vidioc_s_parm,
  1290. .vidioc_enum_framesizes = mcam_vidioc_enum_framesizes,
  1291. .vidioc_enum_frameintervals = mcam_vidioc_enum_frameintervals,
  1292. .vidioc_g_chip_ident = mcam_vidioc_g_chip_ident,
  1293. #ifdef CONFIG_VIDEO_ADV_DEBUG
  1294. .vidioc_g_register = mcam_vidioc_g_register,
  1295. .vidioc_s_register = mcam_vidioc_s_register,
  1296. #endif
  1297. };
  1298. /* ---------------------------------------------------------------------- */
  1299. /*
  1300. * Our various file operations.
  1301. */
  1302. static int mcam_v4l_open(struct file *filp)
  1303. {
  1304. struct mcam_camera *cam = video_drvdata(filp);
  1305. int ret = 0;
  1306. filp->private_data = cam;
  1307. cam->frame_state.frames = 0;
  1308. cam->frame_state.singles = 0;
  1309. cam->frame_state.delivered = 0;
  1310. mutex_lock(&cam->s_mutex);
  1311. if (cam->users == 0) {
  1312. ret = mcam_setup_vb2(cam);
  1313. if (ret)
  1314. goto out;
  1315. mcam_ctlr_power_up(cam);
  1316. __mcam_cam_reset(cam);
  1317. mcam_set_config_needed(cam, 1);
  1318. }
  1319. (cam->users)++;
  1320. out:
  1321. mutex_unlock(&cam->s_mutex);
  1322. return ret;
  1323. }
  1324. static int mcam_v4l_release(struct file *filp)
  1325. {
  1326. struct mcam_camera *cam = filp->private_data;
  1327. cam_dbg(cam, "Release, %d frames, %d singles, %d delivered\n",
  1328. cam->frame_state.frames, cam->frame_state.singles,
  1329. cam->frame_state.delivered);
  1330. mutex_lock(&cam->s_mutex);
  1331. (cam->users)--;
  1332. if (cam->users == 0) {
  1333. mcam_ctlr_stop_dma(cam);
  1334. mcam_cleanup_vb2(cam);
  1335. mcam_ctlr_power_down(cam);
  1336. if (cam->buffer_mode == B_vmalloc && alloc_bufs_at_read)
  1337. mcam_free_dma_bufs(cam);
  1338. }
  1339. mutex_unlock(&cam->s_mutex);
  1340. return 0;
  1341. }
  1342. static ssize_t mcam_v4l_read(struct file *filp,
  1343. char __user *buffer, size_t len, loff_t *pos)
  1344. {
  1345. struct mcam_camera *cam = filp->private_data;
  1346. int ret;
  1347. mutex_lock(&cam->s_mutex);
  1348. ret = vb2_read(&cam->vb_queue, buffer, len, pos,
  1349. filp->f_flags & O_NONBLOCK);
  1350. mutex_unlock(&cam->s_mutex);
  1351. return ret;
  1352. }
  1353. static unsigned int mcam_v4l_poll(struct file *filp,
  1354. struct poll_table_struct *pt)
  1355. {
  1356. struct mcam_camera *cam = filp->private_data;
  1357. int ret;
  1358. mutex_lock(&cam->s_mutex);
  1359. ret = vb2_poll(&cam->vb_queue, filp, pt);
  1360. mutex_unlock(&cam->s_mutex);
  1361. return ret;
  1362. }
  1363. static int mcam_v4l_mmap(struct file *filp, struct vm_area_struct *vma)
  1364. {
  1365. struct mcam_camera *cam = filp->private_data;
  1366. int ret;
  1367. mutex_lock(&cam->s_mutex);
  1368. ret = vb2_mmap(&cam->vb_queue, vma);
  1369. mutex_unlock(&cam->s_mutex);
  1370. return ret;
  1371. }
  1372. static const struct v4l2_file_operations mcam_v4l_fops = {
  1373. .owner = THIS_MODULE,
  1374. .open = mcam_v4l_open,
  1375. .release = mcam_v4l_release,
  1376. .read = mcam_v4l_read,
  1377. .poll = mcam_v4l_poll,
  1378. .mmap = mcam_v4l_mmap,
  1379. .unlocked_ioctl = video_ioctl2,
  1380. };
  1381. /*
  1382. * This template device holds all of those v4l2 methods; we
  1383. * clone it for specific real devices.
  1384. */
  1385. static struct video_device mcam_v4l_template = {
  1386. .name = "mcam",
  1387. .tvnorms = V4L2_STD_NTSC_M,
  1388. .current_norm = V4L2_STD_NTSC_M, /* make mplayer happy */
  1389. .fops = &mcam_v4l_fops,
  1390. .ioctl_ops = &mcam_v4l_ioctl_ops,
  1391. .release = video_device_release_empty,
  1392. };
  1393. /* ---------------------------------------------------------------------- */
  1394. /*
  1395. * Interrupt handler stuff
  1396. */
  1397. static void mcam_frame_complete(struct mcam_camera *cam, int frame)
  1398. {
  1399. /*
  1400. * Basic frame housekeeping.
  1401. */
  1402. set_bit(frame, &cam->flags);
  1403. clear_bit(CF_DMA_ACTIVE, &cam->flags);
  1404. cam->next_buf = frame;
  1405. cam->buf_seq[frame] = ++(cam->sequence);
  1406. cam->frame_state.frames++;
  1407. /*
  1408. * "This should never happen"
  1409. */
  1410. if (cam->state != S_STREAMING)
  1411. return;
  1412. /*
  1413. * Process the frame and set up the next one.
  1414. */
  1415. cam->frame_complete(cam, frame);
  1416. }
  1417. /*
  1418. * The interrupt handler; this needs to be called from the
  1419. * platform irq handler with the lock held.
  1420. */
  1421. int mccic_irq(struct mcam_camera *cam, unsigned int irqs)
  1422. {
  1423. unsigned int frame, handled = 0;
  1424. mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS); /* Clear'em all */
  1425. /*
  1426. * Handle any frame completions. There really should
  1427. * not be more than one of these, or we have fallen
  1428. * far behind.
  1429. *
  1430. * When running in S/G mode, the frame number lacks any
  1431. * real meaning - there's only one descriptor array - but
  1432. * the controller still picks a different one to signal
  1433. * each time.
  1434. */
  1435. for (frame = 0; frame < cam->nbufs; frame++)
  1436. if (irqs & (IRQ_EOF0 << frame)) {
  1437. mcam_frame_complete(cam, frame);
  1438. handled = 1;
  1439. if (cam->buffer_mode == B_DMA_sg)
  1440. break;
  1441. }
  1442. /*
  1443. * If a frame starts, note that we have DMA active. This
  1444. * code assumes that we won't get multiple frame interrupts
  1445. * at once; may want to rethink that.
  1446. */
  1447. if (irqs & (IRQ_SOF0 | IRQ_SOF1 | IRQ_SOF2)) {
  1448. set_bit(CF_DMA_ACTIVE, &cam->flags);
  1449. handled = 1;
  1450. if (cam->buffer_mode == B_DMA_sg)
  1451. mcam_ctlr_stop(cam);
  1452. }
  1453. return handled;
  1454. }
  1455. /* ---------------------------------------------------------------------- */
  1456. /*
  1457. * Registration and such.
  1458. */
  1459. static struct ov7670_config sensor_cfg = {
  1460. /*
  1461. * Exclude QCIF mode, because it only captures a tiny portion
  1462. * of the sensor FOV
  1463. */
  1464. .min_width = 320,
  1465. .min_height = 240,
  1466. };
  1467. int mccic_register(struct mcam_camera *cam)
  1468. {
  1469. struct i2c_board_info ov7670_info = {
  1470. .type = "ov7670",
  1471. .addr = 0x42 >> 1,
  1472. .platform_data = &sensor_cfg,
  1473. };
  1474. int ret;
  1475. /*
  1476. * Validate the requested buffer mode.
  1477. */
  1478. if (buffer_mode >= 0)
  1479. cam->buffer_mode = buffer_mode;
  1480. if (cam->buffer_mode == B_DMA_sg &&
  1481. cam->chip_id == V4L2_IDENT_CAFE) {
  1482. printk(KERN_ERR "marvell-cam: Cafe can't do S/G I/O, "
  1483. "attempting vmalloc mode instead\n");
  1484. cam->buffer_mode = B_vmalloc;
  1485. }
  1486. if (!mcam_buffer_mode_supported(cam->buffer_mode)) {
  1487. printk(KERN_ERR "marvell-cam: buffer mode %d unsupported\n",
  1488. cam->buffer_mode);
  1489. return -EINVAL;
  1490. }
  1491. /*
  1492. * Register with V4L
  1493. */
  1494. ret = v4l2_device_register(cam->dev, &cam->v4l2_dev);
  1495. if (ret)
  1496. return ret;
  1497. mutex_init(&cam->s_mutex);
  1498. cam->state = S_NOTREADY;
  1499. mcam_set_config_needed(cam, 1);
  1500. cam->pix_format = mcam_def_pix_format;
  1501. cam->mbus_code = mcam_def_mbus_code;
  1502. INIT_LIST_HEAD(&cam->buffers);
  1503. mcam_ctlr_init(cam);
  1504. /*
  1505. * Try to find the sensor.
  1506. */
  1507. sensor_cfg.clock_speed = cam->clock_speed;
  1508. sensor_cfg.use_smbus = cam->use_smbus;
  1509. cam->sensor_addr = ov7670_info.addr;
  1510. cam->sensor = v4l2_i2c_new_subdev_board(&cam->v4l2_dev,
  1511. cam->i2c_adapter, &ov7670_info, NULL);
  1512. if (cam->sensor == NULL) {
  1513. ret = -ENODEV;
  1514. goto out_unregister;
  1515. }
  1516. ret = mcam_cam_init(cam);
  1517. if (ret)
  1518. goto out_unregister;
  1519. /*
  1520. * Get the v4l2 setup done.
  1521. */
  1522. ret = v4l2_ctrl_handler_init(&cam->ctrl_handler, 10);
  1523. if (ret)
  1524. goto out_unregister;
  1525. cam->v4l2_dev.ctrl_handler = &cam->ctrl_handler;
  1526. mutex_lock(&cam->s_mutex);
  1527. cam->vdev = mcam_v4l_template;
  1528. cam->vdev.debug = 0;
  1529. cam->vdev.v4l2_dev = &cam->v4l2_dev;
  1530. video_set_drvdata(&cam->vdev, cam);
  1531. ret = video_register_device(&cam->vdev, VFL_TYPE_GRABBER, -1);
  1532. if (ret)
  1533. goto out;
  1534. /*
  1535. * If so requested, try to get our DMA buffers now.
  1536. */
  1537. if (cam->buffer_mode == B_vmalloc && !alloc_bufs_at_read) {
  1538. if (mcam_alloc_dma_bufs(cam, 1))
  1539. cam_warn(cam, "Unable to alloc DMA buffers at load"
  1540. " will try again later.");
  1541. }
  1542. out:
  1543. v4l2_ctrl_handler_free(&cam->ctrl_handler);
  1544. mutex_unlock(&cam->s_mutex);
  1545. return ret;
  1546. out_unregister:
  1547. v4l2_device_unregister(&cam->v4l2_dev);
  1548. return ret;
  1549. }
  1550. void mccic_shutdown(struct mcam_camera *cam)
  1551. {
  1552. /*
  1553. * If we have no users (and we really, really should have no
  1554. * users) the device will already be powered down. Trying to
  1555. * take it down again will wedge the machine, which is frowned
  1556. * upon.
  1557. */
  1558. if (cam->users > 0) {
  1559. cam_warn(cam, "Removing a device with users!\n");
  1560. mcam_ctlr_power_down(cam);
  1561. }
  1562. vb2_queue_release(&cam->vb_queue);
  1563. if (cam->buffer_mode == B_vmalloc)
  1564. mcam_free_dma_bufs(cam);
  1565. video_unregister_device(&cam->vdev);
  1566. v4l2_ctrl_handler_free(&cam->ctrl_handler);
  1567. v4l2_device_unregister(&cam->v4l2_dev);
  1568. }
  1569. /*
  1570. * Power management
  1571. */
  1572. #ifdef CONFIG_PM
  1573. void mccic_suspend(struct mcam_camera *cam)
  1574. {
  1575. mutex_lock(&cam->s_mutex);
  1576. if (cam->users > 0) {
  1577. enum mcam_state cstate = cam->state;
  1578. mcam_ctlr_stop_dma(cam);
  1579. mcam_ctlr_power_down(cam);
  1580. cam->state = cstate;
  1581. }
  1582. mutex_unlock(&cam->s_mutex);
  1583. }
  1584. int mccic_resume(struct mcam_camera *cam)
  1585. {
  1586. int ret = 0;
  1587. mutex_lock(&cam->s_mutex);
  1588. if (cam->users > 0) {
  1589. mcam_ctlr_power_up(cam);
  1590. __mcam_cam_reset(cam);
  1591. } else {
  1592. mcam_ctlr_power_down(cam);
  1593. }
  1594. mutex_unlock(&cam->s_mutex);
  1595. set_bit(CF_CONFIG_NEEDED, &cam->flags);
  1596. if (cam->state == S_STREAMING) {
  1597. /*
  1598. * If there was a buffer in the DMA engine at suspend
  1599. * time, put it back on the queue or we'll forget about it.
  1600. */
  1601. if (cam->buffer_mode == B_DMA_sg && cam->vb_bufs[0])
  1602. list_add(&cam->vb_bufs[0]->queue, &cam->buffers);
  1603. ret = mcam_read_setup(cam);
  1604. }
  1605. return ret;
  1606. }
  1607. #endif /* CONFIG_PM */