mcam-core.c 47 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886
  1. /*
  2. * The Marvell camera core. This device appears in a number of settings,
  3. * so it needs platform-specific support outside of the core.
  4. *
  5. * Copyright 2011 Jonathan Corbet corbet@lwn.net
  6. */
  7. #include <linux/kernel.h>
  8. #include <linux/module.h>
  9. #include <linux/fs.h>
  10. #include <linux/mm.h>
  11. #include <linux/i2c.h>
  12. #include <linux/interrupt.h>
  13. #include <linux/spinlock.h>
  14. #include <linux/slab.h>
  15. #include <linux/device.h>
  16. #include <linux/wait.h>
  17. #include <linux/list.h>
  18. #include <linux/dma-mapping.h>
  19. #include <linux/delay.h>
  20. #include <linux/vmalloc.h>
  21. #include <linux/io.h>
  22. #include <linux/clk.h>
  23. #include <linux/videodev2.h>
  24. #include <media/v4l2-device.h>
  25. #include <media/v4l2-ioctl.h>
  26. #include <media/v4l2-ctrls.h>
  27. #include <media/ov7670.h>
  28. #include <media/videobuf2-vmalloc.h>
  29. #include <media/videobuf2-dma-contig.h>
  30. #include <media/videobuf2-dma-sg.h>
  31. #include "mcam-core.h"
  32. #ifdef MCAM_MODE_VMALLOC
  33. /*
  34. * Internal DMA buffer management. Since the controller cannot do S/G I/O,
  35. * we must have physically contiguous buffers to bring frames into.
  36. * These parameters control how many buffers we use, whether we
  37. * allocate them at load time (better chance of success, but nails down
  38. * memory) or when somebody tries to use the camera (riskier), and,
  39. * for load-time allocation, how big they should be.
  40. *
  41. * The controller can cycle through three buffers. We could use
  42. * more by flipping pointers around, but it probably makes little
  43. * sense.
  44. */
  45. static bool alloc_bufs_at_read;
  46. module_param(alloc_bufs_at_read, bool, 0444);
  47. MODULE_PARM_DESC(alloc_bufs_at_read,
  48. "Non-zero value causes DMA buffers to be allocated when the "
  49. "video capture device is read, rather than at module load "
  50. "time. This saves memory, but decreases the chances of "
  51. "successfully getting those buffers. This parameter is "
  52. "only used in the vmalloc buffer mode");
  53. static int n_dma_bufs = 3;
  54. module_param(n_dma_bufs, uint, 0644);
  55. MODULE_PARM_DESC(n_dma_bufs,
  56. "The number of DMA buffers to allocate. Can be either two "
  57. "(saves memory, makes timing tighter) or three.");
  58. static int dma_buf_size = VGA_WIDTH * VGA_HEIGHT * 2; /* Worst case */
  59. module_param(dma_buf_size, uint, 0444);
  60. MODULE_PARM_DESC(dma_buf_size,
  61. "The size of the allocated DMA buffers. If actual operating "
  62. "parameters require larger buffers, an attempt to reallocate "
  63. "will be made.");
  64. #else /* MCAM_MODE_VMALLOC */
  65. static const bool alloc_bufs_at_read = 0;
  66. static const int n_dma_bufs = 3; /* Used by S/G_PARM */
  67. #endif /* MCAM_MODE_VMALLOC */
  68. static bool flip;
  69. module_param(flip, bool, 0444);
  70. MODULE_PARM_DESC(flip,
  71. "If set, the sensor will be instructed to flip the image "
  72. "vertically.");
  73. static int buffer_mode = -1;
  74. module_param(buffer_mode, int, 0444);
  75. MODULE_PARM_DESC(buffer_mode,
  76. "Set the buffer mode to be used; default is to go with what "
  77. "the platform driver asks for. Set to 0 for vmalloc, 1 for "
  78. "DMA contiguous.");
  79. /*
  80. * Status flags. Always manipulated with bit operations.
  81. */
  82. #define CF_BUF0_VALID 0 /* Buffers valid - first three */
  83. #define CF_BUF1_VALID 1
  84. #define CF_BUF2_VALID 2
  85. #define CF_DMA_ACTIVE 3 /* A frame is incoming */
  86. #define CF_CONFIG_NEEDED 4 /* Must configure hardware */
  87. #define CF_SINGLE_BUFFER 5 /* Running with a single buffer */
  88. #define CF_SG_RESTART 6 /* SG restart needed */
  89. #define sensor_call(cam, o, f, args...) \
  90. v4l2_subdev_call(cam->sensor, o, f, ##args)
  91. static struct mcam_format_struct {
  92. __u8 *desc;
  93. __u32 pixelformat;
  94. int bpp; /* Bytes per pixel */
  95. enum v4l2_mbus_pixelcode mbus_code;
  96. } mcam_formats[] = {
  97. {
  98. .desc = "YUYV 4:2:2",
  99. .pixelformat = V4L2_PIX_FMT_YUYV,
  100. .mbus_code = V4L2_MBUS_FMT_YUYV8_2X8,
  101. .bpp = 2,
  102. },
  103. {
  104. .desc = "RGB 444",
  105. .pixelformat = V4L2_PIX_FMT_RGB444,
  106. .mbus_code = V4L2_MBUS_FMT_RGB444_2X8_PADHI_LE,
  107. .bpp = 2,
  108. },
  109. {
  110. .desc = "RGB 565",
  111. .pixelformat = V4L2_PIX_FMT_RGB565,
  112. .mbus_code = V4L2_MBUS_FMT_RGB565_2X8_LE,
  113. .bpp = 2,
  114. },
  115. {
  116. .desc = "Raw RGB Bayer",
  117. .pixelformat = V4L2_PIX_FMT_SBGGR8,
  118. .mbus_code = V4L2_MBUS_FMT_SBGGR8_1X8,
  119. .bpp = 1
  120. },
  121. };
  122. #define N_MCAM_FMTS ARRAY_SIZE(mcam_formats)
  123. static struct mcam_format_struct *mcam_find_format(u32 pixelformat)
  124. {
  125. unsigned i;
  126. for (i = 0; i < N_MCAM_FMTS; i++)
  127. if (mcam_formats[i].pixelformat == pixelformat)
  128. return mcam_formats + i;
  129. /* Not found? Then return the first format. */
  130. return mcam_formats;
  131. }
  132. /*
  133. * The default format we use until somebody says otherwise.
  134. */
  135. static const struct v4l2_pix_format mcam_def_pix_format = {
  136. .width = VGA_WIDTH,
  137. .height = VGA_HEIGHT,
  138. .pixelformat = V4L2_PIX_FMT_YUYV,
  139. .field = V4L2_FIELD_NONE,
  140. .bytesperline = VGA_WIDTH*2,
  141. .sizeimage = VGA_WIDTH*VGA_HEIGHT*2,
  142. };
  143. static const enum v4l2_mbus_pixelcode mcam_def_mbus_code =
  144. V4L2_MBUS_FMT_YUYV8_2X8;
  145. /*
  146. * The two-word DMA descriptor format used by the Armada 610 and like. There
  147. * Is a three-word format as well (set C1_DESC_3WORD) where the third
  148. * word is a pointer to the next descriptor, but we don't use it. Two-word
  149. * descriptors have to be contiguous in memory.
  150. */
  151. struct mcam_dma_desc {
  152. u32 dma_addr;
  153. u32 segment_len;
  154. };
  155. /*
  156. * Our buffer type for working with videobuf2. Note that the vb2
  157. * developers have decreed that struct vb2_buffer must be at the
  158. * beginning of this structure.
  159. */
  160. struct mcam_vb_buffer {
  161. struct vb2_buffer vb_buf;
  162. struct list_head queue;
  163. struct mcam_dma_desc *dma_desc; /* Descriptor virtual address */
  164. dma_addr_t dma_desc_pa; /* Descriptor physical address */
  165. int dma_desc_nent; /* Number of mapped descriptors */
  166. };
  167. static inline struct mcam_vb_buffer *vb_to_mvb(struct vb2_buffer *vb)
  168. {
  169. return container_of(vb, struct mcam_vb_buffer, vb_buf);
  170. }
  171. /*
  172. * Hand a completed buffer back to user space.
  173. */
  174. static void mcam_buffer_done(struct mcam_camera *cam, int frame,
  175. struct vb2_buffer *vbuf)
  176. {
  177. vbuf->v4l2_buf.bytesused = cam->pix_format.sizeimage;
  178. vbuf->v4l2_buf.sequence = cam->buf_seq[frame];
  179. vb2_set_plane_payload(vbuf, 0, cam->pix_format.sizeimage);
  180. vb2_buffer_done(vbuf, VB2_BUF_STATE_DONE);
  181. }
  182. /*
  183. * Debugging and related.
  184. */
  185. #define cam_err(cam, fmt, arg...) \
  186. dev_err((cam)->dev, fmt, ##arg);
  187. #define cam_warn(cam, fmt, arg...) \
  188. dev_warn((cam)->dev, fmt, ##arg);
  189. #define cam_dbg(cam, fmt, arg...) \
  190. dev_dbg((cam)->dev, fmt, ##arg);
  191. /*
  192. * Flag manipulation helpers
  193. */
  194. static void mcam_reset_buffers(struct mcam_camera *cam)
  195. {
  196. int i;
  197. cam->next_buf = -1;
  198. for (i = 0; i < cam->nbufs; i++)
  199. clear_bit(i, &cam->flags);
  200. }
  201. static inline int mcam_needs_config(struct mcam_camera *cam)
  202. {
  203. return test_bit(CF_CONFIG_NEEDED, &cam->flags);
  204. }
  205. static void mcam_set_config_needed(struct mcam_camera *cam, int needed)
  206. {
  207. if (needed)
  208. set_bit(CF_CONFIG_NEEDED, &cam->flags);
  209. else
  210. clear_bit(CF_CONFIG_NEEDED, &cam->flags);
  211. }
  212. /* ------------------------------------------------------------------- */
  213. /*
  214. * Make the controller start grabbing images. Everything must
  215. * be set up before doing this.
  216. */
  217. static void mcam_ctlr_start(struct mcam_camera *cam)
  218. {
  219. /* set_bit performs a read, so no other barrier should be
  220. needed here */
  221. mcam_reg_set_bit(cam, REG_CTRL0, C0_ENABLE);
  222. }
  223. static void mcam_ctlr_stop(struct mcam_camera *cam)
  224. {
  225. mcam_reg_clear_bit(cam, REG_CTRL0, C0_ENABLE);
  226. }
  227. static void mcam_enable_mipi(struct mcam_camera *mcam)
  228. {
  229. /* Using MIPI mode and enable MIPI */
  230. cam_dbg(mcam, "camera: DPHY3=0x%x, DPHY5=0x%x, DPHY6=0x%x\n",
  231. mcam->dphy[0], mcam->dphy[1], mcam->dphy[2]);
  232. mcam_reg_write(mcam, REG_CSI2_DPHY3, mcam->dphy[0]);
  233. mcam_reg_write(mcam, REG_CSI2_DPHY5, mcam->dphy[1]);
  234. mcam_reg_write(mcam, REG_CSI2_DPHY6, mcam->dphy[2]);
  235. if (!mcam->mipi_enabled) {
  236. if (mcam->lane > 4 || mcam->lane <= 0) {
  237. cam_warn(mcam, "lane number error\n");
  238. mcam->lane = 1; /* set the default value */
  239. }
  240. /*
  241. * 0x41 actives 1 lane
  242. * 0x43 actives 2 lanes
  243. * 0x45 actives 3 lanes (never happen)
  244. * 0x47 actives 4 lanes
  245. */
  246. mcam_reg_write(mcam, REG_CSI2_CTRL0,
  247. CSI2_C0_MIPI_EN | CSI2_C0_ACT_LANE(mcam->lane));
  248. mcam_reg_write(mcam, REG_CLKCTRL,
  249. (mcam->mclk_src << 29) | mcam->mclk_div);
  250. mcam->mipi_enabled = true;
  251. }
  252. }
  253. static void mcam_disable_mipi(struct mcam_camera *mcam)
  254. {
  255. /* Using Parallel mode or disable MIPI */
  256. mcam_reg_write(mcam, REG_CSI2_CTRL0, 0x0);
  257. mcam_reg_write(mcam, REG_CSI2_DPHY3, 0x0);
  258. mcam_reg_write(mcam, REG_CSI2_DPHY5, 0x0);
  259. mcam_reg_write(mcam, REG_CSI2_DPHY6, 0x0);
  260. mcam->mipi_enabled = false;
  261. }
  262. /* ------------------------------------------------------------------- */
  263. #ifdef MCAM_MODE_VMALLOC
  264. /*
  265. * Code specific to the vmalloc buffer mode.
  266. */
  267. /*
  268. * Allocate in-kernel DMA buffers for vmalloc mode.
  269. */
  270. static int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime)
  271. {
  272. int i;
  273. mcam_set_config_needed(cam, 1);
  274. if (loadtime)
  275. cam->dma_buf_size = dma_buf_size;
  276. else
  277. cam->dma_buf_size = cam->pix_format.sizeimage;
  278. if (n_dma_bufs > 3)
  279. n_dma_bufs = 3;
  280. cam->nbufs = 0;
  281. for (i = 0; i < n_dma_bufs; i++) {
  282. cam->dma_bufs[i] = dma_alloc_coherent(cam->dev,
  283. cam->dma_buf_size, cam->dma_handles + i,
  284. GFP_KERNEL);
  285. if (cam->dma_bufs[i] == NULL) {
  286. cam_warn(cam, "Failed to allocate DMA buffer\n");
  287. break;
  288. }
  289. (cam->nbufs)++;
  290. }
  291. switch (cam->nbufs) {
  292. case 1:
  293. dma_free_coherent(cam->dev, cam->dma_buf_size,
  294. cam->dma_bufs[0], cam->dma_handles[0]);
  295. cam->nbufs = 0;
  296. case 0:
  297. cam_err(cam, "Insufficient DMA buffers, cannot operate\n");
  298. return -ENOMEM;
  299. case 2:
  300. if (n_dma_bufs > 2)
  301. cam_warn(cam, "Will limp along with only 2 buffers\n");
  302. break;
  303. }
  304. return 0;
  305. }
  306. static void mcam_free_dma_bufs(struct mcam_camera *cam)
  307. {
  308. int i;
  309. for (i = 0; i < cam->nbufs; i++) {
  310. dma_free_coherent(cam->dev, cam->dma_buf_size,
  311. cam->dma_bufs[i], cam->dma_handles[i]);
  312. cam->dma_bufs[i] = NULL;
  313. }
  314. cam->nbufs = 0;
  315. }
  316. /*
  317. * Set up DMA buffers when operating in vmalloc mode
  318. */
  319. static void mcam_ctlr_dma_vmalloc(struct mcam_camera *cam)
  320. {
  321. /*
  322. * Store the first two Y buffers (we aren't supporting
  323. * planar formats for now, so no UV bufs). Then either
  324. * set the third if it exists, or tell the controller
  325. * to just use two.
  326. */
  327. mcam_reg_write(cam, REG_Y0BAR, cam->dma_handles[0]);
  328. mcam_reg_write(cam, REG_Y1BAR, cam->dma_handles[1]);
  329. if (cam->nbufs > 2) {
  330. mcam_reg_write(cam, REG_Y2BAR, cam->dma_handles[2]);
  331. mcam_reg_clear_bit(cam, REG_CTRL1, C1_TWOBUFS);
  332. } else
  333. mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
  334. if (cam->chip_id == MCAM_CAFE)
  335. mcam_reg_write(cam, REG_UBAR, 0); /* 32 bits only */
  336. }
  337. /*
  338. * Copy data out to user space in the vmalloc case
  339. */
  340. static void mcam_frame_tasklet(unsigned long data)
  341. {
  342. struct mcam_camera *cam = (struct mcam_camera *) data;
  343. int i;
  344. unsigned long flags;
  345. struct mcam_vb_buffer *buf;
  346. spin_lock_irqsave(&cam->dev_lock, flags);
  347. for (i = 0; i < cam->nbufs; i++) {
  348. int bufno = cam->next_buf;
  349. if (cam->state != S_STREAMING || bufno < 0)
  350. break; /* I/O got stopped */
  351. if (++(cam->next_buf) >= cam->nbufs)
  352. cam->next_buf = 0;
  353. if (!test_bit(bufno, &cam->flags))
  354. continue;
  355. if (list_empty(&cam->buffers)) {
  356. cam->frame_state.singles++;
  357. break; /* Leave it valid, hope for better later */
  358. }
  359. cam->frame_state.delivered++;
  360. clear_bit(bufno, &cam->flags);
  361. buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer,
  362. queue);
  363. list_del_init(&buf->queue);
  364. /*
  365. * Drop the lock during the big copy. This *should* be safe...
  366. */
  367. spin_unlock_irqrestore(&cam->dev_lock, flags);
  368. memcpy(vb2_plane_vaddr(&buf->vb_buf, 0), cam->dma_bufs[bufno],
  369. cam->pix_format.sizeimage);
  370. mcam_buffer_done(cam, bufno, &buf->vb_buf);
  371. spin_lock_irqsave(&cam->dev_lock, flags);
  372. }
  373. spin_unlock_irqrestore(&cam->dev_lock, flags);
  374. }
  375. /*
  376. * Make sure our allocated buffers are up to the task.
  377. */
  378. static int mcam_check_dma_buffers(struct mcam_camera *cam)
  379. {
  380. if (cam->nbufs > 0 && cam->dma_buf_size < cam->pix_format.sizeimage)
  381. mcam_free_dma_bufs(cam);
  382. if (cam->nbufs == 0)
  383. return mcam_alloc_dma_bufs(cam, 0);
  384. return 0;
  385. }
  386. static void mcam_vmalloc_done(struct mcam_camera *cam, int frame)
  387. {
  388. tasklet_schedule(&cam->s_tasklet);
  389. }
  390. #else /* MCAM_MODE_VMALLOC */
  391. static inline int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime)
  392. {
  393. return 0;
  394. }
  395. static inline void mcam_free_dma_bufs(struct mcam_camera *cam)
  396. {
  397. return;
  398. }
  399. static inline int mcam_check_dma_buffers(struct mcam_camera *cam)
  400. {
  401. return 0;
  402. }
  403. #endif /* MCAM_MODE_VMALLOC */
  404. #ifdef MCAM_MODE_DMA_CONTIG
  405. /* ---------------------------------------------------------------------- */
  406. /*
  407. * DMA-contiguous code.
  408. */
  409. /*
  410. * Set up a contiguous buffer for the given frame. Here also is where
  411. * the underrun strategy is set: if there is no buffer available, reuse
  412. * the buffer from the other BAR and set the CF_SINGLE_BUFFER flag to
  413. * keep the interrupt handler from giving that buffer back to user
  414. * space. In this way, we always have a buffer to DMA to and don't
  415. * have to try to play games stopping and restarting the controller.
  416. */
  417. static void mcam_set_contig_buffer(struct mcam_camera *cam, int frame)
  418. {
  419. struct mcam_vb_buffer *buf;
  420. /*
  421. * If there are no available buffers, go into single mode
  422. */
  423. if (list_empty(&cam->buffers)) {
  424. buf = cam->vb_bufs[frame ^ 0x1];
  425. set_bit(CF_SINGLE_BUFFER, &cam->flags);
  426. cam->frame_state.singles++;
  427. } else {
  428. /*
  429. * OK, we have a buffer we can use.
  430. */
  431. buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer,
  432. queue);
  433. list_del_init(&buf->queue);
  434. clear_bit(CF_SINGLE_BUFFER, &cam->flags);
  435. }
  436. cam->vb_bufs[frame] = buf;
  437. mcam_reg_write(cam, frame == 0 ? REG_Y0BAR : REG_Y1BAR,
  438. vb2_dma_contig_plane_dma_addr(&buf->vb_buf, 0));
  439. }
  440. /*
  441. * Initial B_DMA_contig setup.
  442. */
  443. static void mcam_ctlr_dma_contig(struct mcam_camera *cam)
  444. {
  445. mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
  446. cam->nbufs = 2;
  447. mcam_set_contig_buffer(cam, 0);
  448. mcam_set_contig_buffer(cam, 1);
  449. }
  450. /*
  451. * Frame completion handling.
  452. */
  453. static void mcam_dma_contig_done(struct mcam_camera *cam, int frame)
  454. {
  455. struct mcam_vb_buffer *buf = cam->vb_bufs[frame];
  456. if (!test_bit(CF_SINGLE_BUFFER, &cam->flags)) {
  457. cam->frame_state.delivered++;
  458. mcam_buffer_done(cam, frame, &buf->vb_buf);
  459. }
  460. mcam_set_contig_buffer(cam, frame);
  461. }
  462. #endif /* MCAM_MODE_DMA_CONTIG */
  463. #ifdef MCAM_MODE_DMA_SG
  464. /* ---------------------------------------------------------------------- */
  465. /*
  466. * Scatter/gather-specific code.
  467. */
  468. /*
  469. * Set up the next buffer for S/G I/O; caller should be sure that
  470. * the controller is stopped and a buffer is available.
  471. */
  472. static void mcam_sg_next_buffer(struct mcam_camera *cam)
  473. {
  474. struct mcam_vb_buffer *buf;
  475. buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, queue);
  476. list_del_init(&buf->queue);
  477. /*
  478. * Very Bad Not Good Things happen if you don't clear
  479. * C1_DESC_ENA before making any descriptor changes.
  480. */
  481. mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_ENA);
  482. mcam_reg_write(cam, REG_DMA_DESC_Y, buf->dma_desc_pa);
  483. mcam_reg_write(cam, REG_DESC_LEN_Y,
  484. buf->dma_desc_nent*sizeof(struct mcam_dma_desc));
  485. mcam_reg_write(cam, REG_DESC_LEN_U, 0);
  486. mcam_reg_write(cam, REG_DESC_LEN_V, 0);
  487. mcam_reg_set_bit(cam, REG_CTRL1, C1_DESC_ENA);
  488. cam->vb_bufs[0] = buf;
  489. }
  490. /*
  491. * Initial B_DMA_sg setup
  492. */
  493. static void mcam_ctlr_dma_sg(struct mcam_camera *cam)
  494. {
  495. /*
  496. * The list-empty condition can hit us at resume time
  497. * if the buffer list was empty when the system was suspended.
  498. */
  499. if (list_empty(&cam->buffers)) {
  500. set_bit(CF_SG_RESTART, &cam->flags);
  501. return;
  502. }
  503. mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_3WORD);
  504. mcam_sg_next_buffer(cam);
  505. cam->nbufs = 3;
  506. }
  507. /*
  508. * Frame completion with S/G is trickier. We can't muck with
  509. * a descriptor chain on the fly, since the controller buffers it
  510. * internally. So we have to actually stop and restart; Marvell
  511. * says this is the way to do it.
  512. *
  513. * Of course, stopping is easier said than done; experience shows
  514. * that the controller can start a frame *after* C0_ENABLE has been
  515. * cleared. So when running in S/G mode, the controller is "stopped"
  516. * on receipt of the start-of-frame interrupt. That means we can
  517. * safely change the DMA descriptor array here and restart things
  518. * (assuming there's another buffer waiting to go).
  519. */
  520. static void mcam_dma_sg_done(struct mcam_camera *cam, int frame)
  521. {
  522. struct mcam_vb_buffer *buf = cam->vb_bufs[0];
  523. /*
  524. * If we're no longer supposed to be streaming, don't do anything.
  525. */
  526. if (cam->state != S_STREAMING)
  527. return;
  528. /*
  529. * If we have another buffer available, put it in and
  530. * restart the engine.
  531. */
  532. if (!list_empty(&cam->buffers)) {
  533. mcam_sg_next_buffer(cam);
  534. mcam_ctlr_start(cam);
  535. /*
  536. * Otherwise set CF_SG_RESTART and the controller will
  537. * be restarted once another buffer shows up.
  538. */
  539. } else {
  540. set_bit(CF_SG_RESTART, &cam->flags);
  541. cam->frame_state.singles++;
  542. cam->vb_bufs[0] = NULL;
  543. }
  544. /*
  545. * Now we can give the completed frame back to user space.
  546. */
  547. cam->frame_state.delivered++;
  548. mcam_buffer_done(cam, frame, &buf->vb_buf);
  549. }
  550. /*
  551. * Scatter/gather mode requires stopping the controller between
  552. * frames so we can put in a new DMA descriptor array. If no new
  553. * buffer exists at frame completion, the controller is left stopped;
  554. * this function is charged with gettig things going again.
  555. */
  556. static void mcam_sg_restart(struct mcam_camera *cam)
  557. {
  558. mcam_ctlr_dma_sg(cam);
  559. mcam_ctlr_start(cam);
  560. clear_bit(CF_SG_RESTART, &cam->flags);
  561. }
  562. #else /* MCAM_MODE_DMA_SG */
  563. static inline void mcam_sg_restart(struct mcam_camera *cam)
  564. {
  565. return;
  566. }
  567. #endif /* MCAM_MODE_DMA_SG */
  568. /* ---------------------------------------------------------------------- */
  569. /*
  570. * Buffer-mode-independent controller code.
  571. */
  572. /*
  573. * Image format setup
  574. */
  575. static void mcam_ctlr_image(struct mcam_camera *cam)
  576. {
  577. int imgsz;
  578. struct v4l2_pix_format *fmt = &cam->pix_format;
  579. imgsz = ((fmt->height << IMGSZ_V_SHIFT) & IMGSZ_V_MASK) |
  580. (fmt->bytesperline & IMGSZ_H_MASK);
  581. mcam_reg_write(cam, REG_IMGSIZE, imgsz);
  582. mcam_reg_write(cam, REG_IMGOFFSET, 0);
  583. /* YPITCH just drops the last two bits */
  584. mcam_reg_write_mask(cam, REG_IMGPITCH, fmt->bytesperline,
  585. IMGP_YP_MASK);
  586. /*
  587. * Tell the controller about the image format we are using.
  588. */
  589. switch (cam->pix_format.pixelformat) {
  590. case V4L2_PIX_FMT_YUYV:
  591. mcam_reg_write_mask(cam, REG_CTRL0,
  592. C0_DF_YUV|C0_YUV_PACKED|C0_YUVE_YUYV,
  593. C0_DF_MASK);
  594. break;
  595. case V4L2_PIX_FMT_RGB444:
  596. mcam_reg_write_mask(cam, REG_CTRL0,
  597. C0_DF_RGB|C0_RGBF_444|C0_RGB4_XRGB,
  598. C0_DF_MASK);
  599. /* Alpha value? */
  600. break;
  601. case V4L2_PIX_FMT_RGB565:
  602. mcam_reg_write_mask(cam, REG_CTRL0,
  603. C0_DF_RGB|C0_RGBF_565|C0_RGB5_BGGR,
  604. C0_DF_MASK);
  605. break;
  606. default:
  607. cam_err(cam, "Unknown format %x\n", cam->pix_format.pixelformat);
  608. break;
  609. }
  610. /*
  611. * Make sure it knows we want to use hsync/vsync.
  612. */
  613. mcam_reg_write_mask(cam, REG_CTRL0, C0_SIF_HVSYNC,
  614. C0_SIFM_MASK);
  615. /*
  616. * This field controls the generation of EOF(DVP only)
  617. */
  618. if (cam->bus_type != V4L2_MBUS_CSI2)
  619. mcam_reg_set_bit(cam, REG_CTRL0,
  620. C0_EOF_VSYNC | C0_VEDGE_CTRL);
  621. }
  622. /*
  623. * Configure the controller for operation; caller holds the
  624. * device mutex.
  625. */
  626. static int mcam_ctlr_configure(struct mcam_camera *cam)
  627. {
  628. unsigned long flags;
  629. spin_lock_irqsave(&cam->dev_lock, flags);
  630. clear_bit(CF_SG_RESTART, &cam->flags);
  631. cam->dma_setup(cam);
  632. mcam_ctlr_image(cam);
  633. mcam_set_config_needed(cam, 0);
  634. spin_unlock_irqrestore(&cam->dev_lock, flags);
  635. return 0;
  636. }
  637. static void mcam_ctlr_irq_enable(struct mcam_camera *cam)
  638. {
  639. /*
  640. * Clear any pending interrupts, since we do not
  641. * expect to have I/O active prior to enabling.
  642. */
  643. mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS);
  644. mcam_reg_set_bit(cam, REG_IRQMASK, FRAMEIRQS);
  645. }
  646. static void mcam_ctlr_irq_disable(struct mcam_camera *cam)
  647. {
  648. mcam_reg_clear_bit(cam, REG_IRQMASK, FRAMEIRQS);
  649. }
  650. static void mcam_ctlr_init(struct mcam_camera *cam)
  651. {
  652. unsigned long flags;
  653. spin_lock_irqsave(&cam->dev_lock, flags);
  654. /*
  655. * Make sure it's not powered down.
  656. */
  657. mcam_reg_clear_bit(cam, REG_CTRL1, C1_PWRDWN);
  658. /*
  659. * Turn off the enable bit. It sure should be off anyway,
  660. * but it's good to be sure.
  661. */
  662. mcam_reg_clear_bit(cam, REG_CTRL0, C0_ENABLE);
  663. /*
  664. * Clock the sensor appropriately. Controller clock should
  665. * be 48MHz, sensor "typical" value is half that.
  666. */
  667. mcam_reg_write_mask(cam, REG_CLKCTRL, 2, CLK_DIV_MASK);
  668. spin_unlock_irqrestore(&cam->dev_lock, flags);
  669. }
  670. /*
  671. * Stop the controller, and don't return until we're really sure that no
  672. * further DMA is going on.
  673. */
  674. static void mcam_ctlr_stop_dma(struct mcam_camera *cam)
  675. {
  676. unsigned long flags;
  677. /*
  678. * Theory: stop the camera controller (whether it is operating
  679. * or not). Delay briefly just in case we race with the SOF
  680. * interrupt, then wait until no DMA is active.
  681. */
  682. spin_lock_irqsave(&cam->dev_lock, flags);
  683. clear_bit(CF_SG_RESTART, &cam->flags);
  684. mcam_ctlr_stop(cam);
  685. cam->state = S_IDLE;
  686. spin_unlock_irqrestore(&cam->dev_lock, flags);
  687. /*
  688. * This is a brutally long sleep, but experience shows that
  689. * it can take the controller a while to get the message that
  690. * it needs to stop grabbing frames. In particular, we can
  691. * sometimes (on mmp) get a frame at the end WITHOUT the
  692. * start-of-frame indication.
  693. */
  694. msleep(150);
  695. if (test_bit(CF_DMA_ACTIVE, &cam->flags))
  696. cam_err(cam, "Timeout waiting for DMA to end\n");
  697. /* This would be bad news - what now? */
  698. spin_lock_irqsave(&cam->dev_lock, flags);
  699. mcam_ctlr_irq_disable(cam);
  700. spin_unlock_irqrestore(&cam->dev_lock, flags);
  701. }
  702. /*
  703. * Power up and down.
  704. */
  705. static int mcam_ctlr_power_up(struct mcam_camera *cam)
  706. {
  707. unsigned long flags;
  708. int ret;
  709. spin_lock_irqsave(&cam->dev_lock, flags);
  710. ret = cam->plat_power_up(cam);
  711. if (ret) {
  712. spin_unlock_irqrestore(&cam->dev_lock, flags);
  713. return ret;
  714. }
  715. mcam_reg_clear_bit(cam, REG_CTRL1, C1_PWRDWN);
  716. spin_unlock_irqrestore(&cam->dev_lock, flags);
  717. msleep(5); /* Just to be sure */
  718. return 0;
  719. }
  720. static void mcam_ctlr_power_down(struct mcam_camera *cam)
  721. {
  722. unsigned long flags;
  723. spin_lock_irqsave(&cam->dev_lock, flags);
  724. /*
  725. * School of hard knocks department: be sure we do any register
  726. * twiddling on the controller *before* calling the platform
  727. * power down routine.
  728. */
  729. mcam_reg_set_bit(cam, REG_CTRL1, C1_PWRDWN);
  730. cam->plat_power_down(cam);
  731. spin_unlock_irqrestore(&cam->dev_lock, flags);
  732. }
  733. /* -------------------------------------------------------------------- */
  734. /*
  735. * Communications with the sensor.
  736. */
  737. static int __mcam_cam_reset(struct mcam_camera *cam)
  738. {
  739. return sensor_call(cam, core, reset, 0);
  740. }
  741. /*
  742. * We have found the sensor on the i2c. Let's try to have a
  743. * conversation.
  744. */
  745. static int mcam_cam_init(struct mcam_camera *cam)
  746. {
  747. int ret;
  748. mutex_lock(&cam->s_mutex);
  749. if (cam->state != S_NOTREADY)
  750. cam_warn(cam, "Cam init with device in funky state %d",
  751. cam->state);
  752. ret = __mcam_cam_reset(cam);
  753. /* Get/set parameters? */
  754. cam->state = S_IDLE;
  755. mcam_ctlr_power_down(cam);
  756. mutex_unlock(&cam->s_mutex);
  757. return ret;
  758. }
  759. /*
  760. * Configure the sensor to match the parameters we have. Caller should
  761. * hold s_mutex
  762. */
  763. static int mcam_cam_set_flip(struct mcam_camera *cam)
  764. {
  765. struct v4l2_control ctrl;
  766. memset(&ctrl, 0, sizeof(ctrl));
  767. ctrl.id = V4L2_CID_VFLIP;
  768. ctrl.value = flip;
  769. return sensor_call(cam, core, s_ctrl, &ctrl);
  770. }
  771. static int mcam_cam_configure(struct mcam_camera *cam)
  772. {
  773. struct v4l2_mbus_framefmt mbus_fmt;
  774. int ret;
  775. v4l2_fill_mbus_format(&mbus_fmt, &cam->pix_format, cam->mbus_code);
  776. ret = sensor_call(cam, core, init, 0);
  777. if (ret == 0)
  778. ret = sensor_call(cam, video, s_mbus_fmt, &mbus_fmt);
  779. /*
  780. * OV7670 does weird things if flip is set *before* format...
  781. */
  782. ret += mcam_cam_set_flip(cam);
  783. return ret;
  784. }
  785. /*
  786. * Get everything ready, and start grabbing frames.
  787. */
  788. static int mcam_read_setup(struct mcam_camera *cam)
  789. {
  790. int ret;
  791. unsigned long flags;
  792. /*
  793. * Configuration. If we still don't have DMA buffers,
  794. * make one last, desperate attempt.
  795. */
  796. if (cam->buffer_mode == B_vmalloc && cam->nbufs == 0 &&
  797. mcam_alloc_dma_bufs(cam, 0))
  798. return -ENOMEM;
  799. if (mcam_needs_config(cam)) {
  800. mcam_cam_configure(cam);
  801. ret = mcam_ctlr_configure(cam);
  802. if (ret)
  803. return ret;
  804. }
  805. /*
  806. * Turn it loose.
  807. */
  808. spin_lock_irqsave(&cam->dev_lock, flags);
  809. clear_bit(CF_DMA_ACTIVE, &cam->flags);
  810. mcam_reset_buffers(cam);
  811. /*
  812. * Update CSI2_DPHY value
  813. */
  814. if (cam->calc_dphy)
  815. cam->calc_dphy(cam);
  816. cam_dbg(cam, "camera: DPHY sets: dphy3=0x%x, dphy5=0x%x, dphy6=0x%x\n",
  817. cam->dphy[0], cam->dphy[1], cam->dphy[2]);
  818. if (cam->bus_type == V4L2_MBUS_CSI2)
  819. mcam_enable_mipi(cam);
  820. else
  821. mcam_disable_mipi(cam);
  822. mcam_ctlr_irq_enable(cam);
  823. cam->state = S_STREAMING;
  824. if (!test_bit(CF_SG_RESTART, &cam->flags))
  825. mcam_ctlr_start(cam);
  826. spin_unlock_irqrestore(&cam->dev_lock, flags);
  827. return 0;
  828. }
  829. /* ----------------------------------------------------------------------- */
  830. /*
  831. * Videobuf2 interface code.
  832. */
  833. static int mcam_vb_queue_setup(struct vb2_queue *vq,
  834. const struct v4l2_format *fmt, unsigned int *nbufs,
  835. unsigned int *num_planes, unsigned int sizes[],
  836. void *alloc_ctxs[])
  837. {
  838. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  839. int minbufs = (cam->buffer_mode == B_DMA_contig) ? 3 : 2;
  840. sizes[0] = cam->pix_format.sizeimage;
  841. *num_planes = 1; /* Someday we have to support planar formats... */
  842. if (*nbufs < minbufs)
  843. *nbufs = minbufs;
  844. if (cam->buffer_mode == B_DMA_contig)
  845. alloc_ctxs[0] = cam->vb_alloc_ctx;
  846. return 0;
  847. }
  848. static void mcam_vb_buf_queue(struct vb2_buffer *vb)
  849. {
  850. struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
  851. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  852. unsigned long flags;
  853. int start;
  854. spin_lock_irqsave(&cam->dev_lock, flags);
  855. start = (cam->state == S_BUFWAIT) && !list_empty(&cam->buffers);
  856. list_add(&mvb->queue, &cam->buffers);
  857. if (cam->state == S_STREAMING && test_bit(CF_SG_RESTART, &cam->flags))
  858. mcam_sg_restart(cam);
  859. spin_unlock_irqrestore(&cam->dev_lock, flags);
  860. if (start)
  861. mcam_read_setup(cam);
  862. }
  863. /*
  864. * vb2 uses these to release the mutex when waiting in dqbuf. I'm
  865. * not actually sure we need to do this (I'm not sure that vb2_dqbuf() needs
  866. * to be called with the mutex held), but better safe than sorry.
  867. */
  868. static void mcam_vb_wait_prepare(struct vb2_queue *vq)
  869. {
  870. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  871. mutex_unlock(&cam->s_mutex);
  872. }
  873. static void mcam_vb_wait_finish(struct vb2_queue *vq)
  874. {
  875. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  876. mutex_lock(&cam->s_mutex);
  877. }
  878. /*
  879. * These need to be called with the mutex held from vb2
  880. */
  881. static int mcam_vb_start_streaming(struct vb2_queue *vq, unsigned int count)
  882. {
  883. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  884. if (cam->state != S_IDLE) {
  885. INIT_LIST_HEAD(&cam->buffers);
  886. return -EINVAL;
  887. }
  888. cam->sequence = 0;
  889. /*
  890. * Videobuf2 sneakily hoards all the buffers and won't
  891. * give them to us until *after* streaming starts. But
  892. * we can't actually start streaming until we have a
  893. * destination. So go into a wait state and hope they
  894. * give us buffers soon.
  895. */
  896. if (cam->buffer_mode != B_vmalloc && list_empty(&cam->buffers)) {
  897. cam->state = S_BUFWAIT;
  898. return 0;
  899. }
  900. return mcam_read_setup(cam);
  901. }
  902. static int mcam_vb_stop_streaming(struct vb2_queue *vq)
  903. {
  904. struct mcam_camera *cam = vb2_get_drv_priv(vq);
  905. unsigned long flags;
  906. if (cam->state == S_BUFWAIT) {
  907. /* They never gave us buffers */
  908. cam->state = S_IDLE;
  909. return 0;
  910. }
  911. if (cam->state != S_STREAMING)
  912. return -EINVAL;
  913. mcam_ctlr_stop_dma(cam);
  914. /*
  915. * Reset the CCIC PHY after stopping streaming,
  916. * otherwise, the CCIC may be unstable.
  917. */
  918. if (cam->ctlr_reset)
  919. cam->ctlr_reset(cam);
  920. /*
  921. * VB2 reclaims the buffers, so we need to forget
  922. * about them.
  923. */
  924. spin_lock_irqsave(&cam->dev_lock, flags);
  925. INIT_LIST_HEAD(&cam->buffers);
  926. spin_unlock_irqrestore(&cam->dev_lock, flags);
  927. return 0;
  928. }
  929. static const struct vb2_ops mcam_vb2_ops = {
  930. .queue_setup = mcam_vb_queue_setup,
  931. .buf_queue = mcam_vb_buf_queue,
  932. .start_streaming = mcam_vb_start_streaming,
  933. .stop_streaming = mcam_vb_stop_streaming,
  934. .wait_prepare = mcam_vb_wait_prepare,
  935. .wait_finish = mcam_vb_wait_finish,
  936. };
  937. #ifdef MCAM_MODE_DMA_SG
  938. /*
  939. * Scatter/gather mode uses all of the above functions plus a
  940. * few extras to deal with DMA mapping.
  941. */
  942. static int mcam_vb_sg_buf_init(struct vb2_buffer *vb)
  943. {
  944. struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
  945. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  946. int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
  947. mvb->dma_desc = dma_alloc_coherent(cam->dev,
  948. ndesc * sizeof(struct mcam_dma_desc),
  949. &mvb->dma_desc_pa, GFP_KERNEL);
  950. if (mvb->dma_desc == NULL) {
  951. cam_err(cam, "Unable to get DMA descriptor array\n");
  952. return -ENOMEM;
  953. }
  954. return 0;
  955. }
  956. static int mcam_vb_sg_buf_prepare(struct vb2_buffer *vb)
  957. {
  958. struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
  959. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  960. struct vb2_dma_sg_desc *sgd = vb2_dma_sg_plane_desc(vb, 0);
  961. struct mcam_dma_desc *desc = mvb->dma_desc;
  962. struct scatterlist *sg;
  963. int i;
  964. mvb->dma_desc_nent = dma_map_sg(cam->dev, sgd->sglist, sgd->num_pages,
  965. DMA_FROM_DEVICE);
  966. if (mvb->dma_desc_nent <= 0)
  967. return -EIO; /* Not sure what's right here */
  968. for_each_sg(sgd->sglist, sg, mvb->dma_desc_nent, i) {
  969. desc->dma_addr = sg_dma_address(sg);
  970. desc->segment_len = sg_dma_len(sg);
  971. desc++;
  972. }
  973. return 0;
  974. }
  975. static int mcam_vb_sg_buf_finish(struct vb2_buffer *vb)
  976. {
  977. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  978. struct vb2_dma_sg_desc *sgd = vb2_dma_sg_plane_desc(vb, 0);
  979. dma_unmap_sg(cam->dev, sgd->sglist, sgd->num_pages, DMA_FROM_DEVICE);
  980. return 0;
  981. }
  982. static void mcam_vb_sg_buf_cleanup(struct vb2_buffer *vb)
  983. {
  984. struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
  985. struct mcam_vb_buffer *mvb = vb_to_mvb(vb);
  986. int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
  987. dma_free_coherent(cam->dev, ndesc * sizeof(struct mcam_dma_desc),
  988. mvb->dma_desc, mvb->dma_desc_pa);
  989. }
  990. static const struct vb2_ops mcam_vb2_sg_ops = {
  991. .queue_setup = mcam_vb_queue_setup,
  992. .buf_init = mcam_vb_sg_buf_init,
  993. .buf_prepare = mcam_vb_sg_buf_prepare,
  994. .buf_queue = mcam_vb_buf_queue,
  995. .buf_finish = mcam_vb_sg_buf_finish,
  996. .buf_cleanup = mcam_vb_sg_buf_cleanup,
  997. .start_streaming = mcam_vb_start_streaming,
  998. .stop_streaming = mcam_vb_stop_streaming,
  999. .wait_prepare = mcam_vb_wait_prepare,
  1000. .wait_finish = mcam_vb_wait_finish,
  1001. };
  1002. #endif /* MCAM_MODE_DMA_SG */
  1003. static int mcam_setup_vb2(struct mcam_camera *cam)
  1004. {
  1005. struct vb2_queue *vq = &cam->vb_queue;
  1006. memset(vq, 0, sizeof(*vq));
  1007. vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  1008. vq->drv_priv = cam;
  1009. INIT_LIST_HEAD(&cam->buffers);
  1010. switch (cam->buffer_mode) {
  1011. case B_DMA_contig:
  1012. #ifdef MCAM_MODE_DMA_CONTIG
  1013. vq->ops = &mcam_vb2_ops;
  1014. vq->mem_ops = &vb2_dma_contig_memops;
  1015. cam->vb_alloc_ctx = vb2_dma_contig_init_ctx(cam->dev);
  1016. vq->io_modes = VB2_MMAP | VB2_USERPTR;
  1017. cam->dma_setup = mcam_ctlr_dma_contig;
  1018. cam->frame_complete = mcam_dma_contig_done;
  1019. #endif
  1020. break;
  1021. case B_DMA_sg:
  1022. #ifdef MCAM_MODE_DMA_SG
  1023. vq->ops = &mcam_vb2_sg_ops;
  1024. vq->mem_ops = &vb2_dma_sg_memops;
  1025. vq->io_modes = VB2_MMAP | VB2_USERPTR;
  1026. cam->dma_setup = mcam_ctlr_dma_sg;
  1027. cam->frame_complete = mcam_dma_sg_done;
  1028. #endif
  1029. break;
  1030. case B_vmalloc:
  1031. #ifdef MCAM_MODE_VMALLOC
  1032. tasklet_init(&cam->s_tasklet, mcam_frame_tasklet,
  1033. (unsigned long) cam);
  1034. vq->ops = &mcam_vb2_ops;
  1035. vq->mem_ops = &vb2_vmalloc_memops;
  1036. vq->buf_struct_size = sizeof(struct mcam_vb_buffer);
  1037. vq->io_modes = VB2_MMAP;
  1038. cam->dma_setup = mcam_ctlr_dma_vmalloc;
  1039. cam->frame_complete = mcam_vmalloc_done;
  1040. #endif
  1041. break;
  1042. }
  1043. return vb2_queue_init(vq);
  1044. }
  1045. static void mcam_cleanup_vb2(struct mcam_camera *cam)
  1046. {
  1047. vb2_queue_release(&cam->vb_queue);
  1048. #ifdef MCAM_MODE_DMA_CONTIG
  1049. if (cam->buffer_mode == B_DMA_contig)
  1050. vb2_dma_contig_cleanup_ctx(cam->vb_alloc_ctx);
  1051. #endif
  1052. }
  1053. /* ---------------------------------------------------------------------- */
  1054. /*
  1055. * The long list of V4L2 ioctl() operations.
  1056. */
  1057. static int mcam_vidioc_streamon(struct file *filp, void *priv,
  1058. enum v4l2_buf_type type)
  1059. {
  1060. struct mcam_camera *cam = filp->private_data;
  1061. int ret;
  1062. mutex_lock(&cam->s_mutex);
  1063. ret = vb2_streamon(&cam->vb_queue, type);
  1064. mutex_unlock(&cam->s_mutex);
  1065. return ret;
  1066. }
  1067. static int mcam_vidioc_streamoff(struct file *filp, void *priv,
  1068. enum v4l2_buf_type type)
  1069. {
  1070. struct mcam_camera *cam = filp->private_data;
  1071. int ret;
  1072. mutex_lock(&cam->s_mutex);
  1073. ret = vb2_streamoff(&cam->vb_queue, type);
  1074. mutex_unlock(&cam->s_mutex);
  1075. return ret;
  1076. }
  1077. static int mcam_vidioc_reqbufs(struct file *filp, void *priv,
  1078. struct v4l2_requestbuffers *req)
  1079. {
  1080. struct mcam_camera *cam = filp->private_data;
  1081. int ret;
  1082. mutex_lock(&cam->s_mutex);
  1083. ret = vb2_reqbufs(&cam->vb_queue, req);
  1084. mutex_unlock(&cam->s_mutex);
  1085. return ret;
  1086. }
  1087. static int mcam_vidioc_querybuf(struct file *filp, void *priv,
  1088. struct v4l2_buffer *buf)
  1089. {
  1090. struct mcam_camera *cam = filp->private_data;
  1091. int ret;
  1092. mutex_lock(&cam->s_mutex);
  1093. ret = vb2_querybuf(&cam->vb_queue, buf);
  1094. mutex_unlock(&cam->s_mutex);
  1095. return ret;
  1096. }
  1097. static int mcam_vidioc_qbuf(struct file *filp, void *priv,
  1098. struct v4l2_buffer *buf)
  1099. {
  1100. struct mcam_camera *cam = filp->private_data;
  1101. int ret;
  1102. mutex_lock(&cam->s_mutex);
  1103. ret = vb2_qbuf(&cam->vb_queue, buf);
  1104. mutex_unlock(&cam->s_mutex);
  1105. return ret;
  1106. }
  1107. static int mcam_vidioc_dqbuf(struct file *filp, void *priv,
  1108. struct v4l2_buffer *buf)
  1109. {
  1110. struct mcam_camera *cam = filp->private_data;
  1111. int ret;
  1112. mutex_lock(&cam->s_mutex);
  1113. ret = vb2_dqbuf(&cam->vb_queue, buf, filp->f_flags & O_NONBLOCK);
  1114. mutex_unlock(&cam->s_mutex);
  1115. return ret;
  1116. }
  1117. static int mcam_vidioc_querycap(struct file *file, void *priv,
  1118. struct v4l2_capability *cap)
  1119. {
  1120. strcpy(cap->driver, "marvell_ccic");
  1121. strcpy(cap->card, "marvell_ccic");
  1122. cap->version = 1;
  1123. cap->capabilities = V4L2_CAP_VIDEO_CAPTURE |
  1124. V4L2_CAP_READWRITE | V4L2_CAP_STREAMING;
  1125. return 0;
  1126. }
  1127. static int mcam_vidioc_enum_fmt_vid_cap(struct file *filp,
  1128. void *priv, struct v4l2_fmtdesc *fmt)
  1129. {
  1130. if (fmt->index >= N_MCAM_FMTS)
  1131. return -EINVAL;
  1132. strlcpy(fmt->description, mcam_formats[fmt->index].desc,
  1133. sizeof(fmt->description));
  1134. fmt->pixelformat = mcam_formats[fmt->index].pixelformat;
  1135. return 0;
  1136. }
  1137. static int mcam_vidioc_try_fmt_vid_cap(struct file *filp, void *priv,
  1138. struct v4l2_format *fmt)
  1139. {
  1140. struct mcam_camera *cam = priv;
  1141. struct mcam_format_struct *f;
  1142. struct v4l2_pix_format *pix = &fmt->fmt.pix;
  1143. struct v4l2_mbus_framefmt mbus_fmt;
  1144. int ret;
  1145. f = mcam_find_format(pix->pixelformat);
  1146. pix->pixelformat = f->pixelformat;
  1147. v4l2_fill_mbus_format(&mbus_fmt, pix, f->mbus_code);
  1148. mutex_lock(&cam->s_mutex);
  1149. ret = sensor_call(cam, video, try_mbus_fmt, &mbus_fmt);
  1150. mutex_unlock(&cam->s_mutex);
  1151. v4l2_fill_pix_format(pix, &mbus_fmt);
  1152. pix->bytesperline = pix->width * f->bpp;
  1153. pix->sizeimage = pix->height * pix->bytesperline;
  1154. return ret;
  1155. }
  1156. static int mcam_vidioc_s_fmt_vid_cap(struct file *filp, void *priv,
  1157. struct v4l2_format *fmt)
  1158. {
  1159. struct mcam_camera *cam = priv;
  1160. struct mcam_format_struct *f;
  1161. int ret;
  1162. /*
  1163. * Can't do anything if the device is not idle
  1164. * Also can't if there are streaming buffers in place.
  1165. */
  1166. if (cam->state != S_IDLE || cam->vb_queue.num_buffers > 0)
  1167. return -EBUSY;
  1168. f = mcam_find_format(fmt->fmt.pix.pixelformat);
  1169. /*
  1170. * See if the formatting works in principle.
  1171. */
  1172. ret = mcam_vidioc_try_fmt_vid_cap(filp, priv, fmt);
  1173. if (ret)
  1174. return ret;
  1175. /*
  1176. * Now we start to change things for real, so let's do it
  1177. * under lock.
  1178. */
  1179. mutex_lock(&cam->s_mutex);
  1180. cam->pix_format = fmt->fmt.pix;
  1181. cam->mbus_code = f->mbus_code;
  1182. /*
  1183. * Make sure we have appropriate DMA buffers.
  1184. */
  1185. if (cam->buffer_mode == B_vmalloc) {
  1186. ret = mcam_check_dma_buffers(cam);
  1187. if (ret)
  1188. goto out;
  1189. }
  1190. mcam_set_config_needed(cam, 1);
  1191. out:
  1192. mutex_unlock(&cam->s_mutex);
  1193. return ret;
  1194. }
  1195. /*
  1196. * Return our stored notion of how the camera is/should be configured.
  1197. * The V4l2 spec wants us to be smarter, and actually get this from
  1198. * the camera (and not mess with it at open time). Someday.
  1199. */
  1200. static int mcam_vidioc_g_fmt_vid_cap(struct file *filp, void *priv,
  1201. struct v4l2_format *f)
  1202. {
  1203. struct mcam_camera *cam = priv;
  1204. f->fmt.pix = cam->pix_format;
  1205. return 0;
  1206. }
  1207. /*
  1208. * We only have one input - the sensor - so minimize the nonsense here.
  1209. */
  1210. static int mcam_vidioc_enum_input(struct file *filp, void *priv,
  1211. struct v4l2_input *input)
  1212. {
  1213. if (input->index != 0)
  1214. return -EINVAL;
  1215. input->type = V4L2_INPUT_TYPE_CAMERA;
  1216. input->std = V4L2_STD_ALL; /* Not sure what should go here */
  1217. strcpy(input->name, "Camera");
  1218. return 0;
  1219. }
  1220. static int mcam_vidioc_g_input(struct file *filp, void *priv, unsigned int *i)
  1221. {
  1222. *i = 0;
  1223. return 0;
  1224. }
  1225. static int mcam_vidioc_s_input(struct file *filp, void *priv, unsigned int i)
  1226. {
  1227. if (i != 0)
  1228. return -EINVAL;
  1229. return 0;
  1230. }
  1231. /* from vivi.c */
  1232. static int mcam_vidioc_s_std(struct file *filp, void *priv, v4l2_std_id a)
  1233. {
  1234. return 0;
  1235. }
  1236. static int mcam_vidioc_g_std(struct file *filp, void *priv, v4l2_std_id *a)
  1237. {
  1238. *a = V4L2_STD_NTSC_M;
  1239. return 0;
  1240. }
  1241. /*
  1242. * G/S_PARM. Most of this is done by the sensor, but we are
  1243. * the level which controls the number of read buffers.
  1244. */
  1245. static int mcam_vidioc_g_parm(struct file *filp, void *priv,
  1246. struct v4l2_streamparm *parms)
  1247. {
  1248. struct mcam_camera *cam = priv;
  1249. int ret;
  1250. mutex_lock(&cam->s_mutex);
  1251. ret = sensor_call(cam, video, g_parm, parms);
  1252. mutex_unlock(&cam->s_mutex);
  1253. parms->parm.capture.readbuffers = n_dma_bufs;
  1254. return ret;
  1255. }
  1256. static int mcam_vidioc_s_parm(struct file *filp, void *priv,
  1257. struct v4l2_streamparm *parms)
  1258. {
  1259. struct mcam_camera *cam = priv;
  1260. int ret;
  1261. mutex_lock(&cam->s_mutex);
  1262. ret = sensor_call(cam, video, s_parm, parms);
  1263. mutex_unlock(&cam->s_mutex);
  1264. parms->parm.capture.readbuffers = n_dma_bufs;
  1265. return ret;
  1266. }
  1267. static int mcam_vidioc_enum_framesizes(struct file *filp, void *priv,
  1268. struct v4l2_frmsizeenum *sizes)
  1269. {
  1270. struct mcam_camera *cam = priv;
  1271. int ret;
  1272. mutex_lock(&cam->s_mutex);
  1273. ret = sensor_call(cam, video, enum_framesizes, sizes);
  1274. mutex_unlock(&cam->s_mutex);
  1275. return ret;
  1276. }
  1277. static int mcam_vidioc_enum_frameintervals(struct file *filp, void *priv,
  1278. struct v4l2_frmivalenum *interval)
  1279. {
  1280. struct mcam_camera *cam = priv;
  1281. int ret;
  1282. mutex_lock(&cam->s_mutex);
  1283. ret = sensor_call(cam, video, enum_frameintervals, interval);
  1284. mutex_unlock(&cam->s_mutex);
  1285. return ret;
  1286. }
  1287. #ifdef CONFIG_VIDEO_ADV_DEBUG
  1288. static int mcam_vidioc_g_register(struct file *file, void *priv,
  1289. struct v4l2_dbg_register *reg)
  1290. {
  1291. struct mcam_camera *cam = priv;
  1292. if (reg->reg > cam->regs_size - 4)
  1293. return -EINVAL;
  1294. reg->val = mcam_reg_read(cam, reg->reg);
  1295. reg->size = 4;
  1296. return 0;
  1297. }
  1298. static int mcam_vidioc_s_register(struct file *file, void *priv,
  1299. const struct v4l2_dbg_register *reg)
  1300. {
  1301. struct mcam_camera *cam = priv;
  1302. if (reg->reg > cam->regs_size - 4)
  1303. return -EINVAL;
  1304. mcam_reg_write(cam, reg->reg, reg->val);
  1305. return 0;
  1306. }
  1307. #endif
  1308. static const struct v4l2_ioctl_ops mcam_v4l_ioctl_ops = {
  1309. .vidioc_querycap = mcam_vidioc_querycap,
  1310. .vidioc_enum_fmt_vid_cap = mcam_vidioc_enum_fmt_vid_cap,
  1311. .vidioc_try_fmt_vid_cap = mcam_vidioc_try_fmt_vid_cap,
  1312. .vidioc_s_fmt_vid_cap = mcam_vidioc_s_fmt_vid_cap,
  1313. .vidioc_g_fmt_vid_cap = mcam_vidioc_g_fmt_vid_cap,
  1314. .vidioc_enum_input = mcam_vidioc_enum_input,
  1315. .vidioc_g_input = mcam_vidioc_g_input,
  1316. .vidioc_s_input = mcam_vidioc_s_input,
  1317. .vidioc_s_std = mcam_vidioc_s_std,
  1318. .vidioc_g_std = mcam_vidioc_g_std,
  1319. .vidioc_reqbufs = mcam_vidioc_reqbufs,
  1320. .vidioc_querybuf = mcam_vidioc_querybuf,
  1321. .vidioc_qbuf = mcam_vidioc_qbuf,
  1322. .vidioc_dqbuf = mcam_vidioc_dqbuf,
  1323. .vidioc_streamon = mcam_vidioc_streamon,
  1324. .vidioc_streamoff = mcam_vidioc_streamoff,
  1325. .vidioc_g_parm = mcam_vidioc_g_parm,
  1326. .vidioc_s_parm = mcam_vidioc_s_parm,
  1327. .vidioc_enum_framesizes = mcam_vidioc_enum_framesizes,
  1328. .vidioc_enum_frameintervals = mcam_vidioc_enum_frameintervals,
  1329. #ifdef CONFIG_VIDEO_ADV_DEBUG
  1330. .vidioc_g_register = mcam_vidioc_g_register,
  1331. .vidioc_s_register = mcam_vidioc_s_register,
  1332. #endif
  1333. };
  1334. /* ---------------------------------------------------------------------- */
  1335. /*
  1336. * Our various file operations.
  1337. */
  1338. static int mcam_v4l_open(struct file *filp)
  1339. {
  1340. struct mcam_camera *cam = video_drvdata(filp);
  1341. int ret = 0;
  1342. filp->private_data = cam;
  1343. cam->frame_state.frames = 0;
  1344. cam->frame_state.singles = 0;
  1345. cam->frame_state.delivered = 0;
  1346. mutex_lock(&cam->s_mutex);
  1347. if (cam->users == 0) {
  1348. ret = mcam_setup_vb2(cam);
  1349. if (ret)
  1350. goto out;
  1351. ret = mcam_ctlr_power_up(cam);
  1352. if (ret)
  1353. goto out;
  1354. __mcam_cam_reset(cam);
  1355. mcam_set_config_needed(cam, 1);
  1356. }
  1357. (cam->users)++;
  1358. out:
  1359. mutex_unlock(&cam->s_mutex);
  1360. return ret;
  1361. }
  1362. static int mcam_v4l_release(struct file *filp)
  1363. {
  1364. struct mcam_camera *cam = filp->private_data;
  1365. cam_dbg(cam, "Release, %d frames, %d singles, %d delivered\n",
  1366. cam->frame_state.frames, cam->frame_state.singles,
  1367. cam->frame_state.delivered);
  1368. mutex_lock(&cam->s_mutex);
  1369. (cam->users)--;
  1370. if (cam->users == 0) {
  1371. mcam_ctlr_stop_dma(cam);
  1372. mcam_cleanup_vb2(cam);
  1373. mcam_disable_mipi(cam);
  1374. mcam_ctlr_power_down(cam);
  1375. if (cam->buffer_mode == B_vmalloc && alloc_bufs_at_read)
  1376. mcam_free_dma_bufs(cam);
  1377. }
  1378. mutex_unlock(&cam->s_mutex);
  1379. return 0;
  1380. }
  1381. static ssize_t mcam_v4l_read(struct file *filp,
  1382. char __user *buffer, size_t len, loff_t *pos)
  1383. {
  1384. struct mcam_camera *cam = filp->private_data;
  1385. int ret;
  1386. mutex_lock(&cam->s_mutex);
  1387. ret = vb2_read(&cam->vb_queue, buffer, len, pos,
  1388. filp->f_flags & O_NONBLOCK);
  1389. mutex_unlock(&cam->s_mutex);
  1390. return ret;
  1391. }
  1392. static unsigned int mcam_v4l_poll(struct file *filp,
  1393. struct poll_table_struct *pt)
  1394. {
  1395. struct mcam_camera *cam = filp->private_data;
  1396. int ret;
  1397. mutex_lock(&cam->s_mutex);
  1398. ret = vb2_poll(&cam->vb_queue, filp, pt);
  1399. mutex_unlock(&cam->s_mutex);
  1400. return ret;
  1401. }
  1402. static int mcam_v4l_mmap(struct file *filp, struct vm_area_struct *vma)
  1403. {
  1404. struct mcam_camera *cam = filp->private_data;
  1405. int ret;
  1406. mutex_lock(&cam->s_mutex);
  1407. ret = vb2_mmap(&cam->vb_queue, vma);
  1408. mutex_unlock(&cam->s_mutex);
  1409. return ret;
  1410. }
  1411. static const struct v4l2_file_operations mcam_v4l_fops = {
  1412. .owner = THIS_MODULE,
  1413. .open = mcam_v4l_open,
  1414. .release = mcam_v4l_release,
  1415. .read = mcam_v4l_read,
  1416. .poll = mcam_v4l_poll,
  1417. .mmap = mcam_v4l_mmap,
  1418. .unlocked_ioctl = video_ioctl2,
  1419. };
  1420. /*
  1421. * This template device holds all of those v4l2 methods; we
  1422. * clone it for specific real devices.
  1423. */
  1424. static struct video_device mcam_v4l_template = {
  1425. .name = "mcam",
  1426. .tvnorms = V4L2_STD_NTSC_M,
  1427. .fops = &mcam_v4l_fops,
  1428. .ioctl_ops = &mcam_v4l_ioctl_ops,
  1429. .release = video_device_release_empty,
  1430. };
  1431. /* ---------------------------------------------------------------------- */
  1432. /*
  1433. * Interrupt handler stuff
  1434. */
  1435. static void mcam_frame_complete(struct mcam_camera *cam, int frame)
  1436. {
  1437. /*
  1438. * Basic frame housekeeping.
  1439. */
  1440. set_bit(frame, &cam->flags);
  1441. clear_bit(CF_DMA_ACTIVE, &cam->flags);
  1442. cam->next_buf = frame;
  1443. cam->buf_seq[frame] = ++(cam->sequence);
  1444. cam->frame_state.frames++;
  1445. /*
  1446. * "This should never happen"
  1447. */
  1448. if (cam->state != S_STREAMING)
  1449. return;
  1450. /*
  1451. * Process the frame and set up the next one.
  1452. */
  1453. cam->frame_complete(cam, frame);
  1454. }
  1455. /*
  1456. * The interrupt handler; this needs to be called from the
  1457. * platform irq handler with the lock held.
  1458. */
  1459. int mccic_irq(struct mcam_camera *cam, unsigned int irqs)
  1460. {
  1461. unsigned int frame, handled = 0;
  1462. mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS); /* Clear'em all */
  1463. /*
  1464. * Handle any frame completions. There really should
  1465. * not be more than one of these, or we have fallen
  1466. * far behind.
  1467. *
  1468. * When running in S/G mode, the frame number lacks any
  1469. * real meaning - there's only one descriptor array - but
  1470. * the controller still picks a different one to signal
  1471. * each time.
  1472. */
  1473. for (frame = 0; frame < cam->nbufs; frame++)
  1474. if (irqs & (IRQ_EOF0 << frame)) {
  1475. mcam_frame_complete(cam, frame);
  1476. handled = 1;
  1477. if (cam->buffer_mode == B_DMA_sg)
  1478. break;
  1479. }
  1480. /*
  1481. * If a frame starts, note that we have DMA active. This
  1482. * code assumes that we won't get multiple frame interrupts
  1483. * at once; may want to rethink that.
  1484. */
  1485. if (irqs & (IRQ_SOF0 | IRQ_SOF1 | IRQ_SOF2)) {
  1486. set_bit(CF_DMA_ACTIVE, &cam->flags);
  1487. handled = 1;
  1488. if (cam->buffer_mode == B_DMA_sg)
  1489. mcam_ctlr_stop(cam);
  1490. }
  1491. return handled;
  1492. }
  1493. /* ---------------------------------------------------------------------- */
  1494. /*
  1495. * Registration and such.
  1496. */
  1497. static struct ov7670_config sensor_cfg = {
  1498. /*
  1499. * Exclude QCIF mode, because it only captures a tiny portion
  1500. * of the sensor FOV
  1501. */
  1502. .min_width = 320,
  1503. .min_height = 240,
  1504. };
  1505. int mccic_register(struct mcam_camera *cam)
  1506. {
  1507. struct i2c_board_info ov7670_info = {
  1508. .type = "ov7670",
  1509. .addr = 0x42 >> 1,
  1510. .platform_data = &sensor_cfg,
  1511. };
  1512. int ret;
  1513. /*
  1514. * Validate the requested buffer mode.
  1515. */
  1516. if (buffer_mode >= 0)
  1517. cam->buffer_mode = buffer_mode;
  1518. if (cam->buffer_mode == B_DMA_sg &&
  1519. cam->chip_id == MCAM_CAFE) {
  1520. printk(KERN_ERR "marvell-cam: Cafe can't do S/G I/O, "
  1521. "attempting vmalloc mode instead\n");
  1522. cam->buffer_mode = B_vmalloc;
  1523. }
  1524. if (!mcam_buffer_mode_supported(cam->buffer_mode)) {
  1525. printk(KERN_ERR "marvell-cam: buffer mode %d unsupported\n",
  1526. cam->buffer_mode);
  1527. return -EINVAL;
  1528. }
  1529. /*
  1530. * Register with V4L
  1531. */
  1532. ret = v4l2_device_register(cam->dev, &cam->v4l2_dev);
  1533. if (ret)
  1534. return ret;
  1535. mutex_init(&cam->s_mutex);
  1536. cam->state = S_NOTREADY;
  1537. mcam_set_config_needed(cam, 1);
  1538. cam->pix_format = mcam_def_pix_format;
  1539. cam->mbus_code = mcam_def_mbus_code;
  1540. INIT_LIST_HEAD(&cam->buffers);
  1541. mcam_ctlr_init(cam);
  1542. /*
  1543. * Try to find the sensor.
  1544. */
  1545. sensor_cfg.clock_speed = cam->clock_speed;
  1546. sensor_cfg.use_smbus = cam->use_smbus;
  1547. cam->sensor_addr = ov7670_info.addr;
  1548. cam->sensor = v4l2_i2c_new_subdev_board(&cam->v4l2_dev,
  1549. cam->i2c_adapter, &ov7670_info, NULL);
  1550. if (cam->sensor == NULL) {
  1551. ret = -ENODEV;
  1552. goto out_unregister;
  1553. }
  1554. ret = mcam_cam_init(cam);
  1555. if (ret)
  1556. goto out_unregister;
  1557. /*
  1558. * Get the v4l2 setup done.
  1559. */
  1560. ret = v4l2_ctrl_handler_init(&cam->ctrl_handler, 10);
  1561. if (ret)
  1562. goto out_unregister;
  1563. cam->v4l2_dev.ctrl_handler = &cam->ctrl_handler;
  1564. mutex_lock(&cam->s_mutex);
  1565. cam->vdev = mcam_v4l_template;
  1566. cam->vdev.debug = 0;
  1567. cam->vdev.v4l2_dev = &cam->v4l2_dev;
  1568. video_set_drvdata(&cam->vdev, cam);
  1569. ret = video_register_device(&cam->vdev, VFL_TYPE_GRABBER, -1);
  1570. if (ret)
  1571. goto out;
  1572. /*
  1573. * If so requested, try to get our DMA buffers now.
  1574. */
  1575. if (cam->buffer_mode == B_vmalloc && !alloc_bufs_at_read) {
  1576. if (mcam_alloc_dma_bufs(cam, 1))
  1577. cam_warn(cam, "Unable to alloc DMA buffers at load"
  1578. " will try again later.");
  1579. }
  1580. out:
  1581. v4l2_ctrl_handler_free(&cam->ctrl_handler);
  1582. mutex_unlock(&cam->s_mutex);
  1583. return ret;
  1584. out_unregister:
  1585. v4l2_device_unregister(&cam->v4l2_dev);
  1586. return ret;
  1587. }
  1588. void mccic_shutdown(struct mcam_camera *cam)
  1589. {
  1590. /*
  1591. * If we have no users (and we really, really should have no
  1592. * users) the device will already be powered down. Trying to
  1593. * take it down again will wedge the machine, which is frowned
  1594. * upon.
  1595. */
  1596. if (cam->users > 0) {
  1597. cam_warn(cam, "Removing a device with users!\n");
  1598. mcam_ctlr_power_down(cam);
  1599. }
  1600. vb2_queue_release(&cam->vb_queue);
  1601. if (cam->buffer_mode == B_vmalloc)
  1602. mcam_free_dma_bufs(cam);
  1603. video_unregister_device(&cam->vdev);
  1604. v4l2_ctrl_handler_free(&cam->ctrl_handler);
  1605. v4l2_device_unregister(&cam->v4l2_dev);
  1606. }
  1607. /*
  1608. * Power management
  1609. */
  1610. #ifdef CONFIG_PM
  1611. void mccic_suspend(struct mcam_camera *cam)
  1612. {
  1613. mutex_lock(&cam->s_mutex);
  1614. if (cam->users > 0) {
  1615. enum mcam_state cstate = cam->state;
  1616. mcam_ctlr_stop_dma(cam);
  1617. mcam_ctlr_power_down(cam);
  1618. cam->state = cstate;
  1619. }
  1620. mutex_unlock(&cam->s_mutex);
  1621. }
  1622. int mccic_resume(struct mcam_camera *cam)
  1623. {
  1624. int ret = 0;
  1625. mutex_lock(&cam->s_mutex);
  1626. if (cam->users > 0) {
  1627. ret = mcam_ctlr_power_up(cam);
  1628. if (ret) {
  1629. mutex_unlock(&cam->s_mutex);
  1630. return ret;
  1631. }
  1632. __mcam_cam_reset(cam);
  1633. } else {
  1634. mcam_ctlr_power_down(cam);
  1635. }
  1636. mutex_unlock(&cam->s_mutex);
  1637. set_bit(CF_CONFIG_NEEDED, &cam->flags);
  1638. if (cam->state == S_STREAMING) {
  1639. /*
  1640. * If there was a buffer in the DMA engine at suspend
  1641. * time, put it back on the queue or we'll forget about it.
  1642. */
  1643. if (cam->buffer_mode == B_DMA_sg && cam->vb_bufs[0])
  1644. list_add(&cam->vb_bufs[0]->queue, &cam->buffers);
  1645. ret = mcam_read_setup(cam);
  1646. }
  1647. return ret;
  1648. }
  1649. #endif /* CONFIG_PM */