s5p_mfc_opr_v5.c 55 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794
  1. /*
  2. * drivers/media/platform/samsung/mfc5/s5p_mfc_opr_v5.c
  3. *
  4. * Samsung MFC (Multi Function Codec - FIMV) driver
  5. * This file contains hw related functions.
  6. *
  7. * Kamil Debski, Copyright (c) 2011 Samsung Electronics
  8. * http://www.samsung.com/
  9. *
  10. * This program is free software; you can redistribute it and/or modify
  11. * it under the terms of the GNU General Public License version 2 as
  12. * published by the Free Software Foundation.
  13. */
  14. #include "s5p_mfc_common.h"
  15. #include "s5p_mfc_cmd.h"
  16. #include "s5p_mfc_ctrl.h"
  17. #include "s5p_mfc_debug.h"
  18. #include "s5p_mfc_intr.h"
  19. #include "s5p_mfc_pm.h"
  20. #include "s5p_mfc_opr.h"
  21. #include "s5p_mfc_opr_v5.h"
  22. #include <asm/cacheflush.h>
  23. #include <linux/delay.h>
  24. #include <linux/dma-mapping.h>
  25. #include <linux/err.h>
  26. #include <linux/firmware.h>
  27. #include <linux/io.h>
  28. #include <linux/jiffies.h>
  29. #include <linux/mm.h>
  30. #include <linux/sched.h>
  31. #define OFFSETA(x) (((x) - dev->bank1) >> MFC_OFFSET_SHIFT)
  32. #define OFFSETB(x) (((x) - dev->bank2) >> MFC_OFFSET_SHIFT)
  33. /* Allocate temporary buffers for decoding */
  34. int s5p_mfc_alloc_dec_temp_buffers_v5(struct s5p_mfc_ctx *ctx)
  35. {
  36. struct s5p_mfc_dev *dev = ctx->dev;
  37. struct s5p_mfc_buf_size_v5 *buf_size = dev->variant->buf_size->priv;
  38. ctx->dsc.alloc = vb2_dma_contig_memops.alloc(
  39. dev->alloc_ctx[MFC_BANK1_ALLOC_CTX],
  40. buf_size->dsc);
  41. if (IS_ERR_VALUE((int)ctx->dsc.alloc)) {
  42. ctx->dsc.alloc = NULL;
  43. mfc_err("Allocating DESC buffer failed\n");
  44. return -ENOMEM;
  45. }
  46. ctx->dsc.dma = s5p_mfc_mem_cookie(
  47. dev->alloc_ctx[MFC_BANK1_ALLOC_CTX], ctx->dsc.alloc);
  48. BUG_ON(ctx->dsc.dma & ((1 << MFC_BANK1_ALIGN_ORDER) - 1));
  49. ctx->dsc.virt = vb2_dma_contig_memops.vaddr(ctx->dsc.alloc);
  50. if (ctx->dsc.virt == NULL) {
  51. vb2_dma_contig_memops.put(ctx->dsc.alloc);
  52. ctx->dsc.dma = 0;
  53. ctx->dsc.alloc = NULL;
  54. mfc_err("Remapping DESC buffer failed\n");
  55. return -ENOMEM;
  56. }
  57. memset(ctx->dsc.virt, 0, buf_size->dsc);
  58. wmb();
  59. return 0;
  60. }
  61. /* Release temporary buffers for decoding */
  62. void s5p_mfc_release_dec_desc_buffer_v5(struct s5p_mfc_ctx *ctx)
  63. {
  64. if (ctx->dsc.dma) {
  65. vb2_dma_contig_memops.put(ctx->dsc.alloc);
  66. ctx->dsc.alloc = NULL;
  67. ctx->dsc.dma = 0;
  68. }
  69. }
  70. /* Allocate codec buffers */
  71. int s5p_mfc_alloc_codec_buffers_v5(struct s5p_mfc_ctx *ctx)
  72. {
  73. struct s5p_mfc_dev *dev = ctx->dev;
  74. unsigned int enc_ref_y_size = 0;
  75. unsigned int enc_ref_c_size = 0;
  76. unsigned int guard_width, guard_height;
  77. if (ctx->type == MFCINST_DECODER) {
  78. mfc_debug(2, "Luma size:%d Chroma size:%d MV size:%d\n",
  79. ctx->luma_size, ctx->chroma_size, ctx->mv_size);
  80. mfc_debug(2, "Totals bufs: %d\n", ctx->total_dpb_count);
  81. } else if (ctx->type == MFCINST_ENCODER) {
  82. enc_ref_y_size = ALIGN(ctx->img_width, S5P_FIMV_NV12MT_HALIGN)
  83. * ALIGN(ctx->img_height, S5P_FIMV_NV12MT_VALIGN);
  84. enc_ref_y_size = ALIGN(enc_ref_y_size, S5P_FIMV_NV12MT_SALIGN);
  85. if (ctx->codec_mode == S5P_MFC_CODEC_H264_ENC) {
  86. enc_ref_c_size = ALIGN(ctx->img_width,
  87. S5P_FIMV_NV12MT_HALIGN)
  88. * ALIGN(ctx->img_height >> 1,
  89. S5P_FIMV_NV12MT_VALIGN);
  90. enc_ref_c_size = ALIGN(enc_ref_c_size,
  91. S5P_FIMV_NV12MT_SALIGN);
  92. } else {
  93. guard_width = ALIGN(ctx->img_width + 16,
  94. S5P_FIMV_NV12MT_HALIGN);
  95. guard_height = ALIGN((ctx->img_height >> 1) + 4,
  96. S5P_FIMV_NV12MT_VALIGN);
  97. enc_ref_c_size = ALIGN(guard_width * guard_height,
  98. S5P_FIMV_NV12MT_SALIGN);
  99. }
  100. mfc_debug(2, "recon luma size: %d chroma size: %d\n",
  101. enc_ref_y_size, enc_ref_c_size);
  102. } else {
  103. return -EINVAL;
  104. }
  105. /* Codecs have different memory requirements */
  106. switch (ctx->codec_mode) {
  107. case S5P_MFC_CODEC_H264_DEC:
  108. ctx->bank1_size =
  109. ALIGN(S5P_FIMV_DEC_NB_IP_SIZE +
  110. S5P_FIMV_DEC_VERT_NB_MV_SIZE,
  111. S5P_FIMV_DEC_BUF_ALIGN);
  112. ctx->bank2_size = ctx->total_dpb_count * ctx->mv_size;
  113. break;
  114. case S5P_MFC_CODEC_MPEG4_DEC:
  115. ctx->bank1_size =
  116. ALIGN(S5P_FIMV_DEC_NB_DCAC_SIZE +
  117. S5P_FIMV_DEC_UPNB_MV_SIZE +
  118. S5P_FIMV_DEC_SUB_ANCHOR_MV_SIZE +
  119. S5P_FIMV_DEC_STX_PARSER_SIZE +
  120. S5P_FIMV_DEC_OVERLAP_TRANSFORM_SIZE,
  121. S5P_FIMV_DEC_BUF_ALIGN);
  122. ctx->bank2_size = 0;
  123. break;
  124. case S5P_MFC_CODEC_VC1RCV_DEC:
  125. case S5P_MFC_CODEC_VC1_DEC:
  126. ctx->bank1_size =
  127. ALIGN(S5P_FIMV_DEC_OVERLAP_TRANSFORM_SIZE +
  128. S5P_FIMV_DEC_UPNB_MV_SIZE +
  129. S5P_FIMV_DEC_SUB_ANCHOR_MV_SIZE +
  130. S5P_FIMV_DEC_NB_DCAC_SIZE +
  131. 3 * S5P_FIMV_DEC_VC1_BITPLANE_SIZE,
  132. S5P_FIMV_DEC_BUF_ALIGN);
  133. ctx->bank2_size = 0;
  134. break;
  135. case S5P_MFC_CODEC_MPEG2_DEC:
  136. ctx->bank1_size = 0;
  137. ctx->bank2_size = 0;
  138. break;
  139. case S5P_MFC_CODEC_H263_DEC:
  140. ctx->bank1_size =
  141. ALIGN(S5P_FIMV_DEC_OVERLAP_TRANSFORM_SIZE +
  142. S5P_FIMV_DEC_UPNB_MV_SIZE +
  143. S5P_FIMV_DEC_SUB_ANCHOR_MV_SIZE +
  144. S5P_FIMV_DEC_NB_DCAC_SIZE,
  145. S5P_FIMV_DEC_BUF_ALIGN);
  146. ctx->bank2_size = 0;
  147. break;
  148. case S5P_MFC_CODEC_H264_ENC:
  149. ctx->bank1_size = (enc_ref_y_size * 2) +
  150. S5P_FIMV_ENC_UPMV_SIZE +
  151. S5P_FIMV_ENC_COLFLG_SIZE +
  152. S5P_FIMV_ENC_INTRAMD_SIZE +
  153. S5P_FIMV_ENC_NBORINFO_SIZE;
  154. ctx->bank2_size = (enc_ref_y_size * 2) +
  155. (enc_ref_c_size * 4) +
  156. S5P_FIMV_ENC_INTRAPRED_SIZE;
  157. break;
  158. case S5P_MFC_CODEC_MPEG4_ENC:
  159. ctx->bank1_size = (enc_ref_y_size * 2) +
  160. S5P_FIMV_ENC_UPMV_SIZE +
  161. S5P_FIMV_ENC_COLFLG_SIZE +
  162. S5P_FIMV_ENC_ACDCCOEF_SIZE;
  163. ctx->bank2_size = (enc_ref_y_size * 2) +
  164. (enc_ref_c_size * 4);
  165. break;
  166. case S5P_MFC_CODEC_H263_ENC:
  167. ctx->bank1_size = (enc_ref_y_size * 2) +
  168. S5P_FIMV_ENC_UPMV_SIZE +
  169. S5P_FIMV_ENC_ACDCCOEF_SIZE;
  170. ctx->bank2_size = (enc_ref_y_size * 2) +
  171. (enc_ref_c_size * 4);
  172. break;
  173. default:
  174. break;
  175. }
  176. /* Allocate only if memory from bank 1 is necessary */
  177. if (ctx->bank1_size > 0) {
  178. ctx->bank1_buf = vb2_dma_contig_memops.alloc(
  179. dev->alloc_ctx[MFC_BANK1_ALLOC_CTX], ctx->bank1_size);
  180. if (IS_ERR(ctx->bank1_buf)) {
  181. ctx->bank1_buf = NULL;
  182. printk(KERN_ERR
  183. "Buf alloc for decoding failed (port A)\n");
  184. return -ENOMEM;
  185. }
  186. ctx->bank1_phys = s5p_mfc_mem_cookie(
  187. dev->alloc_ctx[MFC_BANK1_ALLOC_CTX], ctx->bank1_buf);
  188. BUG_ON(ctx->bank1_phys & ((1 << MFC_BANK1_ALIGN_ORDER) - 1));
  189. }
  190. /* Allocate only if memory from bank 2 is necessary */
  191. if (ctx->bank2_size > 0) {
  192. ctx->bank2_buf = vb2_dma_contig_memops.alloc(
  193. dev->alloc_ctx[MFC_BANK2_ALLOC_CTX], ctx->bank2_size);
  194. if (IS_ERR(ctx->bank2_buf)) {
  195. ctx->bank2_buf = NULL;
  196. mfc_err("Buf alloc for decoding failed (port B)\n");
  197. return -ENOMEM;
  198. }
  199. ctx->bank2_phys = s5p_mfc_mem_cookie(
  200. dev->alloc_ctx[MFC_BANK2_ALLOC_CTX], ctx->bank2_buf);
  201. BUG_ON(ctx->bank2_phys & ((1 << MFC_BANK2_ALIGN_ORDER) - 1));
  202. }
  203. return 0;
  204. }
  205. /* Release buffers allocated for codec */
  206. void s5p_mfc_release_codec_buffers_v5(struct s5p_mfc_ctx *ctx)
  207. {
  208. if (ctx->bank1_buf) {
  209. vb2_dma_contig_memops.put(ctx->bank1_buf);
  210. ctx->bank1_buf = NULL;
  211. ctx->bank1_phys = 0;
  212. ctx->bank1_size = 0;
  213. }
  214. if (ctx->bank2_buf) {
  215. vb2_dma_contig_memops.put(ctx->bank2_buf);
  216. ctx->bank2_buf = NULL;
  217. ctx->bank2_phys = 0;
  218. ctx->bank2_size = 0;
  219. }
  220. }
  221. /* Allocate memory for instance data buffer */
  222. int s5p_mfc_alloc_instance_buffer_v5(struct s5p_mfc_ctx *ctx)
  223. {
  224. struct s5p_mfc_dev *dev = ctx->dev;
  225. struct s5p_mfc_buf_size_v5 *buf_size = dev->variant->buf_size->priv;
  226. if (ctx->codec_mode == S5P_MFC_CODEC_H264_DEC ||
  227. ctx->codec_mode == S5P_MFC_CODEC_H264_ENC)
  228. ctx->ctx.size = buf_size->h264_ctx;
  229. else
  230. ctx->ctx.size = buf_size->non_h264_ctx;
  231. ctx->ctx.alloc = vb2_dma_contig_memops.alloc(
  232. dev->alloc_ctx[MFC_BANK1_ALLOC_CTX], ctx->ctx.size);
  233. if (IS_ERR(ctx->ctx.alloc)) {
  234. mfc_err("Allocating context buffer failed\n");
  235. ctx->ctx.alloc = NULL;
  236. return -ENOMEM;
  237. }
  238. ctx->ctx.dma = s5p_mfc_mem_cookie(
  239. dev->alloc_ctx[MFC_BANK1_ALLOC_CTX], ctx->ctx.alloc);
  240. BUG_ON(ctx->ctx.dma & ((1 << MFC_BANK1_ALIGN_ORDER) - 1));
  241. ctx->ctx.ofs = OFFSETA(ctx->ctx.dma);
  242. ctx->ctx.virt = vb2_dma_contig_memops.vaddr(ctx->ctx.alloc);
  243. if (!ctx->ctx.virt) {
  244. mfc_err("Remapping instance buffer failed\n");
  245. vb2_dma_contig_memops.put(ctx->ctx.alloc);
  246. ctx->ctx.alloc = NULL;
  247. ctx->ctx.ofs = 0;
  248. ctx->ctx.dma = 0;
  249. return -ENOMEM;
  250. }
  251. /* Zero content of the allocated memory */
  252. memset(ctx->ctx.virt, 0, ctx->ctx.size);
  253. wmb();
  254. /* Initialize shared memory */
  255. ctx->shm.alloc = vb2_dma_contig_memops.alloc(
  256. dev->alloc_ctx[MFC_BANK1_ALLOC_CTX], buf_size->shm);
  257. if (IS_ERR(ctx->shm.alloc)) {
  258. mfc_err("failed to allocate shared memory\n");
  259. return PTR_ERR(ctx->shm.alloc);
  260. }
  261. /* shared memory offset only keeps the offset from base (port a) */
  262. ctx->shm.ofs = s5p_mfc_mem_cookie(
  263. dev->alloc_ctx[MFC_BANK1_ALLOC_CTX], ctx->shm.alloc)
  264. - dev->bank1;
  265. BUG_ON(ctx->shm.ofs & ((1 << MFC_BANK1_ALIGN_ORDER) - 1));
  266. ctx->shm.virt = vb2_dma_contig_memops.vaddr(ctx->shm.alloc);
  267. if (!ctx->shm.virt) {
  268. vb2_dma_contig_memops.put(ctx->shm.alloc);
  269. ctx->shm.alloc = NULL;
  270. ctx->shm.ofs = 0;
  271. mfc_err("failed to virt addr of shared memory\n");
  272. return -ENOMEM;
  273. }
  274. memset((void *)ctx->shm.virt, 0, buf_size->shm);
  275. wmb();
  276. return 0;
  277. }
  278. /* Release instance buffer */
  279. void s5p_mfc_release_instance_buffer_v5(struct s5p_mfc_ctx *ctx)
  280. {
  281. if (ctx->ctx.alloc) {
  282. vb2_dma_contig_memops.put(ctx->ctx.alloc);
  283. ctx->ctx.alloc = NULL;
  284. ctx->ctx.ofs = 0;
  285. ctx->ctx.virt = NULL;
  286. ctx->ctx.dma = 0;
  287. }
  288. if (ctx->shm.alloc) {
  289. vb2_dma_contig_memops.put(ctx->shm.alloc);
  290. ctx->shm.alloc = NULL;
  291. ctx->shm.ofs = 0;
  292. ctx->shm.virt = NULL;
  293. }
  294. }
  295. int s5p_mfc_alloc_dev_context_buffer_v5(struct s5p_mfc_dev *dev)
  296. {
  297. /* NOP */
  298. return 0;
  299. }
  300. void s5p_mfc_release_dev_context_buffer_v5(struct s5p_mfc_dev *dev)
  301. {
  302. /* NOP */
  303. }
  304. static void s5p_mfc_write_info_v5(struct s5p_mfc_ctx *ctx, unsigned int data,
  305. unsigned int ofs)
  306. {
  307. writel(data, (ctx->shm.virt + ofs));
  308. wmb();
  309. }
  310. static unsigned int s5p_mfc_read_info_v5(struct s5p_mfc_ctx *ctx,
  311. unsigned int ofs)
  312. {
  313. rmb();
  314. return readl(ctx->shm.virt + ofs);
  315. }
  316. void s5p_mfc_dec_calc_dpb_size_v5(struct s5p_mfc_ctx *ctx)
  317. {
  318. unsigned int guard_width, guard_height;
  319. ctx->buf_width = ALIGN(ctx->img_width, S5P_FIMV_NV12MT_HALIGN);
  320. ctx->buf_height = ALIGN(ctx->img_height, S5P_FIMV_NV12MT_VALIGN);
  321. mfc_debug(2,
  322. "SEQ Done: Movie dimensions %dx%d, buffer dimensions: %dx%d\n",
  323. ctx->img_width, ctx->img_height, ctx->buf_width,
  324. ctx->buf_height);
  325. if (ctx->codec_mode == S5P_MFC_CODEC_H264_DEC) {
  326. ctx->luma_size = ALIGN(ctx->buf_width * ctx->buf_height,
  327. S5P_FIMV_DEC_BUF_ALIGN);
  328. ctx->chroma_size = ALIGN(ctx->buf_width *
  329. ALIGN((ctx->img_height >> 1),
  330. S5P_FIMV_NV12MT_VALIGN),
  331. S5P_FIMV_DEC_BUF_ALIGN);
  332. ctx->mv_size = ALIGN(ctx->buf_width *
  333. ALIGN((ctx->buf_height >> 2),
  334. S5P_FIMV_NV12MT_VALIGN),
  335. S5P_FIMV_DEC_BUF_ALIGN);
  336. } else {
  337. guard_width =
  338. ALIGN(ctx->img_width + 24, S5P_FIMV_NV12MT_HALIGN);
  339. guard_height =
  340. ALIGN(ctx->img_height + 16, S5P_FIMV_NV12MT_VALIGN);
  341. ctx->luma_size = ALIGN(guard_width * guard_height,
  342. S5P_FIMV_DEC_BUF_ALIGN);
  343. guard_width =
  344. ALIGN(ctx->img_width + 16, S5P_FIMV_NV12MT_HALIGN);
  345. guard_height =
  346. ALIGN((ctx->img_height >> 1) + 4,
  347. S5P_FIMV_NV12MT_VALIGN);
  348. ctx->chroma_size = ALIGN(guard_width * guard_height,
  349. S5P_FIMV_DEC_BUF_ALIGN);
  350. ctx->mv_size = 0;
  351. }
  352. }
  353. void s5p_mfc_enc_calc_src_size_v5(struct s5p_mfc_ctx *ctx)
  354. {
  355. if (ctx->src_fmt->fourcc == V4L2_PIX_FMT_NV12M) {
  356. ctx->buf_width = ALIGN(ctx->img_width, S5P_FIMV_NV12M_HALIGN);
  357. ctx->luma_size = ALIGN(ctx->img_width, S5P_FIMV_NV12M_HALIGN)
  358. * ALIGN(ctx->img_height, S5P_FIMV_NV12M_LVALIGN);
  359. ctx->chroma_size = ALIGN(ctx->img_width, S5P_FIMV_NV12M_HALIGN)
  360. * ALIGN((ctx->img_height >> 1), S5P_FIMV_NV12M_CVALIGN);
  361. ctx->luma_size = ALIGN(ctx->luma_size, S5P_FIMV_NV12M_SALIGN);
  362. ctx->chroma_size =
  363. ALIGN(ctx->chroma_size, S5P_FIMV_NV12M_SALIGN);
  364. } else if (ctx->src_fmt->fourcc == V4L2_PIX_FMT_NV12MT) {
  365. ctx->buf_width = ALIGN(ctx->img_width, S5P_FIMV_NV12MT_HALIGN);
  366. ctx->luma_size = ALIGN(ctx->img_width, S5P_FIMV_NV12MT_HALIGN)
  367. * ALIGN(ctx->img_height, S5P_FIMV_NV12MT_VALIGN);
  368. ctx->chroma_size =
  369. ALIGN(ctx->img_width, S5P_FIMV_NV12MT_HALIGN)
  370. * ALIGN((ctx->img_height >> 1), S5P_FIMV_NV12MT_VALIGN);
  371. ctx->luma_size = ALIGN(ctx->luma_size, S5P_FIMV_NV12MT_SALIGN);
  372. ctx->chroma_size =
  373. ALIGN(ctx->chroma_size, S5P_FIMV_NV12MT_SALIGN);
  374. }
  375. }
  376. /* Set registers for decoding temporary buffers */
  377. static void s5p_mfc_set_dec_desc_buffer(struct s5p_mfc_ctx *ctx)
  378. {
  379. struct s5p_mfc_dev *dev = ctx->dev;
  380. struct s5p_mfc_buf_size_v5 *buf_size = dev->variant->buf_size->priv;
  381. mfc_write(dev, OFFSETA(ctx->dsc.dma), S5P_FIMV_SI_CH0_DESC_ADR);
  382. mfc_write(dev, buf_size->dsc, S5P_FIMV_SI_CH0_DESC_SIZE);
  383. }
  384. /* Set registers for shared buffer */
  385. static void s5p_mfc_set_shared_buffer(struct s5p_mfc_ctx *ctx)
  386. {
  387. struct s5p_mfc_dev *dev = ctx->dev;
  388. mfc_write(dev, ctx->shm.ofs, S5P_FIMV_SI_CH0_HOST_WR_ADR);
  389. }
  390. /* Set registers for decoding stream buffer */
  391. int s5p_mfc_set_dec_stream_buffer_v5(struct s5p_mfc_ctx *ctx, int buf_addr,
  392. unsigned int start_num_byte, unsigned int buf_size)
  393. {
  394. struct s5p_mfc_dev *dev = ctx->dev;
  395. mfc_write(dev, OFFSETA(buf_addr), S5P_FIMV_SI_CH0_SB_ST_ADR);
  396. mfc_write(dev, ctx->dec_src_buf_size, S5P_FIMV_SI_CH0_CPB_SIZE);
  397. mfc_write(dev, buf_size, S5P_FIMV_SI_CH0_SB_FRM_SIZE);
  398. s5p_mfc_write_info_v5(ctx, start_num_byte, START_BYTE_NUM);
  399. return 0;
  400. }
  401. /* Set decoding frame buffer */
  402. int s5p_mfc_set_dec_frame_buffer_v5(struct s5p_mfc_ctx *ctx)
  403. {
  404. unsigned int frame_size, i;
  405. unsigned int frame_size_ch, frame_size_mv;
  406. struct s5p_mfc_dev *dev = ctx->dev;
  407. unsigned int dpb;
  408. size_t buf_addr1, buf_addr2;
  409. int buf_size1, buf_size2;
  410. buf_addr1 = ctx->bank1_phys;
  411. buf_size1 = ctx->bank1_size;
  412. buf_addr2 = ctx->bank2_phys;
  413. buf_size2 = ctx->bank2_size;
  414. dpb = mfc_read(dev, S5P_FIMV_SI_CH0_DPB_CONF_CTRL) &
  415. ~S5P_FIMV_DPB_COUNT_MASK;
  416. mfc_write(dev, ctx->total_dpb_count | dpb,
  417. S5P_FIMV_SI_CH0_DPB_CONF_CTRL);
  418. s5p_mfc_set_shared_buffer(ctx);
  419. switch (ctx->codec_mode) {
  420. case S5P_MFC_CODEC_H264_DEC:
  421. mfc_write(dev, OFFSETA(buf_addr1),
  422. S5P_FIMV_H264_VERT_NB_MV_ADR);
  423. buf_addr1 += S5P_FIMV_DEC_VERT_NB_MV_SIZE;
  424. buf_size1 -= S5P_FIMV_DEC_VERT_NB_MV_SIZE;
  425. mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_H264_NB_IP_ADR);
  426. buf_addr1 += S5P_FIMV_DEC_NB_IP_SIZE;
  427. buf_size1 -= S5P_FIMV_DEC_NB_IP_SIZE;
  428. break;
  429. case S5P_MFC_CODEC_MPEG4_DEC:
  430. mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_MPEG4_NB_DCAC_ADR);
  431. buf_addr1 += S5P_FIMV_DEC_NB_DCAC_SIZE;
  432. buf_size1 -= S5P_FIMV_DEC_NB_DCAC_SIZE;
  433. mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_MPEG4_UP_NB_MV_ADR);
  434. buf_addr1 += S5P_FIMV_DEC_UPNB_MV_SIZE;
  435. buf_size1 -= S5P_FIMV_DEC_UPNB_MV_SIZE;
  436. mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_MPEG4_SA_MV_ADR);
  437. buf_addr1 += S5P_FIMV_DEC_SUB_ANCHOR_MV_SIZE;
  438. buf_size1 -= S5P_FIMV_DEC_SUB_ANCHOR_MV_SIZE;
  439. mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_MPEG4_SP_ADR);
  440. buf_addr1 += S5P_FIMV_DEC_STX_PARSER_SIZE;
  441. buf_size1 -= S5P_FIMV_DEC_STX_PARSER_SIZE;
  442. mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_MPEG4_OT_LINE_ADR);
  443. buf_addr1 += S5P_FIMV_DEC_OVERLAP_TRANSFORM_SIZE;
  444. buf_size1 -= S5P_FIMV_DEC_OVERLAP_TRANSFORM_SIZE;
  445. break;
  446. case S5P_MFC_CODEC_H263_DEC:
  447. mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_H263_OT_LINE_ADR);
  448. buf_addr1 += S5P_FIMV_DEC_OVERLAP_TRANSFORM_SIZE;
  449. buf_size1 -= S5P_FIMV_DEC_OVERLAP_TRANSFORM_SIZE;
  450. mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_H263_UP_NB_MV_ADR);
  451. buf_addr1 += S5P_FIMV_DEC_UPNB_MV_SIZE;
  452. buf_size1 -= S5P_FIMV_DEC_UPNB_MV_SIZE;
  453. mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_H263_SA_MV_ADR);
  454. buf_addr1 += S5P_FIMV_DEC_SUB_ANCHOR_MV_SIZE;
  455. buf_size1 -= S5P_FIMV_DEC_SUB_ANCHOR_MV_SIZE;
  456. mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_H263_NB_DCAC_ADR);
  457. buf_addr1 += S5P_FIMV_DEC_NB_DCAC_SIZE;
  458. buf_size1 -= S5P_FIMV_DEC_NB_DCAC_SIZE;
  459. break;
  460. case S5P_MFC_CODEC_VC1_DEC:
  461. case S5P_MFC_CODEC_VC1RCV_DEC:
  462. mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_VC1_NB_DCAC_ADR);
  463. buf_addr1 += S5P_FIMV_DEC_NB_DCAC_SIZE;
  464. buf_size1 -= S5P_FIMV_DEC_NB_DCAC_SIZE;
  465. mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_VC1_OT_LINE_ADR);
  466. buf_addr1 += S5P_FIMV_DEC_OVERLAP_TRANSFORM_SIZE;
  467. buf_size1 -= S5P_FIMV_DEC_OVERLAP_TRANSFORM_SIZE;
  468. mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_VC1_UP_NB_MV_ADR);
  469. buf_addr1 += S5P_FIMV_DEC_UPNB_MV_SIZE;
  470. buf_size1 -= S5P_FIMV_DEC_UPNB_MV_SIZE;
  471. mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_VC1_SA_MV_ADR);
  472. buf_addr1 += S5P_FIMV_DEC_SUB_ANCHOR_MV_SIZE;
  473. buf_size1 -= S5P_FIMV_DEC_SUB_ANCHOR_MV_SIZE;
  474. mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_VC1_BITPLANE3_ADR);
  475. buf_addr1 += S5P_FIMV_DEC_VC1_BITPLANE_SIZE;
  476. buf_size1 -= S5P_FIMV_DEC_VC1_BITPLANE_SIZE;
  477. mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_VC1_BITPLANE2_ADR);
  478. buf_addr1 += S5P_FIMV_DEC_VC1_BITPLANE_SIZE;
  479. buf_size1 -= S5P_FIMV_DEC_VC1_BITPLANE_SIZE;
  480. mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_VC1_BITPLANE1_ADR);
  481. buf_addr1 += S5P_FIMV_DEC_VC1_BITPLANE_SIZE;
  482. buf_size1 -= S5P_FIMV_DEC_VC1_BITPLANE_SIZE;
  483. break;
  484. case S5P_MFC_CODEC_MPEG2_DEC:
  485. break;
  486. default:
  487. mfc_err("Unknown codec for decoding (%x)\n",
  488. ctx->codec_mode);
  489. return -EINVAL;
  490. break;
  491. }
  492. frame_size = ctx->luma_size;
  493. frame_size_ch = ctx->chroma_size;
  494. frame_size_mv = ctx->mv_size;
  495. mfc_debug(2, "Frm size: %d ch: %d mv: %d\n", frame_size, frame_size_ch,
  496. frame_size_mv);
  497. for (i = 0; i < ctx->total_dpb_count; i++) {
  498. /* Bank2 */
  499. mfc_debug(2, "Luma %d: %x\n", i,
  500. ctx->dst_bufs[i].cookie.raw.luma);
  501. mfc_write(dev, OFFSETB(ctx->dst_bufs[i].cookie.raw.luma),
  502. S5P_FIMV_DEC_LUMA_ADR + i * 4);
  503. mfc_debug(2, "\tChroma %d: %x\n", i,
  504. ctx->dst_bufs[i].cookie.raw.chroma);
  505. mfc_write(dev, OFFSETA(ctx->dst_bufs[i].cookie.raw.chroma),
  506. S5P_FIMV_DEC_CHROMA_ADR + i * 4);
  507. if (ctx->codec_mode == S5P_MFC_CODEC_H264_DEC) {
  508. mfc_debug(2, "\tBuf2: %x, size: %d\n",
  509. buf_addr2, buf_size2);
  510. mfc_write(dev, OFFSETB(buf_addr2),
  511. S5P_FIMV_H264_MV_ADR + i * 4);
  512. buf_addr2 += frame_size_mv;
  513. buf_size2 -= frame_size_mv;
  514. }
  515. }
  516. mfc_debug(2, "Buf1: %u, buf_size1: %d\n", buf_addr1, buf_size1);
  517. mfc_debug(2, "Buf 1/2 size after: %d/%d (frames %d)\n",
  518. buf_size1, buf_size2, ctx->total_dpb_count);
  519. if (buf_size1 < 0 || buf_size2 < 0) {
  520. mfc_debug(2, "Not enough memory has been allocated\n");
  521. return -ENOMEM;
  522. }
  523. s5p_mfc_write_info_v5(ctx, frame_size, ALLOC_LUMA_DPB_SIZE);
  524. s5p_mfc_write_info_v5(ctx, frame_size_ch, ALLOC_CHROMA_DPB_SIZE);
  525. if (ctx->codec_mode == S5P_MFC_CODEC_H264_DEC)
  526. s5p_mfc_write_info_v5(ctx, frame_size_mv, ALLOC_MV_SIZE);
  527. mfc_write(dev, ((S5P_FIMV_CH_INIT_BUFS & S5P_FIMV_CH_MASK)
  528. << S5P_FIMV_CH_SHIFT) | (ctx->inst_no),
  529. S5P_FIMV_SI_CH0_INST_ID);
  530. return 0;
  531. }
  532. /* Set registers for encoding stream buffer */
  533. int s5p_mfc_set_enc_stream_buffer_v5(struct s5p_mfc_ctx *ctx,
  534. unsigned long addr, unsigned int size)
  535. {
  536. struct s5p_mfc_dev *dev = ctx->dev;
  537. mfc_write(dev, OFFSETA(addr), S5P_FIMV_ENC_SI_CH0_SB_ADR);
  538. mfc_write(dev, size, S5P_FIMV_ENC_SI_CH0_SB_SIZE);
  539. return 0;
  540. }
  541. void s5p_mfc_set_enc_frame_buffer_v5(struct s5p_mfc_ctx *ctx,
  542. unsigned long y_addr, unsigned long c_addr)
  543. {
  544. struct s5p_mfc_dev *dev = ctx->dev;
  545. mfc_write(dev, OFFSETB(y_addr), S5P_FIMV_ENC_SI_CH0_CUR_Y_ADR);
  546. mfc_write(dev, OFFSETB(c_addr), S5P_FIMV_ENC_SI_CH0_CUR_C_ADR);
  547. }
  548. void s5p_mfc_get_enc_frame_buffer_v5(struct s5p_mfc_ctx *ctx,
  549. unsigned long *y_addr, unsigned long *c_addr)
  550. {
  551. struct s5p_mfc_dev *dev = ctx->dev;
  552. *y_addr = dev->bank2 + (mfc_read(dev, S5P_FIMV_ENCODED_Y_ADDR)
  553. << MFC_OFFSET_SHIFT);
  554. *c_addr = dev->bank2 + (mfc_read(dev, S5P_FIMV_ENCODED_C_ADDR)
  555. << MFC_OFFSET_SHIFT);
  556. }
  557. /* Set encoding ref & codec buffer */
  558. int s5p_mfc_set_enc_ref_buffer_v5(struct s5p_mfc_ctx *ctx)
  559. {
  560. struct s5p_mfc_dev *dev = ctx->dev;
  561. size_t buf_addr1, buf_addr2;
  562. size_t buf_size1, buf_size2;
  563. unsigned int enc_ref_y_size, enc_ref_c_size;
  564. unsigned int guard_width, guard_height;
  565. int i;
  566. buf_addr1 = ctx->bank1_phys;
  567. buf_size1 = ctx->bank1_size;
  568. buf_addr2 = ctx->bank2_phys;
  569. buf_size2 = ctx->bank2_size;
  570. enc_ref_y_size = ALIGN(ctx->img_width, S5P_FIMV_NV12MT_HALIGN)
  571. * ALIGN(ctx->img_height, S5P_FIMV_NV12MT_VALIGN);
  572. enc_ref_y_size = ALIGN(enc_ref_y_size, S5P_FIMV_NV12MT_SALIGN);
  573. if (ctx->codec_mode == S5P_MFC_CODEC_H264_ENC) {
  574. enc_ref_c_size = ALIGN(ctx->img_width, S5P_FIMV_NV12MT_HALIGN)
  575. * ALIGN((ctx->img_height >> 1), S5P_FIMV_NV12MT_VALIGN);
  576. enc_ref_c_size = ALIGN(enc_ref_c_size, S5P_FIMV_NV12MT_SALIGN);
  577. } else {
  578. guard_width = ALIGN(ctx->img_width + 16,
  579. S5P_FIMV_NV12MT_HALIGN);
  580. guard_height = ALIGN((ctx->img_height >> 1) + 4,
  581. S5P_FIMV_NV12MT_VALIGN);
  582. enc_ref_c_size = ALIGN(guard_width * guard_height,
  583. S5P_FIMV_NV12MT_SALIGN);
  584. }
  585. mfc_debug(2, "buf_size1: %d, buf_size2: %d\n", buf_size1, buf_size2);
  586. switch (ctx->codec_mode) {
  587. case S5P_MFC_CODEC_H264_ENC:
  588. for (i = 0; i < 2; i++) {
  589. mfc_write(dev, OFFSETA(buf_addr1),
  590. S5P_FIMV_ENC_REF0_LUMA_ADR + (4 * i));
  591. buf_addr1 += enc_ref_y_size;
  592. buf_size1 -= enc_ref_y_size;
  593. mfc_write(dev, OFFSETB(buf_addr2),
  594. S5P_FIMV_ENC_REF2_LUMA_ADR + (4 * i));
  595. buf_addr2 += enc_ref_y_size;
  596. buf_size2 -= enc_ref_y_size;
  597. }
  598. for (i = 0; i < 4; i++) {
  599. mfc_write(dev, OFFSETB(buf_addr2),
  600. S5P_FIMV_ENC_REF0_CHROMA_ADR + (4 * i));
  601. buf_addr2 += enc_ref_c_size;
  602. buf_size2 -= enc_ref_c_size;
  603. }
  604. mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_H264_UP_MV_ADR);
  605. buf_addr1 += S5P_FIMV_ENC_UPMV_SIZE;
  606. buf_size1 -= S5P_FIMV_ENC_UPMV_SIZE;
  607. mfc_write(dev, OFFSETA(buf_addr1),
  608. S5P_FIMV_H264_COZERO_FLAG_ADR);
  609. buf_addr1 += S5P_FIMV_ENC_COLFLG_SIZE;
  610. buf_size1 -= S5P_FIMV_ENC_COLFLG_SIZE;
  611. mfc_write(dev, OFFSETA(buf_addr1),
  612. S5P_FIMV_H264_UP_INTRA_MD_ADR);
  613. buf_addr1 += S5P_FIMV_ENC_INTRAMD_SIZE;
  614. buf_size1 -= S5P_FIMV_ENC_INTRAMD_SIZE;
  615. mfc_write(dev, OFFSETB(buf_addr2),
  616. S5P_FIMV_H264_UP_INTRA_PRED_ADR);
  617. buf_addr2 += S5P_FIMV_ENC_INTRAPRED_SIZE;
  618. buf_size2 -= S5P_FIMV_ENC_INTRAPRED_SIZE;
  619. mfc_write(dev, OFFSETA(buf_addr1),
  620. S5P_FIMV_H264_NBOR_INFO_ADR);
  621. buf_addr1 += S5P_FIMV_ENC_NBORINFO_SIZE;
  622. buf_size1 -= S5P_FIMV_ENC_NBORINFO_SIZE;
  623. mfc_debug(2, "buf_size1: %d, buf_size2: %d\n",
  624. buf_size1, buf_size2);
  625. break;
  626. case S5P_MFC_CODEC_MPEG4_ENC:
  627. for (i = 0; i < 2; i++) {
  628. mfc_write(dev, OFFSETA(buf_addr1),
  629. S5P_FIMV_ENC_REF0_LUMA_ADR + (4 * i));
  630. buf_addr1 += enc_ref_y_size;
  631. buf_size1 -= enc_ref_y_size;
  632. mfc_write(dev, OFFSETB(buf_addr2),
  633. S5P_FIMV_ENC_REF2_LUMA_ADR + (4 * i));
  634. buf_addr2 += enc_ref_y_size;
  635. buf_size2 -= enc_ref_y_size;
  636. }
  637. for (i = 0; i < 4; i++) {
  638. mfc_write(dev, OFFSETB(buf_addr2),
  639. S5P_FIMV_ENC_REF0_CHROMA_ADR + (4 * i));
  640. buf_addr2 += enc_ref_c_size;
  641. buf_size2 -= enc_ref_c_size;
  642. }
  643. mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_MPEG4_UP_MV_ADR);
  644. buf_addr1 += S5P_FIMV_ENC_UPMV_SIZE;
  645. buf_size1 -= S5P_FIMV_ENC_UPMV_SIZE;
  646. mfc_write(dev, OFFSETA(buf_addr1),
  647. S5P_FIMV_MPEG4_COZERO_FLAG_ADR);
  648. buf_addr1 += S5P_FIMV_ENC_COLFLG_SIZE;
  649. buf_size1 -= S5P_FIMV_ENC_COLFLG_SIZE;
  650. mfc_write(dev, OFFSETA(buf_addr1),
  651. S5P_FIMV_MPEG4_ACDC_COEF_ADR);
  652. buf_addr1 += S5P_FIMV_ENC_ACDCCOEF_SIZE;
  653. buf_size1 -= S5P_FIMV_ENC_ACDCCOEF_SIZE;
  654. mfc_debug(2, "buf_size1: %d, buf_size2: %d\n",
  655. buf_size1, buf_size2);
  656. break;
  657. case S5P_MFC_CODEC_H263_ENC:
  658. for (i = 0; i < 2; i++) {
  659. mfc_write(dev, OFFSETA(buf_addr1),
  660. S5P_FIMV_ENC_REF0_LUMA_ADR + (4 * i));
  661. buf_addr1 += enc_ref_y_size;
  662. buf_size1 -= enc_ref_y_size;
  663. mfc_write(dev, OFFSETB(buf_addr2),
  664. S5P_FIMV_ENC_REF2_LUMA_ADR + (4 * i));
  665. buf_addr2 += enc_ref_y_size;
  666. buf_size2 -= enc_ref_y_size;
  667. }
  668. for (i = 0; i < 4; i++) {
  669. mfc_write(dev, OFFSETB(buf_addr2),
  670. S5P_FIMV_ENC_REF0_CHROMA_ADR + (4 * i));
  671. buf_addr2 += enc_ref_c_size;
  672. buf_size2 -= enc_ref_c_size;
  673. }
  674. mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_H263_UP_MV_ADR);
  675. buf_addr1 += S5P_FIMV_ENC_UPMV_SIZE;
  676. buf_size1 -= S5P_FIMV_ENC_UPMV_SIZE;
  677. mfc_write(dev, OFFSETA(buf_addr1), S5P_FIMV_H263_ACDC_COEF_ADR);
  678. buf_addr1 += S5P_FIMV_ENC_ACDCCOEF_SIZE;
  679. buf_size1 -= S5P_FIMV_ENC_ACDCCOEF_SIZE;
  680. mfc_debug(2, "buf_size1: %d, buf_size2: %d\n",
  681. buf_size1, buf_size2);
  682. break;
  683. default:
  684. mfc_err("Unknown codec set for encoding: %d\n",
  685. ctx->codec_mode);
  686. return -EINVAL;
  687. }
  688. return 0;
  689. }
  690. static int s5p_mfc_set_enc_params(struct s5p_mfc_ctx *ctx)
  691. {
  692. struct s5p_mfc_dev *dev = ctx->dev;
  693. struct s5p_mfc_enc_params *p = &ctx->enc_params;
  694. unsigned int reg;
  695. unsigned int shm;
  696. /* width */
  697. mfc_write(dev, ctx->img_width, S5P_FIMV_ENC_HSIZE_PX);
  698. /* height */
  699. mfc_write(dev, ctx->img_height, S5P_FIMV_ENC_VSIZE_PX);
  700. /* pictype : enable, IDR period */
  701. reg = mfc_read(dev, S5P_FIMV_ENC_PIC_TYPE_CTRL);
  702. reg |= (1 << 18);
  703. reg &= ~(0xFFFF);
  704. reg |= p->gop_size;
  705. mfc_write(dev, reg, S5P_FIMV_ENC_PIC_TYPE_CTRL);
  706. mfc_write(dev, 0, S5P_FIMV_ENC_B_RECON_WRITE_ON);
  707. /* multi-slice control */
  708. /* multi-slice MB number or bit size */
  709. mfc_write(dev, p->slice_mode, S5P_FIMV_ENC_MSLICE_CTRL);
  710. if (p->slice_mode == V4L2_MPEG_VIDEO_MULTI_SICE_MODE_MAX_MB) {
  711. mfc_write(dev, p->slice_mb, S5P_FIMV_ENC_MSLICE_MB);
  712. } else if (p->slice_mode == V4L2_MPEG_VIDEO_MULTI_SICE_MODE_MAX_BYTES) {
  713. mfc_write(dev, p->slice_bit, S5P_FIMV_ENC_MSLICE_BIT);
  714. } else {
  715. mfc_write(dev, 0, S5P_FIMV_ENC_MSLICE_MB);
  716. mfc_write(dev, 0, S5P_FIMV_ENC_MSLICE_BIT);
  717. }
  718. /* cyclic intra refresh */
  719. mfc_write(dev, p->intra_refresh_mb, S5P_FIMV_ENC_CIR_CTRL);
  720. /* memory structure cur. frame */
  721. if (ctx->src_fmt->fourcc == V4L2_PIX_FMT_NV12M)
  722. mfc_write(dev, 0, S5P_FIMV_ENC_MAP_FOR_CUR);
  723. else if (ctx->src_fmt->fourcc == V4L2_PIX_FMT_NV12MT)
  724. mfc_write(dev, 3, S5P_FIMV_ENC_MAP_FOR_CUR);
  725. /* padding control & value */
  726. reg = mfc_read(dev, S5P_FIMV_ENC_PADDING_CTRL);
  727. if (p->pad) {
  728. /** enable */
  729. reg |= (1 << 31);
  730. /** cr value */
  731. reg &= ~(0xFF << 16);
  732. reg |= (p->pad_cr << 16);
  733. /** cb value */
  734. reg &= ~(0xFF << 8);
  735. reg |= (p->pad_cb << 8);
  736. /** y value */
  737. reg &= ~(0xFF);
  738. reg |= (p->pad_luma);
  739. } else {
  740. /** disable & all value clear */
  741. reg = 0;
  742. }
  743. mfc_write(dev, reg, S5P_FIMV_ENC_PADDING_CTRL);
  744. /* rate control config. */
  745. reg = mfc_read(dev, S5P_FIMV_ENC_RC_CONFIG);
  746. /** frame-level rate control */
  747. reg &= ~(0x1 << 9);
  748. reg |= (p->rc_frame << 9);
  749. mfc_write(dev, reg, S5P_FIMV_ENC_RC_CONFIG);
  750. /* bit rate */
  751. if (p->rc_frame)
  752. mfc_write(dev, p->rc_bitrate,
  753. S5P_FIMV_ENC_RC_BIT_RATE);
  754. else
  755. mfc_write(dev, 0, S5P_FIMV_ENC_RC_BIT_RATE);
  756. /* reaction coefficient */
  757. if (p->rc_frame)
  758. mfc_write(dev, p->rc_reaction_coeff, S5P_FIMV_ENC_RC_RPARA);
  759. shm = s5p_mfc_read_info_v5(ctx, EXT_ENC_CONTROL);
  760. /* seq header ctrl */
  761. shm &= ~(0x1 << 3);
  762. shm |= (p->seq_hdr_mode << 3);
  763. /* frame skip mode */
  764. shm &= ~(0x3 << 1);
  765. shm |= (p->frame_skip_mode << 1);
  766. s5p_mfc_write_info_v5(ctx, shm, EXT_ENC_CONTROL);
  767. /* fixed target bit */
  768. s5p_mfc_write_info_v5(ctx, p->fixed_target_bit, RC_CONTROL_CONFIG);
  769. return 0;
  770. }
  771. static int s5p_mfc_set_enc_params_h264(struct s5p_mfc_ctx *ctx)
  772. {
  773. struct s5p_mfc_dev *dev = ctx->dev;
  774. struct s5p_mfc_enc_params *p = &ctx->enc_params;
  775. struct s5p_mfc_h264_enc_params *p_264 = &p->codec.h264;
  776. unsigned int reg;
  777. unsigned int shm;
  778. s5p_mfc_set_enc_params(ctx);
  779. /* pictype : number of B */
  780. reg = mfc_read(dev, S5P_FIMV_ENC_PIC_TYPE_CTRL);
  781. /* num_b_frame - 0 ~ 2 */
  782. reg &= ~(0x3 << 16);
  783. reg |= (p->num_b_frame << 16);
  784. mfc_write(dev, reg, S5P_FIMV_ENC_PIC_TYPE_CTRL);
  785. /* profile & level */
  786. reg = mfc_read(dev, S5P_FIMV_ENC_PROFILE);
  787. /* level */
  788. reg &= ~(0xFF << 8);
  789. reg |= (p_264->level << 8);
  790. /* profile - 0 ~ 2 */
  791. reg &= ~(0x3F);
  792. reg |= p_264->profile;
  793. mfc_write(dev, reg, S5P_FIMV_ENC_PROFILE);
  794. /* interlace */
  795. mfc_write(dev, p_264->interlace, S5P_FIMV_ENC_PIC_STRUCT);
  796. /* height */
  797. if (p_264->interlace)
  798. mfc_write(dev, ctx->img_height >> 1, S5P_FIMV_ENC_VSIZE_PX);
  799. /* loopfilter ctrl */
  800. mfc_write(dev, p_264->loop_filter_mode, S5P_FIMV_ENC_LF_CTRL);
  801. /* loopfilter alpha offset */
  802. if (p_264->loop_filter_alpha < 0) {
  803. reg = 0x10;
  804. reg |= (0xFF - p_264->loop_filter_alpha) + 1;
  805. } else {
  806. reg = 0x00;
  807. reg |= (p_264->loop_filter_alpha & 0xF);
  808. }
  809. mfc_write(dev, reg, S5P_FIMV_ENC_ALPHA_OFF);
  810. /* loopfilter beta offset */
  811. if (p_264->loop_filter_beta < 0) {
  812. reg = 0x10;
  813. reg |= (0xFF - p_264->loop_filter_beta) + 1;
  814. } else {
  815. reg = 0x00;
  816. reg |= (p_264->loop_filter_beta & 0xF);
  817. }
  818. mfc_write(dev, reg, S5P_FIMV_ENC_BETA_OFF);
  819. /* entropy coding mode */
  820. if (p_264->entropy_mode == V4L2_MPEG_VIDEO_H264_ENTROPY_MODE_CABAC)
  821. mfc_write(dev, 1, S5P_FIMV_ENC_H264_ENTROPY_MODE);
  822. else
  823. mfc_write(dev, 0, S5P_FIMV_ENC_H264_ENTROPY_MODE);
  824. /* number of ref. picture */
  825. reg = mfc_read(dev, S5P_FIMV_ENC_H264_NUM_OF_REF);
  826. /* num of ref. pictures of P */
  827. reg &= ~(0x3 << 5);
  828. reg |= (p_264->num_ref_pic_4p << 5);
  829. /* max number of ref. pictures */
  830. reg &= ~(0x1F);
  831. reg |= p_264->max_ref_pic;
  832. mfc_write(dev, reg, S5P_FIMV_ENC_H264_NUM_OF_REF);
  833. /* 8x8 transform enable */
  834. mfc_write(dev, p_264->_8x8_transform, S5P_FIMV_ENC_H264_TRANS_FLAG);
  835. /* rate control config. */
  836. reg = mfc_read(dev, S5P_FIMV_ENC_RC_CONFIG);
  837. /* macroblock level rate control */
  838. reg &= ~(0x1 << 8);
  839. reg |= (p->rc_mb << 8);
  840. /* frame QP */
  841. reg &= ~(0x3F);
  842. reg |= p_264->rc_frame_qp;
  843. mfc_write(dev, reg, S5P_FIMV_ENC_RC_CONFIG);
  844. /* frame rate */
  845. if (p->rc_frame && p->rc_framerate_denom)
  846. mfc_write(dev, p->rc_framerate_num * 1000
  847. / p->rc_framerate_denom, S5P_FIMV_ENC_RC_FRAME_RATE);
  848. else
  849. mfc_write(dev, 0, S5P_FIMV_ENC_RC_FRAME_RATE);
  850. /* max & min value of QP */
  851. reg = mfc_read(dev, S5P_FIMV_ENC_RC_QBOUND);
  852. /* max QP */
  853. reg &= ~(0x3F << 8);
  854. reg |= (p_264->rc_max_qp << 8);
  855. /* min QP */
  856. reg &= ~(0x3F);
  857. reg |= p_264->rc_min_qp;
  858. mfc_write(dev, reg, S5P_FIMV_ENC_RC_QBOUND);
  859. /* macroblock adaptive scaling features */
  860. if (p->rc_mb) {
  861. reg = mfc_read(dev, S5P_FIMV_ENC_RC_MB_CTRL);
  862. /* dark region */
  863. reg &= ~(0x1 << 3);
  864. reg |= (p_264->rc_mb_dark << 3);
  865. /* smooth region */
  866. reg &= ~(0x1 << 2);
  867. reg |= (p_264->rc_mb_smooth << 2);
  868. /* static region */
  869. reg &= ~(0x1 << 1);
  870. reg |= (p_264->rc_mb_static << 1);
  871. /* high activity region */
  872. reg &= ~(0x1);
  873. reg |= p_264->rc_mb_activity;
  874. mfc_write(dev, reg, S5P_FIMV_ENC_RC_MB_CTRL);
  875. }
  876. if (!p->rc_frame && !p->rc_mb) {
  877. shm = s5p_mfc_read_info_v5(ctx, P_B_FRAME_QP);
  878. shm &= ~(0xFFF);
  879. shm |= ((p_264->rc_b_frame_qp & 0x3F) << 6);
  880. shm |= (p_264->rc_p_frame_qp & 0x3F);
  881. s5p_mfc_write_info_v5(ctx, shm, P_B_FRAME_QP);
  882. }
  883. /* extended encoder ctrl */
  884. shm = s5p_mfc_read_info_v5(ctx, EXT_ENC_CONTROL);
  885. /* AR VUI control */
  886. shm &= ~(0x1 << 15);
  887. shm |= (p_264->vui_sar << 1);
  888. s5p_mfc_write_info_v5(ctx, shm, EXT_ENC_CONTROL);
  889. if (p_264->vui_sar) {
  890. /* aspect ration IDC */
  891. shm = s5p_mfc_read_info_v5(ctx, SAMPLE_ASPECT_RATIO_IDC);
  892. shm &= ~(0xFF);
  893. shm |= p_264->vui_sar_idc;
  894. s5p_mfc_write_info_v5(ctx, shm, SAMPLE_ASPECT_RATIO_IDC);
  895. if (p_264->vui_sar_idc == 0xFF) {
  896. /* sample AR info */
  897. shm = s5p_mfc_read_info_v5(ctx, EXTENDED_SAR);
  898. shm &= ~(0xFFFFFFFF);
  899. shm |= p_264->vui_ext_sar_width << 16;
  900. shm |= p_264->vui_ext_sar_height;
  901. s5p_mfc_write_info_v5(ctx, shm, EXTENDED_SAR);
  902. }
  903. }
  904. /* intra picture period for H.264 */
  905. shm = s5p_mfc_read_info_v5(ctx, H264_I_PERIOD);
  906. /* control */
  907. shm &= ~(0x1 << 16);
  908. shm |= (p_264->open_gop << 16);
  909. /* value */
  910. if (p_264->open_gop) {
  911. shm &= ~(0xFFFF);
  912. shm |= p_264->open_gop_size;
  913. }
  914. s5p_mfc_write_info_v5(ctx, shm, H264_I_PERIOD);
  915. /* extended encoder ctrl */
  916. shm = s5p_mfc_read_info_v5(ctx, EXT_ENC_CONTROL);
  917. /* vbv buffer size */
  918. if (p->frame_skip_mode ==
  919. V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT) {
  920. shm &= ~(0xFFFF << 16);
  921. shm |= (p_264->cpb_size << 16);
  922. }
  923. s5p_mfc_write_info_v5(ctx, shm, EXT_ENC_CONTROL);
  924. return 0;
  925. }
  926. static int s5p_mfc_set_enc_params_mpeg4(struct s5p_mfc_ctx *ctx)
  927. {
  928. struct s5p_mfc_dev *dev = ctx->dev;
  929. struct s5p_mfc_enc_params *p = &ctx->enc_params;
  930. struct s5p_mfc_mpeg4_enc_params *p_mpeg4 = &p->codec.mpeg4;
  931. unsigned int reg;
  932. unsigned int shm;
  933. unsigned int framerate;
  934. s5p_mfc_set_enc_params(ctx);
  935. /* pictype : number of B */
  936. reg = mfc_read(dev, S5P_FIMV_ENC_PIC_TYPE_CTRL);
  937. /* num_b_frame - 0 ~ 2 */
  938. reg &= ~(0x3 << 16);
  939. reg |= (p->num_b_frame << 16);
  940. mfc_write(dev, reg, S5P_FIMV_ENC_PIC_TYPE_CTRL);
  941. /* profile & level */
  942. reg = mfc_read(dev, S5P_FIMV_ENC_PROFILE);
  943. /* level */
  944. reg &= ~(0xFF << 8);
  945. reg |= (p_mpeg4->level << 8);
  946. /* profile - 0 ~ 2 */
  947. reg &= ~(0x3F);
  948. reg |= p_mpeg4->profile;
  949. mfc_write(dev, reg, S5P_FIMV_ENC_PROFILE);
  950. /* quarter_pixel */
  951. mfc_write(dev, p_mpeg4->quarter_pixel, S5P_FIMV_ENC_MPEG4_QUART_PXL);
  952. /* qp */
  953. if (!p->rc_frame) {
  954. shm = s5p_mfc_read_info_v5(ctx, P_B_FRAME_QP);
  955. shm &= ~(0xFFF);
  956. shm |= ((p_mpeg4->rc_b_frame_qp & 0x3F) << 6);
  957. shm |= (p_mpeg4->rc_p_frame_qp & 0x3F);
  958. s5p_mfc_write_info_v5(ctx, shm, P_B_FRAME_QP);
  959. }
  960. /* frame rate */
  961. if (p->rc_frame) {
  962. if (p->rc_framerate_denom > 0) {
  963. framerate = p->rc_framerate_num * 1000 /
  964. p->rc_framerate_denom;
  965. mfc_write(dev, framerate,
  966. S5P_FIMV_ENC_RC_FRAME_RATE);
  967. shm = s5p_mfc_read_info_v5(ctx, RC_VOP_TIMING);
  968. shm &= ~(0xFFFFFFFF);
  969. shm |= (1 << 31);
  970. shm |= ((p->rc_framerate_num & 0x7FFF) << 16);
  971. shm |= (p->rc_framerate_denom & 0xFFFF);
  972. s5p_mfc_write_info_v5(ctx, shm, RC_VOP_TIMING);
  973. }
  974. } else {
  975. mfc_write(dev, 0, S5P_FIMV_ENC_RC_FRAME_RATE);
  976. }
  977. /* rate control config. */
  978. reg = mfc_read(dev, S5P_FIMV_ENC_RC_CONFIG);
  979. /* frame QP */
  980. reg &= ~(0x3F);
  981. reg |= p_mpeg4->rc_frame_qp;
  982. mfc_write(dev, reg, S5P_FIMV_ENC_RC_CONFIG);
  983. /* max & min value of QP */
  984. reg = mfc_read(dev, S5P_FIMV_ENC_RC_QBOUND);
  985. /* max QP */
  986. reg &= ~(0x3F << 8);
  987. reg |= (p_mpeg4->rc_max_qp << 8);
  988. /* min QP */
  989. reg &= ~(0x3F);
  990. reg |= p_mpeg4->rc_min_qp;
  991. mfc_write(dev, reg, S5P_FIMV_ENC_RC_QBOUND);
  992. /* extended encoder ctrl */
  993. shm = s5p_mfc_read_info_v5(ctx, EXT_ENC_CONTROL);
  994. /* vbv buffer size */
  995. if (p->frame_skip_mode ==
  996. V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT) {
  997. shm &= ~(0xFFFF << 16);
  998. shm |= (p->vbv_size << 16);
  999. }
  1000. s5p_mfc_write_info_v5(ctx, shm, EXT_ENC_CONTROL);
  1001. return 0;
  1002. }
  1003. static int s5p_mfc_set_enc_params_h263(struct s5p_mfc_ctx *ctx)
  1004. {
  1005. struct s5p_mfc_dev *dev = ctx->dev;
  1006. struct s5p_mfc_enc_params *p = &ctx->enc_params;
  1007. struct s5p_mfc_mpeg4_enc_params *p_h263 = &p->codec.mpeg4;
  1008. unsigned int reg;
  1009. unsigned int shm;
  1010. s5p_mfc_set_enc_params(ctx);
  1011. /* qp */
  1012. if (!p->rc_frame) {
  1013. shm = s5p_mfc_read_info_v5(ctx, P_B_FRAME_QP);
  1014. shm &= ~(0xFFF);
  1015. shm |= (p_h263->rc_p_frame_qp & 0x3F);
  1016. s5p_mfc_write_info_v5(ctx, shm, P_B_FRAME_QP);
  1017. }
  1018. /* frame rate */
  1019. if (p->rc_frame && p->rc_framerate_denom)
  1020. mfc_write(dev, p->rc_framerate_num * 1000
  1021. / p->rc_framerate_denom, S5P_FIMV_ENC_RC_FRAME_RATE);
  1022. else
  1023. mfc_write(dev, 0, S5P_FIMV_ENC_RC_FRAME_RATE);
  1024. /* rate control config. */
  1025. reg = mfc_read(dev, S5P_FIMV_ENC_RC_CONFIG);
  1026. /* frame QP */
  1027. reg &= ~(0x3F);
  1028. reg |= p_h263->rc_frame_qp;
  1029. mfc_write(dev, reg, S5P_FIMV_ENC_RC_CONFIG);
  1030. /* max & min value of QP */
  1031. reg = mfc_read(dev, S5P_FIMV_ENC_RC_QBOUND);
  1032. /* max QP */
  1033. reg &= ~(0x3F << 8);
  1034. reg |= (p_h263->rc_max_qp << 8);
  1035. /* min QP */
  1036. reg &= ~(0x3F);
  1037. reg |= p_h263->rc_min_qp;
  1038. mfc_write(dev, reg, S5P_FIMV_ENC_RC_QBOUND);
  1039. /* extended encoder ctrl */
  1040. shm = s5p_mfc_read_info_v5(ctx, EXT_ENC_CONTROL);
  1041. /* vbv buffer size */
  1042. if (p->frame_skip_mode ==
  1043. V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT) {
  1044. shm &= ~(0xFFFF << 16);
  1045. shm |= (p->vbv_size << 16);
  1046. }
  1047. s5p_mfc_write_info_v5(ctx, shm, EXT_ENC_CONTROL);
  1048. return 0;
  1049. }
  1050. /* Initialize decoding */
  1051. int s5p_mfc_init_decode_v5(struct s5p_mfc_ctx *ctx)
  1052. {
  1053. struct s5p_mfc_dev *dev = ctx->dev;
  1054. s5p_mfc_set_shared_buffer(ctx);
  1055. /* Setup loop filter, for decoding this is only valid for MPEG4 */
  1056. if (ctx->codec_mode == S5P_MFC_CODEC_MPEG4_DEC)
  1057. mfc_write(dev, ctx->loop_filter_mpeg4, S5P_FIMV_ENC_LF_CTRL);
  1058. else
  1059. mfc_write(dev, 0, S5P_FIMV_ENC_LF_CTRL);
  1060. mfc_write(dev, ((ctx->slice_interface & S5P_FIMV_SLICE_INT_MASK) <<
  1061. S5P_FIMV_SLICE_INT_SHIFT) | (ctx->display_delay_enable <<
  1062. S5P_FIMV_DDELAY_ENA_SHIFT) | ((ctx->display_delay &
  1063. S5P_FIMV_DDELAY_VAL_MASK) << S5P_FIMV_DDELAY_VAL_SHIFT),
  1064. S5P_FIMV_SI_CH0_DPB_CONF_CTRL);
  1065. mfc_write(dev,
  1066. ((S5P_FIMV_CH_SEQ_HEADER & S5P_FIMV_CH_MASK) << S5P_FIMV_CH_SHIFT)
  1067. | (ctx->inst_no), S5P_FIMV_SI_CH0_INST_ID);
  1068. return 0;
  1069. }
  1070. static void s5p_mfc_set_flush(struct s5p_mfc_ctx *ctx, int flush)
  1071. {
  1072. struct s5p_mfc_dev *dev = ctx->dev;
  1073. unsigned int dpb;
  1074. if (flush)
  1075. dpb = mfc_read(dev, S5P_FIMV_SI_CH0_DPB_CONF_CTRL) | (
  1076. S5P_FIMV_DPB_FLUSH_MASK << S5P_FIMV_DPB_FLUSH_SHIFT);
  1077. else
  1078. dpb = mfc_read(dev, S5P_FIMV_SI_CH0_DPB_CONF_CTRL) &
  1079. ~(S5P_FIMV_DPB_FLUSH_MASK << S5P_FIMV_DPB_FLUSH_SHIFT);
  1080. mfc_write(dev, dpb, S5P_FIMV_SI_CH0_DPB_CONF_CTRL);
  1081. }
  1082. /* Decode a single frame */
  1083. int s5p_mfc_decode_one_frame_v5(struct s5p_mfc_ctx *ctx,
  1084. enum s5p_mfc_decode_arg last_frame)
  1085. {
  1086. struct s5p_mfc_dev *dev = ctx->dev;
  1087. mfc_write(dev, ctx->dec_dst_flag, S5P_FIMV_SI_CH0_RELEASE_BUF);
  1088. s5p_mfc_set_shared_buffer(ctx);
  1089. s5p_mfc_set_flush(ctx, ctx->dpb_flush_flag);
  1090. /* Issue different commands to instance basing on whether it
  1091. * is the last frame or not. */
  1092. switch (last_frame) {
  1093. case MFC_DEC_FRAME:
  1094. mfc_write(dev, ((S5P_FIMV_CH_FRAME_START & S5P_FIMV_CH_MASK) <<
  1095. S5P_FIMV_CH_SHIFT) | (ctx->inst_no), S5P_FIMV_SI_CH0_INST_ID);
  1096. break;
  1097. case MFC_DEC_LAST_FRAME:
  1098. mfc_write(dev, ((S5P_FIMV_CH_LAST_FRAME & S5P_FIMV_CH_MASK) <<
  1099. S5P_FIMV_CH_SHIFT) | (ctx->inst_no), S5P_FIMV_SI_CH0_INST_ID);
  1100. break;
  1101. case MFC_DEC_RES_CHANGE:
  1102. mfc_write(dev, ((S5P_FIMV_CH_FRAME_START_REALLOC &
  1103. S5P_FIMV_CH_MASK) << S5P_FIMV_CH_SHIFT) | (ctx->inst_no),
  1104. S5P_FIMV_SI_CH0_INST_ID);
  1105. break;
  1106. }
  1107. mfc_debug(2, "Decoding a usual frame\n");
  1108. return 0;
  1109. }
  1110. int s5p_mfc_init_encode_v5(struct s5p_mfc_ctx *ctx)
  1111. {
  1112. struct s5p_mfc_dev *dev = ctx->dev;
  1113. if (ctx->codec_mode == S5P_MFC_CODEC_H264_ENC)
  1114. s5p_mfc_set_enc_params_h264(ctx);
  1115. else if (ctx->codec_mode == S5P_MFC_CODEC_MPEG4_ENC)
  1116. s5p_mfc_set_enc_params_mpeg4(ctx);
  1117. else if (ctx->codec_mode == S5P_MFC_CODEC_H263_ENC)
  1118. s5p_mfc_set_enc_params_h263(ctx);
  1119. else {
  1120. mfc_err("Unknown codec for encoding (%x)\n",
  1121. ctx->codec_mode);
  1122. return -EINVAL;
  1123. }
  1124. s5p_mfc_set_shared_buffer(ctx);
  1125. mfc_write(dev, ((S5P_FIMV_CH_SEQ_HEADER << 16) & 0x70000) |
  1126. (ctx->inst_no), S5P_FIMV_SI_CH0_INST_ID);
  1127. return 0;
  1128. }
  1129. /* Encode a single frame */
  1130. int s5p_mfc_encode_one_frame_v5(struct s5p_mfc_ctx *ctx)
  1131. {
  1132. struct s5p_mfc_dev *dev = ctx->dev;
  1133. int cmd;
  1134. /* memory structure cur. frame */
  1135. if (ctx->src_fmt->fourcc == V4L2_PIX_FMT_NV12M)
  1136. mfc_write(dev, 0, S5P_FIMV_ENC_MAP_FOR_CUR);
  1137. else if (ctx->src_fmt->fourcc == V4L2_PIX_FMT_NV12MT)
  1138. mfc_write(dev, 3, S5P_FIMV_ENC_MAP_FOR_CUR);
  1139. s5p_mfc_set_shared_buffer(ctx);
  1140. if (ctx->state == MFCINST_FINISHING)
  1141. cmd = S5P_FIMV_CH_LAST_FRAME;
  1142. else
  1143. cmd = S5P_FIMV_CH_FRAME_START;
  1144. mfc_write(dev, ((cmd & S5P_FIMV_CH_MASK) << S5P_FIMV_CH_SHIFT)
  1145. | (ctx->inst_no), S5P_FIMV_SI_CH0_INST_ID);
  1146. return 0;
  1147. }
  1148. static int s5p_mfc_get_new_ctx(struct s5p_mfc_dev *dev)
  1149. {
  1150. unsigned long flags;
  1151. int new_ctx;
  1152. int cnt;
  1153. spin_lock_irqsave(&dev->condlock, flags);
  1154. new_ctx = (dev->curr_ctx + 1) % MFC_NUM_CONTEXTS;
  1155. cnt = 0;
  1156. while (!test_bit(new_ctx, &dev->ctx_work_bits)) {
  1157. new_ctx = (new_ctx + 1) % MFC_NUM_CONTEXTS;
  1158. if (++cnt > MFC_NUM_CONTEXTS) {
  1159. /* No contexts to run */
  1160. spin_unlock_irqrestore(&dev->condlock, flags);
  1161. return -EAGAIN;
  1162. }
  1163. }
  1164. spin_unlock_irqrestore(&dev->condlock, flags);
  1165. return new_ctx;
  1166. }
  1167. static void s5p_mfc_run_res_change(struct s5p_mfc_ctx *ctx)
  1168. {
  1169. struct s5p_mfc_dev *dev = ctx->dev;
  1170. s5p_mfc_set_dec_stream_buffer_v5(ctx, 0, 0, 0);
  1171. dev->curr_ctx = ctx->num;
  1172. s5p_mfc_clean_ctx_int_flags(ctx);
  1173. s5p_mfc_decode_one_frame_v5(ctx, MFC_DEC_RES_CHANGE);
  1174. }
  1175. static int s5p_mfc_run_dec_frame(struct s5p_mfc_ctx *ctx, int last_frame)
  1176. {
  1177. struct s5p_mfc_dev *dev = ctx->dev;
  1178. struct s5p_mfc_buf *temp_vb;
  1179. unsigned long flags;
  1180. unsigned int index;
  1181. spin_lock_irqsave(&dev->irqlock, flags);
  1182. /* Frames are being decoded */
  1183. if (list_empty(&ctx->src_queue)) {
  1184. mfc_debug(2, "No src buffers\n");
  1185. spin_unlock_irqrestore(&dev->irqlock, flags);
  1186. return -EAGAIN;
  1187. }
  1188. /* Get the next source buffer */
  1189. temp_vb = list_entry(ctx->src_queue.next, struct s5p_mfc_buf, list);
  1190. temp_vb->flags |= MFC_BUF_FLAG_USED;
  1191. s5p_mfc_set_dec_stream_buffer_v5(ctx,
  1192. vb2_dma_contig_plane_dma_addr(temp_vb->b, 0),
  1193. ctx->consumed_stream, temp_vb->b->v4l2_planes[0].bytesused);
  1194. spin_unlock_irqrestore(&dev->irqlock, flags);
  1195. index = temp_vb->b->v4l2_buf.index;
  1196. dev->curr_ctx = ctx->num;
  1197. s5p_mfc_clean_ctx_int_flags(ctx);
  1198. if (temp_vb->b->v4l2_planes[0].bytesused == 0) {
  1199. last_frame = MFC_DEC_LAST_FRAME;
  1200. mfc_debug(2, "Setting ctx->state to FINISHING\n");
  1201. ctx->state = MFCINST_FINISHING;
  1202. }
  1203. s5p_mfc_decode_one_frame_v5(ctx, last_frame);
  1204. return 0;
  1205. }
  1206. static int s5p_mfc_run_enc_frame(struct s5p_mfc_ctx *ctx)
  1207. {
  1208. struct s5p_mfc_dev *dev = ctx->dev;
  1209. unsigned long flags;
  1210. struct s5p_mfc_buf *dst_mb;
  1211. struct s5p_mfc_buf *src_mb;
  1212. unsigned long src_y_addr, src_c_addr, dst_addr;
  1213. unsigned int dst_size;
  1214. spin_lock_irqsave(&dev->irqlock, flags);
  1215. if (list_empty(&ctx->src_queue) && ctx->state != MFCINST_FINISHING) {
  1216. mfc_debug(2, "no src buffers\n");
  1217. spin_unlock_irqrestore(&dev->irqlock, flags);
  1218. return -EAGAIN;
  1219. }
  1220. if (list_empty(&ctx->dst_queue)) {
  1221. mfc_debug(2, "no dst buffers\n");
  1222. spin_unlock_irqrestore(&dev->irqlock, flags);
  1223. return -EAGAIN;
  1224. }
  1225. if (list_empty(&ctx->src_queue)) {
  1226. /* send null frame */
  1227. s5p_mfc_set_enc_frame_buffer_v5(ctx, dev->bank2, dev->bank2);
  1228. src_mb = NULL;
  1229. } else {
  1230. src_mb = list_entry(ctx->src_queue.next, struct s5p_mfc_buf,
  1231. list);
  1232. src_mb->flags |= MFC_BUF_FLAG_USED;
  1233. if (src_mb->b->v4l2_planes[0].bytesused == 0) {
  1234. /* send null frame */
  1235. s5p_mfc_set_enc_frame_buffer_v5(ctx, dev->bank2,
  1236. dev->bank2);
  1237. ctx->state = MFCINST_FINISHING;
  1238. } else {
  1239. src_y_addr = vb2_dma_contig_plane_dma_addr(src_mb->b,
  1240. 0);
  1241. src_c_addr = vb2_dma_contig_plane_dma_addr(src_mb->b,
  1242. 1);
  1243. s5p_mfc_set_enc_frame_buffer_v5(ctx, src_y_addr,
  1244. src_c_addr);
  1245. if (src_mb->flags & MFC_BUF_FLAG_EOS)
  1246. ctx->state = MFCINST_FINISHING;
  1247. }
  1248. }
  1249. dst_mb = list_entry(ctx->dst_queue.next, struct s5p_mfc_buf, list);
  1250. dst_mb->flags |= MFC_BUF_FLAG_USED;
  1251. dst_addr = vb2_dma_contig_plane_dma_addr(dst_mb->b, 0);
  1252. dst_size = vb2_plane_size(dst_mb->b, 0);
  1253. s5p_mfc_set_enc_stream_buffer_v5(ctx, dst_addr, dst_size);
  1254. spin_unlock_irqrestore(&dev->irqlock, flags);
  1255. dev->curr_ctx = ctx->num;
  1256. s5p_mfc_clean_ctx_int_flags(ctx);
  1257. mfc_debug(2, "encoding buffer with index=%d state=%d",
  1258. src_mb ? src_mb->b->v4l2_buf.index : -1, ctx->state);
  1259. s5p_mfc_encode_one_frame_v5(ctx);
  1260. return 0;
  1261. }
  1262. static void s5p_mfc_run_init_dec(struct s5p_mfc_ctx *ctx)
  1263. {
  1264. struct s5p_mfc_dev *dev = ctx->dev;
  1265. unsigned long flags;
  1266. struct s5p_mfc_buf *temp_vb;
  1267. /* Initializing decoding - parsing header */
  1268. spin_lock_irqsave(&dev->irqlock, flags);
  1269. mfc_debug(2, "Preparing to init decoding\n");
  1270. temp_vb = list_entry(ctx->src_queue.next, struct s5p_mfc_buf, list);
  1271. s5p_mfc_set_dec_desc_buffer(ctx);
  1272. mfc_debug(2, "Header size: %d\n", temp_vb->b->v4l2_planes[0].bytesused);
  1273. s5p_mfc_set_dec_stream_buffer_v5(ctx,
  1274. vb2_dma_contig_plane_dma_addr(temp_vb->b, 0),
  1275. 0, temp_vb->b->v4l2_planes[0].bytesused);
  1276. spin_unlock_irqrestore(&dev->irqlock, flags);
  1277. dev->curr_ctx = ctx->num;
  1278. s5p_mfc_clean_ctx_int_flags(ctx);
  1279. s5p_mfc_init_decode_v5(ctx);
  1280. }
  1281. static void s5p_mfc_run_init_enc(struct s5p_mfc_ctx *ctx)
  1282. {
  1283. struct s5p_mfc_dev *dev = ctx->dev;
  1284. unsigned long flags;
  1285. struct s5p_mfc_buf *dst_mb;
  1286. unsigned long dst_addr;
  1287. unsigned int dst_size;
  1288. s5p_mfc_set_enc_ref_buffer_v5(ctx);
  1289. spin_lock_irqsave(&dev->irqlock, flags);
  1290. dst_mb = list_entry(ctx->dst_queue.next, struct s5p_mfc_buf, list);
  1291. dst_addr = vb2_dma_contig_plane_dma_addr(dst_mb->b, 0);
  1292. dst_size = vb2_plane_size(dst_mb->b, 0);
  1293. s5p_mfc_set_enc_stream_buffer_v5(ctx, dst_addr, dst_size);
  1294. spin_unlock_irqrestore(&dev->irqlock, flags);
  1295. dev->curr_ctx = ctx->num;
  1296. s5p_mfc_clean_ctx_int_flags(ctx);
  1297. s5p_mfc_init_encode_v5(ctx);
  1298. }
  1299. static int s5p_mfc_run_init_dec_buffers(struct s5p_mfc_ctx *ctx)
  1300. {
  1301. struct s5p_mfc_dev *dev = ctx->dev;
  1302. unsigned long flags;
  1303. struct s5p_mfc_buf *temp_vb;
  1304. int ret;
  1305. /*
  1306. * Header was parsed now starting processing
  1307. * First set the output frame buffers
  1308. */
  1309. if (ctx->capture_state != QUEUE_BUFS_MMAPED) {
  1310. mfc_err("It seems that not all destionation buffers were "
  1311. "mmaped\nMFC requires that all destination are mmaped "
  1312. "before starting processing\n");
  1313. return -EAGAIN;
  1314. }
  1315. spin_lock_irqsave(&dev->irqlock, flags);
  1316. if (list_empty(&ctx->src_queue)) {
  1317. mfc_err("Header has been deallocated in the middle of"
  1318. " initialization\n");
  1319. spin_unlock_irqrestore(&dev->irqlock, flags);
  1320. return -EIO;
  1321. }
  1322. temp_vb = list_entry(ctx->src_queue.next, struct s5p_mfc_buf, list);
  1323. mfc_debug(2, "Header size: %d\n", temp_vb->b->v4l2_planes[0].bytesused);
  1324. s5p_mfc_set_dec_stream_buffer_v5(ctx,
  1325. vb2_dma_contig_plane_dma_addr(temp_vb->b, 0),
  1326. 0, temp_vb->b->v4l2_planes[0].bytesused);
  1327. spin_unlock_irqrestore(&dev->irqlock, flags);
  1328. dev->curr_ctx = ctx->num;
  1329. s5p_mfc_clean_ctx_int_flags(ctx);
  1330. ret = s5p_mfc_set_dec_frame_buffer_v5(ctx);
  1331. if (ret) {
  1332. mfc_err("Failed to alloc frame mem\n");
  1333. ctx->state = MFCINST_ERROR;
  1334. }
  1335. return ret;
  1336. }
  1337. /* Try running an operation on hardware */
  1338. void s5p_mfc_try_run_v5(struct s5p_mfc_dev *dev)
  1339. {
  1340. struct s5p_mfc_ctx *ctx;
  1341. int new_ctx;
  1342. unsigned int ret = 0;
  1343. if (test_bit(0, &dev->enter_suspend)) {
  1344. mfc_debug(1, "Entering suspend so do not schedule any jobs\n");
  1345. return;
  1346. }
  1347. /* Check whether hardware is not running */
  1348. if (test_and_set_bit(0, &dev->hw_lock) != 0) {
  1349. /* This is perfectly ok, the scheduled ctx should wait */
  1350. mfc_debug(1, "Couldn't lock HW\n");
  1351. return;
  1352. }
  1353. /* Choose the context to run */
  1354. new_ctx = s5p_mfc_get_new_ctx(dev);
  1355. if (new_ctx < 0) {
  1356. /* No contexts to run */
  1357. if (test_and_clear_bit(0, &dev->hw_lock) == 0) {
  1358. mfc_err("Failed to unlock hardware\n");
  1359. return;
  1360. }
  1361. mfc_debug(1, "No ctx is scheduled to be run\n");
  1362. return;
  1363. }
  1364. ctx = dev->ctx[new_ctx];
  1365. /* Got context to run in ctx */
  1366. /*
  1367. * Last frame has already been sent to MFC.
  1368. * Now obtaining frames from MFC buffer
  1369. */
  1370. s5p_mfc_clock_on();
  1371. if (ctx->type == MFCINST_DECODER) {
  1372. s5p_mfc_set_dec_desc_buffer(ctx);
  1373. switch (ctx->state) {
  1374. case MFCINST_FINISHING:
  1375. s5p_mfc_run_dec_frame(ctx, MFC_DEC_LAST_FRAME);
  1376. break;
  1377. case MFCINST_RUNNING:
  1378. ret = s5p_mfc_run_dec_frame(ctx, MFC_DEC_FRAME);
  1379. break;
  1380. case MFCINST_INIT:
  1381. s5p_mfc_clean_ctx_int_flags(ctx);
  1382. ret = s5p_mfc_hw_call(dev->mfc_cmds, open_inst_cmd,
  1383. ctx);
  1384. break;
  1385. case MFCINST_RETURN_INST:
  1386. s5p_mfc_clean_ctx_int_flags(ctx);
  1387. ret = s5p_mfc_hw_call(dev->mfc_cmds, close_inst_cmd,
  1388. ctx);
  1389. break;
  1390. case MFCINST_GOT_INST:
  1391. s5p_mfc_run_init_dec(ctx);
  1392. break;
  1393. case MFCINST_HEAD_PARSED:
  1394. ret = s5p_mfc_run_init_dec_buffers(ctx);
  1395. mfc_debug(1, "head parsed\n");
  1396. break;
  1397. case MFCINST_RES_CHANGE_INIT:
  1398. s5p_mfc_run_res_change(ctx);
  1399. break;
  1400. case MFCINST_RES_CHANGE_FLUSH:
  1401. s5p_mfc_run_dec_frame(ctx, MFC_DEC_FRAME);
  1402. break;
  1403. case MFCINST_RES_CHANGE_END:
  1404. mfc_debug(2, "Finished remaining frames after resolution change\n");
  1405. ctx->capture_state = QUEUE_FREE;
  1406. mfc_debug(2, "Will re-init the codec\n");
  1407. s5p_mfc_run_init_dec(ctx);
  1408. break;
  1409. default:
  1410. ret = -EAGAIN;
  1411. }
  1412. } else if (ctx->type == MFCINST_ENCODER) {
  1413. switch (ctx->state) {
  1414. case MFCINST_FINISHING:
  1415. case MFCINST_RUNNING:
  1416. ret = s5p_mfc_run_enc_frame(ctx);
  1417. break;
  1418. case MFCINST_INIT:
  1419. s5p_mfc_clean_ctx_int_flags(ctx);
  1420. ret = s5p_mfc_hw_call(dev->mfc_cmds, open_inst_cmd,
  1421. ctx);
  1422. break;
  1423. case MFCINST_RETURN_INST:
  1424. s5p_mfc_clean_ctx_int_flags(ctx);
  1425. ret = s5p_mfc_hw_call(dev->mfc_cmds, close_inst_cmd,
  1426. ctx);
  1427. break;
  1428. case MFCINST_GOT_INST:
  1429. s5p_mfc_run_init_enc(ctx);
  1430. break;
  1431. default:
  1432. ret = -EAGAIN;
  1433. }
  1434. } else {
  1435. mfc_err("Invalid context type: %d\n", ctx->type);
  1436. ret = -EAGAIN;
  1437. }
  1438. if (ret) {
  1439. /* Free hardware lock */
  1440. if (test_and_clear_bit(0, &dev->hw_lock) == 0)
  1441. mfc_err("Failed to unlock hardware\n");
  1442. /* This is in deed imporant, as no operation has been
  1443. * scheduled, reduce the clock count as no one will
  1444. * ever do this, because no interrupt related to this try_run
  1445. * will ever come from hardware. */
  1446. s5p_mfc_clock_off();
  1447. }
  1448. }
  1449. void s5p_mfc_cleanup_queue_v5(struct list_head *lh, struct vb2_queue *vq)
  1450. {
  1451. struct s5p_mfc_buf *b;
  1452. int i;
  1453. while (!list_empty(lh)) {
  1454. b = list_entry(lh->next, struct s5p_mfc_buf, list);
  1455. for (i = 0; i < b->b->num_planes; i++)
  1456. vb2_set_plane_payload(b->b, i, 0);
  1457. vb2_buffer_done(b->b, VB2_BUF_STATE_ERROR);
  1458. list_del(&b->list);
  1459. }
  1460. }
  1461. void s5p_mfc_clear_int_flags_v5(struct s5p_mfc_dev *dev)
  1462. {
  1463. mfc_write(dev, 0, S5P_FIMV_RISC_HOST_INT);
  1464. mfc_write(dev, 0, S5P_FIMV_RISC2HOST_CMD);
  1465. mfc_write(dev, 0xffff, S5P_FIMV_SI_RTN_CHID);
  1466. }
  1467. int s5p_mfc_get_dspl_y_adr_v5(struct s5p_mfc_dev *dev)
  1468. {
  1469. return mfc_read(dev, S5P_FIMV_SI_DISPLAY_Y_ADR) << MFC_OFFSET_SHIFT;
  1470. }
  1471. int s5p_mfc_get_dec_y_adr_v5(struct s5p_mfc_dev *dev)
  1472. {
  1473. return mfc_read(dev, S5P_FIMV_SI_DECODE_Y_ADR) << MFC_OFFSET_SHIFT;
  1474. }
  1475. int s5p_mfc_get_dspl_status_v5(struct s5p_mfc_dev *dev)
  1476. {
  1477. return mfc_read(dev, S5P_FIMV_SI_DISPLAY_STATUS);
  1478. }
  1479. int s5p_mfc_get_dec_status_v5(struct s5p_mfc_dev *dev)
  1480. {
  1481. return mfc_read(dev, S5P_FIMV_SI_DECODE_STATUS);
  1482. }
  1483. int s5p_mfc_get_dec_frame_type_v5(struct s5p_mfc_dev *dev)
  1484. {
  1485. return mfc_read(dev, S5P_FIMV_DECODE_FRAME_TYPE) &
  1486. S5P_FIMV_DECODE_FRAME_MASK;
  1487. }
  1488. int s5p_mfc_get_disp_frame_type_v5(struct s5p_mfc_ctx *ctx)
  1489. {
  1490. return (s5p_mfc_read_info_v5(ctx, DISP_PIC_FRAME_TYPE) >>
  1491. S5P_FIMV_SHARED_DISP_FRAME_TYPE_SHIFT) &
  1492. S5P_FIMV_DECODE_FRAME_MASK;
  1493. }
  1494. int s5p_mfc_get_consumed_stream_v5(struct s5p_mfc_dev *dev)
  1495. {
  1496. return mfc_read(dev, S5P_FIMV_SI_CONSUMED_BYTES);
  1497. }
  1498. int s5p_mfc_get_int_reason_v5(struct s5p_mfc_dev *dev)
  1499. {
  1500. int reason;
  1501. reason = mfc_read(dev, S5P_FIMV_RISC2HOST_CMD) &
  1502. S5P_FIMV_RISC2HOST_CMD_MASK;
  1503. switch (reason) {
  1504. case S5P_FIMV_R2H_CMD_OPEN_INSTANCE_RET:
  1505. reason = S5P_MFC_R2H_CMD_OPEN_INSTANCE_RET;
  1506. break;
  1507. case S5P_FIMV_R2H_CMD_CLOSE_INSTANCE_RET:
  1508. reason = S5P_MFC_R2H_CMD_CLOSE_INSTANCE_RET;
  1509. break;
  1510. case S5P_FIMV_R2H_CMD_SEQ_DONE_RET:
  1511. reason = S5P_MFC_R2H_CMD_SEQ_DONE_RET;
  1512. break;
  1513. case S5P_FIMV_R2H_CMD_FRAME_DONE_RET:
  1514. reason = S5P_MFC_R2H_CMD_FRAME_DONE_RET;
  1515. break;
  1516. case S5P_FIMV_R2H_CMD_SLICE_DONE_RET:
  1517. reason = S5P_MFC_R2H_CMD_SLICE_DONE_RET;
  1518. break;
  1519. case S5P_FIMV_R2H_CMD_SYS_INIT_RET:
  1520. reason = S5P_MFC_R2H_CMD_SYS_INIT_RET;
  1521. break;
  1522. case S5P_FIMV_R2H_CMD_FW_STATUS_RET:
  1523. reason = S5P_MFC_R2H_CMD_FW_STATUS_RET;
  1524. break;
  1525. case S5P_FIMV_R2H_CMD_SLEEP_RET:
  1526. reason = S5P_MFC_R2H_CMD_SLEEP_RET;
  1527. break;
  1528. case S5P_FIMV_R2H_CMD_WAKEUP_RET:
  1529. reason = S5P_MFC_R2H_CMD_WAKEUP_RET;
  1530. break;
  1531. case S5P_FIMV_R2H_CMD_INIT_BUFFERS_RET:
  1532. reason = S5P_MFC_R2H_CMD_INIT_BUFFERS_RET;
  1533. break;
  1534. case S5P_FIMV_R2H_CMD_ENC_COMPLETE_RET:
  1535. reason = S5P_MFC_R2H_CMD_COMPLETE_SEQ_RET;
  1536. break;
  1537. case S5P_FIMV_R2H_CMD_ERR_RET:
  1538. reason = S5P_MFC_R2H_CMD_ERR_RET;
  1539. break;
  1540. default:
  1541. reason = S5P_MFC_R2H_CMD_EMPTY;
  1542. };
  1543. return reason;
  1544. }
  1545. int s5p_mfc_get_int_err_v5(struct s5p_mfc_dev *dev)
  1546. {
  1547. return mfc_read(dev, S5P_FIMV_RISC2HOST_ARG2);
  1548. }
  1549. int s5p_mfc_err_dec_v5(unsigned int err)
  1550. {
  1551. return (err & S5P_FIMV_ERR_DEC_MASK) >> S5P_FIMV_ERR_DEC_SHIFT;
  1552. }
  1553. int s5p_mfc_err_dspl_v5(unsigned int err)
  1554. {
  1555. return (err & S5P_FIMV_ERR_DSPL_MASK) >> S5P_FIMV_ERR_DSPL_SHIFT;
  1556. }
  1557. int s5p_mfc_get_img_width_v5(struct s5p_mfc_dev *dev)
  1558. {
  1559. return mfc_read(dev, S5P_FIMV_SI_HRESOL);
  1560. }
  1561. int s5p_mfc_get_img_height_v5(struct s5p_mfc_dev *dev)
  1562. {
  1563. return mfc_read(dev, S5P_FIMV_SI_VRESOL);
  1564. }
  1565. int s5p_mfc_get_dpb_count_v5(struct s5p_mfc_dev *dev)
  1566. {
  1567. return mfc_read(dev, S5P_FIMV_SI_BUF_NUMBER);
  1568. }
  1569. int s5p_mfc_get_mv_count_v5(struct s5p_mfc_dev *dev)
  1570. {
  1571. /* NOP */
  1572. return -1;
  1573. }
  1574. int s5p_mfc_get_inst_no_v5(struct s5p_mfc_dev *dev)
  1575. {
  1576. return mfc_read(dev, S5P_FIMV_RISC2HOST_ARG1);
  1577. }
  1578. int s5p_mfc_get_enc_strm_size_v5(struct s5p_mfc_dev *dev)
  1579. {
  1580. return mfc_read(dev, S5P_FIMV_ENC_SI_STRM_SIZE);
  1581. }
  1582. int s5p_mfc_get_enc_slice_type_v5(struct s5p_mfc_dev *dev)
  1583. {
  1584. return mfc_read(dev, S5P_FIMV_ENC_SI_SLICE_TYPE);
  1585. }
  1586. int s5p_mfc_get_enc_dpb_count_v5(struct s5p_mfc_dev *dev)
  1587. {
  1588. return -1;
  1589. }
  1590. int s5p_mfc_get_enc_pic_count_v5(struct s5p_mfc_dev *dev)
  1591. {
  1592. return mfc_read(dev, S5P_FIMV_ENC_SI_PIC_CNT);
  1593. }
  1594. int s5p_mfc_get_sei_avail_status_v5(struct s5p_mfc_ctx *ctx)
  1595. {
  1596. return s5p_mfc_read_info_v5(ctx, FRAME_PACK_SEI_AVAIL);
  1597. }
  1598. int s5p_mfc_get_mvc_num_views_v5(struct s5p_mfc_dev *dev)
  1599. {
  1600. return -1;
  1601. }
  1602. int s5p_mfc_get_mvc_view_id_v5(struct s5p_mfc_dev *dev)
  1603. {
  1604. return -1;
  1605. }
  1606. unsigned int s5p_mfc_get_pic_type_top_v5(struct s5p_mfc_ctx *ctx)
  1607. {
  1608. return s5p_mfc_read_info_v5(ctx, PIC_TIME_TOP);
  1609. }
  1610. unsigned int s5p_mfc_get_pic_type_bot_v5(struct s5p_mfc_ctx *ctx)
  1611. {
  1612. return s5p_mfc_read_info_v5(ctx, PIC_TIME_BOT);
  1613. }
  1614. unsigned int s5p_mfc_get_crop_info_h_v5(struct s5p_mfc_ctx *ctx)
  1615. {
  1616. return s5p_mfc_read_info_v5(ctx, CROP_INFO_H);
  1617. }
  1618. unsigned int s5p_mfc_get_crop_info_v_v5(struct s5p_mfc_ctx *ctx)
  1619. {
  1620. return s5p_mfc_read_info_v5(ctx, CROP_INFO_V);
  1621. }
  1622. /* Initialize opr function pointers for MFC v5 */
  1623. static struct s5p_mfc_hw_ops s5p_mfc_ops_v5 = {
  1624. .alloc_dec_temp_buffers = s5p_mfc_alloc_dec_temp_buffers_v5,
  1625. .release_dec_desc_buffer = s5p_mfc_release_dec_desc_buffer_v5,
  1626. .alloc_codec_buffers = s5p_mfc_alloc_codec_buffers_v5,
  1627. .release_codec_buffers = s5p_mfc_release_codec_buffers_v5,
  1628. .alloc_instance_buffer = s5p_mfc_alloc_instance_buffer_v5,
  1629. .release_instance_buffer = s5p_mfc_release_instance_buffer_v5,
  1630. .alloc_dev_context_buffer = s5p_mfc_alloc_dev_context_buffer_v5,
  1631. .release_dev_context_buffer = s5p_mfc_release_dev_context_buffer_v5,
  1632. .dec_calc_dpb_size = s5p_mfc_dec_calc_dpb_size_v5,
  1633. .enc_calc_src_size = s5p_mfc_enc_calc_src_size_v5,
  1634. .set_dec_stream_buffer = s5p_mfc_set_dec_stream_buffer_v5,
  1635. .set_dec_frame_buffer = s5p_mfc_set_dec_frame_buffer_v5,
  1636. .set_enc_stream_buffer = s5p_mfc_set_enc_stream_buffer_v5,
  1637. .set_enc_frame_buffer = s5p_mfc_set_enc_frame_buffer_v5,
  1638. .get_enc_frame_buffer = s5p_mfc_get_enc_frame_buffer_v5,
  1639. .set_enc_ref_buffer = s5p_mfc_set_enc_ref_buffer_v5,
  1640. .init_decode = s5p_mfc_init_decode_v5,
  1641. .init_encode = s5p_mfc_init_encode_v5,
  1642. .encode_one_frame = s5p_mfc_encode_one_frame_v5,
  1643. .try_run = s5p_mfc_try_run_v5,
  1644. .cleanup_queue = s5p_mfc_cleanup_queue_v5,
  1645. .clear_int_flags = s5p_mfc_clear_int_flags_v5,
  1646. .write_info = s5p_mfc_write_info_v5,
  1647. .read_info = s5p_mfc_read_info_v5,
  1648. .get_dspl_y_adr = s5p_mfc_get_dspl_y_adr_v5,
  1649. .get_dec_y_adr = s5p_mfc_get_dec_y_adr_v5,
  1650. .get_dspl_status = s5p_mfc_get_dspl_status_v5,
  1651. .get_dec_status = s5p_mfc_get_dec_status_v5,
  1652. .get_dec_frame_type = s5p_mfc_get_dec_frame_type_v5,
  1653. .get_disp_frame_type = s5p_mfc_get_disp_frame_type_v5,
  1654. .get_consumed_stream = s5p_mfc_get_consumed_stream_v5,
  1655. .get_int_reason = s5p_mfc_get_int_reason_v5,
  1656. .get_int_err = s5p_mfc_get_int_err_v5,
  1657. .err_dec = s5p_mfc_err_dec_v5,
  1658. .err_dspl = s5p_mfc_err_dspl_v5,
  1659. .get_img_width = s5p_mfc_get_img_width_v5,
  1660. .get_img_height = s5p_mfc_get_img_height_v5,
  1661. .get_dpb_count = s5p_mfc_get_dpb_count_v5,
  1662. .get_mv_count = s5p_mfc_get_mv_count_v5,
  1663. .get_inst_no = s5p_mfc_get_inst_no_v5,
  1664. .get_enc_strm_size = s5p_mfc_get_enc_strm_size_v5,
  1665. .get_enc_slice_type = s5p_mfc_get_enc_slice_type_v5,
  1666. .get_enc_dpb_count = s5p_mfc_get_enc_dpb_count_v5,
  1667. .get_enc_pic_count = s5p_mfc_get_enc_pic_count_v5,
  1668. .get_sei_avail_status = s5p_mfc_get_sei_avail_status_v5,
  1669. .get_mvc_num_views = s5p_mfc_get_mvc_num_views_v5,
  1670. .get_mvc_view_id = s5p_mfc_get_mvc_view_id_v5,
  1671. .get_pic_type_top = s5p_mfc_get_pic_type_top_v5,
  1672. .get_pic_type_bot = s5p_mfc_get_pic_type_bot_v5,
  1673. .get_crop_info_h = s5p_mfc_get_crop_info_h_v5,
  1674. .get_crop_info_v = s5p_mfc_get_crop_info_v_v5,
  1675. };
  1676. struct s5p_mfc_hw_ops *s5p_mfc_init_hw_ops_v5(void)
  1677. {
  1678. return &s5p_mfc_ops_v5;
  1679. }