ppc4xx_dma.c 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712
  1. /*
  2. * arch/ppc/kernel/ppc4xx_dma.c
  3. *
  4. * IBM PPC4xx DMA engine core library
  5. *
  6. * Copyright 2000-2004 MontaVista Software Inc.
  7. *
  8. * Cleaned up and converted to new DCR access
  9. * Matt Porter <mporter@kernel.crashing.org>
  10. *
  11. * Original code by Armin Kuster <akuster@mvista.com>
  12. * and Pete Popov <ppopov@mvista.com>
  13. *
  14. * This program is free software; you can redistribute it and/or modify it
  15. * under the terms of the GNU General Public License as published by the
  16. * Free Software Foundation; either version 2 of the License, or (at your
  17. * option) any later version.
  18. *
  19. * You should have received a copy of the GNU General Public License along
  20. * with this program; if not, write to the Free Software Foundation, Inc.,
  21. * 675 Mass Ave, Cambridge, MA 02139, USA.
  22. */
  23. #include <linux/config.h>
  24. #include <linux/kernel.h>
  25. #include <linux/mm.h>
  26. #include <linux/miscdevice.h>
  27. #include <linux/init.h>
  28. #include <linux/module.h>
  29. #include <asm/system.h>
  30. #include <asm/io.h>
  31. #include <asm/ppc4xx_dma.h>
  32. ppc_dma_ch_t dma_channels[MAX_PPC4xx_DMA_CHANNELS];
  33. int
  34. ppc4xx_get_dma_status(void)
  35. {
  36. return (mfdcr(DCRN_DMASR));
  37. }
  38. void
  39. ppc4xx_set_src_addr(int dmanr, phys_addr_t src_addr)
  40. {
  41. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  42. printk("set_src_addr: bad channel: %d\n", dmanr);
  43. return;
  44. }
  45. #ifdef PPC4xx_DMA_64BIT
  46. mtdcr(DCRN_DMASAH0 + dmanr*2, (u32)(src_addr >> 32));
  47. #else
  48. mtdcr(DCRN_DMASA0 + dmanr*2, (u32)src_addr);
  49. #endif
  50. }
  51. void
  52. ppc4xx_set_dst_addr(int dmanr, phys_addr_t dst_addr)
  53. {
  54. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  55. printk("set_dst_addr: bad channel: %d\n", dmanr);
  56. return;
  57. }
  58. #ifdef PPC4xx_DMA_64BIT
  59. mtdcr(DCRN_DMADAH0 + dmanr*2, (u32)(dst_addr >> 32));
  60. #else
  61. mtdcr(DCRN_DMADA0 + dmanr*2, (u32)dst_addr);
  62. #endif
  63. }
  64. void
  65. ppc4xx_enable_dma(unsigned int dmanr)
  66. {
  67. unsigned int control;
  68. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  69. unsigned int status_bits[] = { DMA_CS0 | DMA_TS0 | DMA_CH0_ERR,
  70. DMA_CS1 | DMA_TS1 | DMA_CH1_ERR,
  71. DMA_CS2 | DMA_TS2 | DMA_CH2_ERR,
  72. DMA_CS3 | DMA_TS3 | DMA_CH3_ERR};
  73. if (p_dma_ch->in_use) {
  74. printk("enable_dma: channel %d in use\n", dmanr);
  75. return;
  76. }
  77. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  78. printk("enable_dma: bad channel: %d\n", dmanr);
  79. return;
  80. }
  81. if (p_dma_ch->mode == DMA_MODE_READ) {
  82. /* peripheral to memory */
  83. ppc4xx_set_src_addr(dmanr, 0);
  84. ppc4xx_set_dst_addr(dmanr, p_dma_ch->addr);
  85. } else if (p_dma_ch->mode == DMA_MODE_WRITE) {
  86. /* memory to peripheral */
  87. ppc4xx_set_src_addr(dmanr, p_dma_ch->addr);
  88. ppc4xx_set_dst_addr(dmanr, 0);
  89. }
  90. /* for other xfer modes, the addresses are already set */
  91. control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
  92. control &= ~(DMA_TM_MASK | DMA_TD); /* clear all mode bits */
  93. if (p_dma_ch->mode == DMA_MODE_MM) {
  94. /* software initiated memory to memory */
  95. control |= DMA_ETD_OUTPUT | DMA_TCE_ENABLE;
  96. }
  97. mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
  98. /*
  99. * Clear the CS, TS, RI bits for the channel from DMASR. This
  100. * has been observed to happen correctly only after the mode and
  101. * ETD/DCE bits in DMACRx are set above. Must do this before
  102. * enabling the channel.
  103. */
  104. mtdcr(DCRN_DMASR, status_bits[dmanr]);
  105. /*
  106. * For device-paced transfers, Terminal Count Enable apparently
  107. * must be on, and this must be turned on after the mode, etc.
  108. * bits are cleared above (at least on Redwood-6).
  109. */
  110. if ((p_dma_ch->mode == DMA_MODE_MM_DEVATDST) ||
  111. (p_dma_ch->mode == DMA_MODE_MM_DEVATSRC))
  112. control |= DMA_TCE_ENABLE;
  113. /*
  114. * Now enable the channel.
  115. */
  116. control |= (p_dma_ch->mode | DMA_CE_ENABLE);
  117. mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
  118. p_dma_ch->in_use = 1;
  119. }
  120. void
  121. ppc4xx_disable_dma(unsigned int dmanr)
  122. {
  123. unsigned int control;
  124. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  125. if (!p_dma_ch->in_use) {
  126. printk("disable_dma: channel %d not in use\n", dmanr);
  127. return;
  128. }
  129. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  130. printk("disable_dma: bad channel: %d\n", dmanr);
  131. return;
  132. }
  133. control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
  134. control &= ~DMA_CE_ENABLE;
  135. mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
  136. p_dma_ch->in_use = 0;
  137. }
  138. /*
  139. * Sets the dma mode for single DMA transfers only.
  140. * For scatter/gather transfers, the mode is passed to the
  141. * alloc_dma_handle() function as one of the parameters.
  142. *
  143. * The mode is simply saved and used later. This allows
  144. * the driver to call set_dma_mode() and set_dma_addr() in
  145. * any order.
  146. *
  147. * Valid mode values are:
  148. *
  149. * DMA_MODE_READ peripheral to memory
  150. * DMA_MODE_WRITE memory to peripheral
  151. * DMA_MODE_MM memory to memory
  152. * DMA_MODE_MM_DEVATSRC device-paced memory to memory, device at src
  153. * DMA_MODE_MM_DEVATDST device-paced memory to memory, device at dst
  154. */
  155. int
  156. ppc4xx_set_dma_mode(unsigned int dmanr, unsigned int mode)
  157. {
  158. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  159. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  160. printk("set_dma_mode: bad channel 0x%x\n", dmanr);
  161. return DMA_STATUS_BAD_CHANNEL;
  162. }
  163. p_dma_ch->mode = mode;
  164. return DMA_STATUS_GOOD;
  165. }
  166. /*
  167. * Sets the DMA Count register. Note that 'count' is in bytes.
  168. * However, the DMA Count register counts the number of "transfers",
  169. * where each transfer is equal to the bus width. Thus, count
  170. * MUST be a multiple of the bus width.
  171. */
  172. void
  173. ppc4xx_set_dma_count(unsigned int dmanr, unsigned int count)
  174. {
  175. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  176. #ifdef DEBUG_4xxDMA
  177. {
  178. int error = 0;
  179. switch (p_dma_ch->pwidth) {
  180. case PW_8:
  181. break;
  182. case PW_16:
  183. if (count & 0x1)
  184. error = 1;
  185. break;
  186. case PW_32:
  187. if (count & 0x3)
  188. error = 1;
  189. break;
  190. case PW_64:
  191. if (count & 0x7)
  192. error = 1;
  193. break;
  194. default:
  195. printk("set_dma_count: invalid bus width: 0x%x\n",
  196. p_dma_ch->pwidth);
  197. return;
  198. }
  199. if (error)
  200. printk
  201. ("Warning: set_dma_count count 0x%x bus width %d\n",
  202. count, p_dma_ch->pwidth);
  203. }
  204. #endif
  205. count = count >> p_dma_ch->shift;
  206. mtdcr(DCRN_DMACT0 + (dmanr * 0x8), count);
  207. }
  208. /*
  209. * Returns the number of bytes left to be transfered.
  210. * After a DMA transfer, this should return zero.
  211. * Reading this while a DMA transfer is still in progress will return
  212. * unpredictable results.
  213. */
  214. int
  215. ppc4xx_get_dma_residue(unsigned int dmanr)
  216. {
  217. unsigned int count;
  218. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  219. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  220. printk("ppc4xx_get_dma_residue: bad channel 0x%x\n", dmanr);
  221. return DMA_STATUS_BAD_CHANNEL;
  222. }
  223. count = mfdcr(DCRN_DMACT0 + (dmanr * 0x8));
  224. return (count << p_dma_ch->shift);
  225. }
  226. /*
  227. * Sets the DMA address for a memory to peripheral or peripheral
  228. * to memory transfer. The address is just saved in the channel
  229. * structure for now and used later in enable_dma().
  230. */
  231. void
  232. ppc4xx_set_dma_addr(unsigned int dmanr, phys_addr_t addr)
  233. {
  234. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  235. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  236. printk("ppc4xx_set_dma_addr: bad channel: %d\n", dmanr);
  237. return;
  238. }
  239. #ifdef DEBUG_4xxDMA
  240. {
  241. int error = 0;
  242. switch (p_dma_ch->pwidth) {
  243. case PW_8:
  244. break;
  245. case PW_16:
  246. if ((unsigned) addr & 0x1)
  247. error = 1;
  248. break;
  249. case PW_32:
  250. if ((unsigned) addr & 0x3)
  251. error = 1;
  252. break;
  253. case PW_64:
  254. if ((unsigned) addr & 0x7)
  255. error = 1;
  256. break;
  257. default:
  258. printk("ppc4xx_set_dma_addr: invalid bus width: 0x%x\n",
  259. p_dma_ch->pwidth);
  260. return;
  261. }
  262. if (error)
  263. printk("Warning: ppc4xx_set_dma_addr addr 0x%x bus width %d\n",
  264. addr, p_dma_ch->pwidth);
  265. }
  266. #endif
  267. /* save dma address and program it later after we know the xfer mode */
  268. p_dma_ch->addr = addr;
  269. }
  270. /*
  271. * Sets both DMA addresses for a memory to memory transfer.
  272. * For memory to peripheral or peripheral to memory transfers
  273. * the function set_dma_addr() should be used instead.
  274. */
  275. void
  276. ppc4xx_set_dma_addr2(unsigned int dmanr, phys_addr_t src_dma_addr,
  277. phys_addr_t dst_dma_addr)
  278. {
  279. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  280. printk("ppc4xx_set_dma_addr2: bad channel: %d\n", dmanr);
  281. return;
  282. }
  283. #ifdef DEBUG_4xxDMA
  284. {
  285. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  286. int error = 0;
  287. switch (p_dma_ch->pwidth) {
  288. case PW_8:
  289. break;
  290. case PW_16:
  291. if (((unsigned) src_dma_addr & 0x1) ||
  292. ((unsigned) dst_dma_addr & 0x1)
  293. )
  294. error = 1;
  295. break;
  296. case PW_32:
  297. if (((unsigned) src_dma_addr & 0x3) ||
  298. ((unsigned) dst_dma_addr & 0x3)
  299. )
  300. error = 1;
  301. break;
  302. case PW_64:
  303. if (((unsigned) src_dma_addr & 0x7) ||
  304. ((unsigned) dst_dma_addr & 0x7)
  305. )
  306. error = 1;
  307. break;
  308. default:
  309. printk("ppc4xx_set_dma_addr2: invalid bus width: 0x%x\n",
  310. p_dma_ch->pwidth);
  311. return;
  312. }
  313. if (error)
  314. printk
  315. ("Warning: ppc4xx_set_dma_addr2 src 0x%x dst 0x%x bus width %d\n",
  316. src_dma_addr, dst_dma_addr, p_dma_ch->pwidth);
  317. }
  318. #endif
  319. ppc4xx_set_src_addr(dmanr, src_dma_addr);
  320. ppc4xx_set_dst_addr(dmanr, dst_dma_addr);
  321. }
  322. /*
  323. * Enables the channel interrupt.
  324. *
  325. * If performing a scatter/gatter transfer, this function
  326. * MUST be called before calling alloc_dma_handle() and building
  327. * the sgl list. Otherwise, interrupts will not be enabled, if
  328. * they were previously disabled.
  329. */
  330. int
  331. ppc4xx_enable_dma_interrupt(unsigned int dmanr)
  332. {
  333. unsigned int control;
  334. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  335. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  336. printk("ppc4xx_enable_dma_interrupt: bad channel: %d\n", dmanr);
  337. return DMA_STATUS_BAD_CHANNEL;
  338. }
  339. p_dma_ch->int_enable = 1;
  340. control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
  341. control |= DMA_CIE_ENABLE; /* Channel Interrupt Enable */
  342. mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
  343. return DMA_STATUS_GOOD;
  344. }
  345. /*
  346. * Disables the channel interrupt.
  347. *
  348. * If performing a scatter/gatter transfer, this function
  349. * MUST be called before calling alloc_dma_handle() and building
  350. * the sgl list. Otherwise, interrupts will not be disabled, if
  351. * they were previously enabled.
  352. */
  353. int
  354. ppc4xx_disable_dma_interrupt(unsigned int dmanr)
  355. {
  356. unsigned int control;
  357. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  358. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  359. printk("ppc4xx_disable_dma_interrupt: bad channel: %d\n", dmanr);
  360. return DMA_STATUS_BAD_CHANNEL;
  361. }
  362. p_dma_ch->int_enable = 0;
  363. control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
  364. control &= ~DMA_CIE_ENABLE; /* Channel Interrupt Enable */
  365. mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
  366. return DMA_STATUS_GOOD;
  367. }
  368. /*
  369. * Configures a DMA channel, including the peripheral bus width, if a
  370. * peripheral is attached to the channel, the polarity of the DMAReq and
  371. * DMAAck signals, etc. This information should really be setup by the boot
  372. * code, since most likely the configuration won't change dynamically.
  373. * If the kernel has to call this function, it's recommended that it's
  374. * called from platform specific init code. The driver should not need to
  375. * call this function.
  376. */
  377. int
  378. ppc4xx_init_dma_channel(unsigned int dmanr, ppc_dma_ch_t * p_init)
  379. {
  380. unsigned int polarity;
  381. uint32_t control = 0;
  382. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  383. DMA_MODE_READ = (unsigned long) DMA_TD; /* Peripheral to Memory */
  384. DMA_MODE_WRITE = 0; /* Memory to Peripheral */
  385. if (!p_init) {
  386. printk("ppc4xx_init_dma_channel: NULL p_init\n");
  387. return DMA_STATUS_NULL_POINTER;
  388. }
  389. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  390. printk("ppc4xx_init_dma_channel: bad channel %d\n", dmanr);
  391. return DMA_STATUS_BAD_CHANNEL;
  392. }
  393. #if DCRN_POL > 0
  394. polarity = mfdcr(DCRN_POL);
  395. #else
  396. polarity = 0;
  397. #endif
  398. /* Setup the control register based on the values passed to
  399. * us in p_init. Then, over-write the control register with this
  400. * new value.
  401. */
  402. control |= SET_DMA_CONTROL;
  403. /* clear all polarity signals and then "or" in new signal levels */
  404. polarity &= ~GET_DMA_POLARITY(dmanr);
  405. polarity |= p_init->polarity;
  406. #if DCRN_POL > 0
  407. mtdcr(DCRN_POL, polarity);
  408. #endif
  409. mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
  410. /* save these values in our dma channel structure */
  411. memcpy(p_dma_ch, p_init, sizeof (ppc_dma_ch_t));
  412. /*
  413. * The peripheral width values written in the control register are:
  414. * PW_8 0
  415. * PW_16 1
  416. * PW_32 2
  417. * PW_64 3
  418. *
  419. * Since the DMA count register takes the number of "transfers",
  420. * we need to divide the count sent to us in certain
  421. * functions by the appropriate number. It so happens that our
  422. * right shift value is equal to the peripheral width value.
  423. */
  424. p_dma_ch->shift = p_init->pwidth;
  425. /*
  426. * Save the control word for easy access.
  427. */
  428. p_dma_ch->control = control;
  429. mtdcr(DCRN_DMASR, 0xffffffff); /* clear status register */
  430. return DMA_STATUS_GOOD;
  431. }
  432. /*
  433. * This function returns the channel configuration.
  434. */
  435. int
  436. ppc4xx_get_channel_config(unsigned int dmanr, ppc_dma_ch_t * p_dma_ch)
  437. {
  438. unsigned int polarity;
  439. unsigned int control;
  440. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  441. printk("ppc4xx_get_channel_config: bad channel %d\n", dmanr);
  442. return DMA_STATUS_BAD_CHANNEL;
  443. }
  444. memcpy(p_dma_ch, &dma_channels[dmanr], sizeof (ppc_dma_ch_t));
  445. #if DCRN_POL > 0
  446. polarity = mfdcr(DCRN_POL);
  447. #else
  448. polarity = 0;
  449. #endif
  450. p_dma_ch->polarity = polarity & GET_DMA_POLARITY(dmanr);
  451. control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
  452. p_dma_ch->cp = GET_DMA_PRIORITY(control);
  453. p_dma_ch->pwidth = GET_DMA_PW(control);
  454. p_dma_ch->psc = GET_DMA_PSC(control);
  455. p_dma_ch->pwc = GET_DMA_PWC(control);
  456. p_dma_ch->phc = GET_DMA_PHC(control);
  457. p_dma_ch->ce = GET_DMA_CE_ENABLE(control);
  458. p_dma_ch->int_enable = GET_DMA_CIE_ENABLE(control);
  459. p_dma_ch->shift = GET_DMA_PW(control);
  460. #ifdef CONFIG_PPC4xx_EDMA
  461. p_dma_ch->pf = GET_DMA_PREFETCH(control);
  462. #else
  463. p_dma_ch->ch_enable = GET_DMA_CH(control);
  464. p_dma_ch->ece_enable = GET_DMA_ECE(control);
  465. p_dma_ch->tcd_disable = GET_DMA_TCD(control);
  466. #endif
  467. return DMA_STATUS_GOOD;
  468. }
  469. /*
  470. * Sets the priority for the DMA channel dmanr.
  471. * Since this is setup by the hardware init function, this function
  472. * can be used to dynamically change the priority of a channel.
  473. *
  474. * Acceptable priorities:
  475. *
  476. * PRIORITY_LOW
  477. * PRIORITY_MID_LOW
  478. * PRIORITY_MID_HIGH
  479. * PRIORITY_HIGH
  480. *
  481. */
  482. int
  483. ppc4xx_set_channel_priority(unsigned int dmanr, unsigned int priority)
  484. {
  485. unsigned int control;
  486. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  487. printk("ppc4xx_set_channel_priority: bad channel %d\n", dmanr);
  488. return DMA_STATUS_BAD_CHANNEL;
  489. }
  490. if ((priority != PRIORITY_LOW) &&
  491. (priority != PRIORITY_MID_LOW) &&
  492. (priority != PRIORITY_MID_HIGH) && (priority != PRIORITY_HIGH)) {
  493. printk("ppc4xx_set_channel_priority: bad priority: 0x%x\n", priority);
  494. }
  495. control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
  496. control |= SET_DMA_PRIORITY(priority);
  497. mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
  498. return DMA_STATUS_GOOD;
  499. }
  500. /*
  501. * Returns the width of the peripheral attached to this channel. This assumes
  502. * that someone who knows the hardware configuration, boot code or some other
  503. * init code, already set the width.
  504. *
  505. * The return value is one of:
  506. * PW_8
  507. * PW_16
  508. * PW_32
  509. * PW_64
  510. *
  511. * The function returns 0 on error.
  512. */
  513. unsigned int
  514. ppc4xx_get_peripheral_width(unsigned int dmanr)
  515. {
  516. unsigned int control;
  517. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  518. printk("ppc4xx_get_peripheral_width: bad channel %d\n", dmanr);
  519. return DMA_STATUS_BAD_CHANNEL;
  520. }
  521. control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
  522. return (GET_DMA_PW(control));
  523. }
  524. /*
  525. * Clears the channel status bits
  526. */
  527. int
  528. ppc4xx_clr_dma_status(unsigned int dmanr)
  529. {
  530. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  531. printk(KERN_ERR "ppc4xx_clr_dma_status: bad channel: %d\n", dmanr);
  532. return DMA_STATUS_BAD_CHANNEL;
  533. }
  534. mtdcr(DCRN_DMASR, ((u32)DMA_CH0_ERR | (u32)DMA_CS0 | (u32)DMA_TS0) >> dmanr);
  535. return DMA_STATUS_GOOD;
  536. }
  537. #ifdef CONFIG_PPC4xx_EDMA
  538. /*
  539. * Enables the burst on the channel (BTEN bit in the control/count register)
  540. * Note:
  541. * For scatter/gather dma, this function MUST be called before the
  542. * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the
  543. * sgl list and used as each sgl element is added.
  544. */
  545. int
  546. ppc4xx_enable_burst(unsigned int dmanr)
  547. {
  548. unsigned int ctc;
  549. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  550. printk(KERN_ERR "ppc4xx_enable_burst: bad channel: %d\n", dmanr);
  551. return DMA_STATUS_BAD_CHANNEL;
  552. }
  553. ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) | DMA_CTC_BTEN;
  554. mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc);
  555. return DMA_STATUS_GOOD;
  556. }
  557. /*
  558. * Disables the burst on the channel (BTEN bit in the control/count register)
  559. * Note:
  560. * For scatter/gather dma, this function MUST be called before the
  561. * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the
  562. * sgl list and used as each sgl element is added.
  563. */
  564. int
  565. ppc4xx_disable_burst(unsigned int dmanr)
  566. {
  567. unsigned int ctc;
  568. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  569. printk(KERN_ERR "ppc4xx_disable_burst: bad channel: %d\n", dmanr);
  570. return DMA_STATUS_BAD_CHANNEL;
  571. }
  572. ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) &~ DMA_CTC_BTEN;
  573. mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc);
  574. return DMA_STATUS_GOOD;
  575. }
  576. /*
  577. * Sets the burst size (number of peripheral widths) for the channel
  578. * (BSIZ bits in the control/count register))
  579. * must be one of:
  580. * DMA_CTC_BSIZ_2
  581. * DMA_CTC_BSIZ_4
  582. * DMA_CTC_BSIZ_8
  583. * DMA_CTC_BSIZ_16
  584. * Note:
  585. * For scatter/gather dma, this function MUST be called before the
  586. * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the
  587. * sgl list and used as each sgl element is added.
  588. */
  589. int
  590. ppc4xx_set_burst_size(unsigned int dmanr, unsigned int bsize)
  591. {
  592. unsigned int ctc;
  593. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  594. printk(KERN_ERR "ppc4xx_set_burst_size: bad channel: %d\n", dmanr);
  595. return DMA_STATUS_BAD_CHANNEL;
  596. }
  597. ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) &~ DMA_CTC_BSIZ_MSK;
  598. ctc |= (bsize & DMA_CTC_BSIZ_MSK);
  599. mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc);
  600. return DMA_STATUS_GOOD;
  601. }
  602. EXPORT_SYMBOL(ppc4xx_enable_burst);
  603. EXPORT_SYMBOL(ppc4xx_disable_burst);
  604. EXPORT_SYMBOL(ppc4xx_set_burst_size);
  605. #endif /* CONFIG_PPC4xx_EDMA */
  606. EXPORT_SYMBOL(ppc4xx_init_dma_channel);
  607. EXPORT_SYMBOL(ppc4xx_get_channel_config);
  608. EXPORT_SYMBOL(ppc4xx_set_channel_priority);
  609. EXPORT_SYMBOL(ppc4xx_get_peripheral_width);
  610. EXPORT_SYMBOL(dma_channels);
  611. EXPORT_SYMBOL(ppc4xx_set_src_addr);
  612. EXPORT_SYMBOL(ppc4xx_set_dst_addr);
  613. EXPORT_SYMBOL(ppc4xx_set_dma_addr);
  614. EXPORT_SYMBOL(ppc4xx_set_dma_addr2);
  615. EXPORT_SYMBOL(ppc4xx_enable_dma);
  616. EXPORT_SYMBOL(ppc4xx_disable_dma);
  617. EXPORT_SYMBOL(ppc4xx_set_dma_mode);
  618. EXPORT_SYMBOL(ppc4xx_set_dma_count);
  619. EXPORT_SYMBOL(ppc4xx_get_dma_residue);
  620. EXPORT_SYMBOL(ppc4xx_enable_dma_interrupt);
  621. EXPORT_SYMBOL(ppc4xx_disable_dma_interrupt);
  622. EXPORT_SYMBOL(ppc4xx_get_dma_status);
  623. EXPORT_SYMBOL(ppc4xx_clr_dma_status);