ppc4xx_dma.c 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711
  1. /*
  2. * IBM PPC4xx DMA engine core library
  3. *
  4. * Copyright 2000-2004 MontaVista Software Inc.
  5. *
  6. * Cleaned up and converted to new DCR access
  7. * Matt Porter <mporter@kernel.crashing.org>
  8. *
  9. * Original code by Armin Kuster <akuster@mvista.com>
  10. * and Pete Popov <ppopov@mvista.com>
  11. *
  12. * This program is free software; you can redistribute it and/or modify it
  13. * under the terms of the GNU General Public License as published by the
  14. * Free Software Foundation; either version 2 of the License, or (at your
  15. * option) any later version.
  16. *
  17. * You should have received a copy of the GNU General Public License along
  18. * with this program; if not, write to the Free Software Foundation, Inc.,
  19. * 675 Mass Ave, Cambridge, MA 02139, USA.
  20. */
  21. #include <linux/config.h>
  22. #include <linux/kernel.h>
  23. #include <linux/mm.h>
  24. #include <linux/miscdevice.h>
  25. #include <linux/init.h>
  26. #include <linux/module.h>
  27. #include <asm/system.h>
  28. #include <asm/io.h>
  29. #include <asm/dma.h>
  30. #include <asm/ppc4xx_dma.h>
  31. ppc_dma_ch_t dma_channels[MAX_PPC4xx_DMA_CHANNELS];
  32. int
  33. ppc4xx_get_dma_status(void)
  34. {
  35. return (mfdcr(DCRN_DMASR));
  36. }
  37. void
  38. ppc4xx_set_src_addr(int dmanr, phys_addr_t src_addr)
  39. {
  40. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  41. printk("set_src_addr: bad channel: %d\n", dmanr);
  42. return;
  43. }
  44. #ifdef PPC4xx_DMA_64BIT
  45. mtdcr(DCRN_DMASAH0 + dmanr*2, (u32)(src_addr >> 32));
  46. #else
  47. mtdcr(DCRN_DMASA0 + dmanr*2, (u32)src_addr);
  48. #endif
  49. }
  50. void
  51. ppc4xx_set_dst_addr(int dmanr, phys_addr_t dst_addr)
  52. {
  53. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  54. printk("set_dst_addr: bad channel: %d\n", dmanr);
  55. return;
  56. }
  57. #ifdef PPC4xx_DMA_64BIT
  58. mtdcr(DCRN_DMADAH0 + dmanr*2, (u32)(dst_addr >> 32));
  59. #else
  60. mtdcr(DCRN_DMADA0 + dmanr*2, (u32)dst_addr);
  61. #endif
  62. }
  63. void
  64. ppc4xx_enable_dma(unsigned int dmanr)
  65. {
  66. unsigned int control;
  67. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  68. unsigned int status_bits[] = { DMA_CS0 | DMA_TS0 | DMA_CH0_ERR,
  69. DMA_CS1 | DMA_TS1 | DMA_CH1_ERR,
  70. DMA_CS2 | DMA_TS2 | DMA_CH2_ERR,
  71. DMA_CS3 | DMA_TS3 | DMA_CH3_ERR};
  72. if (p_dma_ch->in_use) {
  73. printk("enable_dma: channel %d in use\n", dmanr);
  74. return;
  75. }
  76. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  77. printk("enable_dma: bad channel: %d\n", dmanr);
  78. return;
  79. }
  80. if (p_dma_ch->mode == DMA_MODE_READ) {
  81. /* peripheral to memory */
  82. ppc4xx_set_src_addr(dmanr, 0);
  83. ppc4xx_set_dst_addr(dmanr, p_dma_ch->addr);
  84. } else if (p_dma_ch->mode == DMA_MODE_WRITE) {
  85. /* memory to peripheral */
  86. ppc4xx_set_src_addr(dmanr, p_dma_ch->addr);
  87. ppc4xx_set_dst_addr(dmanr, 0);
  88. }
  89. /* for other xfer modes, the addresses are already set */
  90. control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
  91. control &= ~(DMA_TM_MASK | DMA_TD); /* clear all mode bits */
  92. if (p_dma_ch->mode == DMA_MODE_MM) {
  93. /* software initiated memory to memory */
  94. control |= DMA_ETD_OUTPUT | DMA_TCE_ENABLE;
  95. }
  96. mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
  97. /*
  98. * Clear the CS, TS, RI bits for the channel from DMASR. This
  99. * has been observed to happen correctly only after the mode and
  100. * ETD/DCE bits in DMACRx are set above. Must do this before
  101. * enabling the channel.
  102. */
  103. mtdcr(DCRN_DMASR, status_bits[dmanr]);
  104. /*
  105. * For device-paced transfers, Terminal Count Enable apparently
  106. * must be on, and this must be turned on after the mode, etc.
  107. * bits are cleared above (at least on Redwood-6).
  108. */
  109. if ((p_dma_ch->mode == DMA_MODE_MM_DEVATDST) ||
  110. (p_dma_ch->mode == DMA_MODE_MM_DEVATSRC))
  111. control |= DMA_TCE_ENABLE;
  112. /*
  113. * Now enable the channel.
  114. */
  115. control |= (p_dma_ch->mode | DMA_CE_ENABLE);
  116. mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
  117. p_dma_ch->in_use = 1;
  118. }
  119. void
  120. ppc4xx_disable_dma(unsigned int dmanr)
  121. {
  122. unsigned int control;
  123. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  124. if (!p_dma_ch->in_use) {
  125. printk("disable_dma: channel %d not in use\n", dmanr);
  126. return;
  127. }
  128. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  129. printk("disable_dma: bad channel: %d\n", dmanr);
  130. return;
  131. }
  132. control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
  133. control &= ~DMA_CE_ENABLE;
  134. mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
  135. p_dma_ch->in_use = 0;
  136. }
  137. /*
  138. * Sets the dma mode for single DMA transfers only.
  139. * For scatter/gather transfers, the mode is passed to the
  140. * alloc_dma_handle() function as one of the parameters.
  141. *
  142. * The mode is simply saved and used later. This allows
  143. * the driver to call set_dma_mode() and set_dma_addr() in
  144. * any order.
  145. *
  146. * Valid mode values are:
  147. *
  148. * DMA_MODE_READ peripheral to memory
  149. * DMA_MODE_WRITE memory to peripheral
  150. * DMA_MODE_MM memory to memory
  151. * DMA_MODE_MM_DEVATSRC device-paced memory to memory, device at src
  152. * DMA_MODE_MM_DEVATDST device-paced memory to memory, device at dst
  153. */
  154. int
  155. ppc4xx_set_dma_mode(unsigned int dmanr, unsigned int mode)
  156. {
  157. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  158. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  159. printk("set_dma_mode: bad channel 0x%x\n", dmanr);
  160. return DMA_STATUS_BAD_CHANNEL;
  161. }
  162. p_dma_ch->mode = mode;
  163. return DMA_STATUS_GOOD;
  164. }
  165. /*
  166. * Sets the DMA Count register. Note that 'count' is in bytes.
  167. * However, the DMA Count register counts the number of "transfers",
  168. * where each transfer is equal to the bus width. Thus, count
  169. * MUST be a multiple of the bus width.
  170. */
  171. void
  172. ppc4xx_set_dma_count(unsigned int dmanr, unsigned int count)
  173. {
  174. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  175. #ifdef DEBUG_4xxDMA
  176. {
  177. int error = 0;
  178. switch (p_dma_ch->pwidth) {
  179. case PW_8:
  180. break;
  181. case PW_16:
  182. if (count & 0x1)
  183. error = 1;
  184. break;
  185. case PW_32:
  186. if (count & 0x3)
  187. error = 1;
  188. break;
  189. case PW_64:
  190. if (count & 0x7)
  191. error = 1;
  192. break;
  193. default:
  194. printk("set_dma_count: invalid bus width: 0x%x\n",
  195. p_dma_ch->pwidth);
  196. return;
  197. }
  198. if (error)
  199. printk
  200. ("Warning: set_dma_count count 0x%x bus width %d\n",
  201. count, p_dma_ch->pwidth);
  202. }
  203. #endif
  204. count = count >> p_dma_ch->shift;
  205. mtdcr(DCRN_DMACT0 + (dmanr * 0x8), count);
  206. }
  207. /*
  208. * Returns the number of bytes left to be transfered.
  209. * After a DMA transfer, this should return zero.
  210. * Reading this while a DMA transfer is still in progress will return
  211. * unpredictable results.
  212. */
  213. int
  214. ppc4xx_get_dma_residue(unsigned int dmanr)
  215. {
  216. unsigned int count;
  217. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  218. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  219. printk("ppc4xx_get_dma_residue: bad channel 0x%x\n", dmanr);
  220. return DMA_STATUS_BAD_CHANNEL;
  221. }
  222. count = mfdcr(DCRN_DMACT0 + (dmanr * 0x8));
  223. return (count << p_dma_ch->shift);
  224. }
  225. /*
  226. * Sets the DMA address for a memory to peripheral or peripheral
  227. * to memory transfer. The address is just saved in the channel
  228. * structure for now and used later in enable_dma().
  229. */
  230. void
  231. ppc4xx_set_dma_addr(unsigned int dmanr, phys_addr_t addr)
  232. {
  233. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  234. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  235. printk("ppc4xx_set_dma_addr: bad channel: %d\n", dmanr);
  236. return;
  237. }
  238. #ifdef DEBUG_4xxDMA
  239. {
  240. int error = 0;
  241. switch (p_dma_ch->pwidth) {
  242. case PW_8:
  243. break;
  244. case PW_16:
  245. if ((unsigned) addr & 0x1)
  246. error = 1;
  247. break;
  248. case PW_32:
  249. if ((unsigned) addr & 0x3)
  250. error = 1;
  251. break;
  252. case PW_64:
  253. if ((unsigned) addr & 0x7)
  254. error = 1;
  255. break;
  256. default:
  257. printk("ppc4xx_set_dma_addr: invalid bus width: 0x%x\n",
  258. p_dma_ch->pwidth);
  259. return;
  260. }
  261. if (error)
  262. printk("Warning: ppc4xx_set_dma_addr addr 0x%x bus width %d\n",
  263. addr, p_dma_ch->pwidth);
  264. }
  265. #endif
  266. /* save dma address and program it later after we know the xfer mode */
  267. p_dma_ch->addr = addr;
  268. }
  269. /*
  270. * Sets both DMA addresses for a memory to memory transfer.
  271. * For memory to peripheral or peripheral to memory transfers
  272. * the function set_dma_addr() should be used instead.
  273. */
  274. void
  275. ppc4xx_set_dma_addr2(unsigned int dmanr, phys_addr_t src_dma_addr,
  276. phys_addr_t dst_dma_addr)
  277. {
  278. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  279. printk("ppc4xx_set_dma_addr2: bad channel: %d\n", dmanr);
  280. return;
  281. }
  282. #ifdef DEBUG_4xxDMA
  283. {
  284. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  285. int error = 0;
  286. switch (p_dma_ch->pwidth) {
  287. case PW_8:
  288. break;
  289. case PW_16:
  290. if (((unsigned) src_dma_addr & 0x1) ||
  291. ((unsigned) dst_dma_addr & 0x1)
  292. )
  293. error = 1;
  294. break;
  295. case PW_32:
  296. if (((unsigned) src_dma_addr & 0x3) ||
  297. ((unsigned) dst_dma_addr & 0x3)
  298. )
  299. error = 1;
  300. break;
  301. case PW_64:
  302. if (((unsigned) src_dma_addr & 0x7) ||
  303. ((unsigned) dst_dma_addr & 0x7)
  304. )
  305. error = 1;
  306. break;
  307. default:
  308. printk("ppc4xx_set_dma_addr2: invalid bus width: 0x%x\n",
  309. p_dma_ch->pwidth);
  310. return;
  311. }
  312. if (error)
  313. printk
  314. ("Warning: ppc4xx_set_dma_addr2 src 0x%x dst 0x%x bus width %d\n",
  315. src_dma_addr, dst_dma_addr, p_dma_ch->pwidth);
  316. }
  317. #endif
  318. ppc4xx_set_src_addr(dmanr, src_dma_addr);
  319. ppc4xx_set_dst_addr(dmanr, dst_dma_addr);
  320. }
  321. /*
  322. * Enables the channel interrupt.
  323. *
  324. * If performing a scatter/gatter transfer, this function
  325. * MUST be called before calling alloc_dma_handle() and building
  326. * the sgl list. Otherwise, interrupts will not be enabled, if
  327. * they were previously disabled.
  328. */
  329. int
  330. ppc4xx_enable_dma_interrupt(unsigned int dmanr)
  331. {
  332. unsigned int control;
  333. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  334. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  335. printk("ppc4xx_enable_dma_interrupt: bad channel: %d\n", dmanr);
  336. return DMA_STATUS_BAD_CHANNEL;
  337. }
  338. p_dma_ch->int_enable = 1;
  339. control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
  340. control |= DMA_CIE_ENABLE; /* Channel Interrupt Enable */
  341. mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
  342. return DMA_STATUS_GOOD;
  343. }
  344. /*
  345. * Disables the channel interrupt.
  346. *
  347. * If performing a scatter/gatter transfer, this function
  348. * MUST be called before calling alloc_dma_handle() and building
  349. * the sgl list. Otherwise, interrupts will not be disabled, if
  350. * they were previously enabled.
  351. */
  352. int
  353. ppc4xx_disable_dma_interrupt(unsigned int dmanr)
  354. {
  355. unsigned int control;
  356. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  357. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  358. printk("ppc4xx_disable_dma_interrupt: bad channel: %d\n", dmanr);
  359. return DMA_STATUS_BAD_CHANNEL;
  360. }
  361. p_dma_ch->int_enable = 0;
  362. control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
  363. control &= ~DMA_CIE_ENABLE; /* Channel Interrupt Enable */
  364. mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
  365. return DMA_STATUS_GOOD;
  366. }
  367. /*
  368. * Configures a DMA channel, including the peripheral bus width, if a
  369. * peripheral is attached to the channel, the polarity of the DMAReq and
  370. * DMAAck signals, etc. This information should really be setup by the boot
  371. * code, since most likely the configuration won't change dynamically.
  372. * If the kernel has to call this function, it's recommended that it's
  373. * called from platform specific init code. The driver should not need to
  374. * call this function.
  375. */
  376. int
  377. ppc4xx_init_dma_channel(unsigned int dmanr, ppc_dma_ch_t * p_init)
  378. {
  379. unsigned int polarity;
  380. uint32_t control = 0;
  381. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  382. DMA_MODE_READ = (unsigned long) DMA_TD; /* Peripheral to Memory */
  383. DMA_MODE_WRITE = 0; /* Memory to Peripheral */
  384. if (!p_init) {
  385. printk("ppc4xx_init_dma_channel: NULL p_init\n");
  386. return DMA_STATUS_NULL_POINTER;
  387. }
  388. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  389. printk("ppc4xx_init_dma_channel: bad channel %d\n", dmanr);
  390. return DMA_STATUS_BAD_CHANNEL;
  391. }
  392. #if DCRN_POL > 0
  393. polarity = mfdcr(DCRN_POL);
  394. #else
  395. polarity = 0;
  396. #endif
  397. /* Setup the control register based on the values passed to
  398. * us in p_init. Then, over-write the control register with this
  399. * new value.
  400. */
  401. control |= SET_DMA_CONTROL;
  402. /* clear all polarity signals and then "or" in new signal levels */
  403. polarity &= ~GET_DMA_POLARITY(dmanr);
  404. polarity |= p_init->polarity;
  405. #if DCRN_POL > 0
  406. mtdcr(DCRN_POL, polarity);
  407. #endif
  408. mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
  409. /* save these values in our dma channel structure */
  410. memcpy(p_dma_ch, p_init, sizeof (ppc_dma_ch_t));
  411. /*
  412. * The peripheral width values written in the control register are:
  413. * PW_8 0
  414. * PW_16 1
  415. * PW_32 2
  416. * PW_64 3
  417. *
  418. * Since the DMA count register takes the number of "transfers",
  419. * we need to divide the count sent to us in certain
  420. * functions by the appropriate number. It so happens that our
  421. * right shift value is equal to the peripheral width value.
  422. */
  423. p_dma_ch->shift = p_init->pwidth;
  424. /*
  425. * Save the control word for easy access.
  426. */
  427. p_dma_ch->control = control;
  428. mtdcr(DCRN_DMASR, 0xffffffff); /* clear status register */
  429. return DMA_STATUS_GOOD;
  430. }
  431. /*
  432. * This function returns the channel configuration.
  433. */
  434. int
  435. ppc4xx_get_channel_config(unsigned int dmanr, ppc_dma_ch_t * p_dma_ch)
  436. {
  437. unsigned int polarity;
  438. unsigned int control;
  439. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  440. printk("ppc4xx_get_channel_config: bad channel %d\n", dmanr);
  441. return DMA_STATUS_BAD_CHANNEL;
  442. }
  443. memcpy(p_dma_ch, &dma_channels[dmanr], sizeof (ppc_dma_ch_t));
  444. #if DCRN_POL > 0
  445. polarity = mfdcr(DCRN_POL);
  446. #else
  447. polarity = 0;
  448. #endif
  449. p_dma_ch->polarity = polarity & GET_DMA_POLARITY(dmanr);
  450. control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
  451. p_dma_ch->cp = GET_DMA_PRIORITY(control);
  452. p_dma_ch->pwidth = GET_DMA_PW(control);
  453. p_dma_ch->psc = GET_DMA_PSC(control);
  454. p_dma_ch->pwc = GET_DMA_PWC(control);
  455. p_dma_ch->phc = GET_DMA_PHC(control);
  456. p_dma_ch->ce = GET_DMA_CE_ENABLE(control);
  457. p_dma_ch->int_enable = GET_DMA_CIE_ENABLE(control);
  458. p_dma_ch->shift = GET_DMA_PW(control);
  459. #ifdef CONFIG_PPC4xx_EDMA
  460. p_dma_ch->pf = GET_DMA_PREFETCH(control);
  461. #else
  462. p_dma_ch->ch_enable = GET_DMA_CH(control);
  463. p_dma_ch->ece_enable = GET_DMA_ECE(control);
  464. p_dma_ch->tcd_disable = GET_DMA_TCD(control);
  465. #endif
  466. return DMA_STATUS_GOOD;
  467. }
  468. /*
  469. * Sets the priority for the DMA channel dmanr.
  470. * Since this is setup by the hardware init function, this function
  471. * can be used to dynamically change the priority of a channel.
  472. *
  473. * Acceptable priorities:
  474. *
  475. * PRIORITY_LOW
  476. * PRIORITY_MID_LOW
  477. * PRIORITY_MID_HIGH
  478. * PRIORITY_HIGH
  479. *
  480. */
  481. int
  482. ppc4xx_set_channel_priority(unsigned int dmanr, unsigned int priority)
  483. {
  484. unsigned int control;
  485. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  486. printk("ppc4xx_set_channel_priority: bad channel %d\n", dmanr);
  487. return DMA_STATUS_BAD_CHANNEL;
  488. }
  489. if ((priority != PRIORITY_LOW) &&
  490. (priority != PRIORITY_MID_LOW) &&
  491. (priority != PRIORITY_MID_HIGH) && (priority != PRIORITY_HIGH)) {
  492. printk("ppc4xx_set_channel_priority: bad priority: 0x%x\n", priority);
  493. }
  494. control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
  495. control |= SET_DMA_PRIORITY(priority);
  496. mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
  497. return DMA_STATUS_GOOD;
  498. }
  499. /*
  500. * Returns the width of the peripheral attached to this channel. This assumes
  501. * that someone who knows the hardware configuration, boot code or some other
  502. * init code, already set the width.
  503. *
  504. * The return value is one of:
  505. * PW_8
  506. * PW_16
  507. * PW_32
  508. * PW_64
  509. *
  510. * The function returns 0 on error.
  511. */
  512. unsigned int
  513. ppc4xx_get_peripheral_width(unsigned int dmanr)
  514. {
  515. unsigned int control;
  516. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  517. printk("ppc4xx_get_peripheral_width: bad channel %d\n", dmanr);
  518. return DMA_STATUS_BAD_CHANNEL;
  519. }
  520. control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
  521. return (GET_DMA_PW(control));
  522. }
  523. /*
  524. * Clears the channel status bits
  525. */
  526. int
  527. ppc4xx_clr_dma_status(unsigned int dmanr)
  528. {
  529. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  530. printk(KERN_ERR "ppc4xx_clr_dma_status: bad channel: %d\n", dmanr);
  531. return DMA_STATUS_BAD_CHANNEL;
  532. }
  533. mtdcr(DCRN_DMASR, ((u32)DMA_CH0_ERR | (u32)DMA_CS0 | (u32)DMA_TS0) >> dmanr);
  534. return DMA_STATUS_GOOD;
  535. }
  536. #ifdef CONFIG_PPC4xx_EDMA
  537. /*
  538. * Enables the burst on the channel (BTEN bit in the control/count register)
  539. * Note:
  540. * For scatter/gather dma, this function MUST be called before the
  541. * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the
  542. * sgl list and used as each sgl element is added.
  543. */
  544. int
  545. ppc4xx_enable_burst(unsigned int dmanr)
  546. {
  547. unsigned int ctc;
  548. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  549. printk(KERN_ERR "ppc4xx_enable_burst: bad channel: %d\n", dmanr);
  550. return DMA_STATUS_BAD_CHANNEL;
  551. }
  552. ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) | DMA_CTC_BTEN;
  553. mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc);
  554. return DMA_STATUS_GOOD;
  555. }
  556. /*
  557. * Disables the burst on the channel (BTEN bit in the control/count register)
  558. * Note:
  559. * For scatter/gather dma, this function MUST be called before the
  560. * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the
  561. * sgl list and used as each sgl element is added.
  562. */
  563. int
  564. ppc4xx_disable_burst(unsigned int dmanr)
  565. {
  566. unsigned int ctc;
  567. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  568. printk(KERN_ERR "ppc4xx_disable_burst: bad channel: %d\n", dmanr);
  569. return DMA_STATUS_BAD_CHANNEL;
  570. }
  571. ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) &~ DMA_CTC_BTEN;
  572. mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc);
  573. return DMA_STATUS_GOOD;
  574. }
  575. /*
  576. * Sets the burst size (number of peripheral widths) for the channel
  577. * (BSIZ bits in the control/count register))
  578. * must be one of:
  579. * DMA_CTC_BSIZ_2
  580. * DMA_CTC_BSIZ_4
  581. * DMA_CTC_BSIZ_8
  582. * DMA_CTC_BSIZ_16
  583. * Note:
  584. * For scatter/gather dma, this function MUST be called before the
  585. * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the
  586. * sgl list and used as each sgl element is added.
  587. */
  588. int
  589. ppc4xx_set_burst_size(unsigned int dmanr, unsigned int bsize)
  590. {
  591. unsigned int ctc;
  592. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  593. printk(KERN_ERR "ppc4xx_set_burst_size: bad channel: %d\n", dmanr);
  594. return DMA_STATUS_BAD_CHANNEL;
  595. }
  596. ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) &~ DMA_CTC_BSIZ_MSK;
  597. ctc |= (bsize & DMA_CTC_BSIZ_MSK);
  598. mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc);
  599. return DMA_STATUS_GOOD;
  600. }
  601. EXPORT_SYMBOL(ppc4xx_enable_burst);
  602. EXPORT_SYMBOL(ppc4xx_disable_burst);
  603. EXPORT_SYMBOL(ppc4xx_set_burst_size);
  604. #endif /* CONFIG_PPC4xx_EDMA */
  605. EXPORT_SYMBOL(ppc4xx_init_dma_channel);
  606. EXPORT_SYMBOL(ppc4xx_get_channel_config);
  607. EXPORT_SYMBOL(ppc4xx_set_channel_priority);
  608. EXPORT_SYMBOL(ppc4xx_get_peripheral_width);
  609. EXPORT_SYMBOL(dma_channels);
  610. EXPORT_SYMBOL(ppc4xx_set_src_addr);
  611. EXPORT_SYMBOL(ppc4xx_set_dst_addr);
  612. EXPORT_SYMBOL(ppc4xx_set_dma_addr);
  613. EXPORT_SYMBOL(ppc4xx_set_dma_addr2);
  614. EXPORT_SYMBOL(ppc4xx_enable_dma);
  615. EXPORT_SYMBOL(ppc4xx_disable_dma);
  616. EXPORT_SYMBOL(ppc4xx_set_dma_mode);
  617. EXPORT_SYMBOL(ppc4xx_set_dma_count);
  618. EXPORT_SYMBOL(ppc4xx_get_dma_residue);
  619. EXPORT_SYMBOL(ppc4xx_enable_dma_interrupt);
  620. EXPORT_SYMBOL(ppc4xx_disable_dma_interrupt);
  621. EXPORT_SYMBOL(ppc4xx_get_dma_status);
  622. EXPORT_SYMBOL(ppc4xx_clr_dma_status);