ppc4xx_dma.c 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710
  1. /*
  2. * IBM PPC4xx DMA engine core library
  3. *
  4. * Copyright 2000-2004 MontaVista Software Inc.
  5. *
  6. * Cleaned up and converted to new DCR access
  7. * Matt Porter <mporter@kernel.crashing.org>
  8. *
  9. * Original code by Armin Kuster <akuster@mvista.com>
  10. * and Pete Popov <ppopov@mvista.com>
  11. *
  12. * This program is free software; you can redistribute it and/or modify it
  13. * under the terms of the GNU General Public License as published by the
  14. * Free Software Foundation; either version 2 of the License, or (at your
  15. * option) any later version.
  16. *
  17. * You should have received a copy of the GNU General Public License along
  18. * with this program; if not, write to the Free Software Foundation, Inc.,
  19. * 675 Mass Ave, Cambridge, MA 02139, USA.
  20. */
  21. #include <linux/kernel.h>
  22. #include <linux/mm.h>
  23. #include <linux/miscdevice.h>
  24. #include <linux/init.h>
  25. #include <linux/module.h>
  26. #include <asm/system.h>
  27. #include <asm/io.h>
  28. #include <asm/dma.h>
  29. #include <asm/ppc4xx_dma.h>
  30. ppc_dma_ch_t dma_channels[MAX_PPC4xx_DMA_CHANNELS];
  31. int
  32. ppc4xx_get_dma_status(void)
  33. {
  34. return (mfdcr(DCRN_DMASR));
  35. }
  36. void
  37. ppc4xx_set_src_addr(int dmanr, phys_addr_t src_addr)
  38. {
  39. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  40. printk("set_src_addr: bad channel: %d\n", dmanr);
  41. return;
  42. }
  43. #ifdef PPC4xx_DMA_64BIT
  44. mtdcr(DCRN_DMASAH0 + dmanr*2, (u32)(src_addr >> 32));
  45. #else
  46. mtdcr(DCRN_DMASA0 + dmanr*2, (u32)src_addr);
  47. #endif
  48. }
  49. void
  50. ppc4xx_set_dst_addr(int dmanr, phys_addr_t dst_addr)
  51. {
  52. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  53. printk("set_dst_addr: bad channel: %d\n", dmanr);
  54. return;
  55. }
  56. #ifdef PPC4xx_DMA_64BIT
  57. mtdcr(DCRN_DMADAH0 + dmanr*2, (u32)(dst_addr >> 32));
  58. #else
  59. mtdcr(DCRN_DMADA0 + dmanr*2, (u32)dst_addr);
  60. #endif
  61. }
  62. void
  63. ppc4xx_enable_dma(unsigned int dmanr)
  64. {
  65. unsigned int control;
  66. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  67. unsigned int status_bits[] = { DMA_CS0 | DMA_TS0 | DMA_CH0_ERR,
  68. DMA_CS1 | DMA_TS1 | DMA_CH1_ERR,
  69. DMA_CS2 | DMA_TS2 | DMA_CH2_ERR,
  70. DMA_CS3 | DMA_TS3 | DMA_CH3_ERR};
  71. if (p_dma_ch->in_use) {
  72. printk("enable_dma: channel %d in use\n", dmanr);
  73. return;
  74. }
  75. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  76. printk("enable_dma: bad channel: %d\n", dmanr);
  77. return;
  78. }
  79. if (p_dma_ch->mode == DMA_MODE_READ) {
  80. /* peripheral to memory */
  81. ppc4xx_set_src_addr(dmanr, 0);
  82. ppc4xx_set_dst_addr(dmanr, p_dma_ch->addr);
  83. } else if (p_dma_ch->mode == DMA_MODE_WRITE) {
  84. /* memory to peripheral */
  85. ppc4xx_set_src_addr(dmanr, p_dma_ch->addr);
  86. ppc4xx_set_dst_addr(dmanr, 0);
  87. }
  88. /* for other xfer modes, the addresses are already set */
  89. control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
  90. control &= ~(DMA_TM_MASK | DMA_TD); /* clear all mode bits */
  91. if (p_dma_ch->mode == DMA_MODE_MM) {
  92. /* software initiated memory to memory */
  93. control |= DMA_ETD_OUTPUT | DMA_TCE_ENABLE;
  94. }
  95. mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
  96. /*
  97. * Clear the CS, TS, RI bits for the channel from DMASR. This
  98. * has been observed to happen correctly only after the mode and
  99. * ETD/DCE bits in DMACRx are set above. Must do this before
  100. * enabling the channel.
  101. */
  102. mtdcr(DCRN_DMASR, status_bits[dmanr]);
  103. /*
  104. * For device-paced transfers, Terminal Count Enable apparently
  105. * must be on, and this must be turned on after the mode, etc.
  106. * bits are cleared above (at least on Redwood-6).
  107. */
  108. if ((p_dma_ch->mode == DMA_MODE_MM_DEVATDST) ||
  109. (p_dma_ch->mode == DMA_MODE_MM_DEVATSRC))
  110. control |= DMA_TCE_ENABLE;
  111. /*
  112. * Now enable the channel.
  113. */
  114. control |= (p_dma_ch->mode | DMA_CE_ENABLE);
  115. mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
  116. p_dma_ch->in_use = 1;
  117. }
  118. void
  119. ppc4xx_disable_dma(unsigned int dmanr)
  120. {
  121. unsigned int control;
  122. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  123. if (!p_dma_ch->in_use) {
  124. printk("disable_dma: channel %d not in use\n", dmanr);
  125. return;
  126. }
  127. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  128. printk("disable_dma: bad channel: %d\n", dmanr);
  129. return;
  130. }
  131. control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
  132. control &= ~DMA_CE_ENABLE;
  133. mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
  134. p_dma_ch->in_use = 0;
  135. }
  136. /*
  137. * Sets the dma mode for single DMA transfers only.
  138. * For scatter/gather transfers, the mode is passed to the
  139. * alloc_dma_handle() function as one of the parameters.
  140. *
  141. * The mode is simply saved and used later. This allows
  142. * the driver to call set_dma_mode() and set_dma_addr() in
  143. * any order.
  144. *
  145. * Valid mode values are:
  146. *
  147. * DMA_MODE_READ peripheral to memory
  148. * DMA_MODE_WRITE memory to peripheral
  149. * DMA_MODE_MM memory to memory
  150. * DMA_MODE_MM_DEVATSRC device-paced memory to memory, device at src
  151. * DMA_MODE_MM_DEVATDST device-paced memory to memory, device at dst
  152. */
  153. int
  154. ppc4xx_set_dma_mode(unsigned int dmanr, unsigned int mode)
  155. {
  156. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  157. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  158. printk("set_dma_mode: bad channel 0x%x\n", dmanr);
  159. return DMA_STATUS_BAD_CHANNEL;
  160. }
  161. p_dma_ch->mode = mode;
  162. return DMA_STATUS_GOOD;
  163. }
  164. /*
  165. * Sets the DMA Count register. Note that 'count' is in bytes.
  166. * However, the DMA Count register counts the number of "transfers",
  167. * where each transfer is equal to the bus width. Thus, count
  168. * MUST be a multiple of the bus width.
  169. */
  170. void
  171. ppc4xx_set_dma_count(unsigned int dmanr, unsigned int count)
  172. {
  173. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  174. #ifdef DEBUG_4xxDMA
  175. {
  176. int error = 0;
  177. switch (p_dma_ch->pwidth) {
  178. case PW_8:
  179. break;
  180. case PW_16:
  181. if (count & 0x1)
  182. error = 1;
  183. break;
  184. case PW_32:
  185. if (count & 0x3)
  186. error = 1;
  187. break;
  188. case PW_64:
  189. if (count & 0x7)
  190. error = 1;
  191. break;
  192. default:
  193. printk("set_dma_count: invalid bus width: 0x%x\n",
  194. p_dma_ch->pwidth);
  195. return;
  196. }
  197. if (error)
  198. printk
  199. ("Warning: set_dma_count count 0x%x bus width %d\n",
  200. count, p_dma_ch->pwidth);
  201. }
  202. #endif
  203. count = count >> p_dma_ch->shift;
  204. mtdcr(DCRN_DMACT0 + (dmanr * 0x8), count);
  205. }
  206. /*
  207. * Returns the number of bytes left to be transfered.
  208. * After a DMA transfer, this should return zero.
  209. * Reading this while a DMA transfer is still in progress will return
  210. * unpredictable results.
  211. */
  212. int
  213. ppc4xx_get_dma_residue(unsigned int dmanr)
  214. {
  215. unsigned int count;
  216. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  217. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  218. printk("ppc4xx_get_dma_residue: bad channel 0x%x\n", dmanr);
  219. return DMA_STATUS_BAD_CHANNEL;
  220. }
  221. count = mfdcr(DCRN_DMACT0 + (dmanr * 0x8));
  222. return (count << p_dma_ch->shift);
  223. }
  224. /*
  225. * Sets the DMA address for a memory to peripheral or peripheral
  226. * to memory transfer. The address is just saved in the channel
  227. * structure for now and used later in enable_dma().
  228. */
  229. void
  230. ppc4xx_set_dma_addr(unsigned int dmanr, phys_addr_t addr)
  231. {
  232. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  233. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  234. printk("ppc4xx_set_dma_addr: bad channel: %d\n", dmanr);
  235. return;
  236. }
  237. #ifdef DEBUG_4xxDMA
  238. {
  239. int error = 0;
  240. switch (p_dma_ch->pwidth) {
  241. case PW_8:
  242. break;
  243. case PW_16:
  244. if ((unsigned) addr & 0x1)
  245. error = 1;
  246. break;
  247. case PW_32:
  248. if ((unsigned) addr & 0x3)
  249. error = 1;
  250. break;
  251. case PW_64:
  252. if ((unsigned) addr & 0x7)
  253. error = 1;
  254. break;
  255. default:
  256. printk("ppc4xx_set_dma_addr: invalid bus width: 0x%x\n",
  257. p_dma_ch->pwidth);
  258. return;
  259. }
  260. if (error)
  261. printk("Warning: ppc4xx_set_dma_addr addr 0x%x bus width %d\n",
  262. addr, p_dma_ch->pwidth);
  263. }
  264. #endif
  265. /* save dma address and program it later after we know the xfer mode */
  266. p_dma_ch->addr = addr;
  267. }
  268. /*
  269. * Sets both DMA addresses for a memory to memory transfer.
  270. * For memory to peripheral or peripheral to memory transfers
  271. * the function set_dma_addr() should be used instead.
  272. */
  273. void
  274. ppc4xx_set_dma_addr2(unsigned int dmanr, phys_addr_t src_dma_addr,
  275. phys_addr_t dst_dma_addr)
  276. {
  277. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  278. printk("ppc4xx_set_dma_addr2: bad channel: %d\n", dmanr);
  279. return;
  280. }
  281. #ifdef DEBUG_4xxDMA
  282. {
  283. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  284. int error = 0;
  285. switch (p_dma_ch->pwidth) {
  286. case PW_8:
  287. break;
  288. case PW_16:
  289. if (((unsigned) src_dma_addr & 0x1) ||
  290. ((unsigned) dst_dma_addr & 0x1)
  291. )
  292. error = 1;
  293. break;
  294. case PW_32:
  295. if (((unsigned) src_dma_addr & 0x3) ||
  296. ((unsigned) dst_dma_addr & 0x3)
  297. )
  298. error = 1;
  299. break;
  300. case PW_64:
  301. if (((unsigned) src_dma_addr & 0x7) ||
  302. ((unsigned) dst_dma_addr & 0x7)
  303. )
  304. error = 1;
  305. break;
  306. default:
  307. printk("ppc4xx_set_dma_addr2: invalid bus width: 0x%x\n",
  308. p_dma_ch->pwidth);
  309. return;
  310. }
  311. if (error)
  312. printk
  313. ("Warning: ppc4xx_set_dma_addr2 src 0x%x dst 0x%x bus width %d\n",
  314. src_dma_addr, dst_dma_addr, p_dma_ch->pwidth);
  315. }
  316. #endif
  317. ppc4xx_set_src_addr(dmanr, src_dma_addr);
  318. ppc4xx_set_dst_addr(dmanr, dst_dma_addr);
  319. }
  320. /*
  321. * Enables the channel interrupt.
  322. *
  323. * If performing a scatter/gatter transfer, this function
  324. * MUST be called before calling alloc_dma_handle() and building
  325. * the sgl list. Otherwise, interrupts will not be enabled, if
  326. * they were previously disabled.
  327. */
  328. int
  329. ppc4xx_enable_dma_interrupt(unsigned int dmanr)
  330. {
  331. unsigned int control;
  332. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  333. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  334. printk("ppc4xx_enable_dma_interrupt: bad channel: %d\n", dmanr);
  335. return DMA_STATUS_BAD_CHANNEL;
  336. }
  337. p_dma_ch->int_enable = 1;
  338. control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
  339. control |= DMA_CIE_ENABLE; /* Channel Interrupt Enable */
  340. mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
  341. return DMA_STATUS_GOOD;
  342. }
  343. /*
  344. * Disables the channel interrupt.
  345. *
  346. * If performing a scatter/gatter transfer, this function
  347. * MUST be called before calling alloc_dma_handle() and building
  348. * the sgl list. Otherwise, interrupts will not be disabled, if
  349. * they were previously enabled.
  350. */
  351. int
  352. ppc4xx_disable_dma_interrupt(unsigned int dmanr)
  353. {
  354. unsigned int control;
  355. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  356. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  357. printk("ppc4xx_disable_dma_interrupt: bad channel: %d\n", dmanr);
  358. return DMA_STATUS_BAD_CHANNEL;
  359. }
  360. p_dma_ch->int_enable = 0;
  361. control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
  362. control &= ~DMA_CIE_ENABLE; /* Channel Interrupt Enable */
  363. mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
  364. return DMA_STATUS_GOOD;
  365. }
  366. /*
  367. * Configures a DMA channel, including the peripheral bus width, if a
  368. * peripheral is attached to the channel, the polarity of the DMAReq and
  369. * DMAAck signals, etc. This information should really be setup by the boot
  370. * code, since most likely the configuration won't change dynamically.
  371. * If the kernel has to call this function, it's recommended that it's
  372. * called from platform specific init code. The driver should not need to
  373. * call this function.
  374. */
  375. int
  376. ppc4xx_init_dma_channel(unsigned int dmanr, ppc_dma_ch_t * p_init)
  377. {
  378. unsigned int polarity;
  379. uint32_t control = 0;
  380. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  381. DMA_MODE_READ = (unsigned long) DMA_TD; /* Peripheral to Memory */
  382. DMA_MODE_WRITE = 0; /* Memory to Peripheral */
  383. if (!p_init) {
  384. printk("ppc4xx_init_dma_channel: NULL p_init\n");
  385. return DMA_STATUS_NULL_POINTER;
  386. }
  387. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  388. printk("ppc4xx_init_dma_channel: bad channel %d\n", dmanr);
  389. return DMA_STATUS_BAD_CHANNEL;
  390. }
  391. #if DCRN_POL > 0
  392. polarity = mfdcr(DCRN_POL);
  393. #else
  394. polarity = 0;
  395. #endif
  396. /* Setup the control register based on the values passed to
  397. * us in p_init. Then, over-write the control register with this
  398. * new value.
  399. */
  400. control |= SET_DMA_CONTROL;
  401. /* clear all polarity signals and then "or" in new signal levels */
  402. polarity &= ~GET_DMA_POLARITY(dmanr);
  403. polarity |= p_init->polarity;
  404. #if DCRN_POL > 0
  405. mtdcr(DCRN_POL, polarity);
  406. #endif
  407. mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
  408. /* save these values in our dma channel structure */
  409. memcpy(p_dma_ch, p_init, sizeof (ppc_dma_ch_t));
  410. /*
  411. * The peripheral width values written in the control register are:
  412. * PW_8 0
  413. * PW_16 1
  414. * PW_32 2
  415. * PW_64 3
  416. *
  417. * Since the DMA count register takes the number of "transfers",
  418. * we need to divide the count sent to us in certain
  419. * functions by the appropriate number. It so happens that our
  420. * right shift value is equal to the peripheral width value.
  421. */
  422. p_dma_ch->shift = p_init->pwidth;
  423. /*
  424. * Save the control word for easy access.
  425. */
  426. p_dma_ch->control = control;
  427. mtdcr(DCRN_DMASR, 0xffffffff); /* clear status register */
  428. return DMA_STATUS_GOOD;
  429. }
  430. /*
  431. * This function returns the channel configuration.
  432. */
  433. int
  434. ppc4xx_get_channel_config(unsigned int dmanr, ppc_dma_ch_t * p_dma_ch)
  435. {
  436. unsigned int polarity;
  437. unsigned int control;
  438. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  439. printk("ppc4xx_get_channel_config: bad channel %d\n", dmanr);
  440. return DMA_STATUS_BAD_CHANNEL;
  441. }
  442. memcpy(p_dma_ch, &dma_channels[dmanr], sizeof (ppc_dma_ch_t));
  443. #if DCRN_POL > 0
  444. polarity = mfdcr(DCRN_POL);
  445. #else
  446. polarity = 0;
  447. #endif
  448. p_dma_ch->polarity = polarity & GET_DMA_POLARITY(dmanr);
  449. control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
  450. p_dma_ch->cp = GET_DMA_PRIORITY(control);
  451. p_dma_ch->pwidth = GET_DMA_PW(control);
  452. p_dma_ch->psc = GET_DMA_PSC(control);
  453. p_dma_ch->pwc = GET_DMA_PWC(control);
  454. p_dma_ch->phc = GET_DMA_PHC(control);
  455. p_dma_ch->ce = GET_DMA_CE_ENABLE(control);
  456. p_dma_ch->int_enable = GET_DMA_CIE_ENABLE(control);
  457. p_dma_ch->shift = GET_DMA_PW(control);
  458. #ifdef CONFIG_PPC4xx_EDMA
  459. p_dma_ch->pf = GET_DMA_PREFETCH(control);
  460. #else
  461. p_dma_ch->ch_enable = GET_DMA_CH(control);
  462. p_dma_ch->ece_enable = GET_DMA_ECE(control);
  463. p_dma_ch->tcd_disable = GET_DMA_TCD(control);
  464. #endif
  465. return DMA_STATUS_GOOD;
  466. }
  467. /*
  468. * Sets the priority for the DMA channel dmanr.
  469. * Since this is setup by the hardware init function, this function
  470. * can be used to dynamically change the priority of a channel.
  471. *
  472. * Acceptable priorities:
  473. *
  474. * PRIORITY_LOW
  475. * PRIORITY_MID_LOW
  476. * PRIORITY_MID_HIGH
  477. * PRIORITY_HIGH
  478. *
  479. */
  480. int
  481. ppc4xx_set_channel_priority(unsigned int dmanr, unsigned int priority)
  482. {
  483. unsigned int control;
  484. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  485. printk("ppc4xx_set_channel_priority: bad channel %d\n", dmanr);
  486. return DMA_STATUS_BAD_CHANNEL;
  487. }
  488. if ((priority != PRIORITY_LOW) &&
  489. (priority != PRIORITY_MID_LOW) &&
  490. (priority != PRIORITY_MID_HIGH) && (priority != PRIORITY_HIGH)) {
  491. printk("ppc4xx_set_channel_priority: bad priority: 0x%x\n", priority);
  492. }
  493. control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
  494. control |= SET_DMA_PRIORITY(priority);
  495. mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
  496. return DMA_STATUS_GOOD;
  497. }
  498. /*
  499. * Returns the width of the peripheral attached to this channel. This assumes
  500. * that someone who knows the hardware configuration, boot code or some other
  501. * init code, already set the width.
  502. *
  503. * The return value is one of:
  504. * PW_8
  505. * PW_16
  506. * PW_32
  507. * PW_64
  508. *
  509. * The function returns 0 on error.
  510. */
  511. unsigned int
  512. ppc4xx_get_peripheral_width(unsigned int dmanr)
  513. {
  514. unsigned int control;
  515. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  516. printk("ppc4xx_get_peripheral_width: bad channel %d\n", dmanr);
  517. return DMA_STATUS_BAD_CHANNEL;
  518. }
  519. control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
  520. return (GET_DMA_PW(control));
  521. }
  522. /*
  523. * Clears the channel status bits
  524. */
  525. int
  526. ppc4xx_clr_dma_status(unsigned int dmanr)
  527. {
  528. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  529. printk(KERN_ERR "ppc4xx_clr_dma_status: bad channel: %d\n", dmanr);
  530. return DMA_STATUS_BAD_CHANNEL;
  531. }
  532. mtdcr(DCRN_DMASR, ((u32)DMA_CH0_ERR | (u32)DMA_CS0 | (u32)DMA_TS0) >> dmanr);
  533. return DMA_STATUS_GOOD;
  534. }
  535. #ifdef CONFIG_PPC4xx_EDMA
  536. /*
  537. * Enables the burst on the channel (BTEN bit in the control/count register)
  538. * Note:
  539. * For scatter/gather dma, this function MUST be called before the
  540. * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the
  541. * sgl list and used as each sgl element is added.
  542. */
  543. int
  544. ppc4xx_enable_burst(unsigned int dmanr)
  545. {
  546. unsigned int ctc;
  547. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  548. printk(KERN_ERR "ppc4xx_enable_burst: bad channel: %d\n", dmanr);
  549. return DMA_STATUS_BAD_CHANNEL;
  550. }
  551. ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) | DMA_CTC_BTEN;
  552. mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc);
  553. return DMA_STATUS_GOOD;
  554. }
  555. /*
  556. * Disables the burst on the channel (BTEN bit in the control/count register)
  557. * Note:
  558. * For scatter/gather dma, this function MUST be called before the
  559. * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the
  560. * sgl list and used as each sgl element is added.
  561. */
  562. int
  563. ppc4xx_disable_burst(unsigned int dmanr)
  564. {
  565. unsigned int ctc;
  566. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  567. printk(KERN_ERR "ppc4xx_disable_burst: bad channel: %d\n", dmanr);
  568. return DMA_STATUS_BAD_CHANNEL;
  569. }
  570. ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) &~ DMA_CTC_BTEN;
  571. mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc);
  572. return DMA_STATUS_GOOD;
  573. }
  574. /*
  575. * Sets the burst size (number of peripheral widths) for the channel
  576. * (BSIZ bits in the control/count register))
  577. * must be one of:
  578. * DMA_CTC_BSIZ_2
  579. * DMA_CTC_BSIZ_4
  580. * DMA_CTC_BSIZ_8
  581. * DMA_CTC_BSIZ_16
  582. * Note:
  583. * For scatter/gather dma, this function MUST be called before the
  584. * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the
  585. * sgl list and used as each sgl element is added.
  586. */
  587. int
  588. ppc4xx_set_burst_size(unsigned int dmanr, unsigned int bsize)
  589. {
  590. unsigned int ctc;
  591. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  592. printk(KERN_ERR "ppc4xx_set_burst_size: bad channel: %d\n", dmanr);
  593. return DMA_STATUS_BAD_CHANNEL;
  594. }
  595. ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) &~ DMA_CTC_BSIZ_MSK;
  596. ctc |= (bsize & DMA_CTC_BSIZ_MSK);
  597. mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc);
  598. return DMA_STATUS_GOOD;
  599. }
  600. EXPORT_SYMBOL(ppc4xx_enable_burst);
  601. EXPORT_SYMBOL(ppc4xx_disable_burst);
  602. EXPORT_SYMBOL(ppc4xx_set_burst_size);
  603. #endif /* CONFIG_PPC4xx_EDMA */
  604. EXPORT_SYMBOL(ppc4xx_init_dma_channel);
  605. EXPORT_SYMBOL(ppc4xx_get_channel_config);
  606. EXPORT_SYMBOL(ppc4xx_set_channel_priority);
  607. EXPORT_SYMBOL(ppc4xx_get_peripheral_width);
  608. EXPORT_SYMBOL(dma_channels);
  609. EXPORT_SYMBOL(ppc4xx_set_src_addr);
  610. EXPORT_SYMBOL(ppc4xx_set_dst_addr);
  611. EXPORT_SYMBOL(ppc4xx_set_dma_addr);
  612. EXPORT_SYMBOL(ppc4xx_set_dma_addr2);
  613. EXPORT_SYMBOL(ppc4xx_enable_dma);
  614. EXPORT_SYMBOL(ppc4xx_disable_dma);
  615. EXPORT_SYMBOL(ppc4xx_set_dma_mode);
  616. EXPORT_SYMBOL(ppc4xx_set_dma_count);
  617. EXPORT_SYMBOL(ppc4xx_get_dma_residue);
  618. EXPORT_SYMBOL(ppc4xx_enable_dma_interrupt);
  619. EXPORT_SYMBOL(ppc4xx_disable_dma_interrupt);
  620. EXPORT_SYMBOL(ppc4xx_get_dma_status);
  621. EXPORT_SYMBOL(ppc4xx_clr_dma_status);