ppc4xx_dma.c 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713
  1. /*
  2. * arch/ppc/kernel/ppc4xx_dma.c
  3. *
  4. * IBM PPC4xx DMA engine core library
  5. *
  6. * Copyright 2000-2004 MontaVista Software Inc.
  7. *
  8. * Cleaned up and converted to new DCR access
  9. * Matt Porter <mporter@kernel.crashing.org>
  10. *
  11. * Original code by Armin Kuster <akuster@mvista.com>
  12. * and Pete Popov <ppopov@mvista.com>
  13. *
  14. * This program is free software; you can redistribute it and/or modify it
  15. * under the terms of the GNU General Public License as published by the
  16. * Free Software Foundation; either version 2 of the License, or (at your
  17. * option) any later version.
  18. *
  19. * You should have received a copy of the GNU General Public License along
  20. * with this program; if not, write to the Free Software Foundation, Inc.,
  21. * 675 Mass Ave, Cambridge, MA 02139, USA.
  22. */
  23. #include <linux/config.h>
  24. #include <linux/kernel.h>
  25. #include <linux/mm.h>
  26. #include <linux/miscdevice.h>
  27. #include <linux/init.h>
  28. #include <linux/module.h>
  29. #include <asm/system.h>
  30. #include <asm/io.h>
  31. #include <asm/dma.h>
  32. #include <asm/ppc4xx_dma.h>
  33. ppc_dma_ch_t dma_channels[MAX_PPC4xx_DMA_CHANNELS];
  34. int
  35. ppc4xx_get_dma_status(void)
  36. {
  37. return (mfdcr(DCRN_DMASR));
  38. }
  39. void
  40. ppc4xx_set_src_addr(int dmanr, phys_addr_t src_addr)
  41. {
  42. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  43. printk("set_src_addr: bad channel: %d\n", dmanr);
  44. return;
  45. }
  46. #ifdef PPC4xx_DMA_64BIT
  47. mtdcr(DCRN_DMASAH0 + dmanr*2, (u32)(src_addr >> 32));
  48. #else
  49. mtdcr(DCRN_DMASA0 + dmanr*2, (u32)src_addr);
  50. #endif
  51. }
  52. void
  53. ppc4xx_set_dst_addr(int dmanr, phys_addr_t dst_addr)
  54. {
  55. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  56. printk("set_dst_addr: bad channel: %d\n", dmanr);
  57. return;
  58. }
  59. #ifdef PPC4xx_DMA_64BIT
  60. mtdcr(DCRN_DMADAH0 + dmanr*2, (u32)(dst_addr >> 32));
  61. #else
  62. mtdcr(DCRN_DMADA0 + dmanr*2, (u32)dst_addr);
  63. #endif
  64. }
  65. void
  66. ppc4xx_enable_dma(unsigned int dmanr)
  67. {
  68. unsigned int control;
  69. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  70. unsigned int status_bits[] = { DMA_CS0 | DMA_TS0 | DMA_CH0_ERR,
  71. DMA_CS1 | DMA_TS1 | DMA_CH1_ERR,
  72. DMA_CS2 | DMA_TS2 | DMA_CH2_ERR,
  73. DMA_CS3 | DMA_TS3 | DMA_CH3_ERR};
  74. if (p_dma_ch->in_use) {
  75. printk("enable_dma: channel %d in use\n", dmanr);
  76. return;
  77. }
  78. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  79. printk("enable_dma: bad channel: %d\n", dmanr);
  80. return;
  81. }
  82. if (p_dma_ch->mode == DMA_MODE_READ) {
  83. /* peripheral to memory */
  84. ppc4xx_set_src_addr(dmanr, 0);
  85. ppc4xx_set_dst_addr(dmanr, p_dma_ch->addr);
  86. } else if (p_dma_ch->mode == DMA_MODE_WRITE) {
  87. /* memory to peripheral */
  88. ppc4xx_set_src_addr(dmanr, p_dma_ch->addr);
  89. ppc4xx_set_dst_addr(dmanr, 0);
  90. }
  91. /* for other xfer modes, the addresses are already set */
  92. control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
  93. control &= ~(DMA_TM_MASK | DMA_TD); /* clear all mode bits */
  94. if (p_dma_ch->mode == DMA_MODE_MM) {
  95. /* software initiated memory to memory */
  96. control |= DMA_ETD_OUTPUT | DMA_TCE_ENABLE;
  97. }
  98. mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
  99. /*
  100. * Clear the CS, TS, RI bits for the channel from DMASR. This
  101. * has been observed to happen correctly only after the mode and
  102. * ETD/DCE bits in DMACRx are set above. Must do this before
  103. * enabling the channel.
  104. */
  105. mtdcr(DCRN_DMASR, status_bits[dmanr]);
  106. /*
  107. * For device-paced transfers, Terminal Count Enable apparently
  108. * must be on, and this must be turned on after the mode, etc.
  109. * bits are cleared above (at least on Redwood-6).
  110. */
  111. if ((p_dma_ch->mode == DMA_MODE_MM_DEVATDST) ||
  112. (p_dma_ch->mode == DMA_MODE_MM_DEVATSRC))
  113. control |= DMA_TCE_ENABLE;
  114. /*
  115. * Now enable the channel.
  116. */
  117. control |= (p_dma_ch->mode | DMA_CE_ENABLE);
  118. mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
  119. p_dma_ch->in_use = 1;
  120. }
  121. void
  122. ppc4xx_disable_dma(unsigned int dmanr)
  123. {
  124. unsigned int control;
  125. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  126. if (!p_dma_ch->in_use) {
  127. printk("disable_dma: channel %d not in use\n", dmanr);
  128. return;
  129. }
  130. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  131. printk("disable_dma: bad channel: %d\n", dmanr);
  132. return;
  133. }
  134. control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
  135. control &= ~DMA_CE_ENABLE;
  136. mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
  137. p_dma_ch->in_use = 0;
  138. }
  139. /*
  140. * Sets the dma mode for single DMA transfers only.
  141. * For scatter/gather transfers, the mode is passed to the
  142. * alloc_dma_handle() function as one of the parameters.
  143. *
  144. * The mode is simply saved and used later. This allows
  145. * the driver to call set_dma_mode() and set_dma_addr() in
  146. * any order.
  147. *
  148. * Valid mode values are:
  149. *
  150. * DMA_MODE_READ peripheral to memory
  151. * DMA_MODE_WRITE memory to peripheral
  152. * DMA_MODE_MM memory to memory
  153. * DMA_MODE_MM_DEVATSRC device-paced memory to memory, device at src
  154. * DMA_MODE_MM_DEVATDST device-paced memory to memory, device at dst
  155. */
  156. int
  157. ppc4xx_set_dma_mode(unsigned int dmanr, unsigned int mode)
  158. {
  159. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  160. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  161. printk("set_dma_mode: bad channel 0x%x\n", dmanr);
  162. return DMA_STATUS_BAD_CHANNEL;
  163. }
  164. p_dma_ch->mode = mode;
  165. return DMA_STATUS_GOOD;
  166. }
  167. /*
  168. * Sets the DMA Count register. Note that 'count' is in bytes.
  169. * However, the DMA Count register counts the number of "transfers",
  170. * where each transfer is equal to the bus width. Thus, count
  171. * MUST be a multiple of the bus width.
  172. */
  173. void
  174. ppc4xx_set_dma_count(unsigned int dmanr, unsigned int count)
  175. {
  176. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  177. #ifdef DEBUG_4xxDMA
  178. {
  179. int error = 0;
  180. switch (p_dma_ch->pwidth) {
  181. case PW_8:
  182. break;
  183. case PW_16:
  184. if (count & 0x1)
  185. error = 1;
  186. break;
  187. case PW_32:
  188. if (count & 0x3)
  189. error = 1;
  190. break;
  191. case PW_64:
  192. if (count & 0x7)
  193. error = 1;
  194. break;
  195. default:
  196. printk("set_dma_count: invalid bus width: 0x%x\n",
  197. p_dma_ch->pwidth);
  198. return;
  199. }
  200. if (error)
  201. printk
  202. ("Warning: set_dma_count count 0x%x bus width %d\n",
  203. count, p_dma_ch->pwidth);
  204. }
  205. #endif
  206. count = count >> p_dma_ch->shift;
  207. mtdcr(DCRN_DMACT0 + (dmanr * 0x8), count);
  208. }
  209. /*
  210. * Returns the number of bytes left to be transfered.
  211. * After a DMA transfer, this should return zero.
  212. * Reading this while a DMA transfer is still in progress will return
  213. * unpredictable results.
  214. */
  215. int
  216. ppc4xx_get_dma_residue(unsigned int dmanr)
  217. {
  218. unsigned int count;
  219. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  220. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  221. printk("ppc4xx_get_dma_residue: bad channel 0x%x\n", dmanr);
  222. return DMA_STATUS_BAD_CHANNEL;
  223. }
  224. count = mfdcr(DCRN_DMACT0 + (dmanr * 0x8));
  225. return (count << p_dma_ch->shift);
  226. }
  227. /*
  228. * Sets the DMA address for a memory to peripheral or peripheral
  229. * to memory transfer. The address is just saved in the channel
  230. * structure for now and used later in enable_dma().
  231. */
  232. void
  233. ppc4xx_set_dma_addr(unsigned int dmanr, phys_addr_t addr)
  234. {
  235. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  236. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  237. printk("ppc4xx_set_dma_addr: bad channel: %d\n", dmanr);
  238. return;
  239. }
  240. #ifdef DEBUG_4xxDMA
  241. {
  242. int error = 0;
  243. switch (p_dma_ch->pwidth) {
  244. case PW_8:
  245. break;
  246. case PW_16:
  247. if ((unsigned) addr & 0x1)
  248. error = 1;
  249. break;
  250. case PW_32:
  251. if ((unsigned) addr & 0x3)
  252. error = 1;
  253. break;
  254. case PW_64:
  255. if ((unsigned) addr & 0x7)
  256. error = 1;
  257. break;
  258. default:
  259. printk("ppc4xx_set_dma_addr: invalid bus width: 0x%x\n",
  260. p_dma_ch->pwidth);
  261. return;
  262. }
  263. if (error)
  264. printk("Warning: ppc4xx_set_dma_addr addr 0x%x bus width %d\n",
  265. addr, p_dma_ch->pwidth);
  266. }
  267. #endif
  268. /* save dma address and program it later after we know the xfer mode */
  269. p_dma_ch->addr = addr;
  270. }
  271. /*
  272. * Sets both DMA addresses for a memory to memory transfer.
  273. * For memory to peripheral or peripheral to memory transfers
  274. * the function set_dma_addr() should be used instead.
  275. */
  276. void
  277. ppc4xx_set_dma_addr2(unsigned int dmanr, phys_addr_t src_dma_addr,
  278. phys_addr_t dst_dma_addr)
  279. {
  280. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  281. printk("ppc4xx_set_dma_addr2: bad channel: %d\n", dmanr);
  282. return;
  283. }
  284. #ifdef DEBUG_4xxDMA
  285. {
  286. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  287. int error = 0;
  288. switch (p_dma_ch->pwidth) {
  289. case PW_8:
  290. break;
  291. case PW_16:
  292. if (((unsigned) src_dma_addr & 0x1) ||
  293. ((unsigned) dst_dma_addr & 0x1)
  294. )
  295. error = 1;
  296. break;
  297. case PW_32:
  298. if (((unsigned) src_dma_addr & 0x3) ||
  299. ((unsigned) dst_dma_addr & 0x3)
  300. )
  301. error = 1;
  302. break;
  303. case PW_64:
  304. if (((unsigned) src_dma_addr & 0x7) ||
  305. ((unsigned) dst_dma_addr & 0x7)
  306. )
  307. error = 1;
  308. break;
  309. default:
  310. printk("ppc4xx_set_dma_addr2: invalid bus width: 0x%x\n",
  311. p_dma_ch->pwidth);
  312. return;
  313. }
  314. if (error)
  315. printk
  316. ("Warning: ppc4xx_set_dma_addr2 src 0x%x dst 0x%x bus width %d\n",
  317. src_dma_addr, dst_dma_addr, p_dma_ch->pwidth);
  318. }
  319. #endif
  320. ppc4xx_set_src_addr(dmanr, src_dma_addr);
  321. ppc4xx_set_dst_addr(dmanr, dst_dma_addr);
  322. }
  323. /*
  324. * Enables the channel interrupt.
  325. *
  326. * If performing a scatter/gatter transfer, this function
  327. * MUST be called before calling alloc_dma_handle() and building
  328. * the sgl list. Otherwise, interrupts will not be enabled, if
  329. * they were previously disabled.
  330. */
  331. int
  332. ppc4xx_enable_dma_interrupt(unsigned int dmanr)
  333. {
  334. unsigned int control;
  335. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  336. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  337. printk("ppc4xx_enable_dma_interrupt: bad channel: %d\n", dmanr);
  338. return DMA_STATUS_BAD_CHANNEL;
  339. }
  340. p_dma_ch->int_enable = 1;
  341. control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
  342. control |= DMA_CIE_ENABLE; /* Channel Interrupt Enable */
  343. mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
  344. return DMA_STATUS_GOOD;
  345. }
  346. /*
  347. * Disables the channel interrupt.
  348. *
  349. * If performing a scatter/gatter transfer, this function
  350. * MUST be called before calling alloc_dma_handle() and building
  351. * the sgl list. Otherwise, interrupts will not be disabled, if
  352. * they were previously enabled.
  353. */
  354. int
  355. ppc4xx_disable_dma_interrupt(unsigned int dmanr)
  356. {
  357. unsigned int control;
  358. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  359. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  360. printk("ppc4xx_disable_dma_interrupt: bad channel: %d\n", dmanr);
  361. return DMA_STATUS_BAD_CHANNEL;
  362. }
  363. p_dma_ch->int_enable = 0;
  364. control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
  365. control &= ~DMA_CIE_ENABLE; /* Channel Interrupt Enable */
  366. mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
  367. return DMA_STATUS_GOOD;
  368. }
  369. /*
  370. * Configures a DMA channel, including the peripheral bus width, if a
  371. * peripheral is attached to the channel, the polarity of the DMAReq and
  372. * DMAAck signals, etc. This information should really be setup by the boot
  373. * code, since most likely the configuration won't change dynamically.
  374. * If the kernel has to call this function, it's recommended that it's
  375. * called from platform specific init code. The driver should not need to
  376. * call this function.
  377. */
  378. int
  379. ppc4xx_init_dma_channel(unsigned int dmanr, ppc_dma_ch_t * p_init)
  380. {
  381. unsigned int polarity;
  382. uint32_t control = 0;
  383. ppc_dma_ch_t *p_dma_ch = &dma_channels[dmanr];
  384. DMA_MODE_READ = (unsigned long) DMA_TD; /* Peripheral to Memory */
  385. DMA_MODE_WRITE = 0; /* Memory to Peripheral */
  386. if (!p_init) {
  387. printk("ppc4xx_init_dma_channel: NULL p_init\n");
  388. return DMA_STATUS_NULL_POINTER;
  389. }
  390. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  391. printk("ppc4xx_init_dma_channel: bad channel %d\n", dmanr);
  392. return DMA_STATUS_BAD_CHANNEL;
  393. }
  394. #if DCRN_POL > 0
  395. polarity = mfdcr(DCRN_POL);
  396. #else
  397. polarity = 0;
  398. #endif
  399. /* Setup the control register based on the values passed to
  400. * us in p_init. Then, over-write the control register with this
  401. * new value.
  402. */
  403. control |= SET_DMA_CONTROL;
  404. /* clear all polarity signals and then "or" in new signal levels */
  405. polarity &= ~GET_DMA_POLARITY(dmanr);
  406. polarity |= p_init->polarity;
  407. #if DCRN_POL > 0
  408. mtdcr(DCRN_POL, polarity);
  409. #endif
  410. mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
  411. /* save these values in our dma channel structure */
  412. memcpy(p_dma_ch, p_init, sizeof (ppc_dma_ch_t));
  413. /*
  414. * The peripheral width values written in the control register are:
  415. * PW_8 0
  416. * PW_16 1
  417. * PW_32 2
  418. * PW_64 3
  419. *
  420. * Since the DMA count register takes the number of "transfers",
  421. * we need to divide the count sent to us in certain
  422. * functions by the appropriate number. It so happens that our
  423. * right shift value is equal to the peripheral width value.
  424. */
  425. p_dma_ch->shift = p_init->pwidth;
  426. /*
  427. * Save the control word for easy access.
  428. */
  429. p_dma_ch->control = control;
  430. mtdcr(DCRN_DMASR, 0xffffffff); /* clear status register */
  431. return DMA_STATUS_GOOD;
  432. }
  433. /*
  434. * This function returns the channel configuration.
  435. */
  436. int
  437. ppc4xx_get_channel_config(unsigned int dmanr, ppc_dma_ch_t * p_dma_ch)
  438. {
  439. unsigned int polarity;
  440. unsigned int control;
  441. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  442. printk("ppc4xx_get_channel_config: bad channel %d\n", dmanr);
  443. return DMA_STATUS_BAD_CHANNEL;
  444. }
  445. memcpy(p_dma_ch, &dma_channels[dmanr], sizeof (ppc_dma_ch_t));
  446. #if DCRN_POL > 0
  447. polarity = mfdcr(DCRN_POL);
  448. #else
  449. polarity = 0;
  450. #endif
  451. p_dma_ch->polarity = polarity & GET_DMA_POLARITY(dmanr);
  452. control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
  453. p_dma_ch->cp = GET_DMA_PRIORITY(control);
  454. p_dma_ch->pwidth = GET_DMA_PW(control);
  455. p_dma_ch->psc = GET_DMA_PSC(control);
  456. p_dma_ch->pwc = GET_DMA_PWC(control);
  457. p_dma_ch->phc = GET_DMA_PHC(control);
  458. p_dma_ch->ce = GET_DMA_CE_ENABLE(control);
  459. p_dma_ch->int_enable = GET_DMA_CIE_ENABLE(control);
  460. p_dma_ch->shift = GET_DMA_PW(control);
  461. #ifdef CONFIG_PPC4xx_EDMA
  462. p_dma_ch->pf = GET_DMA_PREFETCH(control);
  463. #else
  464. p_dma_ch->ch_enable = GET_DMA_CH(control);
  465. p_dma_ch->ece_enable = GET_DMA_ECE(control);
  466. p_dma_ch->tcd_disable = GET_DMA_TCD(control);
  467. #endif
  468. return DMA_STATUS_GOOD;
  469. }
  470. /*
  471. * Sets the priority for the DMA channel dmanr.
  472. * Since this is setup by the hardware init function, this function
  473. * can be used to dynamically change the priority of a channel.
  474. *
  475. * Acceptable priorities:
  476. *
  477. * PRIORITY_LOW
  478. * PRIORITY_MID_LOW
  479. * PRIORITY_MID_HIGH
  480. * PRIORITY_HIGH
  481. *
  482. */
  483. int
  484. ppc4xx_set_channel_priority(unsigned int dmanr, unsigned int priority)
  485. {
  486. unsigned int control;
  487. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  488. printk("ppc4xx_set_channel_priority: bad channel %d\n", dmanr);
  489. return DMA_STATUS_BAD_CHANNEL;
  490. }
  491. if ((priority != PRIORITY_LOW) &&
  492. (priority != PRIORITY_MID_LOW) &&
  493. (priority != PRIORITY_MID_HIGH) && (priority != PRIORITY_HIGH)) {
  494. printk("ppc4xx_set_channel_priority: bad priority: 0x%x\n", priority);
  495. }
  496. control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
  497. control |= SET_DMA_PRIORITY(priority);
  498. mtdcr(DCRN_DMACR0 + (dmanr * 0x8), control);
  499. return DMA_STATUS_GOOD;
  500. }
  501. /*
  502. * Returns the width of the peripheral attached to this channel. This assumes
  503. * that someone who knows the hardware configuration, boot code or some other
  504. * init code, already set the width.
  505. *
  506. * The return value is one of:
  507. * PW_8
  508. * PW_16
  509. * PW_32
  510. * PW_64
  511. *
  512. * The function returns 0 on error.
  513. */
  514. unsigned int
  515. ppc4xx_get_peripheral_width(unsigned int dmanr)
  516. {
  517. unsigned int control;
  518. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  519. printk("ppc4xx_get_peripheral_width: bad channel %d\n", dmanr);
  520. return DMA_STATUS_BAD_CHANNEL;
  521. }
  522. control = mfdcr(DCRN_DMACR0 + (dmanr * 0x8));
  523. return (GET_DMA_PW(control));
  524. }
  525. /*
  526. * Clears the channel status bits
  527. */
  528. int
  529. ppc4xx_clr_dma_status(unsigned int dmanr)
  530. {
  531. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  532. printk(KERN_ERR "ppc4xx_clr_dma_status: bad channel: %d\n", dmanr);
  533. return DMA_STATUS_BAD_CHANNEL;
  534. }
  535. mtdcr(DCRN_DMASR, ((u32)DMA_CH0_ERR | (u32)DMA_CS0 | (u32)DMA_TS0) >> dmanr);
  536. return DMA_STATUS_GOOD;
  537. }
  538. #ifdef CONFIG_PPC4xx_EDMA
  539. /*
  540. * Enables the burst on the channel (BTEN bit in the control/count register)
  541. * Note:
  542. * For scatter/gather dma, this function MUST be called before the
  543. * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the
  544. * sgl list and used as each sgl element is added.
  545. */
  546. int
  547. ppc4xx_enable_burst(unsigned int dmanr)
  548. {
  549. unsigned int ctc;
  550. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  551. printk(KERN_ERR "ppc4xx_enable_burst: bad channel: %d\n", dmanr);
  552. return DMA_STATUS_BAD_CHANNEL;
  553. }
  554. ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) | DMA_CTC_BTEN;
  555. mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc);
  556. return DMA_STATUS_GOOD;
  557. }
  558. /*
  559. * Disables the burst on the channel (BTEN bit in the control/count register)
  560. * Note:
  561. * For scatter/gather dma, this function MUST be called before the
  562. * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the
  563. * sgl list and used as each sgl element is added.
  564. */
  565. int
  566. ppc4xx_disable_burst(unsigned int dmanr)
  567. {
  568. unsigned int ctc;
  569. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  570. printk(KERN_ERR "ppc4xx_disable_burst: bad channel: %d\n", dmanr);
  571. return DMA_STATUS_BAD_CHANNEL;
  572. }
  573. ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) &~ DMA_CTC_BTEN;
  574. mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc);
  575. return DMA_STATUS_GOOD;
  576. }
  577. /*
  578. * Sets the burst size (number of peripheral widths) for the channel
  579. * (BSIZ bits in the control/count register))
  580. * must be one of:
  581. * DMA_CTC_BSIZ_2
  582. * DMA_CTC_BSIZ_4
  583. * DMA_CTC_BSIZ_8
  584. * DMA_CTC_BSIZ_16
  585. * Note:
  586. * For scatter/gather dma, this function MUST be called before the
  587. * ppc4xx_alloc_dma_handle() func as the chan count register is copied into the
  588. * sgl list and used as each sgl element is added.
  589. */
  590. int
  591. ppc4xx_set_burst_size(unsigned int dmanr, unsigned int bsize)
  592. {
  593. unsigned int ctc;
  594. if (dmanr >= MAX_PPC4xx_DMA_CHANNELS) {
  595. printk(KERN_ERR "ppc4xx_set_burst_size: bad channel: %d\n", dmanr);
  596. return DMA_STATUS_BAD_CHANNEL;
  597. }
  598. ctc = mfdcr(DCRN_DMACT0 + (dmanr * 0x8)) &~ DMA_CTC_BSIZ_MSK;
  599. ctc |= (bsize & DMA_CTC_BSIZ_MSK);
  600. mtdcr(DCRN_DMACT0 + (dmanr * 0x8), ctc);
  601. return DMA_STATUS_GOOD;
  602. }
  603. EXPORT_SYMBOL(ppc4xx_enable_burst);
  604. EXPORT_SYMBOL(ppc4xx_disable_burst);
  605. EXPORT_SYMBOL(ppc4xx_set_burst_size);
  606. #endif /* CONFIG_PPC4xx_EDMA */
  607. EXPORT_SYMBOL(ppc4xx_init_dma_channel);
  608. EXPORT_SYMBOL(ppc4xx_get_channel_config);
  609. EXPORT_SYMBOL(ppc4xx_set_channel_priority);
  610. EXPORT_SYMBOL(ppc4xx_get_peripheral_width);
  611. EXPORT_SYMBOL(dma_channels);
  612. EXPORT_SYMBOL(ppc4xx_set_src_addr);
  613. EXPORT_SYMBOL(ppc4xx_set_dst_addr);
  614. EXPORT_SYMBOL(ppc4xx_set_dma_addr);
  615. EXPORT_SYMBOL(ppc4xx_set_dma_addr2);
  616. EXPORT_SYMBOL(ppc4xx_enable_dma);
  617. EXPORT_SYMBOL(ppc4xx_disable_dma);
  618. EXPORT_SYMBOL(ppc4xx_set_dma_mode);
  619. EXPORT_SYMBOL(ppc4xx_set_dma_count);
  620. EXPORT_SYMBOL(ppc4xx_get_dma_residue);
  621. EXPORT_SYMBOL(ppc4xx_enable_dma_interrupt);
  622. EXPORT_SYMBOL(ppc4xx_disable_dma_interrupt);
  623. EXPORT_SYMBOL(ppc4xx_get_dma_status);
  624. EXPORT_SYMBOL(ppc4xx_clr_dma_status);