dmabuf.c 35 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298
  1. /*
  2. * sound/dmabuf.c
  3. *
  4. * The DMA buffer manager for digitized voice applications
  5. */
  6. /*
  7. * Copyright (C) by Hannu Savolainen 1993-1997
  8. *
  9. * OSS/Free for Linux is distributed under the GNU GENERAL PUBLIC LICENSE (GPL)
  10. * Version 2 (June 1991). See the "COPYING" file distributed with this software
  11. * for more info.
  12. *
  13. * Thomas Sailer : moved several static variables into struct audio_operations
  14. * (which is grossly misnamed btw.) because they have the same
  15. * lifetime as the rest in there and dynamic allocation saves
  16. * 12k or so
  17. * Thomas Sailer : remove {in,out}_sleep_flag. It was used for the sleeper to
  18. * determine if it was woken up by the expiring timeout or by
  19. * an explicit wake_up. The return value from schedule_timeout
  20. * can be used instead; if 0, the wakeup was due to the timeout.
  21. *
  22. * Rob Riggs Added persistent DMA buffers (1998/10/17)
  23. */
  24. #define BE_CONSERVATIVE
  25. #define SAMPLE_ROUNDUP 0
  26. #include "sound_config.h"
  27. #define DMAP_FREE_ON_CLOSE 0
  28. #define DMAP_KEEP_ON_CLOSE 1
  29. extern int sound_dmap_flag;
  30. static void dma_reset_output(int dev);
  31. static void dma_reset_input(int dev);
  32. static int local_start_dma(struct audio_operations *adev, unsigned long physaddr, int count, int dma_mode);
  33. static int debugmem; /* switched off by default */
  34. static int dma_buffsize = DSP_BUFFSIZE;
  35. static long dmabuf_timeout(struct dma_buffparms *dmap)
  36. {
  37. long tmout;
  38. tmout = (dmap->fragment_size * HZ) / dmap->data_rate;
  39. tmout += HZ / 5; /* Some safety distance */
  40. if (tmout < (HZ / 2))
  41. tmout = HZ / 2;
  42. if (tmout > 20 * HZ)
  43. tmout = 20 * HZ;
  44. return tmout;
  45. }
  46. static int sound_alloc_dmap(struct dma_buffparms *dmap)
  47. {
  48. char *start_addr, *end_addr;
  49. int dma_pagesize;
  50. int sz, size;
  51. struct page *page;
  52. dmap->mapping_flags &= ~DMA_MAP_MAPPED;
  53. if (dmap->raw_buf != NULL)
  54. return 0; /* Already done */
  55. if (dma_buffsize < 4096)
  56. dma_buffsize = 4096;
  57. dma_pagesize = (dmap->dma < 4) ? (64 * 1024) : (128 * 1024);
  58. /*
  59. * Now check for the Cyrix problem.
  60. */
  61. if(isa_dma_bridge_buggy==2)
  62. dma_pagesize=32768;
  63. dmap->raw_buf = NULL;
  64. dmap->buffsize = dma_buffsize;
  65. if (dmap->buffsize > dma_pagesize)
  66. dmap->buffsize = dma_pagesize;
  67. start_addr = NULL;
  68. /*
  69. * Now loop until we get a free buffer. Try to get smaller buffer if
  70. * it fails. Don't accept smaller than 8k buffer for performance
  71. * reasons.
  72. */
  73. while (start_addr == NULL && dmap->buffsize > PAGE_SIZE) {
  74. for (sz = 0, size = PAGE_SIZE; size < dmap->buffsize; sz++, size <<= 1);
  75. dmap->buffsize = PAGE_SIZE * (1 << sz);
  76. start_addr = (char *) __get_free_pages(GFP_ATOMIC|GFP_DMA|__GFP_NOWARN, sz);
  77. if (start_addr == NULL)
  78. dmap->buffsize /= 2;
  79. }
  80. if (start_addr == NULL) {
  81. printk(KERN_WARNING "Sound error: Couldn't allocate DMA buffer\n");
  82. return -ENOMEM;
  83. } else {
  84. /* make some checks */
  85. end_addr = start_addr + dmap->buffsize - 1;
  86. if (debugmem)
  87. printk(KERN_DEBUG "sound: start 0x%lx, end 0x%lx\n", (long) start_addr, (long) end_addr);
  88. /* now check if it fits into the same dma-pagesize */
  89. if (((long) start_addr & ~(dma_pagesize - 1)) != ((long) end_addr & ~(dma_pagesize - 1))
  90. || end_addr >= (char *) (MAX_DMA_ADDRESS)) {
  91. printk(KERN_ERR "sound: Got invalid address 0x%lx for %db DMA-buffer\n", (long) start_addr, dmap->buffsize);
  92. return -EFAULT;
  93. }
  94. }
  95. dmap->raw_buf = start_addr;
  96. dmap->raw_buf_phys = virt_to_bus(start_addr);
  97. for (page = virt_to_page(start_addr); page <= virt_to_page(end_addr); page++)
  98. SetPageReserved(page);
  99. return 0;
  100. }
  101. static void sound_free_dmap(struct dma_buffparms *dmap)
  102. {
  103. int sz, size;
  104. struct page *page;
  105. unsigned long start_addr, end_addr;
  106. if (dmap->raw_buf == NULL)
  107. return;
  108. if (dmap->mapping_flags & DMA_MAP_MAPPED)
  109. return; /* Don't free mmapped buffer. Will use it next time */
  110. for (sz = 0, size = PAGE_SIZE; size < dmap->buffsize; sz++, size <<= 1);
  111. start_addr = (unsigned long) dmap->raw_buf;
  112. end_addr = start_addr + dmap->buffsize;
  113. for (page = virt_to_page(start_addr); page <= virt_to_page(end_addr); page++)
  114. ClearPageReserved(page);
  115. free_pages((unsigned long) dmap->raw_buf, sz);
  116. dmap->raw_buf = NULL;
  117. }
  118. /* Intel version !!!!!!!!! */
  119. static int sound_start_dma(struct dma_buffparms *dmap, unsigned long physaddr, int count, int dma_mode)
  120. {
  121. unsigned long flags;
  122. int chan = dmap->dma;
  123. /* printk( "Start DMA%d %d, %d\n", chan, (int)(physaddr-dmap->raw_buf_phys), count); */
  124. flags = claim_dma_lock();
  125. disable_dma(chan);
  126. clear_dma_ff(chan);
  127. set_dma_mode(chan, dma_mode);
  128. set_dma_addr(chan, physaddr);
  129. set_dma_count(chan, count);
  130. enable_dma(chan);
  131. release_dma_lock(flags);
  132. return 0;
  133. }
  134. static void dma_init_buffers(struct dma_buffparms *dmap)
  135. {
  136. dmap->qlen = dmap->qhead = dmap->qtail = dmap->user_counter = 0;
  137. dmap->byte_counter = 0;
  138. dmap->max_byte_counter = 8000 * 60 * 60;
  139. dmap->bytes_in_use = dmap->buffsize;
  140. dmap->dma_mode = DMODE_NONE;
  141. dmap->mapping_flags = 0;
  142. dmap->neutral_byte = 0x80;
  143. dmap->data_rate = 8000;
  144. dmap->cfrag = -1;
  145. dmap->closing = 0;
  146. dmap->nbufs = 1;
  147. dmap->flags = DMA_BUSY; /* Other flags off */
  148. }
  149. static int open_dmap(struct audio_operations *adev, int mode, struct dma_buffparms *dmap)
  150. {
  151. int err;
  152. if (dmap->flags & DMA_BUSY)
  153. return -EBUSY;
  154. if ((err = sound_alloc_dmap(dmap)) < 0)
  155. return err;
  156. if (dmap->raw_buf == NULL) {
  157. printk(KERN_WARNING "Sound: DMA buffers not available\n");
  158. return -ENOSPC; /* Memory allocation failed during boot */
  159. }
  160. if (dmap->dma >= 0 && sound_open_dma(dmap->dma, adev->name)) {
  161. printk(KERN_WARNING "Unable to grab(2) DMA%d for the audio driver\n", dmap->dma);
  162. return -EBUSY;
  163. }
  164. dma_init_buffers(dmap);
  165. spin_lock_init(&dmap->lock);
  166. dmap->open_mode = mode;
  167. dmap->subdivision = dmap->underrun_count = 0;
  168. dmap->fragment_size = 0;
  169. dmap->max_fragments = 65536; /* Just a large value */
  170. dmap->byte_counter = 0;
  171. dmap->max_byte_counter = 8000 * 60 * 60;
  172. dmap->applic_profile = APF_NORMAL;
  173. dmap->needs_reorg = 1;
  174. dmap->audio_callback = NULL;
  175. dmap->callback_parm = 0;
  176. return 0;
  177. }
  178. static void close_dmap(struct audio_operations *adev, struct dma_buffparms *dmap)
  179. {
  180. unsigned long flags;
  181. if (dmap->dma >= 0) {
  182. sound_close_dma(dmap->dma);
  183. flags=claim_dma_lock();
  184. disable_dma(dmap->dma);
  185. release_dma_lock(flags);
  186. }
  187. if (dmap->flags & DMA_BUSY)
  188. dmap->dma_mode = DMODE_NONE;
  189. dmap->flags &= ~DMA_BUSY;
  190. if (sound_dmap_flag == DMAP_FREE_ON_CLOSE)
  191. sound_free_dmap(dmap);
  192. }
  193. static unsigned int default_set_bits(int dev, unsigned int bits)
  194. {
  195. mm_segment_t fs = get_fs();
  196. set_fs(get_ds());
  197. audio_devs[dev]->d->ioctl(dev, SNDCTL_DSP_SETFMT, (void __user *)&bits);
  198. set_fs(fs);
  199. return bits;
  200. }
  201. static int default_set_speed(int dev, int speed)
  202. {
  203. mm_segment_t fs = get_fs();
  204. set_fs(get_ds());
  205. audio_devs[dev]->d->ioctl(dev, SNDCTL_DSP_SPEED, (void __user *)&speed);
  206. set_fs(fs);
  207. return speed;
  208. }
  209. static short default_set_channels(int dev, short channels)
  210. {
  211. int c = channels;
  212. mm_segment_t fs = get_fs();
  213. set_fs(get_ds());
  214. audio_devs[dev]->d->ioctl(dev, SNDCTL_DSP_CHANNELS, (void __user *)&c);
  215. set_fs(fs);
  216. return c;
  217. }
  218. static void check_driver(struct audio_driver *d)
  219. {
  220. if (d->set_speed == NULL)
  221. d->set_speed = default_set_speed;
  222. if (d->set_bits == NULL)
  223. d->set_bits = default_set_bits;
  224. if (d->set_channels == NULL)
  225. d->set_channels = default_set_channels;
  226. }
  227. int DMAbuf_open(int dev, int mode)
  228. {
  229. struct audio_operations *adev = audio_devs[dev];
  230. int retval;
  231. struct dma_buffparms *dmap_in = NULL;
  232. struct dma_buffparms *dmap_out = NULL;
  233. if (!adev)
  234. return -ENXIO;
  235. if (!(adev->flags & DMA_DUPLEX))
  236. adev->dmap_in = adev->dmap_out;
  237. check_driver(adev->d);
  238. if ((retval = adev->d->open(dev, mode)) < 0)
  239. return retval;
  240. dmap_out = adev->dmap_out;
  241. dmap_in = adev->dmap_in;
  242. if (dmap_in == dmap_out)
  243. adev->flags &= ~DMA_DUPLEX;
  244. if (mode & OPEN_WRITE) {
  245. if ((retval = open_dmap(adev, mode, dmap_out)) < 0) {
  246. adev->d->close(dev);
  247. return retval;
  248. }
  249. }
  250. adev->enable_bits = mode;
  251. if (mode == OPEN_READ || (mode != OPEN_WRITE && (adev->flags & DMA_DUPLEX))) {
  252. if ((retval = open_dmap(adev, mode, dmap_in)) < 0) {
  253. adev->d->close(dev);
  254. if (mode & OPEN_WRITE)
  255. close_dmap(adev, dmap_out);
  256. return retval;
  257. }
  258. }
  259. adev->open_mode = mode;
  260. adev->go = 1;
  261. adev->d->set_bits(dev, 8);
  262. adev->d->set_channels(dev, 1);
  263. adev->d->set_speed(dev, DSP_DEFAULT_SPEED);
  264. if (adev->dmap_out->dma_mode == DMODE_OUTPUT)
  265. memset(adev->dmap_out->raw_buf, adev->dmap_out->neutral_byte,
  266. adev->dmap_out->bytes_in_use);
  267. return 0;
  268. }
  269. /* MUST not hold the spinlock */
  270. void DMAbuf_reset(int dev)
  271. {
  272. if (audio_devs[dev]->open_mode & OPEN_WRITE)
  273. dma_reset_output(dev);
  274. if (audio_devs[dev]->open_mode & OPEN_READ)
  275. dma_reset_input(dev);
  276. }
  277. static void dma_reset_output(int dev)
  278. {
  279. struct audio_operations *adev = audio_devs[dev];
  280. unsigned long flags,f ;
  281. struct dma_buffparms *dmap = adev->dmap_out;
  282. if (!(dmap->flags & DMA_STARTED)) /* DMA is not active */
  283. return;
  284. /*
  285. * First wait until the current fragment has been played completely
  286. */
  287. spin_lock_irqsave(&dmap->lock,flags);
  288. adev->dmap_out->flags |= DMA_SYNCING;
  289. adev->dmap_out->underrun_count = 0;
  290. if (!signal_pending(current) && adev->dmap_out->qlen &&
  291. adev->dmap_out->underrun_count == 0){
  292. spin_unlock_irqrestore(&dmap->lock,flags);
  293. interruptible_sleep_on_timeout(&adev->out_sleeper,
  294. dmabuf_timeout(dmap));
  295. spin_lock_irqsave(&dmap->lock,flags);
  296. }
  297. adev->dmap_out->flags &= ~(DMA_SYNCING | DMA_ACTIVE);
  298. /*
  299. * Finally shut the device off
  300. */
  301. if (!(adev->flags & DMA_DUPLEX) || !adev->d->halt_output)
  302. adev->d->halt_io(dev);
  303. else
  304. adev->d->halt_output(dev);
  305. adev->dmap_out->flags &= ~DMA_STARTED;
  306. f=claim_dma_lock();
  307. clear_dma_ff(dmap->dma);
  308. disable_dma(dmap->dma);
  309. release_dma_lock(f);
  310. dmap->byte_counter = 0;
  311. reorganize_buffers(dev, adev->dmap_out, 0);
  312. dmap->qlen = dmap->qhead = dmap->qtail = dmap->user_counter = 0;
  313. spin_unlock_irqrestore(&dmap->lock,flags);
  314. }
  315. static void dma_reset_input(int dev)
  316. {
  317. struct audio_operations *adev = audio_devs[dev];
  318. unsigned long flags;
  319. struct dma_buffparms *dmap = adev->dmap_in;
  320. spin_lock_irqsave(&dmap->lock,flags);
  321. if (!(adev->flags & DMA_DUPLEX) || !adev->d->halt_input)
  322. adev->d->halt_io(dev);
  323. else
  324. adev->d->halt_input(dev);
  325. adev->dmap_in->flags &= ~DMA_STARTED;
  326. dmap->qlen = dmap->qhead = dmap->qtail = dmap->user_counter = 0;
  327. dmap->byte_counter = 0;
  328. reorganize_buffers(dev, adev->dmap_in, 1);
  329. spin_unlock_irqrestore(&dmap->lock,flags);
  330. }
  331. /* MUST be called with holding the dmap->lock */
  332. void DMAbuf_launch_output(int dev, struct dma_buffparms *dmap)
  333. {
  334. struct audio_operations *adev = audio_devs[dev];
  335. if (!((adev->enable_bits * adev->go) & PCM_ENABLE_OUTPUT))
  336. return; /* Don't start DMA yet */
  337. dmap->dma_mode = DMODE_OUTPUT;
  338. if (!(dmap->flags & DMA_ACTIVE) || !(adev->flags & DMA_AUTOMODE) || (dmap->flags & DMA_NODMA)) {
  339. if (!(dmap->flags & DMA_STARTED)) {
  340. reorganize_buffers(dev, dmap, 0);
  341. if (adev->d->prepare_for_output(dev, dmap->fragment_size, dmap->nbufs))
  342. return;
  343. if (!(dmap->flags & DMA_NODMA))
  344. local_start_dma(adev, dmap->raw_buf_phys, dmap->bytes_in_use,DMA_MODE_WRITE);
  345. dmap->flags |= DMA_STARTED;
  346. }
  347. if (dmap->counts[dmap->qhead] == 0)
  348. dmap->counts[dmap->qhead] = dmap->fragment_size;
  349. dmap->dma_mode = DMODE_OUTPUT;
  350. adev->d->output_block(dev, dmap->raw_buf_phys + dmap->qhead * dmap->fragment_size,
  351. dmap->counts[dmap->qhead], 1);
  352. if (adev->d->trigger)
  353. adev->d->trigger(dev,adev->enable_bits * adev->go);
  354. }
  355. dmap->flags |= DMA_ACTIVE;
  356. }
  357. int DMAbuf_sync(int dev)
  358. {
  359. struct audio_operations *adev = audio_devs[dev];
  360. unsigned long flags;
  361. int n = 0;
  362. struct dma_buffparms *dmap;
  363. if (!adev->go && !(adev->enable_bits & PCM_ENABLE_OUTPUT))
  364. return 0;
  365. if (adev->dmap_out->dma_mode == DMODE_OUTPUT) {
  366. dmap = adev->dmap_out;
  367. spin_lock_irqsave(&dmap->lock,flags);
  368. if (dmap->qlen > 0 && !(dmap->flags & DMA_ACTIVE))
  369. DMAbuf_launch_output(dev, dmap);
  370. adev->dmap_out->flags |= DMA_SYNCING;
  371. adev->dmap_out->underrun_count = 0;
  372. while (!signal_pending(current) && n++ <= adev->dmap_out->nbufs &&
  373. adev->dmap_out->qlen && adev->dmap_out->underrun_count == 0) {
  374. long t = dmabuf_timeout(dmap);
  375. spin_unlock_irqrestore(&dmap->lock,flags);
  376. /* FIXME: not safe may miss events */
  377. t = interruptible_sleep_on_timeout(&adev->out_sleeper, t);
  378. spin_lock_irqsave(&dmap->lock,flags);
  379. if (!t) {
  380. adev->dmap_out->flags &= ~DMA_SYNCING;
  381. spin_unlock_irqrestore(&dmap->lock,flags);
  382. return adev->dmap_out->qlen;
  383. }
  384. }
  385. adev->dmap_out->flags &= ~(DMA_SYNCING | DMA_ACTIVE);
  386. /*
  387. * Some devices such as GUS have huge amount of on board RAM for the
  388. * audio data. We have to wait until the device has finished playing.
  389. */
  390. /* still holding the lock */
  391. if (adev->d->local_qlen) { /* Device has hidden buffers */
  392. while (!signal_pending(current) &&
  393. adev->d->local_qlen(dev)){
  394. spin_unlock_irqrestore(&dmap->lock,flags);
  395. interruptible_sleep_on_timeout(&adev->out_sleeper,
  396. dmabuf_timeout(dmap));
  397. spin_lock_irqsave(&dmap->lock,flags);
  398. }
  399. }
  400. spin_unlock_irqrestore(&dmap->lock,flags);
  401. }
  402. adev->dmap_out->dma_mode = DMODE_NONE;
  403. return adev->dmap_out->qlen;
  404. }
  405. int DMAbuf_release(int dev, int mode)
  406. {
  407. struct audio_operations *adev = audio_devs[dev];
  408. struct dma_buffparms *dmap;
  409. unsigned long flags;
  410. dmap = adev->dmap_out;
  411. if (adev->open_mode & OPEN_WRITE)
  412. adev->dmap_out->closing = 1;
  413. if (adev->open_mode & OPEN_READ){
  414. adev->dmap_in->closing = 1;
  415. dmap = adev->dmap_in;
  416. }
  417. if (adev->open_mode & OPEN_WRITE)
  418. if (!(adev->dmap_out->mapping_flags & DMA_MAP_MAPPED))
  419. if (!signal_pending(current) && (adev->dmap_out->dma_mode == DMODE_OUTPUT))
  420. DMAbuf_sync(dev);
  421. if (adev->dmap_out->dma_mode == DMODE_OUTPUT)
  422. memset(adev->dmap_out->raw_buf, adev->dmap_out->neutral_byte, adev->dmap_out->bytes_in_use);
  423. DMAbuf_reset(dev);
  424. spin_lock_irqsave(&dmap->lock,flags);
  425. adev->d->close(dev);
  426. if (adev->open_mode & OPEN_WRITE)
  427. close_dmap(adev, adev->dmap_out);
  428. if (adev->open_mode == OPEN_READ ||
  429. (adev->open_mode != OPEN_WRITE &&
  430. (adev->flags & DMA_DUPLEX)))
  431. close_dmap(adev, adev->dmap_in);
  432. adev->open_mode = 0;
  433. spin_unlock_irqrestore(&dmap->lock,flags);
  434. return 0;
  435. }
  436. /* called with dmap->lock dold */
  437. int DMAbuf_activate_recording(int dev, struct dma_buffparms *dmap)
  438. {
  439. struct audio_operations *adev = audio_devs[dev];
  440. int err;
  441. if (!(adev->open_mode & OPEN_READ))
  442. return 0;
  443. if (!(adev->enable_bits & PCM_ENABLE_INPUT))
  444. return 0;
  445. if (dmap->dma_mode == DMODE_OUTPUT) { /* Direction change */
  446. /* release lock - it's not recursive */
  447. spin_unlock_irq(&dmap->lock);
  448. DMAbuf_sync(dev);
  449. DMAbuf_reset(dev);
  450. spin_lock_irq(&dmap->lock);
  451. dmap->dma_mode = DMODE_NONE;
  452. }
  453. if (!dmap->dma_mode) {
  454. reorganize_buffers(dev, dmap, 1);
  455. if ((err = adev->d->prepare_for_input(dev,
  456. dmap->fragment_size, dmap->nbufs)) < 0)
  457. return err;
  458. dmap->dma_mode = DMODE_INPUT;
  459. }
  460. if (!(dmap->flags & DMA_ACTIVE)) {
  461. if (dmap->needs_reorg)
  462. reorganize_buffers(dev, dmap, 0);
  463. local_start_dma(adev, dmap->raw_buf_phys, dmap->bytes_in_use, DMA_MODE_READ);
  464. adev->d->start_input(dev, dmap->raw_buf_phys + dmap->qtail * dmap->fragment_size,
  465. dmap->fragment_size, 0);
  466. dmap->flags |= DMA_ACTIVE;
  467. if (adev->d->trigger)
  468. adev->d->trigger(dev, adev->enable_bits * adev->go);
  469. }
  470. return 0;
  471. }
  472. /* acquires lock */
  473. int DMAbuf_getrdbuffer(int dev, char **buf, int *len, int dontblock)
  474. {
  475. struct audio_operations *adev = audio_devs[dev];
  476. unsigned long flags;
  477. int err = 0, n = 0;
  478. struct dma_buffparms *dmap = adev->dmap_in;
  479. int go;
  480. if (!(adev->open_mode & OPEN_READ))
  481. return -EIO;
  482. spin_lock_irqsave(&dmap->lock,flags);
  483. if (dmap->needs_reorg)
  484. reorganize_buffers(dev, dmap, 0);
  485. if (adev->dmap_in->mapping_flags & DMA_MAP_MAPPED) {
  486. /* printk(KERN_WARNING "Sound: Can't read from mmapped device (1)\n");*/
  487. spin_unlock_irqrestore(&dmap->lock,flags);
  488. return -EINVAL;
  489. } else while (dmap->qlen <= 0 && n++ < 10) {
  490. long timeout = MAX_SCHEDULE_TIMEOUT;
  491. if (!(adev->enable_bits & PCM_ENABLE_INPUT) || !adev->go) {
  492. spin_unlock_irqrestore(&dmap->lock,flags);
  493. return -EAGAIN;
  494. }
  495. if ((err = DMAbuf_activate_recording(dev, dmap)) < 0) {
  496. spin_unlock_irqrestore(&dmap->lock,flags);
  497. return err;
  498. }
  499. /* Wait for the next block */
  500. if (dontblock) {
  501. spin_unlock_irqrestore(&dmap->lock,flags);
  502. return -EAGAIN;
  503. }
  504. if ((go = adev->go))
  505. timeout = dmabuf_timeout(dmap);
  506. spin_unlock_irqrestore(&dmap->lock,flags);
  507. timeout = interruptible_sleep_on_timeout(&adev->in_sleeper,
  508. timeout);
  509. if (!timeout) {
  510. /* FIXME: include device name */
  511. err = -EIO;
  512. printk(KERN_WARNING "Sound: DMA (input) timed out - IRQ/DRQ config error?\n");
  513. dma_reset_input(dev);
  514. } else
  515. err = -EINTR;
  516. spin_lock_irqsave(&dmap->lock,flags);
  517. }
  518. spin_unlock_irqrestore(&dmap->lock,flags);
  519. if (dmap->qlen <= 0)
  520. return err ? err : -EINTR;
  521. *buf = &dmap->raw_buf[dmap->qhead * dmap->fragment_size + dmap->counts[dmap->qhead]];
  522. *len = dmap->fragment_size - dmap->counts[dmap->qhead];
  523. return dmap->qhead;
  524. }
  525. int DMAbuf_rmchars(int dev, int buff_no, int c)
  526. {
  527. struct audio_operations *adev = audio_devs[dev];
  528. struct dma_buffparms *dmap = adev->dmap_in;
  529. int p = dmap->counts[dmap->qhead] + c;
  530. if (dmap->mapping_flags & DMA_MAP_MAPPED)
  531. {
  532. /* printk("Sound: Can't read from mmapped device (2)\n");*/
  533. return -EINVAL;
  534. }
  535. else if (dmap->qlen <= 0)
  536. return -EIO;
  537. else if (p >= dmap->fragment_size) { /* This buffer is completely empty */
  538. dmap->counts[dmap->qhead] = 0;
  539. dmap->qlen--;
  540. dmap->qhead = (dmap->qhead + 1) % dmap->nbufs;
  541. }
  542. else dmap->counts[dmap->qhead] = p;
  543. return 0;
  544. }
  545. /* MUST be called with dmap->lock hold */
  546. int DMAbuf_get_buffer_pointer(int dev, struct dma_buffparms *dmap, int direction)
  547. {
  548. /*
  549. * Try to approximate the active byte position of the DMA pointer within the
  550. * buffer area as well as possible.
  551. */
  552. int pos;
  553. unsigned long f;
  554. if (!(dmap->flags & DMA_ACTIVE))
  555. pos = 0;
  556. else {
  557. int chan = dmap->dma;
  558. f=claim_dma_lock();
  559. clear_dma_ff(chan);
  560. if(!isa_dma_bridge_buggy)
  561. disable_dma(dmap->dma);
  562. pos = get_dma_residue(chan);
  563. pos = dmap->bytes_in_use - pos;
  564. if (!(dmap->mapping_flags & DMA_MAP_MAPPED)) {
  565. if (direction == DMODE_OUTPUT) {
  566. if (dmap->qhead == 0)
  567. if (pos > dmap->fragment_size)
  568. pos = 0;
  569. } else {
  570. if (dmap->qtail == 0)
  571. if (pos > dmap->fragment_size)
  572. pos = 0;
  573. }
  574. }
  575. if (pos < 0)
  576. pos = 0;
  577. if (pos >= dmap->bytes_in_use)
  578. pos = 0;
  579. if(!isa_dma_bridge_buggy)
  580. enable_dma(dmap->dma);
  581. release_dma_lock(f);
  582. }
  583. /* printk( "%04x ", pos); */
  584. return pos;
  585. }
  586. /*
  587. * DMAbuf_start_devices() is called by the /dev/music driver to start
  588. * one or more audio devices at desired moment.
  589. */
  590. void DMAbuf_start_devices(unsigned int devmask)
  591. {
  592. struct audio_operations *adev;
  593. int dev;
  594. for (dev = 0; dev < num_audiodevs; dev++) {
  595. if (!(devmask & (1 << dev)))
  596. continue;
  597. if (!(adev = audio_devs[dev]))
  598. continue;
  599. if (adev->open_mode == 0)
  600. continue;
  601. if (adev->go)
  602. continue;
  603. /* OK to start the device */
  604. adev->go = 1;
  605. if (adev->d->trigger)
  606. adev->d->trigger(dev,adev->enable_bits * adev->go);
  607. }
  608. }
  609. /* via poll called without a lock ?*/
  610. int DMAbuf_space_in_queue(int dev)
  611. {
  612. struct audio_operations *adev = audio_devs[dev];
  613. int len, max, tmp;
  614. struct dma_buffparms *dmap = adev->dmap_out;
  615. int lim = dmap->nbufs;
  616. if (lim < 2)
  617. lim = 2;
  618. if (dmap->qlen >= lim) /* No space at all */
  619. return 0;
  620. /*
  621. * Verify that there are no more pending buffers than the limit
  622. * defined by the process.
  623. */
  624. max = dmap->max_fragments;
  625. if (max > lim)
  626. max = lim;
  627. len = dmap->qlen;
  628. if (adev->d->local_qlen) {
  629. tmp = adev->d->local_qlen(dev);
  630. if (tmp && len)
  631. tmp--; /* This buffer has been counted twice */
  632. len += tmp;
  633. }
  634. if (dmap->byte_counter % dmap->fragment_size) /* There is a partial fragment */
  635. len = len + 1;
  636. if (len >= max)
  637. return 0;
  638. return max - len;
  639. }
  640. /* MUST not hold the spinlock - this function may sleep */
  641. static int output_sleep(int dev, int dontblock)
  642. {
  643. struct audio_operations *adev = audio_devs[dev];
  644. int err = 0;
  645. struct dma_buffparms *dmap = adev->dmap_out;
  646. long timeout;
  647. long timeout_value;
  648. if (dontblock)
  649. return -EAGAIN;
  650. if (!(adev->enable_bits & PCM_ENABLE_OUTPUT))
  651. return -EAGAIN;
  652. /*
  653. * Wait for free space
  654. */
  655. if (signal_pending(current))
  656. return -EINTR;
  657. timeout = (adev->go && !(dmap->flags & DMA_NOTIMEOUT));
  658. if (timeout)
  659. timeout_value = dmabuf_timeout(dmap);
  660. else
  661. timeout_value = MAX_SCHEDULE_TIMEOUT;
  662. timeout_value = interruptible_sleep_on_timeout(&adev->out_sleeper,
  663. timeout_value);
  664. if (timeout != MAX_SCHEDULE_TIMEOUT && !timeout_value) {
  665. printk(KERN_WARNING "Sound: DMA (output) timed out - IRQ/DRQ config error?\n");
  666. dma_reset_output(dev);
  667. } else {
  668. if (signal_pending(current))
  669. err = -EINTR;
  670. }
  671. return err;
  672. }
  673. /* called with the lock held */
  674. static int find_output_space(int dev, char **buf, int *size)
  675. {
  676. struct audio_operations *adev = audio_devs[dev];
  677. struct dma_buffparms *dmap = adev->dmap_out;
  678. unsigned long active_offs;
  679. long len, offs;
  680. int maxfrags;
  681. int occupied_bytes = (dmap->user_counter % dmap->fragment_size);
  682. *buf = dmap->raw_buf;
  683. if (!(maxfrags = DMAbuf_space_in_queue(dev)) && !occupied_bytes)
  684. return 0;
  685. #ifdef BE_CONSERVATIVE
  686. active_offs = dmap->byte_counter + dmap->qhead * dmap->fragment_size;
  687. #else
  688. active_offs = DMAbuf_get_buffer_pointer(dev, dmap, DMODE_OUTPUT);
  689. /* Check for pointer wrapping situation */
  690. if (active_offs < 0 || active_offs >= dmap->bytes_in_use)
  691. active_offs = 0;
  692. active_offs += dmap->byte_counter;
  693. #endif
  694. offs = (dmap->user_counter % dmap->bytes_in_use) & ~SAMPLE_ROUNDUP;
  695. if (offs < 0 || offs >= dmap->bytes_in_use) {
  696. printk(KERN_ERR "Sound: Got unexpected offs %ld. Giving up.\n", offs);
  697. printk("Counter = %ld, bytes=%d\n", dmap->user_counter, dmap->bytes_in_use);
  698. return 0;
  699. }
  700. *buf = dmap->raw_buf + offs;
  701. len = active_offs + dmap->bytes_in_use - dmap->user_counter; /* Number of unused bytes in buffer */
  702. if ((offs + len) > dmap->bytes_in_use)
  703. len = dmap->bytes_in_use - offs;
  704. if (len < 0) {
  705. return 0;
  706. }
  707. if (len > ((maxfrags * dmap->fragment_size) - occupied_bytes))
  708. len = (maxfrags * dmap->fragment_size) - occupied_bytes;
  709. *size = len & ~SAMPLE_ROUNDUP;
  710. return (*size > 0);
  711. }
  712. /* acquires lock */
  713. int DMAbuf_getwrbuffer(int dev, char **buf, int *size, int dontblock)
  714. {
  715. struct audio_operations *adev = audio_devs[dev];
  716. unsigned long flags;
  717. int err = -EIO;
  718. struct dma_buffparms *dmap = adev->dmap_out;
  719. if (dmap->mapping_flags & DMA_MAP_MAPPED) {
  720. /* printk(KERN_DEBUG "Sound: Can't write to mmapped device (3)\n");*/
  721. return -EINVAL;
  722. }
  723. spin_lock_irqsave(&dmap->lock,flags);
  724. if (dmap->needs_reorg)
  725. reorganize_buffers(dev, dmap, 0);
  726. if (dmap->dma_mode == DMODE_INPUT) { /* Direction change */
  727. spin_unlock_irqrestore(&dmap->lock,flags);
  728. DMAbuf_reset(dev);
  729. spin_lock_irqsave(&dmap->lock,flags);
  730. }
  731. dmap->dma_mode = DMODE_OUTPUT;
  732. while (find_output_space(dev, buf, size) <= 0) {
  733. spin_unlock_irqrestore(&dmap->lock,flags);
  734. if ((err = output_sleep(dev, dontblock)) < 0) {
  735. return err;
  736. }
  737. spin_lock_irqsave(&dmap->lock,flags);
  738. }
  739. spin_unlock_irqrestore(&dmap->lock,flags);
  740. return 0;
  741. }
  742. /* has to acquire dmap->lock */
  743. int DMAbuf_move_wrpointer(int dev, int l)
  744. {
  745. struct audio_operations *adev = audio_devs[dev];
  746. struct dma_buffparms *dmap = adev->dmap_out;
  747. unsigned long ptr;
  748. unsigned long end_ptr, p;
  749. int post;
  750. unsigned long flags;
  751. spin_lock_irqsave(&dmap->lock,flags);
  752. post= (dmap->flags & DMA_POST);
  753. ptr = (dmap->user_counter / dmap->fragment_size) * dmap->fragment_size;
  754. dmap->flags &= ~DMA_POST;
  755. dmap->cfrag = -1;
  756. dmap->user_counter += l;
  757. dmap->flags |= DMA_DIRTY;
  758. if (dmap->byte_counter >= dmap->max_byte_counter) {
  759. /* Wrap the byte counters */
  760. long decr = dmap->byte_counter;
  761. dmap->byte_counter = (dmap->byte_counter % dmap->bytes_in_use);
  762. decr -= dmap->byte_counter;
  763. dmap->user_counter -= decr;
  764. }
  765. end_ptr = (dmap->user_counter / dmap->fragment_size) * dmap->fragment_size;
  766. p = (dmap->user_counter - 1) % dmap->bytes_in_use;
  767. dmap->neutral_byte = dmap->raw_buf[p];
  768. /* Update the fragment based bookkeeping too */
  769. while (ptr < end_ptr) {
  770. dmap->counts[dmap->qtail] = dmap->fragment_size;
  771. dmap->qtail = (dmap->qtail + 1) % dmap->nbufs;
  772. dmap->qlen++;
  773. ptr += dmap->fragment_size;
  774. }
  775. dmap->counts[dmap->qtail] = dmap->user_counter - ptr;
  776. /*
  777. * Let the low level driver perform some postprocessing to
  778. * the written data.
  779. */
  780. if (adev->d->postprocess_write)
  781. adev->d->postprocess_write(dev);
  782. if (!(dmap->flags & DMA_ACTIVE))
  783. if (dmap->qlen > 1 || (dmap->qlen > 0 && (post || dmap->qlen >= dmap->nbufs - 1)))
  784. DMAbuf_launch_output(dev, dmap);
  785. spin_unlock_irqrestore(&dmap->lock,flags);
  786. return 0;
  787. }
  788. int DMAbuf_start_dma(int dev, unsigned long physaddr, int count, int dma_mode)
  789. {
  790. struct audio_operations *adev = audio_devs[dev];
  791. struct dma_buffparms *dmap = (dma_mode == DMA_MODE_WRITE) ? adev->dmap_out : adev->dmap_in;
  792. if (dmap->raw_buf == NULL) {
  793. printk(KERN_ERR "sound: DMA buffer(1) == NULL\n");
  794. printk("Device %d, chn=%s\n", dev, (dmap == adev->dmap_out) ? "out" : "in");
  795. return 0;
  796. }
  797. if (dmap->dma < 0)
  798. return 0;
  799. sound_start_dma(dmap, physaddr, count, dma_mode);
  800. return count;
  801. }
  802. static int local_start_dma(struct audio_operations *adev, unsigned long physaddr, int count, int dma_mode)
  803. {
  804. struct dma_buffparms *dmap = (dma_mode == DMA_MODE_WRITE) ? adev->dmap_out : adev->dmap_in;
  805. if (dmap->raw_buf == NULL) {
  806. printk(KERN_ERR "sound: DMA buffer(2) == NULL\n");
  807. printk(KERN_ERR "Device %s, chn=%s\n", adev->name, (dmap == adev->dmap_out) ? "out" : "in");
  808. return 0;
  809. }
  810. if (dmap->flags & DMA_NODMA)
  811. return 1;
  812. if (dmap->dma < 0)
  813. return 0;
  814. sound_start_dma(dmap, dmap->raw_buf_phys, dmap->bytes_in_use, dma_mode | DMA_AUTOINIT);
  815. dmap->flags |= DMA_STARTED;
  816. return count;
  817. }
  818. static void finish_output_interrupt(int dev, struct dma_buffparms *dmap)
  819. {
  820. struct audio_operations *adev = audio_devs[dev];
  821. if (dmap->audio_callback != NULL)
  822. dmap->audio_callback(dev, dmap->callback_parm);
  823. wake_up(&adev->out_sleeper);
  824. wake_up(&adev->poll_sleeper);
  825. }
  826. /* called with dmap->lock held in irq context*/
  827. static void do_outputintr(int dev, int dummy)
  828. {
  829. struct audio_operations *adev = audio_devs[dev];
  830. struct dma_buffparms *dmap = adev->dmap_out;
  831. int this_fragment;
  832. if (dmap->raw_buf == NULL) {
  833. printk(KERN_ERR "Sound: Error. Audio interrupt (%d) after freeing buffers.\n", dev);
  834. return;
  835. }
  836. if (dmap->mapping_flags & DMA_MAP_MAPPED) { /* Virtual memory mapped access */
  837. /* mmapped access */
  838. dmap->qhead = (dmap->qhead + 1) % dmap->nbufs;
  839. if (dmap->qhead == 0) { /* Wrapped */
  840. dmap->byte_counter += dmap->bytes_in_use;
  841. if (dmap->byte_counter >= dmap->max_byte_counter) { /* Overflow */
  842. long decr = dmap->byte_counter;
  843. dmap->byte_counter = (dmap->byte_counter % dmap->bytes_in_use);
  844. decr -= dmap->byte_counter;
  845. dmap->user_counter -= decr;
  846. }
  847. }
  848. dmap->qlen++; /* Yes increment it (don't decrement) */
  849. if (!(adev->flags & DMA_AUTOMODE))
  850. dmap->flags &= ~DMA_ACTIVE;
  851. dmap->counts[dmap->qhead] = dmap->fragment_size;
  852. DMAbuf_launch_output(dev, dmap);
  853. finish_output_interrupt(dev, dmap);
  854. return;
  855. }
  856. dmap->qlen--;
  857. this_fragment = dmap->qhead;
  858. dmap->qhead = (dmap->qhead + 1) % dmap->nbufs;
  859. if (dmap->qhead == 0) { /* Wrapped */
  860. dmap->byte_counter += dmap->bytes_in_use;
  861. if (dmap->byte_counter >= dmap->max_byte_counter) { /* Overflow */
  862. long decr = dmap->byte_counter;
  863. dmap->byte_counter = (dmap->byte_counter % dmap->bytes_in_use);
  864. decr -= dmap->byte_counter;
  865. dmap->user_counter -= decr;
  866. }
  867. }
  868. if (!(adev->flags & DMA_AUTOMODE))
  869. dmap->flags &= ~DMA_ACTIVE;
  870. /*
  871. * This is dmap->qlen <= 0 except when closing when
  872. * dmap->qlen < 0
  873. */
  874. while (dmap->qlen <= -dmap->closing) {
  875. dmap->underrun_count++;
  876. dmap->qlen++;
  877. if ((dmap->flags & DMA_DIRTY) && dmap->applic_profile != APF_CPUINTENS) {
  878. dmap->flags &= ~DMA_DIRTY;
  879. memset(adev->dmap_out->raw_buf, adev->dmap_out->neutral_byte,
  880. adev->dmap_out->buffsize);
  881. }
  882. dmap->user_counter += dmap->fragment_size;
  883. dmap->qtail = (dmap->qtail + 1) % dmap->nbufs;
  884. }
  885. if (dmap->qlen > 0)
  886. DMAbuf_launch_output(dev, dmap);
  887. finish_output_interrupt(dev, dmap);
  888. }
  889. /* called in irq context */
  890. void DMAbuf_outputintr(int dev, int notify_only)
  891. {
  892. struct audio_operations *adev = audio_devs[dev];
  893. unsigned long flags;
  894. struct dma_buffparms *dmap = adev->dmap_out;
  895. spin_lock_irqsave(&dmap->lock,flags);
  896. if (!(dmap->flags & DMA_NODMA)) {
  897. int chan = dmap->dma, pos, n;
  898. unsigned long f;
  899. f=claim_dma_lock();
  900. if(!isa_dma_bridge_buggy)
  901. disable_dma(dmap->dma);
  902. clear_dma_ff(chan);
  903. pos = dmap->bytes_in_use - get_dma_residue(chan);
  904. if(!isa_dma_bridge_buggy)
  905. enable_dma(dmap->dma);
  906. release_dma_lock(f);
  907. pos = pos / dmap->fragment_size; /* Actual qhead */
  908. if (pos < 0 || pos >= dmap->nbufs)
  909. pos = 0;
  910. n = 0;
  911. while (dmap->qhead != pos && n++ < dmap->nbufs)
  912. do_outputintr(dev, notify_only);
  913. }
  914. else
  915. do_outputintr(dev, notify_only);
  916. spin_unlock_irqrestore(&dmap->lock,flags);
  917. }
  918. /* called with dmap->lock held in irq context */
  919. static void do_inputintr(int dev)
  920. {
  921. struct audio_operations *adev = audio_devs[dev];
  922. struct dma_buffparms *dmap = adev->dmap_in;
  923. if (dmap->raw_buf == NULL) {
  924. printk(KERN_ERR "Sound: Fatal error. Audio interrupt after freeing buffers.\n");
  925. return;
  926. }
  927. if (dmap->mapping_flags & DMA_MAP_MAPPED) {
  928. dmap->qtail = (dmap->qtail + 1) % dmap->nbufs;
  929. if (dmap->qtail == 0) { /* Wrapped */
  930. dmap->byte_counter += dmap->bytes_in_use;
  931. if (dmap->byte_counter >= dmap->max_byte_counter) { /* Overflow */
  932. long decr = dmap->byte_counter;
  933. dmap->byte_counter = (dmap->byte_counter % dmap->bytes_in_use) + dmap->bytes_in_use;
  934. decr -= dmap->byte_counter;
  935. dmap->user_counter -= decr;
  936. }
  937. }
  938. dmap->qlen++;
  939. if (!(adev->flags & DMA_AUTOMODE)) {
  940. if (dmap->needs_reorg)
  941. reorganize_buffers(dev, dmap, 0);
  942. local_start_dma(adev, dmap->raw_buf_phys, dmap->bytes_in_use,DMA_MODE_READ);
  943. adev->d->start_input(dev, dmap->raw_buf_phys + dmap->qtail * dmap->fragment_size,
  944. dmap->fragment_size, 1);
  945. if (adev->d->trigger)
  946. adev->d->trigger(dev, adev->enable_bits * adev->go);
  947. }
  948. dmap->flags |= DMA_ACTIVE;
  949. } else if (dmap->qlen >= (dmap->nbufs - 1)) {
  950. printk(KERN_WARNING "Sound: Recording overrun\n");
  951. dmap->underrun_count++;
  952. /* Just throw away the oldest fragment but keep the engine running */
  953. dmap->qhead = (dmap->qhead + 1) % dmap->nbufs;
  954. dmap->qtail = (dmap->qtail + 1) % dmap->nbufs;
  955. } else if (dmap->qlen >= 0 && dmap->qlen < dmap->nbufs) {
  956. dmap->qlen++;
  957. dmap->qtail = (dmap->qtail + 1) % dmap->nbufs;
  958. if (dmap->qtail == 0) { /* Wrapped */
  959. dmap->byte_counter += dmap->bytes_in_use;
  960. if (dmap->byte_counter >= dmap->max_byte_counter) { /* Overflow */
  961. long decr = dmap->byte_counter;
  962. dmap->byte_counter = (dmap->byte_counter % dmap->bytes_in_use) + dmap->bytes_in_use;
  963. decr -= dmap->byte_counter;
  964. dmap->user_counter -= decr;
  965. }
  966. }
  967. }
  968. if (!(adev->flags & DMA_AUTOMODE) || (dmap->flags & DMA_NODMA)) {
  969. local_start_dma(adev, dmap->raw_buf_phys, dmap->bytes_in_use, DMA_MODE_READ);
  970. adev->d->start_input(dev, dmap->raw_buf_phys + dmap->qtail * dmap->fragment_size, dmap->fragment_size, 1);
  971. if (adev->d->trigger)
  972. adev->d->trigger(dev,adev->enable_bits * adev->go);
  973. }
  974. dmap->flags |= DMA_ACTIVE;
  975. if (dmap->qlen > 0)
  976. {
  977. wake_up(&adev->in_sleeper);
  978. wake_up(&adev->poll_sleeper);
  979. }
  980. }
  981. /* called in irq context */
  982. void DMAbuf_inputintr(int dev)
  983. {
  984. struct audio_operations *adev = audio_devs[dev];
  985. struct dma_buffparms *dmap = adev->dmap_in;
  986. unsigned long flags;
  987. spin_lock_irqsave(&dmap->lock,flags);
  988. if (!(dmap->flags & DMA_NODMA)) {
  989. int chan = dmap->dma, pos, n;
  990. unsigned long f;
  991. f=claim_dma_lock();
  992. if(!isa_dma_bridge_buggy)
  993. disable_dma(dmap->dma);
  994. clear_dma_ff(chan);
  995. pos = dmap->bytes_in_use - get_dma_residue(chan);
  996. if(!isa_dma_bridge_buggy)
  997. enable_dma(dmap->dma);
  998. release_dma_lock(f);
  999. pos = pos / dmap->fragment_size; /* Actual qhead */
  1000. if (pos < 0 || pos >= dmap->nbufs)
  1001. pos = 0;
  1002. n = 0;
  1003. while (dmap->qtail != pos && ++n < dmap->nbufs)
  1004. do_inputintr(dev);
  1005. } else
  1006. do_inputintr(dev);
  1007. spin_unlock_irqrestore(&dmap->lock,flags);
  1008. }
  1009. int DMAbuf_open_dma(int dev)
  1010. {
  1011. /*
  1012. * NOTE! This routine opens only the primary DMA channel (output).
  1013. */
  1014. struct audio_operations *adev = audio_devs[dev];
  1015. int err;
  1016. if ((err = open_dmap(adev, OPEN_READWRITE, adev->dmap_out)) < 0)
  1017. return -EBUSY;
  1018. dma_init_buffers(adev->dmap_out);
  1019. adev->dmap_out->flags |= DMA_ALLOC_DONE;
  1020. adev->dmap_out->fragment_size = adev->dmap_out->buffsize;
  1021. if (adev->dmap_out->dma >= 0) {
  1022. unsigned long flags;
  1023. flags=claim_dma_lock();
  1024. clear_dma_ff(adev->dmap_out->dma);
  1025. disable_dma(adev->dmap_out->dma);
  1026. release_dma_lock(flags);
  1027. }
  1028. return 0;
  1029. }
  1030. void DMAbuf_close_dma(int dev)
  1031. {
  1032. close_dmap(audio_devs[dev], audio_devs[dev]->dmap_out);
  1033. }
  1034. void DMAbuf_init(int dev, int dma1, int dma2)
  1035. {
  1036. struct audio_operations *adev = audio_devs[dev];
  1037. /*
  1038. * NOTE! This routine could be called several times.
  1039. */
  1040. /* drag in audio_syms.o */
  1041. {
  1042. extern char audio_syms_symbol;
  1043. audio_syms_symbol = 0;
  1044. }
  1045. if (adev && adev->dmap_out == NULL) {
  1046. if (adev->d == NULL)
  1047. panic("OSS: audio_devs[%d]->d == NULL\n", dev);
  1048. if (adev->parent_dev) { /* Use DMA map of the parent dev */
  1049. int parent = adev->parent_dev - 1;
  1050. adev->dmap_out = audio_devs[parent]->dmap_out;
  1051. adev->dmap_in = audio_devs[parent]->dmap_in;
  1052. } else {
  1053. adev->dmap_out = adev->dmap_in = &adev->dmaps[0];
  1054. adev->dmap_out->dma = dma1;
  1055. if (adev->flags & DMA_DUPLEX) {
  1056. adev->dmap_in = &adev->dmaps[1];
  1057. adev->dmap_in->dma = dma2;
  1058. }
  1059. }
  1060. /* Persistent DMA buffers allocated here */
  1061. if (sound_dmap_flag == DMAP_KEEP_ON_CLOSE) {
  1062. if (adev->dmap_in->raw_buf == NULL)
  1063. sound_alloc_dmap(adev->dmap_in);
  1064. if (adev->dmap_out->raw_buf == NULL)
  1065. sound_alloc_dmap(adev->dmap_out);
  1066. }
  1067. }
  1068. }
  1069. /* No kernel lock - DMAbuf_activate_recording protected by global cli/sti */
  1070. static unsigned int poll_input(struct file * file, int dev, poll_table *wait)
  1071. {
  1072. struct audio_operations *adev = audio_devs[dev];
  1073. struct dma_buffparms *dmap = adev->dmap_in;
  1074. if (!(adev->open_mode & OPEN_READ))
  1075. return 0;
  1076. if (dmap->mapping_flags & DMA_MAP_MAPPED) {
  1077. if (dmap->qlen)
  1078. return POLLIN | POLLRDNORM;
  1079. return 0;
  1080. }
  1081. if (dmap->dma_mode != DMODE_INPUT) {
  1082. if (dmap->dma_mode == DMODE_NONE &&
  1083. adev->enable_bits & PCM_ENABLE_INPUT &&
  1084. !dmap->qlen && adev->go) {
  1085. unsigned long flags;
  1086. spin_lock_irqsave(&dmap->lock,flags);
  1087. DMAbuf_activate_recording(dev, dmap);
  1088. spin_unlock_irqrestore(&dmap->lock,flags);
  1089. }
  1090. return 0;
  1091. }
  1092. if (!dmap->qlen)
  1093. return 0;
  1094. return POLLIN | POLLRDNORM;
  1095. }
  1096. static unsigned int poll_output(struct file * file, int dev, poll_table *wait)
  1097. {
  1098. struct audio_operations *adev = audio_devs[dev];
  1099. struct dma_buffparms *dmap = adev->dmap_out;
  1100. if (!(adev->open_mode & OPEN_WRITE))
  1101. return 0;
  1102. if (dmap->mapping_flags & DMA_MAP_MAPPED) {
  1103. if (dmap->qlen)
  1104. return POLLOUT | POLLWRNORM;
  1105. return 0;
  1106. }
  1107. if (dmap->dma_mode == DMODE_INPUT)
  1108. return 0;
  1109. if (dmap->dma_mode == DMODE_NONE)
  1110. return POLLOUT | POLLWRNORM;
  1111. if (!DMAbuf_space_in_queue(dev))
  1112. return 0;
  1113. return POLLOUT | POLLWRNORM;
  1114. }
  1115. unsigned int DMAbuf_poll(struct file * file, int dev, poll_table *wait)
  1116. {
  1117. struct audio_operations *adev = audio_devs[dev];
  1118. poll_wait(file, &adev->poll_sleeper, wait);
  1119. return poll_input(file, dev, wait) | poll_output(file, dev, wait);
  1120. }
  1121. void DMAbuf_deinit(int dev)
  1122. {
  1123. struct audio_operations *adev = audio_devs[dev];
  1124. /* This routine is called when driver is being unloaded */
  1125. if (!adev)
  1126. return;
  1127. /* Persistent DMA buffers deallocated here */
  1128. if (sound_dmap_flag == DMAP_KEEP_ON_CLOSE) {
  1129. sound_free_dmap(adev->dmap_out);
  1130. if (adev->flags & DMA_DUPLEX)
  1131. sound_free_dmap(adev->dmap_in);
  1132. }
  1133. }