summaryrefslogtreecommitdiff
path: root/firmware/target/arm/imx31/gigabeat-s/pcm-imx31.c
diff options
context:
space:
mode:
Diffstat (limited to 'firmware/target/arm/imx31/gigabeat-s/pcm-imx31.c')
-rw-r--r--firmware/target/arm/imx31/gigabeat-s/pcm-imx31.c366
1 files changed, 255 insertions, 111 deletions
diff --git a/firmware/target/arm/imx31/gigabeat-s/pcm-imx31.c b/firmware/target/arm/imx31/gigabeat-s/pcm-imx31.c
index f53d98aee9..60801262b4 100644
--- a/firmware/target/arm/imx31/gigabeat-s/pcm-imx31.c
+++ b/firmware/target/arm/imx31/gigabeat-s/pcm-imx31.c
@@ -23,78 +23,93 @@
23#include "kernel.h" 23#include "kernel.h"
24#include "audio.h" 24#include "audio.h"
25#include "sound.h" 25#include "sound.h"
26#include "avic-imx31.h"
27#include "clkctl-imx31.h" 26#include "clkctl-imx31.h"
27#include "sdma-imx31.h"
28#include "mmu-imx31.h"
28 29
29/* This isn't DMA-based at the moment and is handled like Portal Player but 30#define DMA_PLAY_CH_NUM 2
30 * will suffice for starters. */ 31#define DMA_REC_CH_NUM 1
32
33static struct buffer_descriptor dma_play_bd DEVBSS_ATTR;
34static struct channel_descriptor dma_play_cd DEVBSS_ATTR;
31 35
32struct dma_data 36struct dma_data
33{ 37{
34 uint16_t *p;
35 size_t size;
36 int locked; 38 int locked;
39 int callback_pending; /* DMA interrupt happened while locked */
37 int state; 40 int state;
38}; 41};
39 42
40static struct dma_data dma_play_data = 43static struct dma_data dma_play_data =
41{ 44{
42 /* Initialize to a locked, stopped state */ 45 /* Initialize to a locked, stopped state */
43 .p = NULL,
44 .size = 0,
45 .locked = 0, 46 .locked = 0,
47 .callback_pending = 0,
46 .state = 0 48 .state = 0
47}; 49};
48 50
49void pcm_play_lock(void) 51static void play_dma_callback(void)
50{ 52{
51 if (++dma_play_data.locked == 1) 53 unsigned char *start;
54 size_t size;
55 pcm_more_callback_type get_more = pcm_callback_for_more;
56
57 if (dma_play_data.locked)
52 { 58 {
53 /* Atomically disable transmit interrupt */ 59 /* Callback is locked out */
54 imx31_regclr32(&SSI_SIER1, SSI_SIER_TIE); 60 dma_play_data.callback_pending = 1;
61 return;
55 } 62 }
56}
57 63
58void pcm_play_unlock(void) 64 if (get_more == NULL || (get_more(&start, &size), size == 0))
59{
60 if (--dma_play_data.locked == 0 && dma_play_data.state != 0)
61 { 65 {
62 /* Atomically enable transmit interrupt */ 66 /* Callback missing or no more DMA to do */
63 imx31_regset32(&SSI_SIER1, SSI_SIER_TIE); 67 pcm_play_dma_stop();
68 pcm_play_dma_stopped_callback();
69 }
70 else
71 {
72 start = (void*)(((unsigned long)start + 3) & ~3);
73 size &= ~3;
74
75 /* Flush any pending cache writes */
76 clean_dcache_range(start, size);
77 dma_play_bd.buf_addr = (void *)addr_virt_to_phys((unsigned long)start);
78 dma_play_bd.mode.count = size;
79 dma_play_bd.mode.command = TRANSFER_16BIT;
80 dma_play_bd.mode.status = BD_DONE | BD_WRAP | BD_INTR;
81 sdma_channel_run(DMA_PLAY_CH_NUM);
64 } 82 }
65} 83}
66 84
67static void __attribute__((interrupt("IRQ"))) SSI1_HANDLER(void) 85void pcm_play_lock(void)
68{ 86{
69 register pcm_more_callback_type get_more; 87 if (++dma_play_data.locked == 1)
88 imx31_regclr32(&SSI_SIER1, SSI_SIER_TDMAE);
89}
70 90
71 do 91void pcm_play_unlock(void)
92{
93 if (--dma_play_data.locked == 0 && dma_play_data.state != 0)
72 { 94 {
73 while (dma_play_data.size > 0) 95 bool pending = false;
96 int oldstatus = disable_irq_save();
97
98 if (dma_play_data.callback_pending)
74 { 99 {
75 if (SSI_SFCSR_TFCNT0r(SSI_SFCSR1) > 6) 100 pending = true;
76 { 101 dma_play_data.callback_pending = 0;
77 return;
78 }
79 SSI_STX0_1 = *dma_play_data.p++;
80 SSI_STX0_1 = *dma_play_data.p++;
81 dma_play_data.size -= 4;
82 } 102 }
83 103
84 /* p is empty, get some more data */ 104 SSI_SIER1 |= SSI_SIER_TDMAE;
85 get_more = pcm_callback_for_more; 105 restore_irq(oldstatus);
86 106
87 if (get_more) 107 /* Should an interrupt be forced instead? The upper pcm layer can
88 { 108 * call producer's callback in thread context so technically this is
89 get_more((unsigned char **)&dma_play_data.p, 109 * acceptable. */
90 &dma_play_data.size); 110 if (pending)
91 } 111 play_dma_callback();
92 } 112 }
93 while (dma_play_data.size > 0);
94
95 /* No more data, so disable the FIFO/interrupt */
96 pcm_play_dma_stop();
97 pcm_play_dma_stopped_callback();
98} 113}
99 114
100void pcm_dma_apply_settings(void) 115void pcm_dma_apply_settings(void)
@@ -104,6 +119,17 @@ void pcm_dma_apply_settings(void)
104 119
105void pcm_play_dma_init(void) 120void pcm_play_dma_init(void)
106{ 121{
122 /* Init channel information */
123 dma_play_cd.bd_count = 1;
124 dma_play_cd.callback = play_dma_callback;
125 dma_play_cd.shp_addr = SDMA_PER_ADDR_SSI1_TX1;
126 dma_play_cd.wml = SDMA_SSI_TXFIFO_WML*2;
127 dma_play_cd.per_type = SDMA_PER_SSI;
128 dma_play_cd.tran_type = SDMA_TRAN_EMI_2_PER;
129 dma_play_cd.event_id1 = SDMA_REQ_SSI1_TX1;
130
131 sdma_channel_init(DMA_PLAY_CH_NUM, &dma_play_cd, &dma_play_bd);
132
107 imx31_clkctl_module_clock_gating(CG_SSI1, CGM_ON_ALL); 133 imx31_clkctl_module_clock_gating(CG_SSI1, CGM_ON_ALL);
108 imx31_clkctl_module_clock_gating(CG_SSI2, CGM_ON_ALL); 134 imx31_clkctl_module_clock_gating(CG_SSI2, CGM_ON_ALL);
109 135
@@ -111,8 +137,8 @@ void pcm_play_dma_init(void)
111 SSI_SCR2 &= ~SSI_SCR_SSIEN; 137 SSI_SCR2 &= ~SSI_SCR_SSIEN;
112 SSI_SCR1 &= ~SSI_SCR_SSIEN; 138 SSI_SCR1 &= ~SSI_SCR_SSIEN;
113 139
114 SSI_SIER1 = SSI_SIER_TFE0; /* TX0 can issue an interrupt */ 140 SSI_SIER1 = 0;
115 SSI_SIER2 = SSI_SIER_RFF0; /* RX0 can issue an interrupt */ 141 SSI_SIER2 = 0;
116 142
117 /* Set up audio mux */ 143 /* Set up audio mux */
118 144
@@ -155,8 +181,9 @@ void pcm_play_dma_init(void)
155 SSI_STCCR1 = SSI_STRCCR_WL16 | SSI_STRCCR_DCw(2-1) | 181 SSI_STCCR1 = SSI_STRCCR_WL16 | SSI_STRCCR_DCw(2-1) |
156 SSI_STRCCR_PMw(4-1); 182 SSI_STRCCR_PMw(4-1);
157 183
158 /* Transmit low watermark - 2 samples in FIFO */ 184 /* Transmit low watermark */
159 SSI_SFCSR1 = SSI_SFCSR_TFWM1w(1) | SSI_SFCSR_TFWM0w(2); 185 SSI_SFCSR1 = (SSI_SFCSR1 & ~SSI_SFCSR_TFWM0) |
186 SSI_SFCSR_TFWM0w(8-SDMA_SSI_TXFIFO_WML);
160 SSI_STMSK1 = 0; 187 SSI_STMSK1 = 0;
161 188
162 /* SSI2 - provides MCLK to codec. Receives data from codec. */ 189 /* SSI2 - provides MCLK to codec. Receives data from codec. */
@@ -186,8 +213,9 @@ void pcm_play_dma_init(void)
186 SSI_SRCCR2 = SSI_STRCCR_WL16 | SSI_STRCCR_DCw(2-1) | 213 SSI_SRCCR2 = SSI_STRCCR_WL16 | SSI_STRCCR_DCw(2-1) |
187 SSI_STRCCR_PMw(4-1); 214 SSI_STRCCR_PMw(4-1);
188 215
189 /* Receive high watermark - 6 samples in FIFO */ 216 /* Receive high watermark */
190 SSI_SFCSR2 = SSI_SFCSR_RFWM1w(8) | SSI_SFCSR_RFWM0w(6); 217 SSI_SFCSR2 = (SSI_SFCSR2 & ~SSI_SFCSR_RFWM0) |
218 SSI_SFCSR_RFWM0w(SDMA_SSI_RXFIFO_WML);
191 SSI_SRMSK2 = 0; 219 SSI_SRMSK2 = 0;
192 220
193 /* Enable SSI2 (codec clock) */ 221 /* Enable SSI2 (codec clock) */
@@ -199,7 +227,6 @@ void pcm_play_dma_init(void)
199void pcm_postinit(void) 227void pcm_postinit(void)
200{ 228{
201 audiohw_postinit(); 229 audiohw_postinit();
202 avic_enable_int(SSI1, IRQ, 8, SSI1_HANDLER);
203} 230}
204 231
205static void play_start_pcm(void) 232static void play_start_pcm(void)
@@ -207,32 +234,23 @@ static void play_start_pcm(void)
207 /* Stop transmission (if in progress) */ 234 /* Stop transmission (if in progress) */
208 SSI_SCR1 &= ~SSI_SCR_TE; 235 SSI_SCR1 &= ~SSI_SCR_TE;
209 236
210 /* Enable interrupt on unlock */
211 dma_play_data.state = 1;
212
213 /* Fill the FIFO or start when data is used up */
214 SSI_SCR1 |= SSI_SCR_SSIEN; /* Enable SSI */ 237 SSI_SCR1 |= SSI_SCR_SSIEN; /* Enable SSI */
215 SSI_STCR1 |= SSI_STCR_TFEN0; /* Enable TX FIFO */ 238 SSI_STCR1 |= SSI_STCR_TFEN0; /* Enable TX FIFO */
216 239
217 while (1) 240 dma_play_data.state = 1; /* Enable DMA requests on unlock */
218 {
219 if (SSI_SFCSR_TFCNT0r(SSI_SFCSR1) > 6 || dma_play_data.size == 0)
220 {
221 SSI_SCR1 |= SSI_SCR_TE; /* Start transmitting */
222 return;
223 }
224 241
225 SSI_STX0_1 = *dma_play_data.p++; 242 /* Do prefill to prevent swapped channels (see TLSbo61214 in MCIMX31CE).
226 SSI_STX0_1 = *dma_play_data.p++; 243 * No actual solution was offered but this appears to work. */
227 dma_play_data.size -= 4; 244 SSI_STX0_1 = 0;
228 } 245 SSI_STX0_1 = 0;
246 SSI_STX0_1 = 0;
247 SSI_STX0_1 = 0;
248
249 SSI_SCR1 |= SSI_SCR_TE; /* Start transmitting */
229} 250}
230 251
231static void play_stop_pcm(void) 252static void play_stop_pcm(void)
232{ 253{
233 /* Disable interrupt */
234 SSI_SIER1 &= ~SSI_SIER_TIE;
235
236 /* Wait for FIFO to empty */ 254 /* Wait for FIFO to empty */
237 while (SSI_SFCSR_TFCNT0r(SSI_SFCSR1) > 0); 255 while (SSI_SFCSR_TFCNT0r(SSI_SFCSR1) > 0);
238 256
@@ -240,135 +258,227 @@ static void play_stop_pcm(void)
240 SSI_STCR1 &= ~SSI_STCR_TFEN0; 258 SSI_STCR1 &= ~SSI_STCR_TFEN0;
241 SSI_SCR1 &= ~(SSI_SCR_TE | SSI_SCR_SSIEN); 259 SSI_SCR1 &= ~(SSI_SCR_TE | SSI_SCR_SSIEN);
242 260
243 /* Do not enable interrupt on unlock */ 261 /* Do not enable DMA requests on unlock */
244 dma_play_data.state = 0; 262 dma_play_data.state = 0;
263 dma_play_data.callback_pending = 0;
245} 264}
246 265
247void pcm_play_dma_start(const void *addr, size_t size) 266void pcm_play_dma_start(const void *addr, size_t size)
248{ 267{
249 dma_play_data.p = (void *)(((uintptr_t)addr + 3) & ~3); 268 sdma_channel_stop(DMA_PLAY_CH_NUM);
250 dma_play_data.size = (size & ~3); 269
270 /* Disable transmission */
271 SSI_STCR1 &= ~SSI_STCR_TFEN0;
272 SSI_SCR1 &= ~(SSI_SCR_TE | SSI_SCR_SSIEN);
273
274 addr = (void *)(((unsigned long)addr + 3) & ~3);
275 size &= ~3;
276
277 clean_dcache_range(addr, size);
278 dma_play_bd.buf_addr =
279 (void *)addr_virt_to_phys((unsigned long)(void *)addr);
280 dma_play_bd.mode.count = size;
281 dma_play_bd.mode.command = TRANSFER_16BIT;
282 dma_play_bd.mode.status = BD_DONE | BD_WRAP | BD_INTR;
251 283
252 play_start_pcm(); 284 play_start_pcm();
285 sdma_channel_start(DMA_PLAY_CH_NUM);
253} 286}
254 287
255void pcm_play_dma_stop(void) 288void pcm_play_dma_stop(void)
256{ 289{
290 sdma_channel_stop(DMA_PLAY_CH_NUM);
257 play_stop_pcm(); 291 play_stop_pcm();
258 dma_play_data.size = 0;
259} 292}
260 293
261void pcm_play_dma_pause(bool pause) 294void pcm_play_dma_pause(bool pause)
262{ 295{
263 if (pause) 296 if (pause)
264 { 297 {
298 sdma_channel_pause(DMA_PLAY_CH_NUM);
265 play_stop_pcm(); 299 play_stop_pcm();
266 } 300 }
267 else 301 else
268 { 302 {
269 uint32_t addr = (uint32_t)dma_play_data.p;
270 dma_play_data.p = (void *)((addr + 2) & ~3);
271 dma_play_data.size &= ~3;
272 play_start_pcm(); 303 play_start_pcm();
304 sdma_channel_run(DMA_PLAY_CH_NUM);
273 } 305 }
274} 306}
275 307
276/* Return the number of bytes waiting - full L-R sample pairs only */ 308/* Return the number of bytes waiting - full L-R sample pairs only */
277size_t pcm_get_bytes_waiting(void) 309size_t pcm_get_bytes_waiting(void)
278{ 310{
279 return dma_play_data.size & ~3; 311 static unsigned long dsa DEVBSS_ATTR;
312 long offs, size;
313 int oldstatus;
314
315 /* read burst dma source address register in channel context */
316 sdma_read_words(&dsa, CHANNEL_CONTEXT_ADDR(DMA_PLAY_CH_NUM)+0x0b, 1);
317
318 oldstatus = disable_irq_save();
319 offs = dsa - (unsigned long)dma_play_bd.buf_addr;
320 size = dma_play_bd.mode.count;
321 restore_irq(oldstatus);
322
323 /* Be addresses are coherent (no buffer change during read) */
324 if (offs >= 0 && offs < size)
325 {
326 return (size - offs) & ~3;
327 }
328
329 return 0;
280} 330}
281 331
282/* Return a pointer to the samples and the number of them in *count */ 332/* Return a pointer to the samples and the number of them in *count */
283const void * pcm_play_dma_get_peak_buffer(int *count) 333const void * pcm_play_dma_get_peak_buffer(int *count)
284{ 334{
285 uint32_t addr = (uint32_t)dma_play_data.p; 335 static unsigned long dsa DEVBSS_ATTR;
286 size_t cnt = dma_play_data.size; 336 unsigned long addr;
287 *count = cnt >> 2; 337 long offs, size;
288 return (void *)((addr + 2) & ~3); 338 int oldstatus;
339
340 /* read burst dma source address register in channel context */
341 sdma_read_words(&dsa, CHANNEL_CONTEXT_ADDR(DMA_PLAY_CH_NUM)+0x0b, 1);
342
343 oldstatus = disable_irq_save();
344 addr = dsa;
345 offs = addr - (unsigned long)dma_play_bd.buf_addr;
346 size = dma_play_bd.mode.count;
347 restore_irq(oldstatus);
348
349 /* Be addresses are coherent (no buffer change during read) */
350 if (offs >= 0 && offs < size)
351 {
352 *count = (size - offs) >> 2;
353 return (void *)((addr + 2) & ~3);
354 }
355
356 *count = 0;
357 return NULL;
358}
359
360void * pcm_dma_addr(void *addr)
361{
362 return (void *)addr_virt_to_phys((unsigned long)addr);
289} 363}
290 364
291#ifdef HAVE_RECORDING 365#ifdef HAVE_RECORDING
366static struct buffer_descriptor dma_rec_bd DEVBSS_ATTR;
367static struct channel_descriptor dma_rec_cd DEVBSS_ATTR;
368
292static struct dma_data dma_rec_data = 369static struct dma_data dma_rec_data =
293{ 370{
294 /* Initialize to a locked, stopped state */ 371 /* Initialize to a locked, stopped state */
295 .p = NULL,
296 .size = 0,
297 .locked = 0, 372 .locked = 0,
298 .state = 0 373 .state = 0
299}; 374};
300 375
301static void __attribute__((interrupt("IRQ"))) SSI2_HANDLER(void) 376static void rec_dma_callback(void)
302{ 377{
303 register pcm_more_callback_type2 more_ready; 378 pcm_more_callback_type2 more_ready;
379 int status = 0;
304 380
305 while (dma_rec_data.size > 0) 381 if (dma_rec_data.locked)
306 { 382 {
307 if (SSI_SFCSR_RFCNT0r(SSI_SFCSR2) < 2) 383 dma_rec_data.callback_pending = 1;
308 return; 384 return; /* Callback is locked out */
309
310 *dma_rec_data.p++ = SSI_SRX0_2;
311 *dma_rec_data.p++ = SSI_SRX0_2;
312 dma_rec_data.size -= 4;
313 } 385 }
314 386
387 if (dma_rec_bd.mode.status & BD_RROR)
388 status = DMA_REC_ERROR_DMA;
389
315 more_ready = pcm_callback_more_ready; 390 more_ready = pcm_callback_more_ready;
316 391
317 if (more_ready == NULL || more_ready(0) < 0) { 392 if (more_ready != NULL && more_ready(status) >= 0)
318 /* Finished recording */ 393 {
319 pcm_rec_dma_stop(); 394 sdma_channel_run(DMA_REC_CH_NUM);
320 pcm_rec_dma_stopped_callback(); 395 return;
321 } 396 }
397
398 /* Finished recording */
399 pcm_rec_dma_stop();
400 pcm_rec_dma_stopped_callback();
322} 401}
323 402
324void pcm_rec_lock(void) 403void pcm_rec_lock(void)
325{ 404{
326 if (++dma_rec_data.locked == 1) 405 if (++dma_rec_data.locked == 1)
327 { 406 imx31_regclr32(&SSI_SIER2, SSI_SIER_RDMAE);
328 /* Atomically disable receive interrupt */
329 imx31_regclr32(&SSI_SIER2, SSI_SIER_RIE);
330 }
331} 407}
332 408
333void pcm_rec_unlock(void) 409void pcm_rec_unlock(void)
334{ 410{
335 if (--dma_rec_data.locked == 0 && dma_rec_data.state != 0) 411 if (--dma_rec_data.locked == 0 && dma_rec_data.state != 0)
336 { 412 {
337 /* Atomically enable receive interrupt */ 413 bool pending = false;
338 imx31_regset32(&SSI_SIER2, SSI_SIER_RIE); 414 int oldstatus = disable_irq_save();
415
416 if (dma_rec_data.callback_pending)
417 {
418 pending = true;
419 dma_rec_data.callback_pending = 0;
420 }
421
422 SSI_SIER2 |= SSI_SIER_RDMAE;
423 restore_irq(oldstatus);
424
425 /* Should an interrupt be forced instead? The upper pcm layer can
426 * call consumer's callback in thread context so technically this is
427 * acceptable. */
428 if (pending)
429 rec_dma_callback();
339 } 430 }
340} 431}
341 432
342void pcm_record_more(void *start, size_t size) 433void pcm_record_more(void *start, size_t size)
343{ 434{
344 pcm_rec_peak_addr = start; /* Start peaking at dest */ 435 start = (void *)(((unsigned long)start + 3) & ~3);
345 dma_rec_data.p = start; /* Start of RX buffer */ 436 size &= ~3;
346 dma_rec_data.size = size; /* Bytes to transfer */ 437
438 /* Write back and invalidate - buffer must be coherent */
439 invalidate_dcache_range(start, size);
440
441 start = (void *)addr_virt_to_phys((unsigned long)start);
442
443 pcm_rec_peak_addr = start;
444 dma_rec_bd.buf_addr = start;
445 dma_rec_bd.mode.count = size;
446 dma_rec_bd.mode.command = TRANSFER_16BIT;
447 dma_rec_bd.mode.status = BD_DONE | BD_WRAP | BD_INTR;
347} 448}
348 449
349void pcm_rec_dma_stop(void) 450void pcm_rec_dma_stop(void)
350{ 451{
351 /* Stop receiving data */ 452 /* Stop receiving data */
453 sdma_channel_stop(DMA_REC_CH_NUM);
454
455 imx31_regclr32(&SSI_SIER2, SSI_SIER_RDMAE);
456
352 SSI_SCR2 &= ~SSI_SCR_RE; /* Disable RX */ 457 SSI_SCR2 &= ~SSI_SCR_RE; /* Disable RX */
353 SSI_SRCR2 &= ~SSI_SRCR_RFEN0; /* Disable RX FIFO */ 458 SSI_SRCR2 &= ~SSI_SRCR_RFEN0; /* Disable RX FIFO */
354 459
355 dma_rec_data.state = 0; 460 dma_rec_data.state = 0;
356 461 dma_rec_data.callback_pending = 0;
357 avic_disable_int(SSI2);
358} 462}
359 463
360void pcm_rec_dma_start(void *addr, size_t size) 464void pcm_rec_dma_start(void *addr, size_t size)
361{ 465{
362 pcm_rec_dma_stop(); 466 pcm_rec_dma_stop();
363 467
468 addr = (void *)(((unsigned long)addr + 3) & ~3);
469 size &= ~3;
470
471 invalidate_dcache_range(addr, size);
472
473 addr = (void *)addr_virt_to_phys((unsigned long)addr);
364 pcm_rec_peak_addr = addr; 474 pcm_rec_peak_addr = addr;
365 dma_rec_data.p = addr; 475 dma_rec_bd.buf_addr = addr;
366 dma_rec_data.size = size; 476 dma_rec_bd.mode.count = size;
477 dma_rec_bd.mode.command = TRANSFER_16BIT;
478 dma_rec_bd.mode.status = BD_DONE | BD_WRAP | BD_INTR;
367 479
368 dma_rec_data.state = 1; 480 dma_rec_data.state = 1;
369 481
370 avic_enable_int(SSI2, IRQ, 9, SSI2_HANDLER);
371
372 SSI_SRCR2 |= SSI_SRCR_RFEN0; /* Enable RX FIFO */ 482 SSI_SRCR2 |= SSI_SRCR_RFEN0; /* Enable RX FIFO */
373 483
374 /* Ensure clear FIFO */ 484 /* Ensure clear FIFO */
@@ -377,24 +487,58 @@ void pcm_rec_dma_start(void *addr, size_t size)
377 487
378 /* Enable receive */ 488 /* Enable receive */
379 SSI_SCR2 |= SSI_SCR_RE; 489 SSI_SCR2 |= SSI_SCR_RE;
490
491 sdma_channel_start(DMA_REC_CH_NUM);
380} 492}
381 493
382void pcm_rec_dma_close(void) 494void pcm_rec_dma_close(void)
383{ 495{
384 pcm_rec_dma_stop(); 496 pcm_rec_dma_stop();
497 sdma_channel_close(DMA_REC_CH_NUM);
385} 498}
386 499
387void pcm_rec_dma_init(void) 500void pcm_rec_dma_init(void)
388{ 501{
389 pcm_rec_dma_stop(); 502 pcm_rec_dma_stop();
503
504 /* Init channel information */
505 dma_rec_cd.bd_count = 1;
506 dma_rec_cd.callback = rec_dma_callback;
507 dma_rec_cd.shp_addr = SDMA_PER_ADDR_SSI2_RX1;
508 dma_rec_cd.wml = SDMA_SSI_RXFIFO_WML*2;
509 dma_rec_cd.per_type = SDMA_PER_SSI;
510 dma_rec_cd.tran_type = SDMA_TRAN_PER_2_EMI;
511 dma_rec_cd.event_id1 = SDMA_REQ_SSI2_RX1;
512
513 sdma_channel_init(DMA_REC_CH_NUM, &dma_rec_cd, &dma_rec_bd);
390} 514}
391 515
392const void * pcm_rec_dma_get_peak_buffer(int *count) 516const void * pcm_rec_dma_get_peak_buffer(int *count)
393{ 517{
394 unsigned long addr = (uint32_t)pcm_rec_peak_addr; 518 static unsigned long pda DEVBSS_ATTR;
395 unsigned long end = (uint32_t)dma_rec_data.p; 519 unsigned long buf, addr, end, bufend;
396 *count = (end >> 2) - (addr >> 2); 520 int oldstatus;
397 return (void *)(addr & ~3); 521
522 /* read burst dma destination address register in channel context */
523 sdma_read_words(&pda, CHANNEL_CONTEXT_ADDR(DMA_REC_CH_NUM)+0x0a, 1);
524
525 oldstatus = disable_irq_save();
526 end = pda;
527 buf = (unsigned long)dma_rec_bd.buf_addr;
528 addr = (unsigned long)pcm_rec_peak_addr;
529 bufend = buf + dma_rec_bd.mode.count;
530 restore_irq(oldstatus);
531
532 /* Be addresses are coherent (no buffer change during read) */
533 if (addr >= buf && addr < bufend &&
534 end >= buf && end < bufend)
535 {
536 *count = (end >> 2) - (addr >> 2);
537 return (void *)(addr & ~3);
538 }
539
540 *count = 0;
541 return NULL;
398} 542}
399 543
400#endif /* HAVE_RECORDING */ 544#endif /* HAVE_RECORDING */