none
WaveIn Guru needed... streaming microphone data crashes

    Question

  • we are trying to stream microphone sound data... but when we open the wave in and begin getting buffers the program crashes.

    I can get it to run stable using 10, 32 KB buffers, until you open any other program, or click on the desktop it crashes and unloads.  I do not understand what is causing this.  Can someone please help us, we have had 2 developers here look at it and cannot find the problem.

     

    public partial class AudioTestForm : Form
    	{
    		// number of buffers to use for waveIn
    		const int BufferCount = 10;
    
    		// size of each buffer - Note: we actually need to use as small as possible buffers, e.g. 4K.. to avoid delay
    		// however, 4K doesn't work very well at all... so I have it set to 32 because it works for a while.
    		const int BufferSize = 32 * 1024;
    
    		IntPtr m_hwi;
    		IntPtr[] buffers = new IntPtr[BufferCount];
    		
    		Thread audioDataConsumerThread;
    		Queue<IntPtr> dataQueue = new Queue<IntPtr>();
    		Semaphore dataSemaphore = new Semaphore(0);
    		bool runConsumer = true;
    
    		public AudioTestForm()
    		{
    			InitializeComponent();
    
    			audioDataConsumerThread = new Thread(AudioDataConsumer)
    			{
    				Name = "Audio Data Consumer",
    				IsBackground = true
    			};
    			audioDataConsumerThread.Start();
    		}
    
    		private unsafe void GetAudioData()
    		{
    			int devices = NativeMethods.waveInGetNumDevs();
    
    			if (devices < 1)
    				return;
    
    			// How come WAVEINCAPS returns 1 channel, but supports 4S16? Stereo has worked before.. so is it just incorrect?
    			NativeMethods.WAVEINCAPS caps = new NativeMethods.WAVEINCAPS();
    			NativeMethods.waveInGetDevCaps(0, ref caps, (uint)Marshal.SizeOf(caps));
    
    			if ((caps.dwFormats & NativeMethods.WaveFormats.WAVE_FORMAT_4S16) == 0)
    				return;
    
    			// Initialize a WAVEFORMATEX structure specifying the desired format
    			NativeMethods.WAVEFORMATEX format = new NativeMethods.WAVEFORMATEX();
    			format.wFormatTag = NativeMethods.WAVE_FORMAT_PCM;
    			format.wBitsPerSample = 16;
    			format.nChannels = 2;
    			format.nSamplesPerSec = 44100;
    			format.nBlockAlign = (ushort)(format.wBitsPerSample * format.nChannels / 8);
    			format.nAvgBytesPerSec = (uint)(format.nSamplesPerSec * format.nBlockAlign);
    
    			NativeMethods.AudioCallback audioCallback = new NativeMethods.AudioCallback(AudioProc);
    			GC.KeepAlive(audioCallback); // I tried this to make sure it didn't GC this? but no luck
    			IntPtr ptrFunc = Marshal.GetFunctionPointerForDelegate(audioCallback);
    
    			// Attempt to open the specified device with the desired wave format
    			NativeMethods.MMSYSERR result = NativeMethods.waveInOpen(ref m_hwi, 0, ref format, ptrFunc, 0, NativeMethods.WaveCallbackType.CALLBACK_FUNCTION);
    			if (result != NativeMethods.MMSYSERR.NOERROR)
    				return;
    
    			// create some buffers, add them
    			for(int i = 0; i < BufferCount; i++)
    			{
    				// create a buffer
    				buffers[i] = Marshal.AllocHGlobal(BufferSize);
    
    				// create a header
    				IntPtr mem = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(NativeMethods.WAVEHDR)));
    				NativeMethods.WAVEHDR* header = (NativeMethods.WAVEHDR*)mem.ToPointer();
    				header->lpData = buffers[i];
    				header->dwBufferLength = BufferSize;
    
    				NativeMethods.waveInPrepareHeader(m_hwi, mem, (uint)Marshal.SizeOf(typeof(NativeMethods.WAVEHDR)));
    				NativeMethods.waveInAddBuffer(m_hwi, mem, (uint)Marshal.SizeOf(typeof(NativeMethods.WAVEHDR)));
    			}
    	
    			result = NativeMethods.waveInStart(m_hwi);
    			if (result != NativeMethods.MMSYSERR.NOERROR)
    				return;
    		}
    
    		private unsafe void AudioProc(IntPtr hwi, uint uMsg, IntPtr dwInstance, IntPtr dwParam1, IntPtr dwParam2)
    		{
    			if(!Enum.IsDefined(typeof(NativeMethods.WaveInCallbackMessages), uMsg))
    				return;
    			NativeMethods.WaveInCallbackMessages message = (NativeMethods.WaveInCallbackMessages)uMsg;
    			
    			switch (message)
    			{
    				case NativeMethods.WaveInCallbackMessages.WIM_OPEN:
    					break;
    				case NativeMethods.WaveInCallbackMessages.WIM_CLOSE:
    					break;
    				case NativeMethods.WaveInCallbackMessages.WIM_DATA:
    					
    					// produce buffer
    					lock (dataQueue)
    					{
    						dataQueue.Enqueue(dwParam1);
    						dataSemaphore.Up();
    					}
    
    					break;
    				default:
    					break;
    			}
    		}
    		 
    		private unsafe void AudioDataConsumer()
    		{
    			while (runConsumer)
    			{
    				// wait on the semaphore
    				if (!dataSemaphore.Down())
    					break;
    				if (!runConsumer)
    					break;
    
    				// get the item
    				IntPtr mem;
    				lock (dataQueue)
    					mem = dataQueue.Dequeue();
    
    				// process the data
    				Invoke(new ThreadStart(CountBuffers));
    
    				NativeMethods.WAVEHDR* hdr = (NativeMethods.WAVEHDR*)mem.ToPointer();
    				
    				NativeMethods.waveInUnprepareHeader(m_hwi, mem, (uint)Marshal.SizeOf(typeof(NativeMethods.WAVEHDR)));
    				IntPtr buff = hdr->lpData;
    				NativeMethods.memset(mem.ToPointer(), 0x00, (uint)Marshal.SizeOf(typeof(NativeMethods.WAVEHDR)));
    				hdr->lpData = buff;
    				hdr->dwBufferLength = BufferSize;
    
    				// add
    				NativeMethods.waveInPrepareHeader(m_hwi, mem, (uint)Marshal.SizeOf(typeof(NativeMethods.WAVEHDR)));
    				NativeMethods.waveInAddBuffer(m_hwi, mem, (uint)Marshal.SizeOf(typeof(NativeMethods.WAVEHDR)));					
    			}
    		}
    
    		int count = 0;
    		private void CountBuffers()
    		{
    			lblCount.Text = (++count).ToString();
    		}
    
    		private void btnStreamClick(object sender, EventArgs e)
    		{
    			GetAudioData();
    		}
    
    		private void btnStopAudio_Click(object sender, EventArgs e)
    		{
    			// this stop audio.. may not work or be thread safe .. but it crashes before this anyway so this may need more work or synchronization
    			runConsumer = false;
    			NativeMethods.waveInReset(m_hwi);
    			NativeMethods.waveInClose(m_hwi);
    			dataSemaphore.ReleaseAll(0);
    			audioDataConsumerThread.Join();
    		}
    	}

     

    test project link:

    http://rapidshare.com/files/385823141/AudioTestIMX31.zip.html

     

    try running it on the emulator, then while streaming audio... open another program

    Monday, May 10, 2010 10:34 PM

Answers

  • I would guess delegate you're passing as native callback is getting collected. Consider keeping it in class level variable instead of local variable.


    This posting is provided "AS IS" with no warranties, and confers no rights.
    • Marked as answer by Alan M_ Tuesday, May 11, 2010 2:11 PM
    Tuesday, May 11, 2010 3:37 AM
    Moderator

All replies

  • I would guess delegate you're passing as native callback is getting collected. Consider keeping it in class level variable instead of local variable.


    This posting is provided "AS IS" with no warranties, and confers no rights.
    • Marked as answer by Alan M_ Tuesday, May 11, 2010 2:11 PM
    Tuesday, May 11, 2010 3:37 AM
    Moderator
  • Ilya, thanks for checking out my code.  You were right.  I had thought of that myself and tried using GC.KeepAlive, but that had not worked.  I created a class variable for both the delagate and the IntPtr and it worked.
    Tuesday, May 11, 2010 2:12 PM