Index Topic Contents | |||
Previous Topic: Multimedia Streaming Component Objects Next Topic: DirectDrawEx |
Multimedia Streaming Sample Code
This article provides sample code that implements the Multimedia Streaming interfaces. The video streaming sample code demonstrates how to read a file and render it to a primary Microsoft® DirectDraw® surface. This code has no error checking; see Use Multimedia Streaming in DirectShow Applications for a more thorough, line by line, description of the video streaming code.
The second code sample demonstrates how to use the audio streaming interfaces to stream audio data.
Contents of this article:
Video Streaming Sample Code
This sample code reads a file and renders it to a primary DirectDraw surface.
#include <stdio.h> #include "ddraw.h" // DirectDraw interfaces #include "mmstream.h" // Multimedia stream interfaces #include "amstream.h" // DirectShow multimedia stream interfaces #include "ddstream.h" // DirectDraw multimedia stream interfaces void RenderStreamToSurface(IDirectDrawSurface *pSurface, IMultiMediaStream *pMMStream) { IMediaStream *pPrimaryVidStream; IDirectDrawMediaStream *pDDStream; IDirectDrawStreamSample *pSample; RECT rect; DDSURFACEDESC ddsd; pMMStream->GetMediaStream(MSPID_PrimaryVideo, &pPrimaryVidStream); pPrimaryVidStream->QueryInterface(IID_IDirectDrawMediaStream, (void **)&pDDStream); ddsd.dwSize = sizeof(ddsd); pDDStream->GetFormat(&ddsd, NULL, NULL, NULL); rect.top = rect.left = 0; rect.bottom = ddsd.dwHeight; rect.right = ddsd.dwWidth; pDDStream->CreateSample(pSurface, &rect, 0, &pSample); pMMStream->SetState(STREAMSTATE_RUN); while (pSample->Update(0, NULL, NULL, NULL) == S_OK); pMMStream->SetState(STREAMSTATE_STOP); pSample->Release(); pDDStream->Release(); pPrimaryVidStream->Release(); } void RenderFileToMMStream(const char * szFileName, IMultiMediaStream **ppMMStream, IDirectDraw *pDD) { IAMMultiMediaStream *pAMStream; CoCreateInstance(CLSID_AMMultiMediaStream, NULL, CLSCTX_INPROC_SERVER, IID_IAMMultiMediaStream, (void **)&pAMStream); WCHAR wPath[MAX_PATH]; // Wide (32-bit) string name MultiByteToWideChar(CP_ACP, 0, szFileName, -1, wPath, sizeof(wPath)/sizeof(wPath[0])); pAMStream->Initialize(STREAMTYPE_READ, AMMSF_NOGRAPHTHREAD, NULL); pAMStream->AddMediaStream(pDD, &MSPID_PrimaryVideo, 0, NULL); pAMStream->AddMediaStream(NULL, &MSPID_PrimaryAudio, AMMSF_ADDDEFAULTRENDERER, NULL); pAMStream->OpenFile(wPath, 0); *ppMMStream = pAMStream; } int _CRTAPI1 main(int argc, char *argv[]) { if (argc < 2) { printf("Usage : showstrm movie.ext\n"); exit(0);} DDSURFACEDESC ddsd; IDirectDraw *pDD; IDirectDrawSurface *pPrimarySurface; IMultiMediaStream *pMMStream; CoInitialize(NULL); DirectDrawCreate(NULL, &pDD, NULL); pDD->SetCooperativeLevel(GetDesktopWindow(), DDSCL_NORMAL); ddsd.dwSize = sizeof(ddsd); ddsd.dwFlags = DDSD_CAPS; ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE; pDD->CreateSurface(&ddsd, &pPrimarySurface, NULL); RenderFileToMMStream(argv[1], &pMMStream, pDD); RenderStreamToSurface(pPrimarySurface, pMMStream); pMMStream->Release(); pPrimarySurface->Release(); pDD->Release(); CoUninitialize(); return 0; }Audio Streaming Sample Code
The following code sample demonstrates how to stream audio data using the IAudioMediaStream, IAudioStreamSample, IMemoryData, and IAudioData interfaces.
#include <windows.h> #include <mmsystem.h> #include <amstream.h> /******************************************************************** Trivial wave player stuff ********************************************************************/ class CWaveBuffer; class CWaveBuffer { public: CWaveBuffer(); ~CWaveBuffer(); BOOL Init(HWAVEOUT hWave, int Size); void Done(); BOOL Write(PBYTE pData, int nBytes, int& BytesWritten); void Flush(); private: WAVEHDR m_Hdr; HWAVEOUT m_hWave; int m_nBytes; }; class CWaveOut { public: CWaveOut(LPCWAVEFORMATEX Format, int nBuffers, int BufferSize); ~CWaveOut(); void Write(PBYTE Data, int nBytes); void Flush(); void Wait(); void Reset(); private: const HANDLE m_hSem; const int m_nBuffers; int m_CurrentBuffer; BOOL m_NoBuffer; CWaveBuffer *m_Hdrs; HWAVEOUT m_hWave; }; /* CWaveBuffer */ CWaveBuffer::CWaveBuffer() { } BOOL CWaveBuffer::Init(HWAVEOUT hWave, int Size) { m_hWave = hWave; m_nBytes = 0; /* Allocate a buffer and initialize the header */ m_Hdr.lpData = (LPSTR)LocalAlloc(LMEM_FIXED, Size); if (m_Hdr.lpData == NULL) { return FALSE; } m_Hdr.dwBufferLength = Size; m_Hdr.dwBytesRecorded = 0; m_Hdr.dwUser = 0; m_Hdr.dwFlags = 0; m_Hdr.dwLoops = 0; m_Hdr.lpNext = 0; m_Hdr.reserved = 0; /* Prepare it */ waveOutPrepareHeader(hWave, &m_Hdr, sizeof(WAVEHDR)); return TRUE; } CWaveBuffer::~CWaveBuffer() { if (m_Hdr.lpData) { waveOutUnprepareHeader(m_hWave, &m_Hdr, sizeof(WAVEHDR)); LocalFree(m_Hdr.lpData); } } void CWaveBuffer::Flush() { //ASSERT(m_nBytes != 0); m_nBytes = 0; waveOutWrite(m_hWave, &m_Hdr, sizeof(WAVEHDR)); } BOOL CWaveBuffer::Write(PBYTE pData, int nBytes, int& BytesWritten) { //ASSERT((DWORD)m_nBytes != m_Hdr.dwBufferLength); BytesWritten = min((int)m_Hdr.dwBufferLength - m_nBytes, nBytes); CopyMemory((PVOID)(m_Hdr.lpData + m_nBytes), (PVOID)pData, BytesWritten); m_nBytes += BytesWritten; if (m_nBytes == (int)m_Hdr.dwBufferLength) { /* Write it! */ m_nBytes = 0; waveOutWrite(m_hWave, &m_Hdr, sizeof(WAVEHDR)); return TRUE; } return FALSE; } void CALLBACK WaveCallback(HWAVEOUT hWave, UINT uMsg, DWORD dwUser, DWORD dw1, DWORD dw2) { if (uMsg == WOM_DONE) { ReleaseSemaphore((HANDLE)dwUser, 1, NULL); } } /* CWaveOut */ CWaveOut::CWaveOut(LPCWAVEFORMATEX Format, int nBuffers, int BufferSize) : m_nBuffers(nBuffers), m_CurrentBuffer(0), m_NoBuffer(TRUE), m_hSem(CreateSemaphore(NULL, nBuffers, nBuffers, NULL)), m_Hdrs(new CWaveBuffer[nBuffers]), m_hWave(NULL) { /* Create wave device */ waveOutOpen(&m_hWave, WAVE_MAPPER, Format, (DWORD)WaveCallback, (DWORD)m_hSem, CALLBACK_FUNCTION); /* Initialize the wave buffers */ for (int i = 0; i < nBuffers; i++) { m_Hdrs[i].Init(m_hWave, BufferSize); } } CWaveOut::~CWaveOut() { /* First get our buffers back */ waveOutReset(m_hWave); /* Free the buffers */ delete [] m_Hdrs; /* Close the wave device */ waveOutClose(m_hWave); /* Free our semaphore */ CloseHandle(m_hSem); } void CWaveOut::Flush() { if (!m_NoBuffer) { m_Hdrs[m_CurrentBuffer].Flush(); m_NoBuffer = TRUE; m_CurrentBuffer = (m_CurrentBuffer + 1) % m_nBuffers; } } void CWaveOut::Reset() { waveOutReset(m_hWave); } void CWaveOut::Write(PBYTE pData, int nBytes) { while (nBytes != 0) { /* Get a buffer if necessary */ if (m_NoBuffer) { WaitForSingleObject(m_hSem, INFINITE); m_NoBuffer = FALSE; } /* Write into a buffer */ int nWritten; if (m_Hdrs[m_CurrentBuffer].Write(pData, nBytes, nWritten)) { m_NoBuffer = TRUE; m_CurrentBuffer = (m_CurrentBuffer + 1) % m_nBuffers; nBytes -= nWritten; pData += nWritten; } else { //ASSERT(nWritten == nBytes); break; } } } void CWaveOut::Wait() { /* Send any remaining buffers */ Flush(); /* Wait for our buffers back */ for (int i = 0; i < m_nBuffers; i++) { WaitForSingleObject(m_hSem, INFINITE); } LONG lPrevCount; ReleaseSemaphore(m_hSem, m_nBuffers, &lPrevCount); } /************************************************************************** End of wave player stuff **************************************************************************/ HRESULT RenderStreamToDevice(IMultiMediaStream *pMMStream) { WAVEFORMATEX wfx; #define DATA_SIZE 5000 PBYTE pBuffer = (PBYTE)LocalAlloc(LMEM_FIXED, DATA_SIZE); IMediaStream *pStream; IAudioStreamSample *pSample; IAudioMediaStream *pAudioStream; IAudioData *pAudioData; pMMStream->GetMediaStream(MSPID_PrimaryAudio, &pStream); pStream->QueryInterface(IID_IAudioMediaStream, (void **)&pAudioStream); pAudioStream->GetFormat(&wfx); CoCreateInstance(CLSID_AMAudioData, NULL, CLSCTX_INPROC_SERVER, IID_IAudioData, (void **)&pAudioData); pAudioData->SetBuffer(DATA_SIZE, pBuffer, 0); pAudioData->SetFormat(&wfx); pAudioStream->CreateSample(pAudioData, 0, &pSample); HANDLE hEvent = CreateEvent(FALSE, NULL, NULL, FALSE); CWaveOut WaveOut(&wfx, 4, 2048); int iTimes; for (iTimes = 0; iTimes < 3; iTimes++) { DWORD dwStart = timeGetTime(); for (; ; ) { HRESULT hr = pSample->Update(0, hEvent, NULL, 0); if (FAILED(hr) || MS_S_ENDOFSTREAM == hr) { break; } WaitForSingleObject(hEvent, INFINITE); DWORD dwTimeDiff = timeGetTime() - dwStart; // We'll get bored after about 10 seconds if (dwTimeDiff > 10000) { break; } DWORD dwLength; pAudioData->GetInfo(NULL, NULL, &dwLength); WaveOut.Write(pBuffer, dwLength); } pMMStream->Seek(0); } pAudioData->Release(); pSample->Release(); pStream->Release(); pAudioStream->Release(); LocalFree((HLOCAL)pBuffer); return S_OK; } HRESULT RenderFileToMMStream(WCHAR * pszFileName, IMultiMediaStream **ppMMStream) { IAMMultiMediaStream *pAMStream; CoCreateInstance(CLSID_AMMultiMediaStream, NULL, CLSCTX_INPROC_SERVER, IID_IAMMultiMediaStream, (void **)&pAMStream); pAMStream->Initialize(STREAMTYPE_READ, AMMSF_NOGRAPHTHREAD, NULL); pAMStream->AddMediaStream(NULL, &MSPID_PrimaryAudio, 0, NULL); pAMStream->OpenFile(pszFileName, AMMSF_RUN); *ppMMStream = pAMStream; return S_OK; } int _CRTAPI1 main(int argc, char *argv[]) { IMultiMediaStream *pMMStream; CoInitialize(NULL); WCHAR wszName[1000]; MultiByteToWideChar(CP_ACP, 0, argv[1], -1, wszName, sizeof(wszName) / sizeof(wszName[0])); RenderFileToMMStream(wszName, &pMMStream); RenderStreamToDevice(pMMStream); pMMStream->Release(); CoUninitialize(); return 0; }© 1998 Microsoft Corporation. All rights reserved. Terms of Use.