Blame | Last modification | View Log | RSS feed
#include "Movies.h"
#include <dshow.h>
#include <windows.h>
#include <mmsystem.h>
#include <atlbase.h>
//#define NO_AUDIO_RENDERER
LPDIRECT3DTEXTURE9 m_pTexture = NULL; // our texture
CComPtr<IGraphBuilder> g_pGB; // GraphBuilder
CComPtr<IMediaControl> g_pMC; // Media Control
CComPtr<IMediaPosition> g_pMP; // Media Position
CComPtr<IMediaEvent> g_pME; // Media Event
CComPtr<IBaseFilter> g_pRenderer; // our custom renderer
D3DFORMAT g_TextureFormat; // Texture format
LPDIRECT3DDEVICE9 gD3dDevice2;
//////////
// Construction/Destruction
//
CMovies::CMovies()
{
Clean();
}
LPDIRECT3DTEXTURE9 CMovies::GetTexture () { return m_pTexture; }
CMovies::~CMovies()
{
CoUninitialize();
}
bool CMovies::Initialise( LPDIRECT3DDEVICE9 device, const char *szVideoFilename, LPDIRECT3DTEXTURE9 texture )
{
if ( m_cVideoFilename == szVideoFilename )
{
Stop();
return true;
}
Clean();
m_cVideoFilename = (char*)szVideoFilename;
gD3dDevice2 = device;
//CoInitialize(NULL);
HRESULT hr = S_OK;
CComPtr<IBaseFilter> pFSrc; // Source Filter
CComPtr<IPin> pFSrcPinOut; // Source Filter Output Pin
CTextureRenderer *pCTR = 0; // DirectShow Texture renderer
// Create the filter graph
if ( FAILED( g_pGB.CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC) ) )
return false;
// Create the Texture Renderer object
pCTR = new CTextureRenderer(NULL, &hr);
if ( FAILED(hr) || !pCTR )
return false;
g_pRenderer = pCTR;
// pFTR = pCTR;
if ( FAILED( g_pGB->AddFilter(g_pRenderer, L"TEXTURERENDERER") ) )
return false;
TCHAR strFileName[MAX_PATH];
WCHAR wFileName[MAX_PATH];
lstrcpyn( strFileName, szVideoFilename, MAX_PATH - 1 );
strFileName[MAX_PATH-1] = 0;
wFileName[MAX_PATH-1] = 0;
USES_CONVERSION;
wcsncpy(wFileName, T2W(strFileName), NUMELMS(wFileName));
hr = g_pGB->AddSourceFilter (wFileName, L"SOURCE", &pFSrc);
// If the media file was not found, inform the user.
if ( hr == VFW_E_NOT_FOUND )
{
// Msg(TEXT("Could not add source filter to graph! (hr==VFW_E_NOT_FOUND)\r\n\r\n")
// TEXT("This sample reads a media file from the DirectX SDK's media path.\r\n")
// TEXT("Please install the DirectX 9 SDK on this machine."));
return false;
}
else if ( FAILED( hr ) )
{
// Msg(TEXT("Could not add source filter to graph! hr=0x%x"), hr);
return false;
}
if ( FAILED( pFSrc->FindPin(L"Output", &pFSrcPinOut) ) )
{
// Msg(TEXT("Could not find output pin! hr=0x%x"), hr);
return false;
}
#ifdef NO_AUDIO_RENDERER
CComPtr<IPin> pFTRPinIn; // Texture Renderer Input Pin
// Find the source's output pin and the renderer's input pin
if ( FAILED( g_pRenderer->FindPin(L"In", &pFTRPinIn) ) )
{
// Msg(TEXT("Could not find input pin! hr=0x%x"), hr);
return false;
}
// Connect these two filters
if ( FAILED( g_pGB->Connect(pFSrcPinOut, pFTRPinIn) ) )
{
// Msg(TEXT("Could not connect pins! hr=0x%x"), hr);
return false;
}
#else
// Render the source filter's output pin. The Filter Graph Manager
// will connect the video stream to the loaded CTextureRenderer
// and will load and connect an audio renderer (if needed).
if ( FAILED( g_pGB->Render(pFSrcPinOut) ) )
{
// Msg(TEXT("Could not render source output pin! hr=0x%x"), hr);
return false;
}
#endif
// Get the graph's media control, event & position interfaces
g_pGB.QueryInterface(&g_pMC);
g_pGB.QueryInterface(&g_pMP);
g_pGB.QueryInterface(&g_pME);
m_iVideoWidth = pCTR->m_lVidWidth;
m_iVideoHeight = pCTR->m_lVidHeight;
m_fRotation = 0.0f;
return true;
}
void CMovies::Clean()
{
g_pGB = NULL;
g_pMP = NULL;
g_pMC = NULL;
g_pME = NULL;
g_pRenderer = NULL;
m_cVideoFilename = NULL;
m_bPlaying = false;
m_bPaused = true;
}
bool CMovies::Play()
{
if ( FAILED( g_pMC->Run() ) )
return false;
m_bPlaying = true;
m_bPaused = false;
return true;
}
bool CMovies::Stop()
{
if ( FAILED ( g_pMC->Stop() ) )
return false;
double temp;
g_pMP->get_CurrentPosition( &temp );
if ( FAILED ( g_pMP->put_CurrentPosition( 0 ) ) )
return false;
// temp += 0;
m_bPlaying = false;
m_bPaused = true;
return true;
}
bool CMovies::Pause()
{
if ( FAILED ( g_pMC->Pause() ) )
return false;
m_bPaused = true;
return true;
}
bool CMovies::JumpTo( double PositionInSeconds )
{
if ( FAILED ( g_pMP->put_CurrentPosition( PositionInSeconds ) ) )
return false;
return true;
}
bool CMovies::HasMovieEnded()
{
long lEventCode, lParam1, lParam2;
// Check for completion events
HRESULT hr = g_pME->GetEvent(&lEventCode, (LONG_PTR *) &lParam1, (LONG_PTR *) &lParam2, 0);
if (SUCCEEDED(hr))
{
if ( EC_COMPLETE == lEventCode )
return true;
// Free any memory associated with this event
hr = g_pME->FreeEventParams(lEventCode, lParam1, lParam2);
}
return false;
}
long CMovies::MovieLength()
{
double length;
g_pMP->get_Duration( &length );
return (long)length * 1000;
}
//////////
// Construction / Deconstruction
//
CTextureRenderer::CTextureRenderer( LPUNKNOWN pUnk, HRESULT *phr )
: CBaseVideoRenderer(__uuidof(CLSID_TextureRenderer),
NAME("Texture Renderer"), pUnk, phr),
m_bUseDynamicTextures(FALSE)
{
// Store and AddRef the texture for our use.
if (phr)
*phr = S_OK;
}
CTextureRenderer::~CTextureRenderer()
{
}
HRESULT CTextureRenderer::CheckMediaType(const CMediaType *pmt)
{
HRESULT hr = E_FAIL;
VIDEOINFO *pvi=0;
CheckPointer(pmt,E_POINTER);
// Reject the connection if this is not a video type
if( *pmt->FormatType() != FORMAT_VideoInfo ) {
return E_INVALIDARG;
}
// Only accept RGB24 video
pvi = (VIDEOINFO *)pmt->Format();
if ( IsEqualGUID( *pmt->Type(), MEDIATYPE_Video ) &&
IsEqualGUID( *pmt->Subtype(), MEDIASUBTYPE_RGB24 ) )
hr = S_OK;
return hr;
}
HRESULT CTextureRenderer::SetMediaType(const CMediaType *pmt)
{
HRESULT hr;
UINT uintWidth = 2;
UINT uintHeight = 2;
// Retrive the size of this media type
D3DCAPS9 caps;
VIDEOINFO *pviBmp; // Bitmap info header
pviBmp = (VIDEOINFO *)pmt->Format();
m_lVidWidth = pviBmp->bmiHeader.biWidth;
m_lVidHeight = abs(pviBmp->bmiHeader.biHeight);
m_lVidPitch = (m_lVidWidth * 3 + 3) & ~(3); // We are forcing RGB24
// here let's check if we can use dynamic textures
ZeroMemory( &caps, sizeof(D3DCAPS9));
hr = gD3dDevice2->GetDeviceCaps( &caps );
/*
if ( caps.Caps2 & D3DCAPS2_DYNAMICTEXTURES )
{
m_bUseDynamicTextures = true;
}*/
if( caps.TextureCaps & D3DPTEXTURECAPS_POW2 )
{
while( (LONG)uintWidth < m_lVidWidth )
uintWidth = uintWidth << 1;
while( (LONG)uintHeight < m_lVidHeight )
uintHeight = uintHeight << 1;
}
else
{
uintWidth = m_lVidWidth;
uintHeight = m_lVidHeight;
}
// Create the texture that maps to this media type
hr = E_UNEXPECTED;
if( m_bUseDynamicTextures )
{
hr = gD3dDevice2->CreateTexture( uintWidth, uintHeight, 1, D3DUSAGE_DYNAMIC, D3DFMT_X8R8G8B8,D3DPOOL_DEFAULT,
&m_pTexture, NULL );
if( FAILED(hr))
m_bUseDynamicTextures = FALSE;
}
if( FALSE == m_bUseDynamicTextures )
hr = gD3dDevice2->CreateTexture(uintWidth, uintHeight, 1, 0,
D3DFMT_X8R8G8B8,D3DPOOL_MANAGED,
&m_pTexture, NULL);
if ( FAILED(hr) )
return false;
// CreateTexture can silently change the parameters on us
D3DSURFACE_DESC ddsd;
ZeroMemory(&ddsd, sizeof(ddsd));
if ( FAILED( m_pTexture->GetLevelDesc( 0, &ddsd ) ) )
return false;
CComPtr<IDirect3DSurface9> pSurf;
if ( SUCCEEDED( hr = m_pTexture->GetSurfaceLevel(0, &pSurf) ) )
pSurf->GetDesc(&ddsd);
// Save format info
g_TextureFormat = ddsd.Format;
if ( (g_TextureFormat != D3DFMT_X8R8G8B8) && (g_TextureFormat != D3DFMT_A1R5G5B5) )
return VFW_E_TYPE_NOT_ACCEPTED;
return S_OK;
}
HRESULT CTextureRenderer::DoRenderSample( IMediaSample * pSample )
{
BYTE *pBmpBuffer, *pTxtBuffer; // Bitmap buffer, texture buffer
LONG lTxtPitch; // Pitch of bitmap, texture
BYTE * pbS = NULL;
DWORD * pdwS = NULL;
DWORD * pdwD = NULL;
UINT row, col, dwordWidth, backwards;
CheckPointer( pSample,E_POINTER );
CheckPointer( m_pTexture,E_UNEXPECTED );
// Get the video bitmap buffer
pSample->GetPointer( &pBmpBuffer );
// Lock the Texture
D3DLOCKED_RECT d3dlr;
if( m_bUseDynamicTextures )
{
if( FAILED(m_pTexture->LockRect(0, &d3dlr, 0, D3DLOCK_DISCARD)))
return E_FAIL;
}
else
{
if (FAILED(m_pTexture->LockRect(0, &d3dlr, 0, 0)))
return E_FAIL;
}
// Get the texture buffer & pitch
pTxtBuffer = static_cast<byte *>(d3dlr.pBits);
lTxtPitch = d3dlr.Pitch;
// Copy the bits
if ( g_TextureFormat == D3DFMT_X8R8G8B8 )
{
// Instead of copying data bytewise, we use DWORD alignment here.
// We also unroll loop by copying 4 pixels at once.
//
// original BYTE array is [b0][g0][r0][b1][g1][r1][b2][g2][r2][b3][g3][r3]
//
// aligned DWORD array is [b1 r0 g0 b0][g2 b2 r1 g1][r3 g3 b3 r2]
//
// We want to transform it to [ff r0 g0 b0][ff r1 g1 b1][ff r2 g2 b2][ff r3 b3 g3]
// below, bitwise operations do exactly this.
dwordWidth = m_lVidWidth / 4; // aligned width of the row, in DWORDS
// (pixel by 3 bytes over sizeof(DWORD))
backwards = m_lVidWidth * m_lVidHeight * 4;
for( row = 0; row< (UINT)m_lVidHeight; row++)
{
pdwS = ( DWORD* )pBmpBuffer;
pdwD = ( DWORD* )pTxtBuffer;
for( col = 0; col < dwordWidth; col ++ )
{
pdwD[0] = pdwS[0] | 0xFF000000;
pdwD[1] = ( ( pdwS[1] << 8) | 0xFF000000) | ( pdwS[0] >> 24 );
pdwD[2] = ( ( pdwS[2] << 16) | 0xFF000000) | ( pdwS[1] >> 16 );
pdwD[3] = 0xFF000000 | ( pdwS[2] >> 8 );
pdwD += 4;
pdwS += 3;
}
// We might have remaining (misaligned) bytes here
pbS = (BYTE*) pdwS;
for( col = 0; col < (UINT)m_lVidWidth % 4; col++)
{
*pdwD = 0xFF000000 |
(pbS[2] << 16) |
(pbS[1] << 8) |
(pbS[0]);
pdwD++;
pbS += 3;
}
pBmpBuffer += m_lVidPitch;
pTxtBuffer += lTxtPitch;
}// for rows
}
if (g_TextureFormat == D3DFMT_A1R5G5B5)
{
for(int y = 0; y < m_lVidHeight; y++ )
{
BYTE *pBmpBufferOld = pBmpBuffer;
BYTE *pTxtBufferOld = pTxtBuffer;
for (int x = 0; x < m_lVidWidth; x++)
{
*(WORD *)pTxtBuffer = (WORD)
(0x8000 +
((pBmpBuffer[2] & 0xF8) << 7) +
((pBmpBuffer[1] & 0xF8) << 2) +
(pBmpBuffer[0] >> 3));
pTxtBuffer += 2;
pBmpBuffer += 3;
}
pBmpBuffer = pBmpBufferOld + m_lVidPitch;
pTxtBuffer = pTxtBufferOld + lTxtPitch;
}
}
// Unlock the Texture
if ( FAILED( m_pTexture->UnlockRect(0) ) )
return E_FAIL;
return S_OK;
}