DirectShow过滤器开发-读视频文件过滤器(再写)
下载本过滤器DLL
本过滤器读取视频文件输出视频流和音频流。流类型由文件决定。已知可读取的文件格式有:AVI,ASF,MOV,MP4,MPG,WMV。
过滤器信息
过滤器名称:读视频文件
过滤器GUID:{29001AD7-37A5-45E0-A750-E76453B36E33}
DLL注册函数名:DllRegisterServer
删除注册函数名:DllUnregisterServer
过滤器有2个输出引脚。
输出引脚1标识:Video
输出引脚1媒体类型:
主要类型:MEDIATYPE_Video
子类型:MEDIASUBTYPE_NULL
输出引脚2标识:Audio
输出引脚2媒体类型:
主要类型:MEDIATYPE_Audio
子类型:MEDIASUBTYPE_NULL
过滤器开发信息
过滤器实现了IFileSourceFilter接口,用于指定要读取的视频文件。并在接口的Load方法中确定文件视频流,音频流媒体类型,设置给视频音频输出引脚,并获取文件的时长。过滤器运行时,创建媒体源线程,在线程中创建媒体源,并获取媒体源事件。媒体源启动后(调用媒体源的Start方法),将产生“创建了新流”事件,事件值为视频或音频流接口。获取流接口后创建视频音频工作线程(视频音频流线程)。在线程中获取流事件。请求样本后,会产生“产生新样本”事件,事件值为样本接口,其为媒体基础样本,将其转换为引脚样本,由引脚向下游发送。再请求下一个样本。过滤器停止时,调用媒体源接口的Stop方法,会产生“流停止”事件,收到此事件后,退出视频音频工作线程,但媒体源线程仍未退出。再次运行过滤器时,再次启动媒体源,此时媒体源产生“流已更新”事件,事件值为已更新的流接口。获取流接口后创建视频音频工作线程(视频音频流线程)。过滤器更改播放位置时(即定位,调用媒体源的Start方法,在参数中指定新的开始位置),媒体源也会产生“流已更新”事件,此时只更新流接口,不再创建新的视频音频工作线程,因为线程已存在。媒体源会自动寻找关键帧,从关键帧开始。要退出媒体源线程,通过销毁过滤器或指定新的要读取的文件,方式为发送“退出媒体源线程”事件信号。
IMediaSeeking接口用于调整播放的当前位置(定位)。可以在过滤器实现,也可以在任一引脚实现。如果在引脚实现,最好在音频引脚。
过滤器DLL的全部代码
DLL.h
#ifndef DLL_FILE
#define DLL_FILE
#include "strmbase10.h"//过滤器基类定义文件
#if _DEBUG
#pragma comment(lib, "strmbasd10.lib")//过滤器基类实现文件调试版本
#else
#pragma comment(lib, "strmbase10.lib")//过滤器基类实现文件发布版本
#endif
// {29001AD7-37A5-45E0-A750-E76453B36E33}
DEFINE_GUID(CLSID_Reader,//过滤器GUID
0x29001ad7, 0x37a5, 0x45e0, 0xa7, 0x50, 0xe7, 0x64, 0x53, 0xb3, 0x6e, 0x33);
#include "mfapi.h"
#include "mfidl.h"
#include "mferror.h"
#include "evr.h"
#pragma comment(lib, "mfplat.lib")
#pragma comment(lib, "mf.lib")
class CPin2;
class CFilter;
class CPin1 : public CBaseOutputPin
{
friend class CPin2;
friend class CFilter;
public:
CPin1(CFilter *pFilter, HRESULT *phr, LPCWSTR pPinName);
~CPin1();
DECLARE_IUNKNOWN
STDMETHODIMP NonDelegatingQueryInterface(REFIID riid, void **ppvoid);
BOOL HasSet = FALSE;
HRESULT CheckMediaType(const CMediaType *pmt);
HRESULT GetMediaType(int iPosition, CMediaType *pMediaType);
HRESULT SetMediaType(const CMediaType *pmt);
HRESULT BreakConnect();
HRESULT DecideBufferSize(IMemAllocator *pMemAllocator, ALLOCATOR_PROPERTIES * ppropInputRequest);
CFilter *pCFilter = NULL;
STDMETHODIMP Notify(IBaseFilter *pSelf, Quality q)
{
return E_FAIL;
}
};
class CPin2 : public CBaseOutputPin, public IMediaSeeking
{
friend class CPin1;
friend class CFilter;
public:
CPin2(CFilter *pFilter, HRESULT *phr, LPCWSTR pPinName);
~CPin2();
DECLARE_IUNKNOWN
STDMETHODIMP NonDelegatingQueryInterface(REFIID riid, void **ppvoid);
BOOL HasSet = FALSE;
HRESULT CheckMediaType(const CMediaType *pmt);
HRESULT GetMediaType(int iPosition, CMediaType *pMediaType);
HRESULT SetMediaType(const CMediaType *pmt);
HRESULT BreakConnect();
HRESULT DecideBufferSize(IMemAllocator *pMemAllocator, ALLOCATOR_PROPERTIES * ppropInputRequest);
CFilter *pCFilter = NULL;
DWORD m_dwSeekingCaps = AM_SEEKING_CanSeekForwards | AM_SEEKING_CanSeekBackwards | AM_SEEKING_CanSeekAbsolute | AM_SEEKING_CanGetStopPos | AM_SEEKING_CanGetDuration;
HRESULT STDMETHODCALLTYPE CheckCapabilities(DWORD *pCapabilities);
HRESULT STDMETHODCALLTYPE ConvertTimeFormat(LONGLONG *pTarget, const GUID *pTargetFormat, LONGLONG Source, const GUID *pSourceFormat);
HRESULT STDMETHODCALLTYPE GetAvailable(LONGLONG *pEarliest, LONGLONG *pLatest);
HRESULT STDMETHODCALLTYPE GetCapabilities(DWORD *pCapabilities);
HRESULT STDMETHODCALLTYPE GetCurrentPosition(LONGLONG *pCurrent);
HRESULT STDMETHODCALLTYPE GetDuration(LONGLONG *pDuration);
HRESULT STDMETHODCALLTYPE GetPositions(LONGLONG *pCurrent, LONGLONG *pStop);
HRESULT STDMETHODCALLTYPE GetPreroll(LONGLONG *pllPreroll);
HRESULT STDMETHODCALLTYPE GetRate(double *pdRate);
HRESULT STDMETHODCALLTYPE GetStopPosition(LONGLONG *pStop);
HRESULT STDMETHODCALLTYPE GetTimeFormat(GUID *pFormat);
HRESULT STDMETHODCALLTYPE IsFormatSupported(const GUID *pFormat);
HRESULT STDMETHODCALLTYPE IsUsingTimeFormat(const GUID *pFormat);
HRESULT STDMETHODCALLTYPE QueryPreferredFormat(GUID *pFormat);
HRESULT STDMETHODCALLTYPE SetPositions(LONGLONG *pCurrent, DWORD dwCurrentFlags, LONGLONG *pStop, DWORD dwStopFlags);
HRESULT STDMETHODCALLTYPE SetRate(double dRate);
HRESULT STDMETHODCALLTYPE SetTimeFormat(const GUID *pFormat);
STDMETHODIMP Notify(IBaseFilter *pSelf, Quality q)
{
return E_FAIL;
}
};
class CFilter : public CCritSec, public CBaseFilter, public IFileSourceFilter
{
friend class CPin1;
friend class CPin2;
public:
CFilter(TCHAR* pName, LPUNKNOWN pUnk, HRESULT* hr);
~CFilter();
CBasePin* GetPin(int n);
int GetPinCount();
static CUnknown * WINAPI CreateInstance(LPUNKNOWN pUnk, HRESULT *phr);
DECLARE_IUNKNOWN
STDMETHODIMP NonDelegatingQueryInterface(REFIID iid, void ** ppv);
STDMETHODIMP Load(LPCOLESTR lpwszFileName, const AM_MEDIA_TYPE *pmt);
STDMETHODIMP GetCurFile(LPOLESTR * ppszFileName, AM_MEDIA_TYPE *pmt);
HRESULT GetMediaType();//获取视频音频流媒体类型
REFERENCE_TIME mStart = 0;
HANDLE hSourceThread = NULL;//媒体源线程句柄
HANDLE hVideoThread = NULL;//视频流线程句柄
HANDLE hAudioThread = NULL;//音频流线程句柄
HANDLE hExit = NULL;// “退出媒体源线程”事件句柄
HANDLE hInit = NULL;//“媒体源已创建”事件句柄
STDMETHODIMP Pause();
STDMETHODIMP Stop();
CPin1* pCPin1 = NULL;//视频引脚指针
CPin2* pCPin2 = NULL;//音频引脚指针
WCHAR* m_pFileName = NULL;//要读取的视频文件路径
LONGLONG DUR = 0;//持续时间,100纳秒单位
LONGLONG CUR = 0;//当前位置,单位100纳秒
IMFMediaType* pVideoType = NULL;//视频媒体类型
IMFMediaType* pAudioType = NULL;//音频媒体类型
IMFMediaSource *pIMFMediaSource = NULL;//媒体源接口
IMFPresentationDescriptor* pISourceD = NULL;//演示文稿描述符
IMFMediaStream* pVideoStream = NULL;//视频流接口
IMFMediaStream* pAudioStream = NULL;//音频流接口
};
template <class T> void SafeRelease(T** ppT)
{
if (*ppT)
{
(*ppT)->Release();
*ppT = NULL;
}
}
#endif //DLL_FILE
DLL.cpp
#include "DLL.h"
const AMOVIESETUP_MEDIATYPE Pin1Type = //引脚1媒体类型
{
&MEDIATYPE_Video, //主要类型
&MEDIASUBTYPE_NULL //子类型
};
const AMOVIESETUP_MEDIATYPE Pin2Type = //引脚2媒体类型
{
&MEDIATYPE_Audio, //主要类型
&MEDIASUBTYPE_NULL //子类型
};
const AMOVIESETUP_PIN sudPins[] = //引脚信息
{
{
L"Video", //引脚名称
FALSE, //渲染引脚
TRUE, //输出引脚
FALSE, //具有该引脚的零个实例
FALSE, //可以创建一个以上引脚的实例
&CLSID_NULL, //该引脚连接的过滤器的类标识
NULL, //该引脚连接的引脚名称
1, //引脚支持的媒体类型数
&Pin1Type //媒体类型信息
},
{
L"Audio", //引脚名称
FALSE, //渲染引脚
TRUE, //输出引脚
FALSE, //具有该引脚的零个实例
FALSE, //可以创建一个以上引脚的实例
&CLSID_NULL, //该引脚连接的过滤器的类标识
NULL, //该引脚连接的引脚名称
1, //引脚支持的媒体类型数
&Pin2Type //媒体类型信息
}
};
const AMOVIESETUP_FILTER Reader = //过滤器的注册信息
{
&CLSID_Reader, //过滤器的类标识
L"读视频文件", //过滤器的名称
MERIT_DO_NOT_USE, //过滤器优先值
2, //引脚数量
sudPins //引脚信息
};
CFactoryTemplate g_Templates[] = //类工厂模板数组
{
{
L"读视频文件", //对象(这里为过滤器)名称
&CLSID_Reader, //对象CLSID的指针
CFilter::CreateInstance, //创建对象实例的函数的指针
NULL, //指向从DLL入口点调用的函数的指针
&Reader //指向AMOVIESETUP_FILTER结构的指针
}
};
int g_cTemplates = 1;//模板数组大小
STDAPI DllRegisterServer()//注册DLL
{
return AMovieDllRegisterServer2(TRUE);
}
STDAPI DllUnregisterServer()//删除DLL注册
{
return AMovieDllRegisterServer2(FALSE);
}
extern "C" BOOL WINAPI DllEntryPoint(HINSTANCE, ULONG, LPVOID);
BOOL APIENTRY DllMain(HANDLE hModule, DWORD dwReason, LPVOID lpReserved)
{
return DllEntryPoint((HINSTANCE)(hModule), dwReason, lpReserved);
}
CFilter.cpp
#include "DLL.h"
#include "strsafe.h"
DWORD WINAPI MediaSourceThread(LPVOID pParam);
DWORD WINAPI VideoThread(LPVOID pParam);
DWORD WINAPI AudioThread(LPVOID pParam);
CFilter::CFilter(TCHAR *pName, LPUNKNOWN pUnk, HRESULT *phr) : CBaseFilter(NAME("读视频文件"), pUnk, this, CLSID_Reader)
{
HRESULT hr = MFStartup(MF_VERSION);//初始化媒体基础
if (hr != S_OK)
{
MessageBox(NULL, L"初始化媒体基础失败", L"读视频文件", MB_OK); return;
}
pCPin1 = new CPin1(this, phr, L"Video");//创建视频输出引脚
pCPin2 = new CPin2(this, phr, L"Audio");//创建音频输出引脚
hExit = CreateEvent(NULL, FALSE, FALSE, NULL);//自动重置,初始无信号
hInit = CreateEvent(NULL, FALSE, FALSE, NULL);//自动重置,初始无信号
}
CFilter::~CFilter()
{
SetEvent(hExit);//发送“退出媒体源线程”信号
SafeRelease(&pVideoType); SafeRelease(&pAudioType);//释放媒体类型
if (m_pFileName)delete[] m_pFileName;
MFShutdown();//关闭媒体基础
CloseHandle(hSourceThread); CloseHandle(hVideoThread); CloseHandle(hAudioThread); CloseHandle(hExit); CloseHandle(hInit);
}
CBasePin *CFilter::GetPin(int n)
{
if (n == 0)return pCPin1;
if (n == 1)return pCPin2;
return NULL;
}
int CFilter::GetPinCount()
{
return 2;
}
CUnknown * WINAPI CFilter::CreateInstance(LPUNKNOWN pUnk, HRESULT *phr)
{
return new CFilter(NAME("读视频文件"), pUnk, phr);
}
STDMETHODIMP CFilter::NonDelegatingQueryInterface(REFIID iid, void ** ppv)
{
if (iid == IID_IFileSourceFilter)
{
return GetInterface(static_cast<IFileSourceFilter*>(this), ppv);
}
else
return CBaseFilter::NonDelegatingQueryInterface(iid, ppv);
}
STDMETHODIMP CFilter::Load(LPCOLESTR lpwszFileName, const AM_MEDIA_TYPE *pmt)
{
CheckPointer(lpwszFileName, E_POINTER);
if (wcslen(lpwszFileName) > MAX_PATH || wcslen(lpwszFileName)<4)
return ERROR_FILENAME_EXCED_RANGE;
size_t len = 1 + lstrlenW(lpwszFileName);
if (m_pFileName != NULL)delete[] m_pFileName;
m_pFileName = new WCHAR[len];
if (m_pFileName == NULL)return E_OUTOFMEMORY;
HRESULT hr = StringCchCopyW(m_pFileName, len, lpwszFileName);
hr = GetMediaType();//获取视频音频流媒体类型
if (hr != S_OK)//如果获取媒体类型失败
{
delete[] m_pFileName; m_pFileName = NULL;
return VFW_E_INVALID_FILE_FORMAT;//设置文件名失败
}
return S_OK;
}
STDMETHODIMP CFilter::GetCurFile(LPOLESTR * ppszFileName, AM_MEDIA_TYPE *pmt)
{
CheckPointer(ppszFileName, E_POINTER);
*ppszFileName = NULL;
if (m_pFileName != NULL)
{
DWORD n = sizeof(WCHAR)*(1 + lstrlenW(m_pFileName));
*ppszFileName = (LPOLESTR)CoTaskMemAlloc(n);
if (*ppszFileName != NULL)
{
CopyMemory(*ppszFileName, m_pFileName, n); return S_OK;
}
}
return S_FALSE;
}
HRESULT CFilter::GetMediaType()//获取视频音频流媒体类型
{
DWORD dw = WaitForSingleObject(hSourceThread, 0);
if (dw == WAIT_TIMEOUT)//如果媒体源线程正在运行
{
SetEvent(hExit);//发送“退出媒体源线程”信号
WaitForSingleObject(hSourceThread, INFINITE);//等待媒体源线程退出
}
SafeRelease(&pVideoType); SafeRelease(&pAudioType);//释放媒体类型
IMFPresentationDescriptor* pSourceD = NULL;//演示文稿描述符
IMFMediaSource *pMFMediaSource = NULL;//媒体源接口
IMFStreamDescriptor* pStreamD1 = NULL;//流1描述符
IMFStreamDescriptor* pStreamD2 = NULL;//流2描述符
IMFMediaTypeHandler* pHandle1 = NULL;//流1媒体类型处理器
IMFMediaTypeHandler* pHandle2 = NULL;//流2媒体类型处理器
IMFSourceResolver* pSourceResolver = NULL;//源解析器
HRESULT hr = MFCreateSourceResolver(&pSourceResolver);//创建源解析器
MF_OBJECT_TYPE ObjectType = MF_OBJECT_INVALID;
IUnknown* pSource = NULL;
if (SUCCEEDED(hr))
{
hr = pSourceResolver->CreateObjectFromURL(//从URL创建媒体源
m_pFileName, //源的URL
MF_RESOLUTION_MEDIASOURCE, //创建源对象
NULL, //可选属性存储
&ObjectType, //接收创建的对象类型
&pSource //接收指向媒体源的指针
);
}
SafeRelease(&pSourceResolver);//释放源解析器
if (SUCCEEDED(hr))
{
hr = pSource->QueryInterface(IID_PPV_ARGS(&pMFMediaSource));//获取媒体源接口
}
SafeRelease(&pSource);//释放IUnknown接口
if (SUCCEEDED(hr))
{
hr = pMFMediaSource->CreatePresentationDescriptor(&pSourceD);//获取演示文稿描述符
}
if (SUCCEEDED(hr))
{
hr = pSourceD->GetUINT64(MF_PD_DURATION, (UINT64*)&DUR);//获取文件流的时长,100纳秒为单位
}
BOOL Selected;
if (SUCCEEDED(hr))
{
hr = pSourceD->GetStreamDescriptorByIndex(0, &Selected, &pStreamD1);//获取流1描述符
}
if (SUCCEEDED(hr))
{
hr = pStreamD1->GetMediaTypeHandler(&pHandle1);//获取流1媒体类型处理器
}
SafeRelease(&pStreamD1);//释放流1描述符
GUID guid;
if (SUCCEEDED(hr))
{
hr = pHandle1->GetMajorType(&guid);//获取流1主要类型
}
if (SUCCEEDED(hr))
{
if (guid == MEDIATYPE_Video)//如果是视频
{
hr = pHandle1->GetCurrentMediaType(&pVideoType);//获取视频流的媒体类型
}
else if (guid == MEDIATYPE_Audio)//如果是音频
{
hr = pHandle1->GetCurrentMediaType(&pAudioType);//获取音频流的媒体类型
}
}
SafeRelease(&pHandle1);//释放流1媒体类型处理器
if (SUCCEEDED(hr))
{
hr = pSourceD->GetStreamDescriptorByIndex(1, &Selected, &pStreamD2);//获取流2描述符
}
if (SUCCEEDED(hr))
{
hr = pStreamD2->GetMediaTypeHandler(&pHandle2);//获取流2媒体类型处理器
}
SafeRelease(&pStreamD2);//释放流2描述符
if (SUCCEEDED(hr))
{
hr = pHandle2->GetMajorType(&guid);//获取流2主要类型
}
if (SUCCEEDED(hr))
{
if (guid == MEDIATYPE_Video)//如果是视频
{
hr = pHandle2->GetCurrentMediaType(&pVideoType);//获取视频流的媒体类型
}
else if (guid == MEDIATYPE_Audio)//如果是音频
{
hr = pHandle2->GetCurrentMediaType(&pAudioType);//获取音频流的媒体类型
}
}
SafeRelease(&pHandle2);//释放流2媒体类型处理器
SafeRelease(&pSourceD);//释放演示文稿描述符
SafeRelease(&pMFMediaSource);//释放媒体源接口
return hr;
}
STDMETHODIMP CFilter::Pause()
{
if (m_State == State_Stopped)
{
DWORD dw = WaitForSingleObject(hSourceThread, 0);
if (dw == WAIT_FAILED || dw == WAIT_OBJECT_0)//如果媒体源线程没有创建或已退出
{
hSourceThread = CreateThread(NULL, 0, MediaSourceThread, this, 0, NULL);//创建媒体源线程
WaitForSingleObject(hInit, INFINITE);//等待媒体源创建成功
}
mStart = 0;
PROPVARIANT var;
PropVariantInit(&var);
var.vt = VT_I8;
var.hVal.QuadPart = mStart;
HRESULT hr = pIMFMediaSource->Start(pISourceD, NULL, &var);//启动媒体源
PropVariantClear(&var);
}
return CBaseFilter::Pause();
}
STDMETHODIMP CFilter::Stop()
{
HRESULT hr = pIMFMediaSource->Stop();
return CBaseFilter::Stop();
}
DWORD WINAPI MediaSourceThread(LPVOID pParam)//媒体源线程
{
CFilter* pCFilter = (CFilter*)pParam;
HRESULT hr, hr1, hr2;
IMFMediaStream* pIMFMediaStream = NULL;
IMFStreamDescriptor* pIMFStreamDescriptor = NULL;
IMFMediaTypeHandler* pHandler = NULL;
pCFilter->pVideoStream = NULL;//视频流接口
pCFilter->pAudioStream = NULL;//音频流接口
IMFSourceResolver* pSourceResolver = NULL;//源解析器
hr = MFCreateSourceResolver(&pSourceResolver);//创建源解析器
MF_OBJECT_TYPE ObjectType = MF_OBJECT_INVALID;
IUnknown* pSource = NULL;
if (SUCCEEDED(hr))
{
hr = pSourceResolver->CreateObjectFromURL(//从URL创建媒体源
pCFilter->m_pFileName, //源的URL
MF_RESOLUTION_MEDIASOURCE, //创建源对象
NULL, //可选属性存储
&ObjectType, //接收创建的对象类型
&pSource //接收指向媒体源的指针
);
}
SafeRelease(&pSourceResolver);//释放源解析器
if (SUCCEEDED(hr))
{
hr = pSource->QueryInterface(IID_PPV_ARGS(&pCFilter->pIMFMediaSource));//获取媒体源接口
}
SafeRelease(&pSource);//释放IUnknown接口
if (SUCCEEDED(hr))
{
hr = pCFilter->pIMFMediaSource->CreatePresentationDescriptor(&pCFilter->pISourceD);//获取演示文稿描述符
}
if (hr != S_OK)
{
SafeRelease(&pCFilter->pIMFMediaSource);//释放媒体源接口
return hr;
}
SetEvent(pCFilter->hInit);//发送“媒体源已创建”信号
Agan:
GUID guid;
IMFMediaEvent* pSourceEvent = NULL;
hr1 = pCFilter->pIMFMediaSource->GetEvent(MF_EVENT_FLAG_NO_WAIT, &pSourceEvent);//获取媒体源事件,不等待
DWORD dw = WaitForSingleObject(pCFilter->hExit, 0);//检测“退出媒体源线程”信号
if (dw == WAIT_OBJECT_0)//如果有“退出媒体源线程”信号
{
hr = pCFilter->pIMFMediaSource->Stop();
pCFilter->pIMFMediaSource->Shutdown();
SafeRelease(&pCFilter->pISourceD);//释放演示文稿描述符
SafeRelease(&pCFilter->pIMFMediaSource);//释放媒体源
return 1;//退出线程
}
if (SUCCEEDED(hr1))//如果获取媒体源事件成功
{
MediaEventType MET;
hr2 = pSourceEvent->GetType(&MET);//获取媒体源事件类型
if (SUCCEEDED(hr2))
{
PROPVARIANT vr;
PropVariantInit(&vr);
hr = pSourceEvent->GetValue(&vr);//获取事件值
switch (MET)
{
case MENewStream://如果是“创建了新流”事件
hr = vr.punkVal->QueryInterface(&pIMFMediaStream);//获取流接口
vr.punkVal->Release();//释放IUnknown接口
hr = pIMFMediaStream->GetStreamDescriptor(&pIMFStreamDescriptor);//获取流描述符
hr = pIMFStreamDescriptor->GetMediaTypeHandler(&pHandler);//获取媒体类型处理器
SafeRelease(&pIMFStreamDescriptor);//释放流描述符
hr = pHandler->GetMajorType(&guid);//获取主要类型
SafeRelease(&pHandler);//释放类型处理器
if (guid == MEDIATYPE_Video)//如果是视频流
{
pCFilter->pVideoStream = pIMFMediaStream;//获取视频流
hr = pCFilter->pVideoStream->RequestSample(NULL);//视频流请求样本第1个样本
pCFilter->hVideoThread = CreateThread(NULL, 0, VideoThread, pCFilter, 0, NULL);//创建视频工作线程
}
if (guid == MEDIATYPE_Audio)//如果是音频流
{
pCFilter->pAudioStream = pIMFMediaStream;//获取音频流
hr = pCFilter->pAudioStream->RequestSample(NULL);//音频流请求样本第1个样本
pCFilter->hAudioThread = CreateThread(NULL, 0, AudioThread, pCFilter, 0, NULL);//创建音频工作线程
}
break;
case MEUpdatedStream://如果是“流已更新”(定位或重启时发送)
hr = vr.punkVal->QueryInterface(&pIMFMediaStream);//获取流接口
vr.punkVal->Release();//释放IUnknown接口
hr = pIMFMediaStream->GetStreamDescriptor(&pIMFStreamDescriptor);//获取流描述符
hr = pIMFStreamDescriptor->GetMediaTypeHandler(&pHandler);//获取媒体类型处理器
SafeRelease(&pIMFStreamDescriptor);//释放流描述符
hr = pHandler->GetMajorType(&guid);//获取主要类型
SafeRelease(&pHandler);//释放类型处理器
if (guid == MEDIATYPE_Video)//如果是视频流
{
pCFilter->pVideoStream = pIMFMediaStream;//获取视频流
hr = pCFilter->pVideoStream->RequestSample(NULL);//视频流请求第1个样本
DWORD dw = WaitForSingleObject(pCFilter->hVideoThread, 0);
if (dw == WAIT_OBJECT_0)//如果视频工作线程已退出。防止在SEEK时,创建新线程
{
pCFilter->hVideoThread = CreateThread(NULL, 0, VideoThread, pCFilter, 0, NULL);//创建视频工作线程
}
}
if (guid == MEDIATYPE_Audio)//如果是音频流
{
pCFilter->pAudioStream = pIMFMediaStream;//获取音频流
hr = pCFilter->pAudioStream->RequestSample(NULL);//音频流请求第1个样本
DWORD dw = WaitForSingleObject(pCFilter->hAudioThread, 0);
if (dw == WAIT_OBJECT_0)//如果音频工作线程已退出。防止在SEEK时,创建新线程
{
pCFilter->hAudioThread = CreateThread(NULL, 0, AudioThread, pCFilter, 0, NULL);//创建音频工作线程
}
}
break;
}
PropVariantClear(&vr);
}
SafeRelease(&pSourceEvent);//释放媒体源事件
}
goto Agan;
}
DWORD WINAPI VideoThread(LPVOID pParam)//视频工作线程
{
CFilter* pCFilter = (CFilter*)pParam;
HRESULT hr;
hr = pCFilter->pCPin1->DeliverBeginFlush();
Sleep(200);
hr = pCFilter->pCPin1->DeliverEndFlush();
hr = pCFilter->pCPin1->DeliverNewSegment(0, pCFilter->DUR - pCFilter->mStart, 1.0);
Agan:
HRESULT hr1, hr2;
IMFSample* pIMFSample = NULL;
IMFMediaEvent* pStreamEvent = NULL;
hr1 = pCFilter->pVideoStream->GetEvent(0, &pStreamEvent);//获取媒体流事件,无限期等待
if (hr1 == S_OK)
{
MediaEventType meType = MEUnknown;
hr2 = pStreamEvent->GetType(&meType);//获取事件类型
if (hr2 == S_OK)
{
PROPVARIANT var;
PropVariantInit(&var);
hr = pStreamEvent->GetValue(&var);//获取事件值
switch (meType)
{
case MEMediaSample://如果是“产生新样本”事件
hr = var.punkVal->QueryInterface(&pIMFSample);//获取样本接口
if (hr == S_OK)
{
HRESULT hrA, hrB;
UINT32 CleanPoint;
hrA = pIMFSample->GetUINT32(MFSampleExtension_CleanPoint, &CleanPoint);//是否为关键帧
UINT32 Discontinuity;
hrB = pIMFSample->GetUINT32(MFSampleExtension_Discontinuity, &Discontinuity);//是否包含中断标志
LONGLONG star, dur;
hr = pIMFSample->GetSampleTime(&star);
hr = pIMFSample->GetSampleDuration(&dur);
DWORD len;
hr = pIMFSample->GetTotalLength(&len);
DWORD Count;
hr = pIMFSample->GetBufferCount(&Count);
IMFMediaBuffer* pBuffer = NULL;
if (Count == 1)
hr = pIMFSample->GetBufferByIndex(0, &pBuffer);
else
hr = pIMFSample->ConvertToContiguousBuffer(&pBuffer);//将具有多个缓冲区的样本转换为具有单个缓冲区的样本
BYTE* pData = NULL; DWORD MLen, CLen;
hr = pBuffer->Lock(&pData, &MLen, &CLen);
IMediaSample *pOutSample = NULL;
hr = pCFilter->pCPin1->GetDeliveryBuffer(&pOutSample, NULL, NULL, 0);//获取一个空的输出引脚样本
if (hr == S_OK)
{
BYTE* pOutBuffer = NULL;
hr = pOutSample->GetPointer(&pOutBuffer);//获取输出引脚样本缓冲区指针
if (pOutBuffer && pData && len <= 10000000)
CopyMemory(pOutBuffer, pData, len);
LONGLONG STAR = star - pCFilter->mStart, END = STAR + dur;
hr = pOutSample->SetTime(&STAR, &END);//设置输出引脚样本时间戳
hr = pOutSample->SetActualDataLength(len);//设置输出引脚样本有效数据长度
if (hrA == S_OK)
{
if (CleanPoint)//如果是关键帧
hr = pOutSample->SetSyncPoint(TRUE);//设置同步点标志
else
hr = pOutSample->SetSyncPoint(FALSE);
}
if (hrB == S_OK)
{
if (Discontinuity)//如果有中断标志
{
hr = pOutSample->SetDiscontinuity(TRUE);//设置中断标志
}
else
hr = pOutSample->SetDiscontinuity(FALSE);
}
hr = pCFilter->pCPin1->Deliver(pOutSample);//输出引脚向下游发送样本
pOutSample->Release();//释放输出引脚样本
}
pBuffer->Unlock(); SafeRelease(&pBuffer); SafeRelease(&pIMFSample);
hr = pCFilter->pVideoStream->RequestSample(NULL);//请求下一个样本
}
break;
case MEEndOfStream://如果是“流结束”事件
hr = pCFilter->pCPin1->DeliverEndOfStream();
break;
case MEStreamStopped://如果是“流停止”事件
PropVariantClear(&var);
SafeRelease(&pStreamEvent);
return 1;//终止视频工作线程
}
PropVariantClear(&var);
}
SafeRelease(&pStreamEvent);
}
goto Agan;
}
DWORD WINAPI AudioThread(LPVOID pParam)//音频工作线程
{
CFilter* pCFilter = (CFilter*)pParam;
HRESULT hr, hrA, hrB;
hr = pCFilter->pCPin2->DeliverBeginFlush();
Sleep(100);
hr = pCFilter->pCPin2->DeliverEndFlush();
hr = pCFilter->pCPin2->DeliverNewSegment(0, pCFilter->DUR - pCFilter->mStart, 1.0);
Agan:
HRESULT hr1, hr2;
IMFSample* pIMFSample = NULL;
IMFMediaEvent* pStreamEvent = NULL;
hr1 = pCFilter->pAudioStream->GetEvent(0, &pStreamEvent);//获取媒体流事件,无限期等待
if (hr1 == S_OK)
{
MediaEventType meType = MEUnknown;
hr2 = pStreamEvent->GetType(&meType);//获取事件类型
if (hr2 == S_OK)
{
PROPVARIANT var;
PropVariantInit(&var);
hr = pStreamEvent->GetValue(&var);//获取事件值
switch (meType)
{
case MEMediaSample://如果是“产生新样本”事件
hr = var.punkVal->QueryInterface(&pIMFSample);//获取样本接口
if (hr == S_OK)
{
UINT32 CleanPoint;
hrA = pIMFSample->GetUINT32(MFSampleExtension_CleanPoint, &CleanPoint);//是否为关键帧
UINT32 Discontinuity;
hrB = pIMFSample->GetUINT32(MFSampleExtension_Discontinuity, &Discontinuity);//是否包含中断标志
LONGLONG star, dur;
hr = pIMFSample->GetSampleTime(&star);
pCFilter->CUR = star;//音频当前位置
hr = pIMFSample->GetSampleDuration(&dur);
DWORD Count;
hr = pIMFSample->GetBufferCount(&Count);
IMFMediaBuffer* pBuffer = NULL;
if (Count == 1)
hr = pIMFSample->GetBufferByIndex(0, &pBuffer);
else
hr = pIMFSample->ConvertToContiguousBuffer(&pBuffer);//将具有多个缓冲区的样本转换为具有单个缓冲区的样本
BYTE* pData = NULL;
hr = pBuffer->Lock(&pData, NULL, NULL);
DWORD len;
hr = pIMFSample->GetTotalLength(&len);
IMediaSample *pOutSample = NULL;
hr = pCFilter->pCPin2->GetDeliveryBuffer(&pOutSample, NULL, NULL, 0);//获取一个空的输出引脚样本
if (hr == S_OK)
{
BYTE* pOutBuffer = NULL;
hr = pOutSample->GetPointer(&pOutBuffer);//获取输出引脚样本缓冲区指针
if (pOutBuffer && pData && len <= 1000000)
CopyMemory(pOutBuffer, pData, len);
LONGLONG STAR = star - pCFilter->mStart, END = STAR + dur;
hr = pOutSample->SetTime(&STAR, &END);//设置输出引脚样本时间戳
hr = pOutSample->SetActualDataLength(len);//设置输出引脚样本有效数据长度
if (hrA == S_OK)
{
if (CleanPoint)//如果是关键帧
hr = pOutSample->SetSyncPoint(TRUE);//设置同步点标志
else
hr = pOutSample->SetSyncPoint(FALSE);
}
if (hrB == S_OK)
{
if (Discontinuity)//如果有中断标志
{
hr = pOutSample->SetDiscontinuity(TRUE);//设置中断标志
}
else
hr = pOutSample->SetDiscontinuity(FALSE);
}
hr = pCFilter->pCPin2->Deliver(pOutSample);//输出引脚向下游发送样本
pOutSample->Release();//释放输出引脚样本
}
pBuffer->Unlock(); SafeRelease(&pBuffer); SafeRelease(&pIMFSample);
hr = pCFilter->pAudioStream->RequestSample(NULL);//请求下一个样本
}
break;
case MEEndOfStream://如果是“流结束”事件
hr = pCFilter->pCPin2->DeliverEndOfStream();
break;
case MEStreamStopped://如果是“流停止”事件
PropVariantClear(&var);
SafeRelease(&pStreamEvent);
return 1;//终止音频工作线程
}
PropVariantClear(&var);
}
SafeRelease(&pStreamEvent);
}
goto Agan;
}
CPin1.cpp
#include "DLL.h"
CPin1::CPin1(CFilter *pFilter, HRESULT *phr, LPCWSTR pPinName) : CBaseOutputPin(NAME("Video"), pFilter, pFilter, phr, pPinName)
{
pCFilter = pFilter;
}
CPin1::~CPin1()
{
}
STDMETHODIMP CPin1::NonDelegatingQueryInterface(REFIID riid, void **ppv)
{
if (riid == IID_IQualityControl)
{
return CBasePin::NonDelegatingQueryInterface(riid, ppv);
}
else
return CBasePin::NonDelegatingQueryInterface(riid, ppv);
}
HRESULT CPin1::CheckMediaType(const CMediaType *pmt)
{
AM_MEDIA_TYPE* pMt = NULL;
pCFilter->pVideoType->GetRepresentation(AM_MEDIA_TYPE_REPRESENTATION, (void**)&pMt);//将IMFMediaType表示的媒体类型,转换为AM_MEDIA_TYPE结构形式
CMediaType MT(*pMt);
if (pmt->MatchesPartial(&MT))
{
pCFilter->pVideoType->FreeRepresentation(AM_MEDIA_TYPE_REPRESENTATION, pMt);//释放GetRepresentation分配的内存
return S_OK;
}
pCFilter->pVideoType->FreeRepresentation(AM_MEDIA_TYPE_REPRESENTATION, pMt);//释放GetRepresentation分配的内存
return S_FALSE;
}
HRESULT CPin1::GetMediaType(int iPosition, CMediaType *pmt)
{
if (pCFilter->m_pFileName == NULL)return S_FALSE;
if (iPosition == 0)
{
AM_MEDIA_TYPE* pMt = NULL;
pCFilter->pVideoType->GetRepresentation(AM_MEDIA_TYPE_REPRESENTATION, (void**)&pMt);//将IMFMediaType表示的媒体类型,转换为AM_MEDIA_TYPE结构形式
pmt->Set(*pMt);
pCFilter->pVideoType->FreeRepresentation(AM_MEDIA_TYPE_REPRESENTATION, pMt);//释放GetRepresentation分配的内存
HasSet = TRUE;
return S_OK;
}
return S_FALSE;
}
HRESULT CPin1::SetMediaType(const CMediaType *pmt)
{
if (HasSet == FALSE)//如果GetMediaType函数没有调用
{
GetMediaType(0, &m_mt);//设置引脚媒体类型
return S_OK;
}
return CBasePin::SetMediaType(pmt);
}
HRESULT CPin1::BreakConnect()
{
HasSet = FALSE;
return CBasePin::BreakConnect();
}
HRESULT CPin1::DecideBufferSize(IMemAllocator *pMemAllocator, ALLOCATOR_PROPERTIES * ppropInputRequest)//确定输出引脚样本缓冲区大小
{
HRESULT hr = S_OK;
ppropInputRequest->cBuffers = 1;//1个缓冲区
ppropInputRequest->cbBuffer = 10000000;//缓冲区的大小10M
ALLOCATOR_PROPERTIES Actual;
hr = pMemAllocator->SetProperties(ppropInputRequest, &Actual);
if (FAILED(hr))return hr;
if (Actual.cbBuffer < ppropInputRequest->cbBuffer)// 这个分配器是否不合适
{
return E_FAIL;
}
ASSERT(Actual.cBuffers == 1);// 确保我们只有 1 个缓冲区
return S_OK;
}
CPin2.cpp
#include "DLL.h"
CPin2::CPin2(CFilter *pFilter, HRESULT *phr, LPCWSTR pPinName) : CBaseOutputPin(NAME("Audio"), pFilter, pFilter, phr, pPinName)
{
pCFilter = pFilter;
}
CPin2::~CPin2()
{
}
STDMETHODIMP CPin2::NonDelegatingQueryInterface(REFIID riid, void **ppv)
{
if (riid == IID_IMediaSeeking)
{
return GetInterface(static_cast<IMediaSeeking*>(this), ppv);
}
else
return CBaseOutputPin::NonDelegatingQueryInterface(riid, ppv);
}
HRESULT CPin2::CheckMediaType(const CMediaType *pmt)
{
AM_MEDIA_TYPE* pMt = NULL;
pCFilter->pAudioType->GetRepresentation(AM_MEDIA_TYPE_REPRESENTATION, (void**)&pMt);//将IMFMediaType表示的媒体类型,转换为AM_MEDIA_TYPE结构形式
CMediaType MT(*pMt);
if (pmt->MatchesPartial(&MT))
{
pCFilter->pAudioType->FreeRepresentation(AM_MEDIA_TYPE_REPRESENTATION, pMt);//释放GetRepresentation分配的内存
return S_OK;
}
pCFilter->pAudioType->FreeRepresentation(AM_MEDIA_TYPE_REPRESENTATION, pMt);//释放GetRepresentation分配的内存
return S_FALSE;
}
HRESULT CPin2::GetMediaType(int iPosition, CMediaType *pmt)
{
if (pCFilter->m_pFileName == NULL)return S_FALSE;
if (iPosition == 0)
{
AM_MEDIA_TYPE* pMt = NULL;
pCFilter->pAudioType->GetRepresentation(AM_MEDIA_TYPE_REPRESENTATION, (void**)&pMt);//将IMFMediaType表示的媒体类型,转换为AM_MEDIA_TYPE结构形式
pmt->Set(*pMt);
pCFilter->pAudioType->FreeRepresentation(AM_MEDIA_TYPE_REPRESENTATION, pMt);//释放GetRepresentation分配的内存
HasSet = TRUE;
return S_OK;
}
return S_FALSE;
}
HRESULT CPin2::SetMediaType(const CMediaType *pmt)
{
if (HasSet == FALSE)//如果GetMediaType函数没有调用
{
GetMediaType(0, &m_mt);//设置引脚媒体类型
return S_OK;
}
return CBasePin::SetMediaType(pmt);
}
HRESULT CPin2::BreakConnect()
{
HasSet = FALSE;
return CBasePin::BreakConnect();
}
HRESULT CPin2::DecideBufferSize(IMemAllocator *pMemAllocator, ALLOCATOR_PROPERTIES * ppropInputRequest)//确定输出引脚样本缓冲区大小
{
HRESULT hr = S_OK;
ppropInputRequest->cBuffers = 1;//1个缓冲区
ppropInputRequest->cbBuffer = 1000000;//缓冲区的大小1M
ALLOCATOR_PROPERTIES Actual;
hr = pMemAllocator->SetProperties(ppropInputRequest, &Actual);
if (FAILED(hr))return hr;
if (Actual.cbBuffer < ppropInputRequest->cbBuffer)// 这个分配器是否不合适
{
return E_FAIL;
}
ASSERT(Actual.cBuffers == 1);// 确保我们只有 1 个缓冲区
return S_OK;
}
HRESULT STDMETHODCALLTYPE CPin2::CheckCapabilities(DWORD *pCapabilities)//查询流是否具有指定的Seek功能
{
CheckPointer(pCapabilities, E_POINTER);
return (~m_dwSeekingCaps & *pCapabilities) ? S_FALSE : S_OK;
}
HRESULT STDMETHODCALLTYPE CPin2::ConvertTimeFormat(LONGLONG *pTarget, const GUID *pTargetFormat, LONGLONG Source, const GUID *pSourceFormat)//从一种时间格式转换为另一种时间格式
{
CheckPointer(pTarget, E_POINTER);
if (pTargetFormat == 0 || *pTargetFormat == TIME_FORMAT_MEDIA_TIME)
{
if (pSourceFormat == 0 || *pSourceFormat == TIME_FORMAT_MEDIA_TIME)
{
*pTarget = Source;
return S_OK;
}
}
return E_INVALIDARG;
}
HRESULT STDMETHODCALLTYPE CPin2::GetAvailable(LONGLONG *pEarliest, LONGLONG *pLatest)//获取有效Seek的时间范围
{
if (pEarliest)
{
*pEarliest = 0;
}
if (pLatest)
{
CAutoLock lock(m_pLock);
*pLatest = pCFilter->DUR;
}
return S_OK;
}
HRESULT STDMETHODCALLTYPE CPin2::GetCapabilities(DWORD *pCapabilities)//检索流的所有Seek功能
{
CheckPointer(pCapabilities, E_POINTER);
*pCapabilities = m_dwSeekingCaps;
return S_OK;
}
HRESULT STDMETHODCALLTYPE CPin2::GetCurrentPosition(LONGLONG *pCurrent)//获取相对于流总持续时间的当前位置
{
*pCurrent = pCFilter->CUR;
return S_OK;
}
HRESULT STDMETHODCALLTYPE CPin2::GetDuration(LONGLONG *pDuration)//获取流的持续时间
{
CheckPointer(pDuration, E_POINTER);
CAutoLock lock(m_pLock);
*pDuration = pCFilter->DUR;
return S_OK;
}
HRESULT STDMETHODCALLTYPE CPin2::GetPositions(LONGLONG *pCurrent, LONGLONG *pStop)//获取相对于流总持续时间的当前位置和停止位置
{
CheckPointer(pCurrent, E_POINTER); CheckPointer(pStop, E_POINTER);
*pCurrent = pCFilter->CUR; *pStop = pCFilter->DUR;
return S_OK;
}
HRESULT STDMETHODCALLTYPE CPin2::GetPreroll(LONGLONG *pllPreroll)//获取将在开始位置之前排队的数据量
{
CheckPointer(pllPreroll, E_POINTER);
*pllPreroll = 0;
return S_OK;
}
HRESULT STDMETHODCALLTYPE CPin2::GetRate(double *pdRate)//获取播放速率
{
CheckPointer(pdRate, E_POINTER);
CAutoLock lock(m_pLock);
*pdRate = 1.0;
return S_OK;
}
HRESULT STDMETHODCALLTYPE CPin2::GetStopPosition(LONGLONG *pStop)//获取相对于流的持续时间的停止播放时间
{
CheckPointer(pStop, E_POINTER);
CAutoLock lock(m_pLock);
*pStop = pCFilter->DUR;
return S_OK;
}
HRESULT STDMETHODCALLTYPE CPin2::GetTimeFormat(GUID *pFormat)//获取当前用于Seek操作的时间格式
{
CheckPointer(pFormat, E_POINTER);
*pFormat = TIME_FORMAT_MEDIA_TIME;
return S_OK;
}
HRESULT STDMETHODCALLTYPE CPin2::IsFormatSupported(const GUID *pFormat)//确定Seek操作是否支持指定的时间格式
{
CheckPointer(pFormat, E_POINTER);
return *pFormat == TIME_FORMAT_MEDIA_TIME ? S_OK : S_FALSE;
}
HRESULT STDMETHODCALLTYPE CPin2::IsUsingTimeFormat(const GUID *pFormat)//确定Seek操作当前是否使用指定的时间格式
{
CheckPointer(pFormat, E_POINTER);
return *pFormat == TIME_FORMAT_MEDIA_TIME ? S_OK : S_FALSE;
}
HRESULT STDMETHODCALLTYPE CPin2::QueryPreferredFormat(GUID *pFormat)//获取首选的Seek时间格式
{
CheckPointer(pFormat, E_POINTER);
*pFormat = TIME_FORMAT_MEDIA_TIME;
return S_OK;
}
HRESULT STDMETHODCALLTYPE CPin2::SetPositions(LONGLONG *pCurrent, DWORD dwCurrentFlags, LONGLONG *pStop, DWORD dwStopFlags)//设置当前位置和停止位置
{
CheckPointer(pCurrent, E_POINTER);
DWORD dwCurrentPos = dwCurrentFlags & AM_SEEKING_PositioningBitsMask;
if (dwCurrentPos == AM_SEEKING_AbsolutePositioning && *pCurrent >= 0 && *pCurrent <= pCFilter->DUR)
{
HRESULT hr;
FILTER_STATE fs;
hr = pCFilter->GetState(0, &fs);
if (fs != State_Stopped && *pCurrent && *pCurrent<pCFilter->DUR - 10000000)
{
hr = pCFilter->pIMFMediaSource->Pause();//暂停媒体源
hr = pCFilter->pCPin1->DeliverBeginFlush();
hr = pCFilter->pCPin2->DeliverBeginFlush();
Sleep(200);//确保下游解码器丢弃所有样本,如果下游解码器残留有样本,渲染器会等待其渲染时间(时间戳),视频将卡在这里
hr = pCFilter->pCPin1->DeliverEndFlush();
hr = pCFilter->pCPin2->DeliverEndFlush();
hr = pCFilter->pCPin1->DeliverNewSegment(0, pCFilter->DUR - pCFilter->mStart, 1.0);
hr = pCFilter->pCPin2->DeliverNewSegment(0, pCFilter->DUR - pCFilter->mStart, 1.0);
pCFilter->mStart = *pCurrent;
PROPVARIANT var;
PropVariantInit(&var);
var.vt = VT_I8;
var.hVal.QuadPart = pCFilter->mStart;
hr = pCFilter->pIMFMediaSource->Start(pCFilter->pISourceD, NULL, &var);//启动媒体源
PropVariantClear(&var);
return S_OK;
}
}
return E_INVALIDARG;
}
HRESULT STDMETHODCALLTYPE CPin2::SetRate(double dRate)//设置播放速率
{
if (dRate == 1.0)return S_OK;
else return S_FALSE;
}
HRESULT STDMETHODCALLTYPE CPin2::SetTimeFormat(const GUID *pFormat)//设置后续Seek操作的时间格式
{
CheckPointer(pFormat, E_POINTER);
return *pFormat == TIME_FORMAT_MEDIA_TIME ? S_OK : E_INVALIDARG;
}
下载本过滤器DLL