multimedia/client/qt_gl_/yuvgl/media/CameraCapture.cpp

432 lines
10 KiB
C++

#include "CameraCapture.h"
#include<iostream>
#ifdef __MINGW32__
#pragma comment(lib, "strmiids")
#endif
//define release maco
#define ReleaseInterface(x) \
if ( nullptr != x ) \
{ \
x->Release( ); \
x = nullptr; \
}
// Application-defined message to notify app of filter graph events
#define WM_GRAPHNOTIFY WM_APP+100
Camera::Camera():
mInitOK(false),
mVideoHeight(0),
mVideoWidth(0),
mDevFilter(nullptr),
mCaptureGB(nullptr),
mGraphBuilder(nullptr),
mMediaControl(nullptr),
mMediaEvent(nullptr),
mSampGrabber(nullptr),
mIsVideoOpened(false),
mDebug(false)
{
}
GUID Camera::MediaType()
{
return mMediaType;
}
Camera::Camera(wstring camera)
{
mInitOK = false;
mVideoHeight = 0;
mVideoWidth = 0;
mDevFilter = nullptr;
mCaptureGB = nullptr;
mGraphBuilder = nullptr;
mMediaControl = nullptr;
mMediaEvent = nullptr;
mSampGrabber = nullptr;
mIsVideoOpened = false;
if(!this->Open(camera)){
mStatus = FAIL;
}
mStatus = STOP;
}
Camera::~Camera()
{
Close();
CoUninitialize();
}
HRESULT Camera::InitializeEnv() {
HRESULT hr;
//Create the filter graph
hr = CoCreateInstance(CLSID_FilterGraph, nullptr, CLSCTX_INPROC_SERVER,
IID_IGraphBuilder, (LPVOID*)&mGraphBuilder);
if (FAILED(hr))
return hr;
//Create the capture graph builder
hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, nullptr, CLSCTX_INPROC_SERVER,
IID_ICaptureGraphBuilder2, (LPVOID*)&mCaptureGB);
if (FAILED(hr))
return hr;
//Obtain interfaces for media control and Video Window
hr = mGraphBuilder->QueryInterface(IID_IMediaControl, (LPVOID*)&mMediaControl);
if (FAILED(hr))
return hr;
hr = mGraphBuilder->QueryInterface(IID_IMediaEventEx, (LPVOID*)&mMediaEvent);
if (FAILED(hr))
return hr;
mCaptureGB->SetFiltergraph(mGraphBuilder);
if (FAILED(hr))
return hr;
return hr;
}
std::vector<std::wstring> Camera::EnumAllCamera(void) {
std::vector<std::wstring> names;
IEnumMoniker *pEnum = nullptr;
// Create the System Device Enumerator.
ICreateDevEnum *pDevEnum;
HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, nullptr,
CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pDevEnum));
if (SUCCEEDED(hr))
{
// Create an enumerator for the category.
hr = pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnum, 0);
if (hr == S_FALSE)
{
hr = VFW_E_NOT_FOUND; // The category is empty. Treat as an error.
}
pDevEnum->Release();
}
if (!SUCCEEDED(hr))
return std::vector<std::wstring>();
IMoniker *pMoniker = nullptr;
while (pEnum->Next(1, &pMoniker, nullptr) == S_OK)
{
IPropertyBag *pPropBag;
IBindCtx* bindCtx = nullptr;
LPOLESTR str = nullptr;
VARIANT var;
VariantInit(&var);
HRESULT hr = pMoniker->BindToStorage(0, 0, IID_PPV_ARGS(&pPropBag));
if (FAILED(hr))
{
pMoniker->Release();
continue;
}
// Get description or friendly name.
hr = pPropBag->Read(L"Description", &var, 0);
if (FAILED(hr))
{
hr = pPropBag->Read(L"FriendlyName", &var, 0);
}
if (SUCCEEDED(hr))
{
names.push_back(var.bstrVal);
VariantClear(&var);
}
pPropBag->Release();
pMoniker->Release();
}
pEnum->Release();
return names;
}
HRESULT Camera::BindFilter(int deviceID, IBaseFilter **pBaseFilter) {
ICreateDevEnum *pDevEnum;
IEnumMoniker *pEnumMon;
IMoniker *pMoniker;
HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, nullptr, CLSCTX_INPROC_SERVER,
IID_ICreateDevEnum, (LPVOID*)&pDevEnum);
if (SUCCEEDED(hr))
{
hr = pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumMon, 0);
if (hr == S_FALSE)
{
hr = VFW_E_NOT_FOUND;
return hr;
}
pEnumMon->Reset();
ULONG cFetched;
int index = 0;
hr = pEnumMon->Next(1, &pMoniker, &cFetched);
while (hr == S_OK && index <= deviceID) {
IPropertyBag *pProBag;
hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (LPVOID*)&pProBag);
if (SUCCEEDED(hr)) {
if (index == deviceID) {
pMoniker->BindToObject(0, 0, IID_IBaseFilter, (LPVOID*)pBaseFilter);
}
}
pMoniker->Release();
index++;
hr = pEnumMon->Next(1, &pMoniker, &cFetched);
}
pEnumMon->Release();
}
return hr;
}
int Camera::SetObserver(CameraObserver *p) {
return this->mSampleGrabberCB.SetObserver(p);
}
int Camera::RemoveObserver(CameraObserver * p) {
return this->mSampleGrabberCB.RemoveObserver(p);
}
void Camera::SetDebug(bool isDebug) {
mDebug = isDebug;
}
int Camera::SampleGrabberCallback::SetObserver(CameraObserver *p) {
if (nullptr == p)
return -1;
mMux.lock();
for (auto itr = this->mObserver.begin(); itr != mObserver.end(); itr++) {
if (p == *itr) {
mMux.unlock();
return 0;
}
}
this->mObserver.push_back(p);
mMux.unlock();
return 0;
}
int Camera::SampleGrabberCallback::RemoveObserver(CameraObserver * p)
{
mMux.lock();
bool founded = false;
auto itrDel = this->mObserver.begin();
for (auto itr = this->mObserver.begin(); itr != mObserver.end(); itr++) {
if (p == *itr) {
itrDel = itr;
founded = true;
}
}
if (founded)
mObserver.erase(itrDel);
mMux.unlock();
return 0;
}
bool Camera::Open(std::wstring &camera_name)
{
if (mIsVideoOpened)
return true;
HRESULT hr;
#define CHECK_HR(x) do{ hr = (x); if (FAILED(hr)){ Close(); return false;}}while(0)
CHECK_HR(InitializeEnv());
IBaseFilter *pSampleGrabberFilter , *dest_filter;
std::vector<std::wstring> names = EnumAllCamera();
if (names.empty())
{
Close();
return false;
}
bool founded = false;
int deviceID = 0;
for(std::wstring i : names ){
if(i == camera_name){
founded = true;
}
}
if (!founded){
return false;
}
// create grabber filter instance
CHECK_HR(CoCreateInstance(CLSID_SampleGrabber, nullptr, CLSCTX_INPROC_SERVER,
IID_IBaseFilter, (LPVOID*)&pSampleGrabberFilter));
// bind source device
CHECK_HR(BindFilter(deviceID, &mDevFilter));
// add src filter
CHECK_HR(mGraphBuilder->AddFilter(mDevFilter, L"Video Filter"));
// add grabber filter and query interface
CHECK_HR(mGraphBuilder->AddFilter(pSampleGrabberFilter, L"Sample Grabber"));
CHECK_HR(pSampleGrabberFilter->QueryInterface(IID_ISampleGrabber, (LPVOID*)&mSampGrabber));
// find the current bit depth
HDC hdc = GetDC(nullptr);
mBitDepth = GetDeviceCaps(hdc, BITSPIXEL);
ReleaseDC(nullptr, hdc);
// set the media type for grabber filter
AM_MEDIA_TYPE mediaType;
ZeroMemory(&mediaType, sizeof(AM_MEDIA_TYPE));
mediaType.majortype = MEDIATYPE_Video;
switch (mBitDepth)
{
case 8:
mediaType.subtype = MEDIASUBTYPE_RGB8;
break;
case 16:
mediaType.subtype = MEDIASUBTYPE_RGB555;
break;
case 24:
mediaType.subtype = MEDIASUBTYPE_RGB24;
break;
case 32:
mediaType.subtype = MEDIASUBTYPE_RGB32;
break;
default:
Close();
return false;
}
mediaType.formattype = FORMAT_VideoInfo;
hr = mSampGrabber->SetMediaType(&mediaType);
// 意味着最后的数据是丢掉的
CHECK_HR(CoCreateInstance(CLSID_NullRenderer, nullptr, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)(&dest_filter)));
mGraphBuilder->AddFilter(dest_filter, L"nullptrRenderer");
// connect source filter to grabber filter
CHECK_HR(mCaptureGB->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,
mDevFilter, pSampleGrabberFilter, dest_filter));
// get connected media type
CHECK_HR(mSampGrabber->GetConnectedMediaType(&mediaType));
VIDEOINFOHEADER * vih = (VIDEOINFOHEADER*)mediaType.pbFormat;
mVideoWidth = vih->bmiHeader.biWidth;
mVideoHeight = vih->bmiHeader.biHeight;
mPixFmt = mediaType.subtype;
mMediaType = mediaType.subtype;
std::cout<<"guid media type is"<<mediaType.subtype.Data1<<" "<<
mediaType.subtype.Data2<<" "<<
mediaType.subtype.Data3<<" "<<
mediaType.subtype.Data4<<" "<<
mVideoWidth<<" "<<mVideoHeight;
// configure grabber filter
CHECK_HR(mSampGrabber->SetOneShot(0));
CHECK_HR(mSampGrabber->SetBufferSamples(0));
// Use the BufferCB callback method
CHECK_HR(mSampGrabber->SetCallback(&mSampleGrabberCB, 1));
mSampleGrabberCB.mNewDataCallBack = mFrameCallBack;
mMediaControl->Run();
dest_filter->Release();
pSampleGrabberFilter->Release();
// release resource
if (mediaType.cbFormat != 0)
{
CoTaskMemFree((PVOID)mediaType.pbFormat);
mediaType.cbFormat = 0;
mediaType.pbFormat = nullptr;
}
if (mediaType.pUnk != nullptr)
{
mediaType.pUnk->Release();
mediaType.pUnk = nullptr;
}
mIsVideoOpened = TRUE;
mStatus = RUNNING;
return true;
}
bool Camera::Close() {
if (mMediaControl)
{
mMediaControl->Stop();
}
if (mMediaEvent)
{
mMediaEvent->SetNotifyWindow(NULL, WM_GRAPHNOTIFY, 0);
}
mIsVideoOpened = false;
//release interface
ReleaseInterface(mDevFilter);
ReleaseInterface(mCaptureGB);
ReleaseInterface(mGraphBuilder);
ReleaseInterface(mMediaControl);
ReleaseInterface(mMediaEvent);
ReleaseInterface(mSampGrabber);
return true;
}
void Camera::SetCallBack(std::function<void(double, BYTE*, LONG)> f) {
mFrameCallBack = f;
}
ULONG STDMETHODCALLTYPE Camera::SampleGrabberCallback::AddRef() {
return 1;
}
ULONG STDMETHODCALLTYPE Camera::SampleGrabberCallback::Release() {
return 2;
}
HRESULT STDMETHODCALLTYPE Camera::SampleGrabberCallback::QueryInterface(REFIID riid, void** ppvObject) {
if (nullptr == ppvObject) return E_POINTER;
if (riid == __uuidof(IUnknown))
{
*ppvObject = static_cast<IUnknown*>(this);
return S_OK;
}
if (riid == IID_ISampleGrabberCB)
{
*ppvObject = static_cast<ISampleGrabberCB*>(this);
return S_OK;
}
return E_NOTIMPL;
}
HRESULT STDMETHODCALLTYPE Camera::SampleGrabberCallback::SampleCB(double Time, IMediaSample *pSample) {
return E_NOTIMPL;
}
HRESULT STDMETHODCALLTYPE Camera::SampleGrabberCallback::BufferCB(double Time, BYTE * pBuffer, long BufferLen)
{
#ifdef DEBUG_CAMERA
static FILE *p = fopen("camera_test.yuv","wb+");
fwrite(pBuffer,BufferLen,1,p);
fflush(p);
#endif
if (mObserver.size() > 0) {
mMux.lock();
for (auto itr = this->mObserver.begin(); itr != mObserver.end(); itr++) {
CameraObserver *p = (CameraObserver *)*itr;
p->OnCameraData(pBuffer, BufferLen);
}
mMux.unlock();
}
return S_OK;
}