添加摄像头RGB32格式的OPENGL渲染
parent
e1cab9a1ea
commit
2bc06e7dc3
|
@ -4,13 +4,64 @@
|
|||
#include <QMouseEvent>
|
||||
#include "CPlayWidget.h"
|
||||
|
||||
void CPlayWidget::OnUpdateFrame()
|
||||
{
|
||||
|
||||
// 顶点着色器源码
|
||||
const char *vsrcyuv = "attribute vec4 vertexIn; \
|
||||
attribute vec2 textureIn; \
|
||||
varying vec2 textureOut; \
|
||||
void main(void) \
|
||||
{ \
|
||||
gl_Position = vertexIn; \
|
||||
textureOut = textureIn; \
|
||||
}";
|
||||
// 片段着色器源码
|
||||
const char *fsrcyuv = "varying vec2 textureOut; \
|
||||
uniform sampler2D tex_y; \
|
||||
uniform sampler2D tex_u; \
|
||||
uniform sampler2D tex_v; \
|
||||
void main(void) \
|
||||
{ \
|
||||
vec3 yuv; \
|
||||
vec3 rgb; \
|
||||
yuv.x = texture2D(tex_y, textureOut).r; \
|
||||
yuv.y = texture2D(tex_u, textureOut).r - 0.5; \
|
||||
yuv.z = texture2D(tex_v, textureOut).r - 0.5; \
|
||||
rgb = mat3( 1, 1, 1, \
|
||||
0, -0.39465, 2.03211, \
|
||||
1.13983, -0.58060, 0) * yuv; \
|
||||
gl_FragColor = vec4(rgb, 1); \
|
||||
}";
|
||||
// rgb片段着色器源码
|
||||
const char *fsrcrgb = "in vec3 ourColor; \
|
||||
in vec2 TexCoord; \
|
||||
out vec4 color; \
|
||||
uniform sampler2D ourTexture1; \
|
||||
// Texture samplers \
|
||||
uniform sampler2D ourTexture2; \
|
||||
uniform float mixValue; \
|
||||
void main() \
|
||||
{ \
|
||||
color = texture(ourTexture1, TexCoord); \
|
||||
}";
|
||||
|
||||
void CPlayWidget::OnUpdateFrame() {
|
||||
this->PlayOneFrame();
|
||||
}
|
||||
|
||||
CPlayWidget::CPlayWidget(QWidget *parent):QOpenGLWidget(parent)
|
||||
void CPlayWidget::OnPaintData(const uint8_t *data, uint32_t len)
|
||||
{
|
||||
if(nullptr == m_pBufYuv420p)
|
||||
{
|
||||
m_pBufYuv420p = new unsigned char[len];
|
||||
qDebug("CPlayWidget::PlayOneFrame new data memory. Len=%d width=%d height=%d\n",
|
||||
len, m_nVideoW, m_nVideoW);
|
||||
memcpy(m_pBufYuv420p, data,len);
|
||||
//刷新界面,触发paintGL接口
|
||||
update();
|
||||
}
|
||||
}
|
||||
|
||||
CPlayWidget::CPlayWidget(QWidget *parent):QOpenGLWidget(parent) {
|
||||
textureUniformY = 0;
|
||||
textureUniformU = 0;
|
||||
textureUniformV = 0;
|
||||
|
@ -27,15 +78,14 @@ CPlayWidget::CPlayWidget(QWidget *parent):QOpenGLWidget(parent)
|
|||
m_pYuvFile = NULL;
|
||||
m_nVideoH = 0;
|
||||
m_nVideoW = 0;
|
||||
mType = TYPE_YUV420P;
|
||||
connect(&this->tm,SIGNAL(timeout()),this,SLOT(OnUpdateFrame()));
|
||||
|
||||
tm.start(1000);
|
||||
}
|
||||
CPlayWidget::~CPlayWidget()
|
||||
{
|
||||
CPlayWidget::~CPlayWidget() {
|
||||
}
|
||||
void CPlayWidget::PlayOneFrame()
|
||||
{//函数功能读取一张yuv图像数据进行显示,每单击一次,就显示一张图片
|
||||
void CPlayWidget::PlayOneFrame() {//函数功能读取一张yuv图像数据进行显示,每单击一次,就显示一张图片
|
||||
if(NULL == m_pYuvFile)
|
||||
{
|
||||
//打开yuv视频文件 注意修改文件路径
|
||||
|
@ -66,6 +116,13 @@ void CPlayWidget::PlayOneFrame()
|
|||
update();
|
||||
return;
|
||||
}
|
||||
|
||||
int CPlayWidget::SetDataType(CPlayWidget::IMG_TYPE type)
|
||||
{
|
||||
this->mType = type;
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
|
||||
* Y = 0.299 R + 0.587 G + 0.114 B
|
||||
|
@ -91,43 +148,21 @@ void CPlayWidget::initializeGL()
|
|||
// GLSL是构成所有OpenGL着色器的语言,具体的GLSL语言的语法需要读者查找相关资料
|
||||
//初始化顶点着色器 对象
|
||||
m_pVSHader = new QOpenGLShader(QOpenGLShader::Vertex, this);
|
||||
//顶点着色器源码
|
||||
const char *vsrc = "attribute vec4 vertexIn; \
|
||||
attribute vec2 textureIn; \
|
||||
varying vec2 textureOut; \
|
||||
void main(void) \
|
||||
{ \
|
||||
gl_Position = vertexIn; \
|
||||
textureOut = textureIn; \
|
||||
}";
|
||||
|
||||
//编译顶点着色器程序
|
||||
bool bCompile = m_pVSHader->compileSourceCode(vsrc);
|
||||
bool bCompile = m_pVSHader->compileSourceCode(vsrcyuv);
|
||||
if(!bCompile)
|
||||
{
|
||||
// todo 设置错误状态
|
||||
}
|
||||
//初始化片段着色器 功能gpu中yuv转换成rgb
|
||||
m_pFSHader = new QOpenGLShader(QOpenGLShader::Fragment, this);
|
||||
//片段着色器源码
|
||||
const char *fsrc = "varying vec2 textureOut; \
|
||||
uniform sampler2D tex_y; \
|
||||
uniform sampler2D tex_u; \
|
||||
uniform sampler2D tex_v; \
|
||||
void main(void) \
|
||||
{ \
|
||||
vec3 yuv; \
|
||||
vec3 rgb; \
|
||||
yuv.x = texture2D(tex_y, textureOut).r; \
|
||||
yuv.y = texture2D(tex_u, textureOut).r - 0.5; \
|
||||
yuv.z = texture2D(tex_v, textureOut).r - 0.5; \
|
||||
rgb = mat3( 1, 1, 1, \
|
||||
0, -0.39465, 2.03211, \
|
||||
1.13983, -0.58060, 0) * yuv; \
|
||||
gl_FragColor = vec4(rgb, 1); \
|
||||
}";
|
||||
//将glsl源码送入编译器编译着色器程序
|
||||
bCompile = m_pFSHader->compileSourceCode(fsrc);
|
||||
|
||||
bCompile = m_pFSHader->compileSourceCode(fsrcyuv);
|
||||
if(!bCompile)
|
||||
{
|
||||
// todo 设置错误状态
|
||||
|
||||
}
|
||||
#define PROGRAM_VERTEX_ATTRIBUTE 0
|
||||
#define PROGRAM_TEXCOORD_ATTRIBUTE 1
|
||||
|
@ -198,52 +233,58 @@ void CPlayWidget::resizeGL(int w, int h)
|
|||
}
|
||||
void CPlayWidget::paintGL()
|
||||
{
|
||||
//加载y数据纹理
|
||||
//激活纹理单元GL_TEXTURE0
|
||||
glActiveTexture(GL_TEXTURE0);
|
||||
//使用来自y数据生成纹理
|
||||
glBindTexture(GL_TEXTURE_2D, id_y);
|
||||
//使用内存中m_pBufYuv420p数据创建真正的y数据纹理
|
||||
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, m_nVideoW, m_nVideoH, 0, GL_RED, GL_UNSIGNED_BYTE, m_pBufYuv420p);
|
||||
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
//加载u数据纹理
|
||||
glActiveTexture(GL_TEXTURE1);//激活纹理单元GL_TEXTURE1
|
||||
glBindTexture(GL_TEXTURE_2D, id_u);
|
||||
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, m_nVideoW/2,
|
||||
m_nVideoH/2,
|
||||
0,
|
||||
GL_RED,
|
||||
GL_UNSIGNED_BYTE,
|
||||
(char*)m_pBufYuv420p+m_nVideoW*m_nVideoH);
|
||||
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
//加载v数据纹理
|
||||
glActiveTexture(GL_TEXTURE2);//激活纹理单元GL_TEXTURE2
|
||||
glBindTexture(GL_TEXTURE_2D, id_v);
|
||||
glTexImage2D(GL_TEXTURE_2D,
|
||||
0, GL_RED,
|
||||
m_nVideoW/2,
|
||||
m_nVideoH/2,
|
||||
0, GL_RED,
|
||||
GL_UNSIGNED_BYTE,
|
||||
(char*)m_pBufYuv420p+m_nVideoW*m_nVideoH*5/4);
|
||||
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
//指定y纹理要使用新值 只能用0,1,2等表示纹理单元的索引,这是opengl不人性化的地方
|
||||
//0对应纹理单元GL_TEXTURE0 1对应纹理单元GL_TEXTURE1 2对应纹理的单元
|
||||
glUniform1i(textureUniformY, 0);
|
||||
//指定u纹理要使用新值
|
||||
glUniform1i(textureUniformU, 1);
|
||||
//指定v纹理要使用新值
|
||||
glUniform1i(textureUniformV, 2);
|
||||
loadYuvTexture();
|
||||
//使用顶点数组方式绘制图形
|
||||
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
|
||||
return;
|
||||
}
|
||||
|
||||
int CPlayWidget::loadYuvTexture()
|
||||
{
|
||||
//加载y数据纹理
|
||||
//激活纹理单元GL_TEXTURE0
|
||||
glActiveTexture(GL_TEXTURE0);
|
||||
//使用来自y数据生成纹理
|
||||
glBindTexture(GL_TEXTURE_2D, id_y);
|
||||
//使用内存中m_pBufYuv420p数据创建真正的y数据纹理
|
||||
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, m_nVideoW, m_nVideoH, 0, GL_RED, GL_UNSIGNED_BYTE, m_pBufYuv420p);
|
||||
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
//加载u数据纹理
|
||||
glActiveTexture(GL_TEXTURE1);//激活纹理单元GL_TEXTURE1
|
||||
glBindTexture(GL_TEXTURE_2D, id_u);
|
||||
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, m_nVideoW/2,
|
||||
m_nVideoH/2,
|
||||
0,
|
||||
GL_RED,
|
||||
GL_UNSIGNED_BYTE,
|
||||
(char*)m_pBufYuv420p+m_nVideoW*m_nVideoH);
|
||||
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
//加载v数据纹理
|
||||
glActiveTexture(GL_TEXTURE2);//激活纹理单元GL_TEXTURE2
|
||||
glBindTexture(GL_TEXTURE_2D, id_v);
|
||||
glTexImage2D(GL_TEXTURE_2D,
|
||||
0, GL_RED,
|
||||
m_nVideoW/2,
|
||||
m_nVideoH/2,
|
||||
0, GL_RED,
|
||||
GL_UNSIGNED_BYTE,
|
||||
(char*)m_pBufYuv420p+m_nVideoW*m_nVideoH*5/4);
|
||||
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
//指定y纹理要使用新值 只能用0,1,2等表示纹理单元的索引,这是opengl不人性化的地方
|
||||
//0对应纹理单元GL_TEXTURE0 1对应纹理单元GL_TEXTURE1 2对应纹理的单元
|
||||
glUniform1i(textureUniformY, 0);
|
||||
//指定u纹理要使用新值
|
||||
glUniform1i(textureUniformU, 1);
|
||||
//指定v纹理要使用新值
|
||||
glUniform1i(textureUniformV, 2);
|
||||
return 0;
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
#include <QOpenGLFunctions>
|
||||
#include <QOpenGLTexture>
|
||||
#include <QFile>
|
||||
#include "media/CameraCapture.h"
|
||||
|
||||
#include <QTimer>
|
||||
|
||||
|
@ -15,19 +16,31 @@ class CPlayWidget:public QOpenGLWidget,protected QOpenGLFunctions
|
|||
Q_OBJECT
|
||||
public slots:
|
||||
void OnUpdateFrame();
|
||||
void OnPaintData(const uint8_t *data,uint32_t len);
|
||||
public:
|
||||
typedef enum{
|
||||
TYPE_YUV420P,
|
||||
TYPE_RGB32,
|
||||
}IMG_TYPE;
|
||||
CPlayWidget(QWidget* parent);
|
||||
~CPlayWidget();
|
||||
void PlayOneFrame();
|
||||
int SetDataType(IMG_TYPE);
|
||||
protected:
|
||||
QTimer tm;
|
||||
void initializeGL() Q_DECL_OVERRIDE;
|
||||
void resizeGL(int w, int h) Q_DECL_OVERRIDE;
|
||||
void paintGL() Q_DECL_OVERRIDE;
|
||||
private:
|
||||
IMG_TYPE mType; // 目前只支持到RGB32,YUV420P
|
||||
GLuint textureUniformY; //y纹理数据位置
|
||||
GLuint textureUniformU; //u纹理数据位置
|
||||
GLuint textureUniformV; //v纹理数据位置
|
||||
|
||||
|
||||
GLuint textureUnifromRGB; //rgb32 的纹理位置
|
||||
|
||||
|
||||
GLuint id_y;
|
||||
GLuint id_u;
|
||||
GLuint id_v; //v纹理对象ID
|
||||
|
@ -41,5 +54,8 @@ private:
|
|||
int m_nVideoH; //视频分辨率高
|
||||
unsigned char* m_pBufYuv420p;
|
||||
FILE* m_pYuvFile;
|
||||
|
||||
int loadYuvTexture();
|
||||
int loadRgbTexture();
|
||||
};
|
||||
#endif
|
||||
|
|
|
@ -11,10 +11,9 @@ void CameraDataCallback(double ts, BYTE *dat, LONG size){
|
|||
int main(int argc, char *argv[])
|
||||
{
|
||||
QApplication a(argc, argv);
|
||||
CPlayWidget gPlayer(nullptr);
|
||||
|
||||
CPlayWidget x(nullptr);
|
||||
x.show();
|
||||
x.PlayOneFrame();
|
||||
gPlayer.show();
|
||||
|
||||
Camera *gCam = Camera::GetInstance();
|
||||
std::vector<std::wstring> names = gCam->EnumAllCamera();
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
#include<iostream>
|
||||
|
||||
#pragma comment(lib, "strmiids")
|
||||
|
||||
//define release maco
|
||||
#define ReleaseInterface(x) \
|
||||
if ( NULL != x ) \
|
||||
|
@ -23,7 +22,8 @@ Camera::Camera():
|
|||
mMediaControl(NULL),
|
||||
mMediaEvent(NULL),
|
||||
mSampGrabber(NULL),
|
||||
mIsVideoOpened(false)
|
||||
mIsVideoOpened(false),
|
||||
mDebug(false)
|
||||
{
|
||||
//HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);
|
||||
//if (SUCCEEDED(hr))
|
||||
|
@ -39,8 +39,7 @@ Camera::~Camera()
|
|||
CoUninitialize();
|
||||
}
|
||||
|
||||
HRESULT Camera::InitializeEnv()
|
||||
{
|
||||
HRESULT Camera::InitializeEnv() {
|
||||
HRESULT hr;
|
||||
|
||||
//Create the filter graph
|
||||
|
@ -71,8 +70,7 @@ HRESULT Camera::InitializeEnv()
|
|||
return hr;
|
||||
}
|
||||
|
||||
std::vector<std::wstring> Camera::EnumAllCamera(void)
|
||||
{
|
||||
std::vector<std::wstring> Camera::EnumAllCamera(void) {
|
||||
if (mInitOK == false)
|
||||
return std::vector<std::wstring>();
|
||||
|
||||
|
@ -135,8 +133,7 @@ std::vector<std::wstring> Camera::EnumAllCamera(void)
|
|||
}
|
||||
|
||||
|
||||
HRESULT Camera::BindFilter(int deviceID, IBaseFilter **pBaseFilter)
|
||||
{
|
||||
HRESULT Camera::BindFilter(int deviceID, IBaseFilter **pBaseFilter) {
|
||||
ICreateDevEnum *pDevEnum;
|
||||
IEnumMoniker *pEnumMon;
|
||||
IMoniker *pMoniker;
|
||||
|
@ -171,18 +168,19 @@ HRESULT Camera::BindFilter(int deviceID, IBaseFilter **pBaseFilter)
|
|||
return hr;
|
||||
}
|
||||
|
||||
int Camera::SetObserver(CameraObserver *p)
|
||||
{
|
||||
int Camera::SetObserver(CameraObserver *p) {
|
||||
return this->mSampleGrabberCB.SetObserver(p);
|
||||
}
|
||||
|
||||
int Camera::RemoveObserver(CameraObserver * p)
|
||||
{
|
||||
return this->mSampleGrabberCB.RemoveObserver(p);
|
||||
int Camera::RemoveObserver(CameraObserver * p) {
|
||||
return this->mSampleGrabberCB.RemoveObserver(p);
|
||||
}
|
||||
|
||||
int Camera::SampleGrabberCallback::SetObserver(CameraObserver *p)
|
||||
{
|
||||
void Camera::SetDebug(bool isDebug) {
|
||||
mDebug = isDebug;
|
||||
}
|
||||
|
||||
int Camera::SampleGrabberCallback::SetObserver(CameraObserver *p) {
|
||||
if (nullptr == p)
|
||||
return -1;
|
||||
mMux.lock();
|
||||
|
@ -210,7 +208,6 @@ int Camera::SampleGrabberCallback::RemoveObserver(CameraObserver * p)
|
|||
mObserver.erase(itrDel);
|
||||
mMux.unlock();
|
||||
return 0;
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
|
@ -344,8 +341,7 @@ bool Camera::Open(std::wstring &camera_name)
|
|||
return true;
|
||||
}
|
||||
|
||||
bool Camera::Close()
|
||||
{
|
||||
bool Camera::Close() {
|
||||
if (mMediaControl)
|
||||
{
|
||||
mMediaControl->Stop();
|
||||
|
@ -366,23 +362,19 @@ bool Camera::Close()
|
|||
return true;
|
||||
}
|
||||
|
||||
void Camera::SetCallBack(std::function<void(double, BYTE*, LONG)> f)
|
||||
{
|
||||
void Camera::SetCallBack(std::function<void(double, BYTE*, LONG)> f) {
|
||||
mFrameCallBack = f;
|
||||
}
|
||||
|
||||
ULONG STDMETHODCALLTYPE Camera::SampleGrabberCallback::AddRef()
|
||||
{
|
||||
ULONG STDMETHODCALLTYPE Camera::SampleGrabberCallback::AddRef() {
|
||||
return 1;
|
||||
}
|
||||
|
||||
ULONG STDMETHODCALLTYPE Camera::SampleGrabberCallback::Release()
|
||||
{
|
||||
ULONG STDMETHODCALLTYPE Camera::SampleGrabberCallback::Release() {
|
||||
return 2;
|
||||
}
|
||||
|
||||
HRESULT STDMETHODCALLTYPE Camera::SampleGrabberCallback::QueryInterface(REFIID riid, void** ppvObject)
|
||||
{
|
||||
HRESULT STDMETHODCALLTYPE Camera::SampleGrabberCallback::QueryInterface(REFIID riid, void** ppvObject) {
|
||||
if (NULL == ppvObject) return E_POINTER;
|
||||
if (riid == __uuidof(IUnknown))
|
||||
{
|
||||
|
@ -397,14 +389,18 @@ HRESULT STDMETHODCALLTYPE Camera::SampleGrabberCallback::QueryInterface(REFIID r
|
|||
return E_NOTIMPL;
|
||||
}
|
||||
|
||||
HRESULT STDMETHODCALLTYPE Camera::SampleGrabberCallback::SampleCB(double Time, IMediaSample *pSample)
|
||||
{
|
||||
HRESULT STDMETHODCALLTYPE Camera::SampleGrabberCallback::SampleCB(double Time, IMediaSample *pSample) {
|
||||
return E_NOTIMPL;
|
||||
}
|
||||
|
||||
HRESULT STDMETHODCALLTYPE Camera::SampleGrabberCallback::BufferCB(double Time, BYTE * pBuffer, long BufferLen)
|
||||
{
|
||||
//Debuger::Debug(L"receieve %d \r\n", BufferLen);
|
||||
#ifdef DEBUG_CAMERA
|
||||
static FILE *p = fopen("camera_test.yuv","wb+");
|
||||
fwrite(pBuffer,BufferLen,1,p);
|
||||
fflush(p);
|
||||
#endif
|
||||
if (mNewDataCallBack)
|
||||
{
|
||||
mNewDataCallBack(Time, pBuffer, BufferLen);
|
||||
|
|
|
@ -61,6 +61,7 @@ private:
|
|||
HRESULT InitializeEnv();
|
||||
HRESULT BindFilter(int deviceID, IBaseFilter **pBaseFilter);
|
||||
GUID mMediaType;
|
||||
bool mDebug;
|
||||
public:
|
||||
int SetObserver(CameraObserver *);
|
||||
int RemoveObserver(CameraObserver *p);
|
||||
|
@ -72,6 +73,7 @@ public:
|
|||
instance = new Camera();
|
||||
return instance;
|
||||
}
|
||||
void SetDebug(bool);
|
||||
std::vector<std::wstring> EnumAllCamera(void);
|
||||
GUID mPixFmt;
|
||||
bool Open(std::wstring &camera_name);
|
||||
|
|
|
@ -0,0 +1,177 @@
|
|||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <unistd.h>
|
||||
#include <sys/stat.h>
|
||||
#include <fcntl.h>
|
||||
|
||||
#define MAX_LEN (1*1024*1024)
|
||||
#define POSITIVE_HEIGHT (1)
|
||||
|
||||
/*12Bytes*/
|
||||
typedef struct /**** BMP file header structure ****/
|
||||
{
|
||||
unsigned int bfSize; /* Size of file */
|
||||
unsigned short bfReserved1; /* Reserved */
|
||||
unsigned short bfReserved2; /* ... */
|
||||
unsigned int bfOffBits; /* Offset to bitmap data */
|
||||
}BITMAPFILEHEADER;
|
||||
|
||||
/*40Bytes*/
|
||||
typedef struct /**** BMP file info structure ****/
|
||||
{
|
||||
unsigned int biSize; /* Size of info header */
|
||||
int biWidth; /* Width of image */
|
||||
int biHeight; /* Height of image */
|
||||
unsigned short biPlanes; /* Number of color planes */
|
||||
unsigned short biBitCount; /* Number of bits per pixel */
|
||||
unsigned int biCompression; /* Type of compression to use */
|
||||
unsigned int biSizeImage; /* Size of image data */
|
||||
int biXPelsPerMeter; /* X pixels per meter */
|
||||
int biYPelsPerMeter; /* Y pixels per meter */
|
||||
unsigned int biClrUsed; /* Number of colors used */
|
||||
unsigned int biClrImportant; /* Number of important colors */
|
||||
}BITMAPINFOHEADER;
|
||||
|
||||
int simplest_rgb24_to_bmp(const char* rgb24Path, int w, int h, const char* bmpPath)
|
||||
{
|
||||
int s32Ret = 0;
|
||||
int fd_ori = -1;
|
||||
int fd_bmp = -1;
|
||||
int headerSize = 0;
|
||||
int i = 0;//for circle
|
||||
int j = 0;//for circle
|
||||
unsigned char temp = 0;
|
||||
|
||||
unsigned char readBuff[MAX_LEN] = {'\0'};
|
||||
memset(readBuff, 0, sizeof(readBuff));
|
||||
|
||||
#ifdef POSITIVE_HEIGHT
|
||||
unsigned char readBuff4Ph[MAX_LEN] = {'\0'};
|
||||
memset(readBuff4Ph, 0, sizeof(readBuff4Ph));
|
||||
#endif
|
||||
|
||||
char bfType[2] = {'B', 'M'};
|
||||
|
||||
BITMAPFILEHEADER myHead;
|
||||
BITMAPINFOHEADER myHeadInfo;
|
||||
memset(&myHead, 0, sizeof(myHead));
|
||||
memset(&myHeadInfo, 0, sizeof(myHeadInfo));
|
||||
printf("sizeof(myHead) = %d\n", sizeof(myHead));
|
||||
printf("sizeof(myHeadInfo) = %d\n", sizeof(myHeadInfo));
|
||||
|
||||
/*myHead*/
|
||||
headerSize = sizeof(bfType) + sizeof(myHead) + sizeof(myHeadInfo);
|
||||
myHead.bfSize = headerSize + w*h*3;
|
||||
myHead.bfOffBits = headerSize;
|
||||
|
||||
/*myHeadInfo*/
|
||||
myHeadInfo.biSize = sizeof(myHeadInfo);
|
||||
myHeadInfo.biWidth = w;
|
||||
|
||||
#ifndef POSITIVE_HEIGHT
|
||||
myHeadInfo.biHeight = -1 * h;
|
||||
#else
|
||||
myHeadInfo.biHeight = h;
|
||||
#endif
|
||||
|
||||
myHeadInfo.biPlanes = 1;
|
||||
myHeadInfo.biBitCount = 24;
|
||||
myHeadInfo.biSizeImage = w*h*3;
|
||||
|
||||
/*open files*/
|
||||
fd_ori = open(rgb24Path, O_RDONLY);
|
||||
if(fd_ori < 0)
|
||||
{
|
||||
printf("open rgb24 failed!\n");
|
||||
return -1;
|
||||
}
|
||||
printf("open rgb24 success!\n");
|
||||
|
||||
fd_bmp = open(bmpPath, O_WRONLY|O_CREAT|O_TRUNC|O_APPEND, 777);
|
||||
if(fd_bmp < 0)
|
||||
{
|
||||
printf("open bmp failed!\n");
|
||||
close(fd_ori);
|
||||
return -1;
|
||||
}
|
||||
printf("open bmp success!\n");
|
||||
|
||||
/*read*/
|
||||
memset(readBuff, 0, sizeof(readBuff));
|
||||
s32Ret = read(fd_ori, readBuff, sizeof(readBuff));
|
||||
if((s32Ret < 0) || (s32Ret != w*h*3))
|
||||
{
|
||||
printf("read RGB file failed!\n");
|
||||
close(fd_bmp);
|
||||
close(fd_ori);
|
||||
return -1;
|
||||
}
|
||||
printf("read RGB file success!\n");
|
||||
|
||||
/*change R-G-B to B-G-R*/
|
||||
for(i = 0; i < (w*h); i++)
|
||||
{
|
||||
temp = *(readBuff + i*3);
|
||||
*(readBuff + i*3) = *(readBuff + i*3 + 2);
|
||||
*(readBuff + i*3 + 2) = temp;
|
||||
}
|
||||
|
||||
/*positive height storage sequence:left-right down-up*/
|
||||
#ifdef POSITIVE_HEIGHT
|
||||
for(i = (h - 1), j = 0; i >= 0; i--, j++)
|
||||
{
|
||||
memcpy(readBuff4Ph + j*w*3, readBuff + i*w*3, w*3);
|
||||
}
|
||||
#endif
|
||||
|
||||
/*write-4 parts*/
|
||||
s32Ret = write(fd_bmp, bfType, sizeof(bfType));
|
||||
if(s32Ret < 0)
|
||||
{
|
||||
printf("write bfType failed!\n");
|
||||
close(fd_bmp);
|
||||
close(fd_ori);
|
||||
return -1;
|
||||
}
|
||||
s32Ret = write(fd_bmp, &myHead, sizeof(myHead));
|
||||
if(s32Ret < 0)
|
||||
{
|
||||
printf("write myHead failed!\n");
|
||||
close(fd_bmp);
|
||||
close(fd_ori);
|
||||
return -1;
|
||||
}
|
||||
s32Ret = write(fd_bmp, &myHeadInfo, sizeof(myHeadInfo));
|
||||
if(s32Ret < 0)
|
||||
{
|
||||
printf("write myHeadInfo failed!\n");
|
||||
close(fd_bmp);
|
||||
close(fd_ori);
|
||||
return -1;
|
||||
}
|
||||
#ifdef POSITIVE_HEIGHT
|
||||
s32Ret = write(fd_bmp, readBuff4Ph, w*h*3);
|
||||
if(s32Ret < 0)
|
||||
{
|
||||
printf("write readBuff4Ph failed!\n");
|
||||
close(fd_bmp);
|
||||
close(fd_ori);
|
||||
return -1;
|
||||
}
|
||||
printf("write readBuff4Ph success!\n");
|
||||
#else
|
||||
s32Ret = write(fd_bmp, readBuff, w*h*3);
|
||||
if(s32Ret < 0)
|
||||
{
|
||||
printf("write readBuff failed!\n");
|
||||
close(fd_bmp);
|
||||
close(fd_ori);
|
||||
return -1;
|
||||
}
|
||||
printf("write readBuff success!\n");
|
||||
#endif
|
||||
|
||||
close(fd_bmp);
|
||||
close(fd_ori);
|
||||
return 0;
|
||||
}
|
|
@ -1,8 +1,7 @@
|
|||
#include "Base64.h"
|
||||
#include "stdio.h"
|
||||
#include "string.h"
|
||||
int DecodeBase64(char * pInput, char * pOutput)
|
||||
{
|
||||
int DecodeBase64(char * pInput, char * pOutput) {
|
||||
int i = 0;
|
||||
int iCnt = 0;
|
||||
int iSrcLen = (int)strlen(pInput);
|
||||
|
|
|
@ -1,21 +1,17 @@
|
|||
#include "Debuger.h"
|
||||
|
||||
|
||||
Debuger::Debuger()
|
||||
{
|
||||
Debuger::Debuger() {
|
||||
}
|
||||
|
||||
Debuger::~Debuger()
|
||||
{
|
||||
Debuger::~Debuger() {
|
||||
}
|
||||
|
||||
int Debuger::Debug(wstring log)
|
||||
{
|
||||
int Debuger::Debug(wstring log) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
int Debuger::Debug(string log)
|
||||
{
|
||||
int Debuger::Debug(string log) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
|
|
@ -32,6 +32,7 @@ SOURCES += \
|
|||
cplaywidget.cpp \
|
||||
media/AACAudioCoder.cpp \
|
||||
media/CameraCapture.cpp \
|
||||
media/imgutil.cpp \
|
||||
utils/Base64.cpp \
|
||||
utils/Debuger.cpp \
|
||||
utils/utils.cpp
|
||||
|
|
Loading…
Reference in New Issue