no message

master
zcy 2023-11-13 00:13:24 +08:00
commit 833d7dc068
87 changed files with 29049 additions and 0 deletions

82
components/toast.cpp Normal file
View File

@ -0,0 +1,82 @@
#include "toast.h"
#include <QPropertyAnimation>
#include <QScreen>
#include <QGuiApplication>
#include <QPainter>
#include <QTimer>
#include <QDebug>
ToastWidget::ToastWidget(QWidget *parent)
: QWidget(parent)
{
ui.setupUi(this);
setWindowFlags(windowFlags() | Qt::FramelessWindowHint | Qt::Tool);// 无边框 无任务栏
setAttribute(Qt::WA_TranslucentBackground, true); // 背景透明
}
ToastWidget::~ToastWidget()
{
}
void ToastWidget::setText(const QString& text)
{
ui.label->setText(text);
}
void ToastWidget::showAnimation(int timeout /*= 2000*/)
{
// 开始动画
QPropertyAnimation *animation = new QPropertyAnimation(this, "windowOpacity");
animation->setDuration(1000);
animation->setStartValue(0);
animation->setEndValue(1);
animation->start();
show();
QTimer::singleShot(timeout, [&]
{
// 结束动画
QPropertyAnimation *animation = new QPropertyAnimation(this, "windowOpacity");
animation->setDuration(1000);
animation->setStartValue(1);
animation->setEndValue(0);
animation->start();
connect(animation, &QPropertyAnimation::finished, [&]
{
close();
deleteLater();// 关闭后析构
});
});
}
void ToastWidget::showTip(const QString& text, QWidget* parent /*= nullptr*/)
{
ToastWidget* toast = new ToastWidget(parent);
toast->setWindowFlags(toast->windowFlags() | Qt::WindowStaysOnTopHint); // 置顶
toast->setText(text);
toast->setStyleSheet("font:bold;font-size:24px;color:rgb(255,255,255);");
toast->adjustSize(); //设置完文本后调整下大小
// 测试显示位于主屏的70%高度位置
qDebug()<<parent->geometry();
toast->move((parent->geometry().x() + (parent->size().width() - toast->width()) / 2),
parent->geometry().y() + (parent->size().height() * 5 / 10));
toast->showAnimation(100);
}
void ToastWidget::paintEvent(QPaintEvent *event)
{
QPainter paint(this);
paint.begin(this);
auto kBackgroundColor = QColor(255, 255, 255);
kBackgroundColor.setAlpha(0.1);// 透明度为0
paint.setRenderHint(QPainter::Antialiasing, true);
paint.setPen(Qt::NoPen);
paint.setBrush(QBrush(kBackgroundColor, Qt::SolidPattern));//设置画刷形式
paint.drawRect(0, 0, width(), height());
paint.end();
}

31
components/toast.h Normal file
View File

@ -0,0 +1,31 @@
#ifndef __TOAST__
#define __TOAST__
#include <QWidget>
#include "ui_toast.h"
class ToastWidget : public QWidget
{
Q_OBJECT
public:
ToastWidget(QWidget *parent = Q_NULLPTR);
~ToastWidget();
void setText(const QString& text);
void showAnimation(int timeout = 2000);// 动画方式show出默认2秒后消失
public:
// 静态调用
static void showTip(const QString& text, QWidget* parent = nullptr);
protected:
virtual void paintEvent(QPaintEvent *event);
private:
Ui::Form ui;
};
#endif

32
components/toast.ui Normal file
View File

@ -0,0 +1,32 @@
<?xml version="1.0" encoding="UTF-8"?>
<ui version="4.0">
<class>Form</class>
<widget class="QWidget" name="Form">
<property name="geometry">
<rect>
<x>0</x>
<y>0</y>
<width>932</width>
<height>59</height>
</rect>
</property>
<property name="windowTitle">
<string>Form</string>
</property>
<widget class="QLabel" name="label">
<property name="geometry">
<rect>
<x>170</x>
<y>10</y>
<width>231</width>
<height>31</height>
</rect>
</property>
<property name="text">
<string>TextLabel</string>
</property>
</widget>
</widget>
<resources/>
<connections/>
</ui>

4
conanfile.txt Normal file
View File

@ -0,0 +1,4 @@
[requires]
ffmpeg/4.2.1
[imports]
.,* -> ./third/msvc32 @ folder=True, ignore_case=True, excludes=*.html *.jpeg

422
cplaywidget.cpp Normal file
View File

@ -0,0 +1,422 @@
#include "CPlayWidget.h"
#include <QOpenGLTexture>
#include <QOpenGLBuffer>
#include <QMouseEvent>
#include "CPlayWidget.h"
// 顶点着色器源码
const char *vsrcyuv = "attribute vec4 vertexIn; \
attribute vec2 textureIn; \
varying vec2 textureOut; \
void main(void) \
{ \
gl_Position = vertexIn; \
textureOut = textureIn; \
}";
// 片段着色器源码
const char *fsrcyuv = "varying vec2 textureOut; \
uniform sampler2D tex_y; \
uniform sampler2D tex_u; \
uniform sampler2D tex_v; \
void main(void) \
{ \
vec3 yuv; \
vec3 rgb; \
yuv.x = texture2D(tex_y, textureOut).r; \
yuv.y = texture2D(tex_u, textureOut).r - 0.5; \
yuv.z = texture2D(tex_v, textureOut).r - 0.5; \
rgb = mat3( 1, 1, 1, \
0, -0.39465, 2.03211, \
1.13983, -0.58060, 0) * yuv; \
gl_FragColor = vec4(rgb, 1); \
}";
// rgb片段着色器源码
// 注意MEDIASUBTYPE_RGB32 是bgr的所以需要再进行一次转换
const char *fsrcrgb = "varying vec2 textureOut; \
uniform sampler2D rgbdata; \
void main() \
{ \
gl_FragColor = texture(rgbdata, textureOut); \
}";
void CPlayWidget::OnUpdateFrame() {
this->PlayOneFrame();
}
void CPlayWidget::OnPaintData(const uint8_t *data, uint32_t len)
{
if(nullptr == m_pBufYuv420p)
{
m_pBufYuv420p = new unsigned char[len];
qDebug("CPlayWidget::PlayOneFrame new data memory. Len=%d width=%d height=%d\n",
len, m_nVideoW, m_nVideoW);
memcpy(m_pBufYuv420p, data,len);
//刷新界面,触发paintGL接口
update();
}
}
CPlayWidget::CPlayWidget(QWidget *parent):QOpenGLWidget(parent) {
textureUniformY = 0;
textureUniformU = 0;
textureUniformV = 0;
id_y = 0;
id_u = 0;
id_v = 0;
m_pTextureRGB = nullptr;
m_pBufYuv420p = nullptr;
m_pVSHader = NULL;
m_pFSHader = NULL;
m_pShaderProgram = NULL;
m_pTextureY = NULL;
m_pTextureU = NULL;
m_pTextureV = NULL;
m_pYuvFile = NULL;
m_nVideoH = 0;
m_nVideoW = 0;
mType = TYPE_YUV420P;
connect(&this->tm,SIGNAL(timeout()),this,SLOT(OnUpdateFrame()));
//tm.start(1000);
}
CPlayWidget::~CPlayWidget() {
}
void CPlayWidget::PlayOneFrame() {//函数功能读取一张yuv图像数据进行显示,每单击一次,就显示一张图片
if(NULL == m_pYuvFile)
{
//打开yuv视频文件 注意修改文件路径
// m_pYuvFile = fopen("F://OpenglYuvDemo//1920_1080.yuv", "rb");
m_pYuvFile = fopen("F://md_sample_sp420_1080p.yuv", "rb");
//根据yuv视频数据的分辨率设置宽高,demo当中是1080p这个地方要注意跟实际数据分辨率对应上
// m_nVideoW = 1920;
// m_nVideoH = 1080;
}
//申请内存存一帧yuv图像数据,其大小为分辨率的1.5倍
int nLen = m_nVideoW*m_nVideoH*3/2;
if(nullptr == m_pBufYuv420p)
{
m_pBufYuv420p = new unsigned char[nLen];
qDebug("CPlayWidget::PlayOneFrame new data memory. Len=%d width=%d height=%d\n",
nLen, m_nVideoW, m_nVideoW);
}
//将一帧yuv图像读到内存中
if(NULL == m_pYuvFile)
{
qFatal("read yuv file err.may be path is wrong!\n");
return;
}
fread(m_pBufYuv420p, 1, nLen, m_pYuvFile);
//刷新界面,触发paintGL接口
update();
return;
}
int CPlayWidget::SetDataType(CPlayWidget::IMG_TYPE type){
this->mType = type;
return 0;
}
int CPlayWidget::OnCameraData(uint8_t *dat, uint32_t size)
{
memcpy(this->m_pBufRgb32,dat,size);
update();
return 0;
}
int CPlayWidget::SetImgSize(uint32_t width, uint32_t height)
{
m_nVideoH = height;
m_nVideoW = width;
if(mType == TYPE_RGB32){
m_pBufRgb32 = new uint8_t[width * height *4];
}
if(mType == TYPE_YUV420P){
m_pBufYuv420p = new uint8_t[width * height *3/2];
}
return 0;
}
/*
* Y = 0.299 R + 0.587 G + 0.114 B
U = - 0.1687 R - 0.3313 G + 0.5 B + 128
V = 0.5 R - 0.4187 G - 0.0813 B + 128
RGB YUV (256) :
R = Y + 1.402 (Cr-128)
G = Y - 0.34414 (Cb-128) - 0.71414 (Cr-128)
B = Y + 1.772 (Cb-128)
*/
void CPlayWidget::initializeGL()
{
initializeOpenGLFunctions();
glEnable(GL_DEPTH_TEST);
//现代opengl渲染管线依赖着色器来处理传入的数据
//着色器就是使用openGL着色语言(OpenGL Shading Language, GLSL)编写的一个小函数,
// GLSL是构成所有OpenGL着色器的语言,具体的GLSL语言的语法需要读者查找相关资料
//初始化顶点着色器 对象
m_pVSHader = new QOpenGLShader(QOpenGLShader::Vertex, this);
//编译顶点着色器程序
bool bCompile = m_pVSHader->compileSourceCode(vsrcyuv);
if(!bCompile)
{
// todo 设置错误状态
}
//初始化片段着色器 功能gpu中yuv转换成rgb
m_pFSHader = new QOpenGLShader(QOpenGLShader::Fragment, this);
if(mType == TYPE_RGB32){
bCompile = m_pFSHader->compileSourceCode(fsrcrgb);
}
if(mType == TYPE_YUV420P){
bCompile = m_pFSHader->compileSourceCode(fsrcyuv);
}
if(!bCompile)
{
// todo 设置错误状态
}
#define PROGRAM_VERTEX_ATTRIBUTE 0
#define PROGRAM_TEXCOORD_ATTRIBUTE 1
//创建着色器程序容器
m_pShaderProgram = new QOpenGLShaderProgram;
//将片段着色器添加到程序容器
m_pShaderProgram->addShader(m_pFSHader);
//将顶点着色器添加到程序容器
m_pShaderProgram->addShader(m_pVSHader);
//绑定属性vertexIn到指定位置ATTRIB_VERTEX,该属性在顶点着色源码其中有声明
m_pShaderProgram->bindAttributeLocation("vertexIn", ATTRIB_VERTEX);
//绑定属性textureIn到指定位置ATTRIB_TEXTURE,该属性在顶点着色源码其中有声明
m_pShaderProgram->bindAttributeLocation("textureIn", ATTRIB_TEXTURE);
//链接所有所有添入到的着色器程序
m_pShaderProgram->link();
//激活所有链接
m_pShaderProgram->bind();
if(this->mType == TYPE_YUV420P){
initShaderYuv();
}
if(this->mType == TYPE_RGB32){
initShaderRgb();
}
glClearColor(0.0,0.0,0.0,0.0);//设置背景色
}
void CPlayWidget::resizeGL(int w, int h)
{
if(h == 0)// 防止被零除
{
h = 1;// 将高设为1
}
//设置视口
glViewport(0,0, w,h);
}
void CPlayWidget::paintGL()
{
if(mType == TYPE_YUV420P)
loadYuvTexture();
if(mType == TYPE_RGB32){
loadRgbTexture();
}
//使用顶点数组方式绘制图形
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
return;
}
void CPlayWidget::initShaderYuv()
{
//读取着色器中的数据变量tex_y, tex_u, tex_v的位置,这些变量的声明可以在
//片段着色器源码中可以看到
textureUniformY = m_pShaderProgram->uniformLocation("tex_y");
textureUniformU = m_pShaderProgram->uniformLocation("tex_u");
textureUniformV = m_pShaderProgram->uniformLocation("tex_v");
// 顶点矩阵
static const GLfloat vertexVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
//纹理矩阵
static const GLfloat textureVertices[] = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
};
//设置属性ATTRIB_VERTEX的顶点矩阵值以及格式
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, vertexVertices);
//设置属性ATTRIB_TEXTURE的纹理矩阵值以及格式
glVertexAttribPointer(ATTRIB_TEXTURE, 2, GL_FLOAT, 0, 0, textureVertices);
//启用ATTRIB_VERTEX属性的数据,默认是关闭的
glEnableVertexAttribArray(ATTRIB_VERTEX);
//启用ATTRIB_TEXTURE属性的数据,默认是关闭的
glEnableVertexAttribArray(ATTRIB_TEXTURE);
//分别创建y,u,v纹理对象
m_pTextureY = new QOpenGLTexture(QOpenGLTexture::Target2D);
m_pTextureU = new QOpenGLTexture(QOpenGLTexture::Target2D);
m_pTextureV = new QOpenGLTexture(QOpenGLTexture::Target2D);
m_pTextureY->create();
m_pTextureU->create();
m_pTextureV->create();
//获取返回y分量的纹理索引值
id_y = m_pTextureY->textureId();
//获取返回u分量的纹理索引值
id_u = m_pTextureU->textureId();
//获取返回v分量的纹理索引值
id_v = m_pTextureV->textureId();
}
void CPlayWidget::initShaderRgb()
{
//读取着色器中的数据变量tex_y, tex_u, tex_v的位置,这些变量的声明可以在
//片段着色器源码中可以看到
textureUniformRGB = m_pShaderProgram->uniformLocation("rgbdata");
// 顶点矩阵
static const GLfloat vertexVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
//纹理矩阵
static const GLfloat textureVertices[] = {
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
};
//设置属性ATTRIB_VERTEX的顶点矩阵值以及格式
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, vertexVertices);
//设置属性ATTRIB_TEXTURE的纹理矩阵值以及格式
glVertexAttribPointer(ATTRIB_TEXTURE, 2, GL_FLOAT, 0, 0, textureVertices);
//启用ATTRIB_VERTEX属性的数据,默认是关闭的
glEnableVertexAttribArray(ATTRIB_VERTEX);
//启用ATTRIB_TEXTURE属性的数据,默认是关闭的
glEnableVertexAttribArray(ATTRIB_TEXTURE);
//分别创建y,u,v纹理对象
m_pTextureRGB = new QOpenGLTexture(QOpenGLTexture::Target2D);
m_pTextureRGB->create();
//获取返回y分量的纹理索引值
id_rgb = m_pTextureRGB->textureId();
}
int CPlayWidget::loadYuvTexture()
{
//加载y数据纹理
//激活纹理单元GL_TEXTURE0
glActiveTexture(GL_TEXTURE0);
//使用来自y数据生成纹理
glBindTexture(GL_TEXTURE_2D, id_y);
//使用内存中m_pBufYuv420p数据创建真正的y数据纹理
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RED,
m_nVideoW,
m_nVideoH,
0,
GL_RED,
GL_UNSIGNED_BYTE,
m_pBufYuv420p);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
//加载u数据纹理
glActiveTexture(GL_TEXTURE1);//激活纹理单元GL_TEXTURE1
glBindTexture(GL_TEXTURE_2D, id_u);
glTexImage2D(GL_TEXTURE_2D,
0, GL_RED,
m_nVideoW/2,
m_nVideoH/2,
0,
GL_RED,
GL_UNSIGNED_BYTE,
(char*)m_pBufYuv420p+m_nVideoW*m_nVideoH);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
//加载v数据纹理
glActiveTexture(GL_TEXTURE2);//激活纹理单元GL_TEXTURE2
glBindTexture(GL_TEXTURE_2D, id_v);
glTexImage2D(GL_TEXTURE_2D,
0, GL_RED,
m_nVideoW/2,
m_nVideoH/2,
0, GL_RED,
GL_UNSIGNED_BYTE,
(char*)m_pBufYuv420p+m_nVideoW*m_nVideoH*5/4);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
//指定y纹理要使用新值 只能用0,1,2等表示纹理单元的索引这是opengl不人性化的地方
//0对应纹理单元GL_TEXTURE0 1对应纹理单元GL_TEXTURE1 2对应纹理的单元
glUniform1i(textureUniformY, 0);
//指定u纹理要使用新值
glUniform1i(textureUniformU, 1);
//指定v纹理要使用新值
glUniform1i(textureUniformV, 2);
return 0;
}
int CPlayWidget::loadRgbTexture()
{
//加载rgb数据纹理
//激活纹理单元GL_TEXTURE0
glActiveTexture(GL_TEXTURE0);
//使用来自y数据生成纹理
glBindTexture(GL_TEXTURE_2D, id_rgb);
//使用内存中m_pBufYuv420p数据创建真正的y数据纹理
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RGBA,
m_nVideoW,
m_nVideoH,
0,
GL_BGRA,
GL_UNSIGNED_BYTE,
m_pBufRgb32);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glUniform1i(textureUniformRGB, 0);
return 0;
}

75
cplaywidget.h Normal file
View File

@ -0,0 +1,75 @@
#ifndef GLPLAYWIDGET_H
#define GLPLAYWIDGET_H
#include <QOpenGLWidget>
#include <QOpenGLShaderProgram>
#include <QOpenGLFunctions>
#include <QOpenGLTexture>
#include <QFile>
#include "media/CameraCapture.h"
#include <QTimer>
#define ATTRIB_VERTEX 3
#define ATTRIB_TEXTURE 4
class CPlayWidget:public QOpenGLWidget,protected QOpenGLFunctions,public Camera::CameraObserver
{
Q_OBJECT
public slots:
void OnUpdateFrame();
void OnPaintData(const uint8_t *data,uint32_t len);
public:
typedef enum{
TYPE_YUV420P,
TYPE_RGB32,
}IMG_TYPE;
CPlayWidget(QWidget* parent);
~CPlayWidget();
void PlayOneFrame();
int SetDataType(IMG_TYPE);
int OnCameraData(uint8_t *dat, uint32_t size) override;
int SetImgSize(uint32_t width,uint32_t );
protected:
QTimer tm;
void initializeGL() override;
void resizeGL(int w, int h) override;
void paintGL() override;
private:
IMG_TYPE mType; // 目前只支持到RGB32,YUV420P
GLuint textureUniformY; //y纹理数据位置
GLuint textureUniformU; //u纹理数据位置
GLuint textureUniformV; //v纹理数据位置
GLuint textureUniformRGB; //RGB纹理位置
GLuint textureUnifromRGB; //rgb32 的纹理位置
GLuint id_rgb;
GLuint id_y;
GLuint id_u;
GLuint id_v; //v纹理对象ID
QOpenGLTexture* m_pTextureRGB; //RGB 纹理是一整块的
QOpenGLTexture* m_pTextureY; //y纹理对象
QOpenGLTexture* m_pTextureU; //u纹理对象
QOpenGLTexture* m_pTextureV; //v纹理对象
QOpenGLShader *m_pVSHader; //顶点着色器程序对象
QOpenGLShader *m_pFSHader; //片段着色器对象
QOpenGLShaderProgram *m_pShaderProgram; //着色器程序容器
int m_nVideoW; //视频分辨率宽
int m_nVideoH; //视频分辨率高
unsigned char *m_pBufYuv420p;
unsigned char* m_pBufRgb32;
FILE* m_pYuvFile;
void initShaderYuv();
void initShaderRgb();
int loadYuvTexture();
int loadRgbTexture();
};
#endif

4
depency/conanfile.txt Normal file
View File

@ -0,0 +1,4 @@
[requires]
ffmpeg/4.2.1
[imports]
.,* -> ./third/msvc32 @ folder=True, ignore_case=True, excludes=*.html *.jpeg

25
inc/Base64.h Normal file
View File

@ -0,0 +1,25 @@
const unsigned char Base64IdxTab[128] =
{
255,255,255,255, 255,255,255,255, 255,255,255,255, 255,255,255,255,
255,255,255,255, 255,255,255,255, 255,255,255,255, 255,255,255,255,
255,255,255,255, 255,255,255,255, 255,255,255,62, 255,255,255,63,
52,53,54,55, 56,57,58,59, 60,61,255,255, 255,255,255,255,
255,0,1,2, 3,4,5,6, 7,8,9,10, 11,12,13,14,
15,16,17,18, 19,20,21,22, 23,24,25,255, 255,255,255,255,
255,26,27,28, 29,30,31,32, 33,34,35,36, 37,38,39,40,
41,42,43,44, 45,46,47,48, 49,50,51,255, 255,255,255,255
};
#define BVal(x) Base64IdxTab[x]
int DecodeBase64(char * pInput, char * pOutput);
const char Base64ValTab[65] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
#define AVal(x) Base64ValTab[x]
int EncodeBase64(unsigned char * pInput, int iInputLen, unsigned char * pOutput);
#define DCD_ONCE_LEN 400*1024
#define CDC_ONCE_LEN 300*1024

181
inc/BitmapEx.h Normal file
View File

@ -0,0 +1,181 @@
// BitmapEx.h: interface for the CBitmapEx class.
//
//////////////////////////////////////////////////////////////////////
#if !defined(AFX_BITMAPEX_H__80F20A52_B43F_42C5_B182_AC8D27BF5C0E__INCLUDED_)
#define AFX_BITMAPEX_H__80F20A52_B43F_42C5_B182_AC8D27BF5C0E__INCLUDED_
#if _MSC_VER > 1000
#pragma once
#endif // _MSC_VER > 1000
#define _PI 3.1415926f // Value of PI
#define _BITS_PER_PIXEL_32 32 // 32-bit color depth
#define _BITS_PER_PIXEL_24 24 // 24-bit color depth
#define _PIXEL DWORD // Pixel
#define _RGB(r,g,b) (((r) << 16) | ((g) << 8) | (b)) // Convert to RGB
#define _GetRValue(c) ((BYTE)(((c) & 0x00FF0000) >> 16)) // Red color component
#define _GetGValue(c) ((BYTE)(((c) & 0x0000FF00) >> 8)) // Green color component
#define _GetBValue(c) ((BYTE)((c) & 0x000000FF)) // Blue color component
typedef long fixed; // Our new fixed point type
#define itofx(x) ((x) << 8) // Integer to fixed point
#define ftofx(x) (long)((x) * 256) // Float to fixed point
#define dtofx(x) (long)((x) * 256) // Double to fixed point
#define fxtoi(x) ((x) >> 8) // Fixed point to integer
#define fxtof(x) ((float) (x) / 256) // Fixed point to float
#define fxtod(x) ((double)(x) / 256) // Fixed point to double
#define Mulfx(x,y) (((x) * (y)) >> 8) // Multiply a fixed by a fixed
#define Divfx(x,y) (((x) << 8) / (y)) // Divide a fixed by a fixed
typedef struct __POINT
{
long x;
long y;
} _POINT, *_LPPOINT;
typedef struct __QUAD
{
_POINT p1;
_POINT p2;
_POINT p3;
_POINT p4;
} _QUAD, *_LPQUAD;
typedef enum __RESAMPLE_MODE
{
RM_NEARESTNEIGHBOUR = 0,
RM_BILINEAR,
RM_BICUBIC,
} _RESAMPLE_MODE;
typedef enum __GRADIENT_MODE
{
GM_NONE = 0x00,
GM_HORIZONTAL = 0x01,
GM_VERTICAL = 0x02,
GM_RADIAL = 0x04
} _GRADIENT_MODE;
class CBitmapEx
{
public:
// Public methods
CBitmapEx();
virtual ~CBitmapEx();
void Create(long width, long height);
void Create(CBitmapEx& bitmapEx);
void Load(LPTSTR lpszBitmapFile);
void Load(LPBYTE lpBitmapData);
void Save(LPTSTR lpszBitmapFile);
void Save(LPBYTE lpBitmapData);
void Scale(long horizontalPercent=100, long verticalPercent=100);
void Rotate(long degrees=0, _PIXEL bgColor=_RGB(0,0,0));
void FlipHorizontal();
void FlipVertical();
void MirrorLeft();
void MirrorRight();
void MirrorTop();
void MirrorBottom();
void Clear(_PIXEL clearColor=_RGB(0,0,0));
void Negative();
void Grayscale();
void Sepia(long depth=34);
void Emboss();
void Engrave();
void Pixelize(long size=4);
void Draw(HDC hDC);
void Draw(HDC hDC, long dstX, long dstY);
void Draw(long dstX, long dstY, long width, long height, CBitmapEx& bitmapEx, long srcX, long srcY);
void Draw(long dstX, long dstY, long width, long height, CBitmapEx& bitmapEx, long srcX, long srcY, long alpha);
void Draw(_QUAD dstQuad, CBitmapEx& bitmapEx);
void Draw(_QUAD dstQuad, CBitmapEx& bitmapEx, long alpha);
void Draw(_QUAD dstQuad, CBitmapEx& bitmapEx, long srcX, long srcY, long srcWidth, long srcHeight);
void Draw(_QUAD dstQuad, CBitmapEx& bitmapEx, long srcX, long srcY, long srcWidth, long srcHeight, long alpha);
void Draw(long dstX, long dstY, long dstWidth, long dstHeight, CBitmapEx& bitmapEx, long srcX, long srcY, long srcWidth, long srcHeight);
void Draw(long dstX, long dstY, long dstWidth, long dstHeight, CBitmapEx& bitmapEx, long srcX, long srcY, long srcWidth, long srcHeight, long alpha);
void DrawTransparent(long dstX, long dstY, long width, long height, CBitmapEx& bitmapEx, long srcX, long srcY, _PIXEL transparentColor=_RGB(0,0,0));
void DrawTransparent(long dstX, long dstY, long width, long height, CBitmapEx& bitmapEx, long srcX, long srcY, long alpha, _PIXEL transparentColor=_RGB(0,0,0));
void DrawTransparent(long dstX, long dstY, long dstWidth, long dstHeight, CBitmapEx& bitmapEx, long srcX, long srcY, long srcWidth, long srcHeight, _PIXEL transparentColor=_RGB(0,0,0));
void DrawTransparent(long dstX, long dstY, long dstWidth, long dstHeight, CBitmapEx& bitmapEx, long srcX, long srcY, long srcWidth, long srcHeight, long alpha, _PIXEL transparentColor=_RGB(0,0,0));
void DrawTransparent(_QUAD dstQuad, CBitmapEx& bitmapEx, _PIXEL transparentColor=_RGB(0,0,0));
void DrawTransparent(_QUAD dstQuad, CBitmapEx& bitmapEx, long alpha, _PIXEL transparentColor=_RGB(0,0,0));
void DrawTransparent(_QUAD dstQuad, CBitmapEx& bitmapEx, long srcX, long srcY, long srcWidth, long srcHeight, _PIXEL transparentColor=_RGB(0,0,0));
void DrawTransparent(_QUAD dstQuad, CBitmapEx& bitmapEx, long srcX, long srcY, long srcWidth, long srcHeight, long alpha, _PIXEL transparentColor=_RGB(0,0,0));
void DrawBlended(long dstX, long dstY, long width, long height, CBitmapEx& bitmapEx, long srcX, long srcY, long startAlpha, long endAlpha, DWORD mode=GM_NONE);
void DrawBlended(long dstX, long dstY, long dstWidth, long dstHeight, CBitmapEx& bitmapEx, long srcX, long srcY, long srcWidth, long srcHeight, long startAlpha, long endAlpha, DWORD mode=GM_NONE);
LPBITMAPFILEHEADER GetFileInfo() {return &m_bfh;}
LPBITMAPINFOHEADER GetInfo() {return &m_bih;}
long GetWidth() {return m_bih.biWidth;}
long GetHeight() {return m_bih.biHeight;}
long GetPitch() {return m_iPitch;}
long GetBpp() {return m_iBpp;}
long GetPaletteEntries() {return m_iPaletteEntries;}
LPRGBQUAD GetPalette() {return m_lpPalette;}
DWORD GetSize() {return m_dwSize;}
LPBYTE GetData() {return m_lpData;}
void SetResampleMode(_RESAMPLE_MODE mode=RM_NEARESTNEIGHBOUR) {m_ResampleMode = mode;}
_RESAMPLE_MODE GetResampleMode() {return m_ResampleMode;}
BOOL IsValid() {return (m_dwSize > 0);}
_PIXEL GetPixel(long x, long y);
void SetPixel(long x, long y, _PIXEL pixel);
//wangjun
void LoadImageFile(LPTSTR lpszImageFile);
void SaveJPGFile(LPTSTR lpszImageFile);
private:
// Private methods
void _ConvertTo32Bpp();
void _ConvertTo24Bpp();
void _ScaleNearestNeighbour(long horizontalPercent, long verticalPercent);
void _ScaleBilinear(long horizontalPercent, long verticalPercent);
void _ScaleBicubic(long horizontalPercent, long verticalPercent);
void _RotateNearestNeighbour(long degrees, _PIXEL bgColor);
void _RotateBilinear(long degrees, _PIXEL bgColor);
void _RotateBicubic(long degrees, _PIXEL bgColor);
void _DrawNearestNeighbour(long dstX, long dstY, long dstWidth, long dstHeight, CBitmapEx& bitmapEx, long srcX, long srcY, long srcWidth, long srcHeight);
void _DrawBilinear(long dstX, long dstY, long dstWidth, long dstHeight, CBitmapEx& bitmapEx, long srcX, long srcY, long srcWidth, long srcHeight);
void _DrawBicubic(long dstX, long dstY, long dstWidth, long dstHeight, CBitmapEx& bitmapEx, long srcX, long srcY, long srcWidth, long srcHeight);
void _DrawNearestNeighbour(long dstX, long dstY, long dstWidth, long dstHeight, CBitmapEx& bitmapEx, long srcX, long srcY, long srcWidth, long srcHeight, long alpha);
void _DrawBilinear(long dstX, long dstY, long dstWidth, long dstHeight, CBitmapEx& bitmapEx, long srcX, long srcY, long srcWidth, long srcHeight, long alpha);
void _DrawBicubic(long dstX, long dstY, long dstWidth, long dstHeight, CBitmapEx& bitmapEx, long srcX, long srcY, long srcWidth, long srcHeight, long alpha);
void _DrawTransparentNearestNeighbour(long dstX, long dstY, long dstWidth, long dstHeight, CBitmapEx& bitmapEx, long srcX, long srcY, long srcWidth, long srcHeight, _PIXEL transparentColor=_RGB(0,0,0));
void _DrawTransparentBilinear(long dstX, long dstY, long dstWidth, long dstHeight, CBitmapEx& bitmapEx, long srcX, long srcY, long srcWidth, long srcHeight, _PIXEL transparentColor=_RGB(0,0,0));
void _DrawTransparentBicubic(long dstX, long dstY, long dstWidth, long dstHeight, CBitmapEx& bitmapEx, long srcX, long srcY, long srcWidth, long srcHeight, _PIXEL transparentColor=_RGB(0,0,0));
void _DrawTransparentNearestNeighbour(long dstX, long dstY, long dstWidth, long dstHeight, CBitmapEx& bitmapEx, long srcX, long srcY, long srcWidth, long srcHeight, long alpha, _PIXEL transparentColor=_RGB(0,0,0));
void _DrawTransparentBilinear(long dstX, long dstY, long dstWidth, long dstHeight, CBitmapEx& bitmapEx, long srcX, long srcY, long srcWidth, long srcHeight, long alpha, _PIXEL transparentColor=_RGB(0,0,0));
void _DrawTransparentBicubic(long dstX, long dstY, long dstWidth, long dstHeight, CBitmapEx& bitmapEx, long srcX, long srcY, long srcWidth, long srcHeight, long alpha, _PIXEL transparentColor=_RGB(0,0,0));
void _DrawBlendedNearestNeighbour(long dstX, long dstY, long dstWidth, long dstHeight, CBitmapEx& bitmapEx, long srcX, long srcY, long srcWidth, long srcHeight, long startAlpha, long endAlpha, DWORD mode=GM_NONE);
void _DrawBlendedBilinear(long dstX, long dstY, long dstWidth, long dstHeight, CBitmapEx& bitmapEx, long srcX, long srcY, long srcWidth, long srcHeight, long startAlpha, long endAlpha, DWORD mode=GM_NONE);
void _DrawBlendedBicubic(long dstX, long dstY, long dstWidth, long dstHeight, CBitmapEx& bitmapEx, long srcX, long srcY, long srcWidth, long srcHeight, long startAlpha, long endAlpha, DWORD mode=GM_NONE);
// wanhjun
HANDLE _dibFromBitmap(HBITMAP hBitmap); //DDB->DIB
int _DIBNumColors (LPBITMAPINFOHEADER lpbi);
HBITMAP _extractBitmap(IPicture* pPicture);
int _GetCodecClsid(const WCHAR* format, CLSID* pClsid);
private:
// Private members
BITMAPFILEHEADER m_bfh;
BITMAPINFOHEADER m_bih;
long m_iPaletteEntries;
RGBQUAD m_lpPalette[256];
long m_iPitch;
long m_iBpp;
DWORD m_dwSize;
LPBYTE m_lpData;
_RESAMPLE_MODE m_ResampleMode;
};
#endif // !defined(AFX_BITMAPEX_H__80F20A52_B43F_42C5_B182_AC8D27BF5C0E__INCLUDED_)

66
inc/qedit.h Normal file
View File

@ -0,0 +1,66 @@
#include <Unknwn.h>
#include <strmif.h>
#pragma comment(lib, "strmiids.lib")
#ifndef __qedit_h__
#define __qedit_h__
///////////////////////////////////////////////////////////////////////////////////
#pragma once
///////////////////////////////////////////////////////////////////////////////////
struct __declspec(uuid("0579154a-2b53-4994-b0d0-e773148eff85"))
ISampleGrabberCB : IUnknown
{
//
// Raw methods provided by interface
//
virtual HRESULT __stdcall SampleCB(
double SampleTime,
struct IMediaSample * pSample) = 0;
virtual HRESULT __stdcall BufferCB(
double SampleTime,
unsigned char * pBuffer,
long BufferLen) = 0;
};
struct __declspec(uuid("6b652fff-11fe-4fce-92ad-0266b5d7c78f"))
ISampleGrabber : IUnknown
{
//
// Raw methods provided by interface
//
virtual HRESULT __stdcall SetOneShot(
long OneShot) = 0;
virtual HRESULT __stdcall SetMediaType(
struct _AMMediaType * pType) = 0;
virtual HRESULT __stdcall GetConnectedMediaType(
struct _AMMediaType * pType) = 0;
virtual HRESULT __stdcall SetBufferSamples(
long BufferThem) = 0;
virtual HRESULT __stdcall GetCurrentBuffer(
/*[in,out]*/ long * pBufferSize,
/*[out]*/ long * pBuffer) = 0;
virtual HRESULT __stdcall GetCurrentSample(
/*[out,retval]*/ struct IMediaSample * * ppSample) = 0;
virtual HRESULT __stdcall SetCallback(
struct ISampleGrabberCB * pCallback,
long WhichMethodToCallback) = 0;
};
static const IID IID_ISampleGrabber = { 0x6B652FFF, 0x11FE, 0x4fce,{ 0x92, 0xAD, 0x02, 0x66, 0xB5, 0xD7, 0xC7, 0x8F } };
static const IID IID_ISampleGrabberCB = { 0x0579154A, 0x2B53, 0x4994,{ 0xB0, 0xD0, 0xE7, 0x73, 0x14, 0x8E, 0xFF, 0x85 } };
static const CLSID CLSID_SampleGrabber = { 0xC1F400A0, 0x3F08, 0x11d3,{ 0x9F, 0x0B, 0x00, 0x60, 0x08, 0x03, 0x9E, 0x37 } };
static const CLSID CLSID_NullRenderer = { 0xC1F400A4, 0x3F08, 0x11d3,{ 0x9F, 0x0B, 0x00, 0x60, 0x08, 0x03, 0x9E, 0x37 } };
#endif

21
inc/utils.h Normal file
View File

@ -0,0 +1,21 @@
#pragma once
#include <string>
#include <memory>
#include "guiddef.h"
#include <dshow.h>
#include <windows.h>
#include "qedit.h"
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libavutil/avutil.h"
#include "libswscale/swscale.h"
#include "libavutil/opt.h"
#include "libavutil/imgutils.h"
};
using namespace std;
AVPixelFormat GUIDToAvFormat(GUID mediatype);

1843
inc/winuuids.h Normal file

File diff suppressed because it is too large Load Diff

1916
inc/zlib.h Normal file

File diff suppressed because it is too large Load Diff

504
librtmp/COPYING Normal file
View File

@ -0,0 +1,504 @@
GNU LESSER GENERAL PUBLIC LICENSE
Version 2.1, February 1999
Copyright (C) 1991, 1999 Free Software Foundation, Inc.
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
[This is the first released version of the Lesser GPL. It also counts
as the successor of the GNU Library Public License, version 2, hence
the version number 2.1.]
Preamble
The licenses for most software are designed to take away your
freedom to share and change it. By contrast, the GNU General Public
Licenses are intended to guarantee your freedom to share and change
free software--to make sure the software is free for all its users.
This license, the Lesser General Public License, applies to some
specially designated software packages--typically libraries--of the
Free Software Foundation and other authors who decide to use it. You
can use it too, but we suggest you first think carefully about whether
this license or the ordinary General Public License is the better
strategy to use in any particular case, based on the explanations below.
When we speak of free software, we are referring to freedom of use,
not price. Our General Public Licenses are designed to make sure that
you have the freedom to distribute copies of free software (and charge
for this service if you wish); that you receive source code or can get
it if you want it; that you can change the software and use pieces of
it in new free programs; and that you are informed that you can do
these things.
To protect your rights, we need to make restrictions that forbid
distributors to deny you these rights or to ask you to surrender these
rights. These restrictions translate to certain responsibilities for
you if you distribute copies of the library or if you modify it.
For example, if you distribute copies of the library, whether gratis
or for a fee, you must give the recipients all the rights that we gave
you. You must make sure that they, too, receive or can get the source
code. If you link other code with the library, you must provide
complete object files to the recipients, so that they can relink them
with the library after making changes to the library and recompiling
it. And you must show them these terms so they know their rights.
We protect your rights with a two-step method: (1) we copyright the
library, and (2) we offer you this license, which gives you legal
permission to copy, distribute and/or modify the library.
To protect each distributor, we want to make it very clear that
there is no warranty for the free library. Also, if the library is
modified by someone else and passed on, the recipients should know
that what they have is not the original version, so that the original
author's reputation will not be affected by problems that might be
introduced by others.
Finally, software patents pose a constant threat to the existence of
any free program. We wish to make sure that a company cannot
effectively restrict the users of a free program by obtaining a
restrictive license from a patent holder. Therefore, we insist that
any patent license obtained for a version of the library must be
consistent with the full freedom of use specified in this license.
Most GNU software, including some libraries, is covered by the
ordinary GNU General Public License. This license, the GNU Lesser
General Public License, applies to certain designated libraries, and
is quite different from the ordinary General Public License. We use
this license for certain libraries in order to permit linking those
libraries into non-free programs.
When a program is linked with a library, whether statically or using
a shared library, the combination of the two is legally speaking a
combined work, a derivative of the original library. The ordinary
General Public License therefore permits such linking only if the
entire combination fits its criteria of freedom. The Lesser General
Public License permits more lax criteria for linking other code with
the library.
We call this license the "Lesser" General Public License because it
does Less to protect the user's freedom than the ordinary General
Public License. It also provides other free software developers Less
of an advantage over competing non-free programs. These disadvantages
are the reason we use the ordinary General Public License for many
libraries. However, the Lesser license provides advantages in certain
special circumstances.
For example, on rare occasions, there may be a special need to
encourage the widest possible use of a certain library, so that it becomes
a de-facto standard. To achieve this, non-free programs must be
allowed to use the library. A more frequent case is that a free
library does the same job as widely used non-free libraries. In this
case, there is little to gain by limiting the free library to free
software only, so we use the Lesser General Public License.
In other cases, permission to use a particular library in non-free
programs enables a greater number of people to use a large body of
free software. For example, permission to use the GNU C Library in
non-free programs enables many more people to use the whole GNU
operating system, as well as its variant, the GNU/Linux operating
system.
Although the Lesser General Public License is Less protective of the
users' freedom, it does ensure that the user of a program that is
linked with the Library has the freedom and the wherewithal to run
that program using a modified version of the Library.
The precise terms and conditions for copying, distribution and
modification follow. Pay close attention to the difference between a
"work based on the library" and a "work that uses the library". The
former contains code derived from the library, whereas the latter must
be combined with the library in order to run.
GNU LESSER GENERAL PUBLIC LICENSE
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. This License Agreement applies to any software library or other
program which contains a notice placed by the copyright holder or
other authorized party saying it may be distributed under the terms of
this Lesser General Public License (also called "this License").
Each licensee is addressed as "you".
A "library" means a collection of software functions and/or data
prepared so as to be conveniently linked with application programs
(which use some of those functions and data) to form executables.
The "Library", below, refers to any such software library or work
which has been distributed under these terms. A "work based on the
Library" means either the Library or any derivative work under
copyright law: that is to say, a work containing the Library or a
portion of it, either verbatim or with modifications and/or translated
straightforwardly into another language. (Hereinafter, translation is
included without limitation in the term "modification".)
"Source code" for a work means the preferred form of the work for
making modifications to it. For a library, complete source code means
all the source code for all modules it contains, plus any associated
interface definition files, plus the scripts used to control compilation
and installation of the library.
Activities other than copying, distribution and modification are not
covered by this License; they are outside its scope. The act of
running a program using the Library is not restricted, and output from
such a program is covered only if its contents constitute a work based
on the Library (independent of the use of the Library in a tool for
writing it). Whether that is true depends on what the Library does
and what the program that uses the Library does.
1. You may copy and distribute verbatim copies of the Library's
complete source code as you receive it, in any medium, provided that
you conspicuously and appropriately publish on each copy an
appropriate copyright notice and disclaimer of warranty; keep intact
all the notices that refer to this License and to the absence of any
warranty; and distribute a copy of this License along with the
Library.
You may charge a fee for the physical act of transferring a copy,
and you may at your option offer warranty protection in exchange for a
fee.
2. You may modify your copy or copies of the Library or any portion
of it, thus forming a work based on the Library, and copy and
distribute such modifications or work under the terms of Section 1
above, provided that you also meet all of these conditions:
a) The modified work must itself be a software library.
b) You must cause the files modified to carry prominent notices
stating that you changed the files and the date of any change.
c) You must cause the whole of the work to be licensed at no
charge to all third parties under the terms of this License.
d) If a facility in the modified Library refers to a function or a
table of data to be supplied by an application program that uses
the facility, other than as an argument passed when the facility
is invoked, then you must make a good faith effort to ensure that,
in the event an application does not supply such function or
table, the facility still operates, and performs whatever part of
its purpose remains meaningful.
(For example, a function in a library to compute square roots has
a purpose that is entirely well-defined independent of the
application. Therefore, Subsection 2d requires that any
application-supplied function or table used by this function must
be optional: if the application does not supply it, the square
root function must still compute square roots.)
These requirements apply to the modified work as a whole. If
identifiable sections of that work are not derived from the Library,
and can be reasonably considered independent and separate works in
themselves, then this License, and its terms, do not apply to those
sections when you distribute them as separate works. But when you
distribute the same sections as part of a whole which is a work based
on the Library, the distribution of the whole must be on the terms of
this License, whose permissions for other licensees extend to the
entire whole, and thus to each and every part regardless of who wrote
it.
Thus, it is not the intent of this section to claim rights or contest
your rights to work written entirely by you; rather, the intent is to
exercise the right to control the distribution of derivative or
collective works based on the Library.
In addition, mere aggregation of another work not based on the Library
with the Library (or with a work based on the Library) on a volume of
a storage or distribution medium does not bring the other work under
the scope of this License.
3. You may opt to apply the terms of the ordinary GNU General Public
License instead of this License to a given copy of the Library. To do
this, you must alter all the notices that refer to this License, so
that they refer to the ordinary GNU General Public License, version 2,
instead of to this License. (If a newer version than version 2 of the
ordinary GNU General Public License has appeared, then you can specify
that version instead if you wish.) Do not make any other change in
these notices.
Once this change is made in a given copy, it is irreversible for
that copy, so the ordinary GNU General Public License applies to all
subsequent copies and derivative works made from that copy.
This option is useful when you wish to copy part of the code of
the Library into a program that is not a library.
4. You may copy and distribute the Library (or a portion or
derivative of it, under Section 2) in object code or executable form
under the terms of Sections 1 and 2 above provided that you accompany
it with the complete corresponding machine-readable source code, which
must be distributed under the terms of Sections 1 and 2 above on a
medium customarily used for software interchange.
If distribution of object code is made by offering access to copy
from a designated place, then offering equivalent access to copy the
source code from the same place satisfies the requirement to
distribute the source code, even though third parties are not
compelled to copy the source along with the object code.
5. A program that contains no derivative of any portion of the
Library, but is designed to work with the Library by being compiled or
linked with it, is called a "work that uses the Library". Such a
work, in isolation, is not a derivative work of the Library, and
therefore falls outside the scope of this License.
However, linking a "work that uses the Library" with the Library
creates an executable that is a derivative of the Library (because it
contains portions of the Library), rather than a "work that uses the
library". The executable is therefore covered by this License.
Section 6 states terms for distribution of such executables.
When a "work that uses the Library" uses material from a header file
that is part of the Library, the object code for the work may be a
derivative work of the Library even though the source code is not.
Whether this is true is especially significant if the work can be
linked without the Library, or if the work is itself a library. The
threshold for this to be true is not precisely defined by law.
If such an object file uses only numerical parameters, data
structure layouts and accessors, and small macros and small inline
functions (ten lines or less in length), then the use of the object
file is unrestricted, regardless of whether it is legally a derivative
work. (Executables containing this object code plus portions of the
Library will still fall under Section 6.)
Otherwise, if the work is a derivative of the Library, you may
distribute the object code for the work under the terms of Section 6.
Any executables containing that work also fall under Section 6,
whether or not they are linked directly with the Library itself.
6. As an exception to the Sections above, you may also combine or
link a "work that uses the Library" with the Library to produce a
work containing portions of the Library, and distribute that work
under terms of your choice, provided that the terms permit
modification of the work for the customer's own use and reverse
engineering for debugging such modifications.
You must give prominent notice with each copy of the work that the
Library is used in it and that the Library and its use are covered by
this License. You must supply a copy of this License. If the work
during execution displays copyright notices, you must include the
copyright notice for the Library among them, as well as a reference
directing the user to the copy of this License. Also, you must do one
of these things:
a) Accompany the work with the complete corresponding
machine-readable source code for the Library including whatever
changes were used in the work (which must be distributed under
Sections 1 and 2 above); and, if the work is an executable linked
with the Library, with the complete machine-readable "work that
uses the Library", as object code and/or source code, so that the
user can modify the Library and then relink to produce a modified
executable containing the modified Library. (It is understood
that the user who changes the contents of definitions files in the
Library will not necessarily be able to recompile the application
to use the modified definitions.)
b) Use a suitable shared library mechanism for linking with the
Library. A suitable mechanism is one that (1) uses at run time a
copy of the library already present on the user's computer system,
rather than copying library functions into the executable, and (2)
will operate properly with a modified version of the library, if
the user installs one, as long as the modified version is
interface-compatible with the version that the work was made with.
c) Accompany the work with a written offer, valid for at
least three years, to give the same user the materials
specified in Subsection 6a, above, for a charge no more
than the cost of performing this distribution.
d) If distribution of the work is made by offering access to copy
from a designated place, offer equivalent access to copy the above
specified materials from the same place.
e) Verify that the user has already received a copy of these
materials or that you have already sent this user a copy.
For an executable, the required form of the "work that uses the
Library" must include any data and utility programs needed for
reproducing the executable from it. However, as a special exception,
the materials to be distributed need not include anything that is
normally distributed (in either source or binary form) with the major
components (compiler, kernel, and so on) of the operating system on
which the executable runs, unless that component itself accompanies
the executable.
It may happen that this requirement contradicts the license
restrictions of other proprietary libraries that do not normally
accompany the operating system. Such a contradiction means you cannot
use both them and the Library together in an executable that you
distribute.
7. You may place library facilities that are a work based on the
Library side-by-side in a single library together with other library
facilities not covered by this License, and distribute such a combined
library, provided that the separate distribution of the work based on
the Library and of the other library facilities is otherwise
permitted, and provided that you do these two things:
a) Accompany the combined library with a copy of the same work
based on the Library, uncombined with any other library
facilities. This must be distributed under the terms of the
Sections above.
b) Give prominent notice with the combined library of the fact
that part of it is a work based on the Library, and explaining
where to find the accompanying uncombined form of the same work.
8. You may not copy, modify, sublicense, link with, or distribute
the Library except as expressly provided under this License. Any
attempt otherwise to copy, modify, sublicense, link with, or
distribute the Library is void, and will automatically terminate your
rights under this License. However, parties who have received copies,
or rights, from you under this License will not have their licenses
terminated so long as such parties remain in full compliance.
9. You are not required to accept this License, since you have not
signed it. However, nothing else grants you permission to modify or
distribute the Library or its derivative works. These actions are
prohibited by law if you do not accept this License. Therefore, by
modifying or distributing the Library (or any work based on the
Library), you indicate your acceptance of this License to do so, and
all its terms and conditions for copying, distributing or modifying
the Library or works based on it.
10. Each time you redistribute the Library (or any work based on the
Library), the recipient automatically receives a license from the
original licensor to copy, distribute, link with or modify the Library
subject to these terms and conditions. You may not impose any further
restrictions on the recipients' exercise of the rights granted herein.
You are not responsible for enforcing compliance by third parties with
this License.
11. If, as a consequence of a court judgment or allegation of patent
infringement or for any other reason (not limited to patent issues),
conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot
distribute so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you
may not distribute the Library at all. For example, if a patent
license would not permit royalty-free redistribution of the Library by
all those who receive copies directly or indirectly through you, then
the only way you could satisfy both it and this License would be to
refrain entirely from distribution of the Library.
If any portion of this section is held invalid or unenforceable under any
particular circumstance, the balance of the section is intended to apply,
and the section as a whole is intended to apply in other circumstances.
It is not the purpose of this section to induce you to infringe any
patents or other property right claims or to contest validity of any
such claims; this section has the sole purpose of protecting the
integrity of the free software distribution system which is
implemented by public license practices. Many people have made
generous contributions to the wide range of software distributed
through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing
to distribute software through any other system and a licensee cannot
impose that choice.
This section is intended to make thoroughly clear what is believed to
be a consequence of the rest of this License.
12. If the distribution and/or use of the Library is restricted in
certain countries either by patents or by copyrighted interfaces, the
original copyright holder who places the Library under this License may add
an explicit geographical distribution limitation excluding those countries,
so that distribution is permitted only in or among countries not thus
excluded. In such case, this License incorporates the limitation as if
written in the body of this License.
13. The Free Software Foundation may publish revised and/or new
versions of the Lesser General Public License from time to time.
Such new versions will be similar in spirit to the present version,
but may differ in detail to address new problems or concerns.
Each version is given a distinguishing version number. If the Library
specifies a version number of this License which applies to it and
"any later version", you have the option of following the terms and
conditions either of that version or of any later version published by
the Free Software Foundation. If the Library does not specify a
license version number, you may choose any version ever published by
the Free Software Foundation.
14. If you wish to incorporate parts of the Library into other free
programs whose distribution conditions are incompatible with these,
write to the author to ask for permission. For software which is
copyrighted by the Free Software Foundation, write to the Free
Software Foundation; we sometimes make exceptions for this. Our
decision will be guided by the two goals of preserving the free status
of all derivatives of our free software and of promoting the sharing
and reuse of software generally.
NO WARRANTY
15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
DAMAGES.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Libraries
If you develop a new library, and you want it to be of the greatest
possible use to the public, we recommend making it free software that
everyone can redistribute and change. You can do so by permitting
redistribution under these terms (or, alternatively, under the terms of the
ordinary General Public License).
To apply these terms, attach the following notices to the library. It is
safest to attach them to the start of each source file to most effectively
convey the exclusion of warranty; and each file should have at least the
"copyright" line and a pointer to where the full notice is found.
<one line to give the library's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Also add information on how to contact you by electronic and paper mail.
You should also get your employer (if you work as a programmer) or your
school, if any, to sign a "copyright disclaimer" for the library, if
necessary. Here is a sample; alter the names:
Yoyodyne, Inc., hereby disclaims all copyright interest in the
library `Frob' (a library for tweaking knobs) written by James Random Hacker.
<signature of Ty Coon>, 1 April 1990
Ty Coon, President of Vice
That's all there is to it!

121
librtmp/Makefile Normal file
View File

@ -0,0 +1,121 @@
VERSION=v2.4
prefix=/usr/local
incdir=$(prefix)/include/librtmp
bindir=$(prefix)/bin
libdir=$(prefix)/lib
mandir=$(prefix)/man
BINDIR=$(DESTDIR)$(bindir)
INCDIR=$(DESTDIR)$(incdir)
LIBDIR=$(DESTDIR)$(libdir)
MANDIR=$(DESTDIR)$(mandir)
CC=$(CROSS_COMPILE)gcc
LD=$(CROSS_COMPILE)ld
AR=$(CROSS_COMPILE)ar
SYS=posix
CRYPTO=OPENSSL
#CRYPTO=GNUTLS
DEF_POLARSSL=-DUSE_POLARSSL
DEF_OPENSSL=-DUSE_OPENSSL
DEF_GNUTLS=-DUSE_GNUTLS
DEF_=-DNO_CRYPTO
REQ_GNUTLS=gnutls,hogweed,nettle
REQ_OPENSSL=libssl,libcrypto
PUB_GNUTLS=-lgmp
LIBZ=-lz
LIBS_posix=
LIBS_darwin=
LIBS_mingw=-lws2_32 -lwinmm -lgdi32
LIB_GNUTLS=-lgnutls -lhogweed -lnettle -lgmp $(LIBZ)
LIB_OPENSSL=-lssl -lcrypto $(LIBZ)
LIB_POLARSSL=-lpolarssl $(LIBZ)
PRIVATE_LIBS=$(LIBS_$(SYS))
CRYPTO_LIB=$(LIB_$(CRYPTO)) $(PRIVATE_LIBS)
CRYPTO_REQ=$(REQ_$(CRYPTO))
CRYPTO_DEF=$(DEF_$(CRYPTO))
PUBLIC_LIBS=$(PUB_$(CRYPTO))
SO_VERSION=1
SOX_posix=so
SOX_darwin=dylib
SOX_mingw=dll
SOX=$(SOX_$(SYS))
SO_posix=.$(SOX).$(SO_VERSION)
SO_darwin=.$(SO_VERSION).$(SOX)
SO_mingw=-$(SO_VERSION).$(SOX)
SO_EXT=$(SO_$(SYS))
SODIR_posix=$(LIBDIR)
SODIR_darwin=$(LIBDIR)
SODIR_mingw=$(BINDIR)
SODIR=$(SODIR_$(SYS))
SO_LDFLAGS_posix=-shared -Wl,-soname,$@
SO_LDFLAGS_darwin=-dynamiclib -twolevel_namespace -undefined dynamic_lookup \
-fno-common -headerpad_max_install_names -install_name $(libdir)/$@
SO_LDFLAGS_mingw=-shared -Wl,--out-implib,librtmp.dll.a
SO_LDFLAGS=$(SO_LDFLAGS_$(SYS))
INSTALL_IMPLIB_posix=
INSTALL_IMPLIB_darwin=
INSTALL_IMPLIB_mingw=cp librtmp.dll.a $(LIBDIR)
INSTALL_IMPLIB=$(INSTALL_IMPLIB_$(SYS))
SHARED=yes
SODEF_yes=-fPIC
SOLIB_yes=librtmp$(SO_EXT)
SOINST_yes=install_so
SO_DEF=$(SODEF_$(SHARED))
SO_LIB=$(SOLIB_$(SHARED))
SO_INST=$(SOINST_$(SHARED))
DEF=-DRTMPDUMP_VERSION=\"$(VERSION)\" $(CRYPTO_DEF) $(XDEF)
OPT=-O2
CFLAGS=-Wall $(XCFLAGS) $(INC) $(DEF) $(OPT) $(SO_DEF)
LDFLAGS=$(XLDFLAGS)
OBJS=rtmp.o log.o amf.o hashswf.o parseurl.o
all: librtmp.a $(SO_LIB)
clean:
rm -f *.o *.a *.$(SOX) *$(SO_EXT) librtmp.pc
librtmp.a: $(OBJS)
$(AR) rs $@ $?
librtmp$(SO_EXT): $(OBJS)
$(CC) $(SO_LDFLAGS) $(LDFLAGS) -o $@ $^ $> $(CRYPTO_LIB)
ln -sf $@ librtmp.$(SOX)
log.o: log.c log.h Makefile
rtmp.o: rtmp.c rtmp.h rtmp_sys.h handshake.h dh.h log.h amf.h Makefile
amf.o: amf.c amf.h bytes.h log.h Makefile
hashswf.o: hashswf.c http.h rtmp.h rtmp_sys.h Makefile
parseurl.o: parseurl.c rtmp.h rtmp_sys.h log.h Makefile
librtmp.pc: librtmp.pc.in Makefile
sed -e "s;@prefix@;$(prefix);" -e "s;@libdir@;$(libdir);" \
-e "s;@VERSION@;$(VERSION);" \
-e "s;@CRYPTO_REQ@;$(CRYPTO_REQ);" \
-e "s;@PUBLIC_LIBS@;$(PUBLIC_LIBS);" \
-e "s;@PRIVATE_LIBS@;$(PRIVATE_LIBS);" librtmp.pc.in > $@
install: install_base $(SO_INST)
install_base: librtmp.a librtmp.pc
-mkdir -p $(INCDIR) $(LIBDIR)/pkgconfig $(MANDIR)/man3 $(SODIR)
cp amf.h http.h log.h rtmp.h $(INCDIR)
cp librtmp.a $(LIBDIR)
cp librtmp.pc $(LIBDIR)/pkgconfig
cp librtmp.3 $(MANDIR)/man3
install_so: librtmp$(SO_EXT)
cp librtmp$(SO_EXT) $(SODIR)
$(INSTALL_IMPLIB)
cd $(SODIR); ln -sf librtmp$(SO_EXT) librtmp.$(SOX)

1311
librtmp/amf.c Normal file

File diff suppressed because it is too large Load Diff

164
librtmp/amf.h Normal file
View File

@ -0,0 +1,164 @@
#ifndef __AMF_H__
#define __AMF_H__
/*
* Copyright (C) 2005-2008 Team XBMC
* http://www.xbmc.org
* Copyright (C) 2008-2009 Andrej Stepanchuk
* Copyright (C) 2009-2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#include <stdint.h>
#ifndef TRUE
#define TRUE 1
#define FALSE 0
#endif
#ifdef __cplusplus
extern "C"
{
#endif
typedef enum
{ AMF_NUMBER = 0, AMF_BOOLEAN, AMF_STRING, AMF_OBJECT,
AMF_MOVIECLIP, /* reserved, not used */
AMF_NULL, AMF_UNDEFINED, AMF_REFERENCE, AMF_ECMA_ARRAY, AMF_OBJECT_END,
AMF_STRICT_ARRAY, AMF_DATE, AMF_LONG_STRING, AMF_UNSUPPORTED,
AMF_RECORDSET, /* reserved, not used */
AMF_XML_DOC, AMF_TYPED_OBJECT,
AMF_AVMPLUS, /* switch to AMF3 */
AMF_INVALID = 0xff
} AMFDataType;
typedef enum
{ AMF3_UNDEFINED = 0, AMF3_NULL, AMF3_FALSE, AMF3_TRUE,
AMF3_INTEGER, AMF3_DOUBLE, AMF3_STRING, AMF3_XML_DOC, AMF3_DATE,
AMF3_ARRAY, AMF3_OBJECT, AMF3_XML, AMF3_BYTE_ARRAY
} AMF3DataType;
typedef struct AVal
{
char *av_val;
int av_len;
} AVal;
#define AVC(str) {str,sizeof(str)-1}
#define AVMATCH(a1,a2) ((a1)->av_len == (a2)->av_len && !memcmp((a1)->av_val,(a2)->av_val,(a1)->av_len))
struct AMFObjectProperty;
typedef struct AMFObject
{
int o_num;
struct AMFObjectProperty *o_props;
} AMFObject;
typedef struct AMFObjectProperty
{
AVal p_name;
AMFDataType p_type;
union
{
double p_number;
AVal p_aval;
AMFObject p_object;
} p_vu;
int16_t p_UTCoffset;
} AMFObjectProperty;
char *AMF_EncodeString(char *output, char *outend, const AVal * str);
char *AMF_EncodeNumber(char *output, char *outend, double dVal);
char *AMF_EncodeInt16(char *output, char *outend, short nVal);
char *AMF_EncodeInt24(char *output, char *outend, int nVal);
char *AMF_EncodeInt32(char *output, char *outend, int nVal);
char *AMF_EncodeBoolean(char *output, char *outend, int bVal);
/* Shortcuts for AMFProp_Encode */
char *AMF_EncodeNamedString(char *output, char *outend, const AVal * name, const AVal * value);
char *AMF_EncodeNamedNumber(char *output, char *outend, const AVal * name, double dVal);
char *AMF_EncodeNamedBoolean(char *output, char *outend, const AVal * name, int bVal);
unsigned short AMF_DecodeInt16(const char *data);
unsigned int AMF_DecodeInt24(const char *data);
unsigned int AMF_DecodeInt32(const char *data);
void AMF_DecodeString(const char *data, AVal * str);
void AMF_DecodeLongString(const char *data, AVal * str);
int AMF_DecodeBoolean(const char *data);
double AMF_DecodeNumber(const char *data);
char *AMF_Encode(AMFObject * obj, char *pBuffer, char *pBufEnd);
char *AMF_EncodeEcmaArray(AMFObject *obj, char *pBuffer, char *pBufEnd);
char *AMF_EncodeArray(AMFObject *obj, char *pBuffer, char *pBufEnd);
int AMF_Decode(AMFObject * obj, const char *pBuffer, int nSize,
int bDecodeName);
int AMF_DecodeArray(AMFObject * obj, const char *pBuffer, int nSize,
int nArrayLen, int bDecodeName);
int AMF3_Decode(AMFObject * obj, const char *pBuffer, int nSize,
int bDecodeName);
void AMF_Dump(AMFObject * obj);
void AMF_Reset(AMFObject * obj);
void AMF_AddProp(AMFObject * obj, const AMFObjectProperty * prop);
int AMF_CountProp(AMFObject * obj);
AMFObjectProperty *AMF_GetProp(AMFObject * obj, const AVal * name,
int nIndex);
AMFDataType AMFProp_GetType(AMFObjectProperty * prop);
void AMFProp_SetNumber(AMFObjectProperty * prop, double dval);
void AMFProp_SetBoolean(AMFObjectProperty * prop, int bflag);
void AMFProp_SetString(AMFObjectProperty * prop, AVal * str);
void AMFProp_SetObject(AMFObjectProperty * prop, AMFObject * obj);
void AMFProp_GetName(AMFObjectProperty * prop, AVal * name);
void AMFProp_SetName(AMFObjectProperty * prop, AVal * name);
double AMFProp_GetNumber(AMFObjectProperty * prop);
int AMFProp_GetBoolean(AMFObjectProperty * prop);
void AMFProp_GetString(AMFObjectProperty * prop, AVal * str);
void AMFProp_GetObject(AMFObjectProperty * prop, AMFObject * obj);
int AMFProp_IsValid(AMFObjectProperty * prop);
char *AMFProp_Encode(AMFObjectProperty * prop, char *pBuffer, char *pBufEnd);
int AMF3Prop_Decode(AMFObjectProperty * prop, const char *pBuffer,
int nSize, int bDecodeName);
int AMFProp_Decode(AMFObjectProperty * prop, const char *pBuffer,
int nSize, int bDecodeName);
void AMFProp_Dump(AMFObjectProperty * prop);
void AMFProp_Reset(AMFObjectProperty * prop);
typedef struct AMF3ClassDef
{
AVal cd_name;
char cd_externalizable;
char cd_dynamic;
int cd_num;
AVal *cd_props;
} AMF3ClassDef;
void AMF3CD_AddProp(AMF3ClassDef * cd, AVal * prop);
AVal *AMF3CD_GetProp(AMF3ClassDef * cd, int idx);
#ifdef __cplusplus
}
#endif
#endif /* __AMF_H__ */

91
librtmp/bytes.h Normal file
View File

@ -0,0 +1,91 @@
/*
* Copyright (C) 2005-2008 Team XBMC
* http://www.xbmc.org
* Copyright (C) 2008-2009 Andrej Stepanchuk
* Copyright (C) 2009-2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#ifndef __BYTES_H__
#define __BYTES_H__
#include <stdint.h>
#ifdef _WIN32
/* Windows is little endian only */
#define __LITTLE_ENDIAN 1234
#define __BIG_ENDIAN 4321
#define __BYTE_ORDER __LITTLE_ENDIAN
#define __FLOAT_WORD_ORDER __BYTE_ORDER
typedef unsigned char uint8_t;
#else /* !_WIN32 */
#include <sys/param.h>
#if defined(BYTE_ORDER) && !defined(__BYTE_ORDER)
#define __BYTE_ORDER BYTE_ORDER
#endif
#if defined(BIG_ENDIAN) && !defined(__BIG_ENDIAN)
#define __BIG_ENDIAN BIG_ENDIAN
#endif
#if defined(LITTLE_ENDIAN) && !defined(__LITTLE_ENDIAN)
#define __LITTLE_ENDIAN LITTLE_ENDIAN
#endif
#endif /* !_WIN32 */
/* define default endianness */
#ifndef __LITTLE_ENDIAN
#define __LITTLE_ENDIAN 1234
#endif
#ifndef __BIG_ENDIAN
#define __BIG_ENDIAN 4321
#endif
#ifndef __BYTE_ORDER
#warning "Byte order not defined on your system, assuming little endian!"
#define __BYTE_ORDER __LITTLE_ENDIAN
#endif
/* ok, we assume to have the same float word order and byte order if float word order is not defined */
#ifndef __FLOAT_WORD_ORDER
#warning "Float word order not defined, assuming the same as byte order!"
#define __FLOAT_WORD_ORDER __BYTE_ORDER
#endif
#if !defined(__BYTE_ORDER) || !defined(__FLOAT_WORD_ORDER)
#error "Undefined byte or float word order!"
#endif
#if __FLOAT_WORD_ORDER != __BIG_ENDIAN && __FLOAT_WORD_ORDER != __LITTLE_ENDIAN
#error "Unknown/unsupported float word order!"
#endif
#if __BYTE_ORDER != __BIG_ENDIAN && __BYTE_ORDER != __LITTLE_ENDIAN
#error "Unknown/unsupported byte order!"
#endif
#endif

376
librtmp/dh.h Normal file
View File

@ -0,0 +1,376 @@
/* RTMPDump - Diffie-Hellmann Key Exchange
* Copyright (C) 2009 Andrej Stepanchuk
* Copyright (C) 2009-2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#include <stdint.h>
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include <limits.h>
#ifdef USE_POLARSSL
#include <polarssl/dhm.h>
typedef mpi * MP_t;
#define MP_new(m) m = malloc(sizeof(mpi)); mpi_init(m)
#define MP_set_w(mpi, w) mpi_lset(mpi, w)
#define MP_cmp(u, v) mpi_cmp_mpi(u, v)
#define MP_set(u, v) mpi_copy(u, v)
#define MP_sub_w(mpi, w) mpi_sub_int(mpi, mpi, w)
#define MP_cmp_1(mpi) mpi_cmp_int(mpi, 1)
#define MP_modexp(r, y, q, p) mpi_exp_mod(r, y, q, p, NULL)
#define MP_free(mpi) mpi_free(mpi); free(mpi)
#define MP_gethex(u, hex, res) MP_new(u); res = mpi_read_string(u, 16, hex) == 0
#define MP_bytes(u) mpi_size(u)
#define MP_setbin(u,buf,len) mpi_write_binary(u,buf,len)
#define MP_getbin(u,buf,len) MP_new(u); mpi_read_binary(u,buf,len)
typedef struct MDH {
MP_t p;
MP_t g;
MP_t pub_key;
MP_t priv_key;
long length;
dhm_context ctx;
} MDH;
#define MDH_new() calloc(1,sizeof(MDH))
#define MDH_free(vp) {MDH *_dh = vp; dhm_free(&_dh->ctx); MP_free(_dh->p); MP_free(_dh->g); MP_free(_dh->pub_key); MP_free(_dh->priv_key); free(_dh);}
static int MDH_generate_key(MDH *dh)
{
unsigned char out[2];
MP_set(&dh->ctx.P, dh->p);
MP_set(&dh->ctx.G, dh->g);
dh->ctx.len = 128;
dhm_make_public(&dh->ctx, 1024, out, 1, havege_random, &RTMP_TLS_ctx->hs);
MP_new(dh->pub_key);
MP_new(dh->priv_key);
MP_set(dh->pub_key, &dh->ctx.GX);
MP_set(dh->priv_key, &dh->ctx.X);
return 1;
}
static int MDH_compute_key(uint8_t *secret, size_t len, MP_t pub, MDH *dh)
{
MP_set(&dh->ctx.GY, pub);
dhm_calc_secret(&dh->ctx, secret, &len);
return 0;
}
#elif defined(USE_GNUTLS)
#include <gmp.h>
#include <nettle/bignum.h>
#include <gnutls/crypto.h>
typedef mpz_ptr MP_t;
#define MP_new(m) m = malloc(sizeof(*m)); mpz_init2(m, 1)
#define MP_set_w(mpi, w) mpz_set_ui(mpi, w)
#define MP_cmp(u, v) mpz_cmp(u, v)
#define MP_set(u, v) mpz_set(u, v)
#define MP_sub_w(mpi, w) mpz_sub_ui(mpi, mpi, w)
#define MP_cmp_1(mpi) mpz_cmp_ui(mpi, 1)
#define MP_modexp(r, y, q, p) mpz_powm(r, y, q, p)
#define MP_free(mpi) mpz_clear(mpi); free(mpi)
#define MP_gethex(u, hex, res) u = malloc(sizeof(*u)); mpz_init2(u, 1); res = (mpz_set_str(u, hex, 16) == 0)
#define MP_bytes(u) (mpz_sizeinbase(u, 2) + 7) / 8
#define MP_setbin(u,buf,len) nettle_mpz_get_str_256(len,buf,u)
#define MP_getbin(u,buf,len) u = malloc(sizeof(*u)); mpz_init2(u, 1); nettle_mpz_set_str_256_u(u,len,buf)
typedef struct MDH {
MP_t p;
MP_t g;
MP_t pub_key;
MP_t priv_key;
long length;
} MDH;
#define MDH_new() calloc(1,sizeof(MDH))
#define MDH_free(dh) do {MP_free(((MDH*)(dh))->p); MP_free(((MDH*)(dh))->g); MP_free(((MDH*)(dh))->pub_key); MP_free(((MDH*)(dh))->priv_key); free(dh);} while(0)
static int MDH_generate_key(MDH *dh)
{
int num_bytes;
uint32_t seed;
gmp_randstate_t rs;
num_bytes = (mpz_sizeinbase(dh->p, 2) + 7) / 8 - 1;
if (num_bytes <= 0 || num_bytes > 18000)
return 0;
dh->priv_key = calloc(1, sizeof(*dh->priv_key));
if (!dh->priv_key)
return 0;
mpz_init2(dh->priv_key, 1);
gnutls_rnd(GNUTLS_RND_RANDOM, &seed, sizeof(seed));
gmp_randinit_mt(rs);
gmp_randseed_ui(rs, seed);
mpz_urandomb(dh->priv_key, rs, num_bytes);
gmp_randclear(rs);
dh->pub_key = calloc(1, sizeof(*dh->pub_key));
if (!dh->pub_key)
return 0;
mpz_init2(dh->pub_key, 1);
if (!dh->pub_key) {
mpz_clear(dh->priv_key);
free(dh->priv_key);
return 0;
}
mpz_powm(dh->pub_key, dh->g, dh->priv_key, dh->p);
return 1;
}
static int MDH_compute_key(uint8_t *secret, size_t len, MP_t pub, MDH *dh)
{
mpz_ptr k;
int num_bytes;
num_bytes = (mpz_sizeinbase(dh->p, 2) + 7) / 8;
if (num_bytes <= 0 || num_bytes > 18000)
return -1;
k = calloc(1, sizeof(*k));
if (!k)
return -1;
mpz_init2(k, 1);
mpz_powm(k, pub, dh->priv_key, dh->p);
nettle_mpz_get_str_256(len, secret, k);
mpz_clear(k);
free(k);
/* return the length of the shared secret key like DH_compute_key */
return len;
}
#else /* USE_OPENSSL */
#include <openssl/bn.h>
#include <openssl/dh.h>
typedef BIGNUM * MP_t;
#define MP_new(m) m = BN_new()
#define MP_set_w(mpi, w) BN_set_word(mpi, w)
#define MP_cmp(u, v) BN_cmp(u, v)
#define MP_set(u, v) BN_copy(u, v)
#define MP_sub_w(mpi, w) BN_sub_word(mpi, w)
#define MP_cmp_1(mpi) BN_cmp(mpi, BN_value_one())
#define MP_modexp(r, y, q, p) do {BN_CTX *ctx = BN_CTX_new(); BN_mod_exp(r, y, q, p, ctx); BN_CTX_free(ctx);} while(0)
#define MP_free(mpi) BN_free(mpi)
#define MP_gethex(u, hex, res) res = BN_hex2bn(&u, hex)
#define MP_bytes(u) BN_num_bytes(u)
#define MP_setbin(u,buf,len) BN_bn2bin(u,buf)
#define MP_getbin(u,buf,len) u = BN_bin2bn(buf,len,0)
#define MDH DH
#define MDH_new() DH_new()
#define MDH_free(dh) DH_free(dh)
#define MDH_generate_key(dh) DH_generate_key(dh)
#define MDH_compute_key(secret, seclen, pub, dh) DH_compute_key(secret, pub, dh)
#endif
#include "log.h"
#include "dhgroups.h"
/* RFC 2631, Section 2.1.5, http://www.ietf.org/rfc/rfc2631.txt */
static int
isValidPublicKey(MP_t y, MP_t p, MP_t q)
{
int ret = TRUE;
MP_t bn;
assert(y);
MP_new(bn);
assert(bn);
/* y must lie in [2,p-1] */
MP_set_w(bn, 1);
if (MP_cmp(y, bn) < 0)
{
RTMP_Log(RTMP_LOGERROR, "DH public key must be at least 2");
ret = FALSE;
goto failed;
}
/* bn = p-2 */
MP_set(bn, p);
MP_sub_w(bn, 1);
if (MP_cmp(y, bn) > 0)
{
RTMP_Log(RTMP_LOGERROR, "DH public key must be at most p-2");
ret = FALSE;
goto failed;
}
/* Verify with Sophie-Germain prime
*
* This is a nice test to make sure the public key position is calculated
* correctly. This test will fail in about 50% of the cases if applied to
* random data.
*/
if (q)
{
/* y must fulfill y^q mod p = 1 */
MP_modexp(bn, y, q, p);
if (MP_cmp_1(bn) != 0)
{
RTMP_Log(RTMP_LOGWARNING, "DH public key does not fulfill y^q mod p = 1");
}
}
failed:
MP_free(bn);
return ret;
}
static MDH *
DHInit(int nKeyBits)
{
size_t res;
MDH *dh = MDH_new();
if (!dh)
goto failed;
MP_new(dh->g);
if (!dh->g)
goto failed;
MP_gethex(dh->p, P1024, res); /* prime P1024, see dhgroups.h */
if (!res)
{
goto failed;
}
MP_set_w(dh->g, 2); /* base 2 */
dh->length = nKeyBits;
return dh;
failed:
if (dh)
MDH_free(dh);
return 0;
}
static int
DHGenerateKey(MDH *dh)
{
size_t res = 0;
if (!dh)
return 0;
while (!res)
{
MP_t q1 = NULL;
if (!MDH_generate_key(dh))
return 0;
MP_gethex(q1, Q1024, res);
assert(res);
res = isValidPublicKey(dh->pub_key, dh->p, q1);
if (!res)
{
MP_free(dh->pub_key);
MP_free(dh->priv_key);
dh->pub_key = dh->priv_key = 0;
}
MP_free(q1);
}
return 1;
}
/* fill pubkey with the public key in BIG ENDIAN order
* 00 00 00 00 00 x1 x2 x3 .....
*/
static int
DHGetPublicKey(MDH *dh, uint8_t *pubkey, size_t nPubkeyLen)
{
int len;
if (!dh || !dh->pub_key)
return 0;
len = MP_bytes(dh->pub_key);
if (len <= 0 || len > (int) nPubkeyLen)
return 0;
memset(pubkey, 0, nPubkeyLen);
MP_setbin(dh->pub_key, pubkey + (nPubkeyLen - len), len);
return 1;
}
#if 0 /* unused */
static int
DHGetPrivateKey(MDH *dh, uint8_t *privkey, size_t nPrivkeyLen)
{
if (!dh || !dh->priv_key)
return 0;
int len = MP_bytes(dh->priv_key);
if (len <= 0 || len > (int) nPrivkeyLen)
return 0;
memset(privkey, 0, nPrivkeyLen);
MP_setbin(dh->priv_key, privkey + (nPrivkeyLen - len), len);
return 1;
}
#endif
/* computes the shared secret key from the private MDH value and the
* other party's public key (pubkey)
*/
static int
DHComputeSharedSecretKey(MDH *dh, uint8_t *pubkey, size_t nPubkeyLen,
uint8_t *secret)
{
MP_t q1 = NULL, pubkeyBn = NULL;
size_t len;
int res;
if (!dh || !secret || nPubkeyLen >= INT_MAX)
return -1;
MP_getbin(pubkeyBn, pubkey, nPubkeyLen);
if (!pubkeyBn)
return -1;
MP_gethex(q1, Q1024, len);
assert(len);
if (isValidPublicKey(pubkeyBn, dh->p, q1))
res = MDH_compute_key(secret, nPubkeyLen, pubkeyBn, dh);
else
res = -1;
MP_free(q1);
MP_free(pubkeyBn);
return res;
}

199
librtmp/dhgroups.h Normal file
View File

@ -0,0 +1,199 @@
/* librtmp - Diffie-Hellmann Key Exchange
* Copyright (C) 2009 Andrej Stepanchuk
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
/* from RFC 3526, see http://www.ietf.org/rfc/rfc3526.txt */
/* 2^768 - 2 ^704 - 1 + 2^64 * { [2^638 pi] + 149686 } */
#define P768 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A63A3620FFFFFFFFFFFFFFFF"
/* 2^1024 - 2^960 - 1 + 2^64 * { [2^894 pi] + 129093 } */
#define P1024 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED" \
"EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381" \
"FFFFFFFFFFFFFFFF"
/* Group morder largest prime factor: */
#define Q1024 \
"7FFFFFFFFFFFFFFFE487ED5110B4611A62633145C06E0E68" \
"948127044533E63A0105DF531D89CD9128A5043CC71A026E" \
"F7CA8CD9E69D218D98158536F92F8A1BA7F09AB6B6A8E122" \
"F242DABB312F3F637A262174D31BF6B585FFAE5B7A035BF6" \
"F71C35FDAD44CFD2D74F9208BE258FF324943328F67329C0" \
"FFFFFFFFFFFFFFFF"
/* 2^1536 - 2^1472 - 1 + 2^64 * { [2^1406 pi] + 741804 } */
#define P1536 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED" \
"EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D" \
"C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F" \
"83655D23DCA3AD961C62F356208552BB9ED529077096966D" \
"670C354E4ABC9804F1746C08CA237327FFFFFFFFFFFFFFFF"
/* 2^2048 - 2^1984 - 1 + 2^64 * { [2^1918 pi] + 124476 } */
#define P2048 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED" \
"EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D" \
"C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F" \
"83655D23DCA3AD961C62F356208552BB9ED529077096966D" \
"670C354E4ABC9804F1746C08CA18217C32905E462E36CE3B" \
"E39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9" \
"DE2BCBF6955817183995497CEA956AE515D2261898FA0510" \
"15728E5A8AACAA68FFFFFFFFFFFFFFFF"
/* 2^3072 - 2^3008 - 1 + 2^64 * { [2^2942 pi] + 1690314 } */
#define P3072 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED" \
"EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D" \
"C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F" \
"83655D23DCA3AD961C62F356208552BB9ED529077096966D" \
"670C354E4ABC9804F1746C08CA18217C32905E462E36CE3B" \
"E39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9" \
"DE2BCBF6955817183995497CEA956AE515D2261898FA0510" \
"15728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64" \
"ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7" \
"ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6B" \
"F12FFA06D98A0864D87602733EC86A64521F2B18177B200C" \
"BBE117577A615D6C770988C0BAD946E208E24FA074E5AB31" \
"43DB5BFCE0FD108E4B82D120A93AD2CAFFFFFFFFFFFFFFFF"
/* 2^4096 - 2^4032 - 1 + 2^64 * { [2^3966 pi] + 240904 } */
#define P4096 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED" \
"EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D" \
"C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F" \
"83655D23DCA3AD961C62F356208552BB9ED529077096966D" \
"670C354E4ABC9804F1746C08CA18217C32905E462E36CE3B" \
"E39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9" \
"DE2BCBF6955817183995497CEA956AE515D2261898FA0510" \
"15728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64" \
"ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7" \
"ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6B" \
"F12FFA06D98A0864D87602733EC86A64521F2B18177B200C" \
"BBE117577A615D6C770988C0BAD946E208E24FA074E5AB31" \
"43DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D7" \
"88719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA" \
"2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6" \
"287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED" \
"1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA9" \
"93B4EA988D8FDDC186FFB7DC90A6C08F4DF435C934063199" \
"FFFFFFFFFFFFFFFF"
/* 2^6144 - 2^6080 - 1 + 2^64 * { [2^6014 pi] + 929484 } */
#define P6144 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED" \
"EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D" \
"C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F" \
"83655D23DCA3AD961C62F356208552BB9ED529077096966D" \
"670C354E4ABC9804F1746C08CA18217C32905E462E36CE3B" \
"E39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9" \
"DE2BCBF6955817183995497CEA956AE515D2261898FA0510" \
"15728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64" \
"ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7" \
"ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6B" \
"F12FFA06D98A0864D87602733EC86A64521F2B18177B200C" \
"BBE117577A615D6C770988C0BAD946E208E24FA074E5AB31" \
"43DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D7" \
"88719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA" \
"2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6" \
"287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED" \
"1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA9" \
"93B4EA988D8FDDC186FFB7DC90A6C08F4DF435C934028492" \
"36C3FAB4D27C7026C1D4DCB2602646DEC9751E763DBA37BD" \
"F8FF9406AD9E530EE5DB382F413001AEB06A53ED9027D831" \
"179727B0865A8918DA3EDBEBCF9B14ED44CE6CBACED4BB1B" \
"DB7F1447E6CC254B332051512BD7AF426FB8F401378CD2BF" \
"5983CA01C64B92ECF032EA15D1721D03F482D7CE6E74FEF6" \
"D55E702F46980C82B5A84031900B1C9E59E7C97FBEC7E8F3" \
"23A97A7E36CC88BE0F1D45B7FF585AC54BD407B22B4154AA" \
"CC8F6D7EBF48E1D814CC5ED20F8037E0A79715EEF29BE328" \
"06A1D58BB7C5DA76F550AA3D8A1FBFF0EB19CCB1A313D55C" \
"DA56C9EC2EF29632387FE8D76E3C0468043E8F663F4860EE" \
"12BF2D5B0B7474D6E694F91E6DCC4024FFFFFFFFFFFFFFFF"
/* 2^8192 - 2^8128 - 1 + 2^64 * { [2^8062 pi] + 4743158 } */
#define P8192 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED" \
"EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D" \
"C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F" \
"83655D23DCA3AD961C62F356208552BB9ED529077096966D" \
"670C354E4ABC9804F1746C08CA18217C32905E462E36CE3B" \
"E39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9" \
"DE2BCBF6955817183995497CEA956AE515D2261898FA0510" \
"15728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64" \
"ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7" \
"ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6B" \
"F12FFA06D98A0864D87602733EC86A64521F2B18177B200C" \
"BBE117577A615D6C770988C0BAD946E208E24FA074E5AB31" \
"43DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D7" \
"88719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA" \
"2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6" \
"287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED" \
"1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA9" \
"93B4EA988D8FDDC186FFB7DC90A6C08F4DF435C934028492" \
"36C3FAB4D27C7026C1D4DCB2602646DEC9751E763DBA37BD" \
"F8FF9406AD9E530EE5DB382F413001AEB06A53ED9027D831" \
"179727B0865A8918DA3EDBEBCF9B14ED44CE6CBACED4BB1B" \
"DB7F1447E6CC254B332051512BD7AF426FB8F401378CD2BF" \
"5983CA01C64B92ECF032EA15D1721D03F482D7CE6E74FEF6" \
"D55E702F46980C82B5A84031900B1C9E59E7C97FBEC7E8F3" \
"23A97A7E36CC88BE0F1D45B7FF585AC54BD407B22B4154AA" \
"CC8F6D7EBF48E1D814CC5ED20F8037E0A79715EEF29BE328" \
"06A1D58BB7C5DA76F550AA3D8A1FBFF0EB19CCB1A313D55C" \
"DA56C9EC2EF29632387FE8D76E3C0468043E8F663F4860EE" \
"12BF2D5B0B7474D6E694F91E6DBE115974A3926F12FEE5E4" \
"38777CB6A932DF8CD8BEC4D073B931BA3BC832B68D9DD300" \
"741FA7BF8AFC47ED2576F6936BA424663AAB639C5AE4F568" \
"3423B4742BF1C978238F16CBE39D652DE3FDB8BEFC848AD9" \
"22222E04A4037C0713EB57A81A23F0C73473FC646CEA306B" \
"4BCBC8862F8385DDFA9D4B7FA2C087E879683303ED5BDD3A" \
"062B3CF5B3A278A66D2A13F83F44F82DDF310EE074AB6A36" \
"4597E899A0255DC164F31CC50846851DF9AB48195DED7EA1" \
"B1D510BD7EE74D73FAF36BC31ECFA268359046F4EB879F92" \
"4009438B481C6CD7889A002ED5EE382BC9190DA6FC026E47" \
"9558E4475677E9AA9E3050E2765694DFC81F56E880B96E71" \
"60C980DD98EDD3DFFFFFFFFFFFFFFFFF"

1419
librtmp/handshake.h Normal file

File diff suppressed because it is too large Load Diff

665
librtmp/hashswf.c Normal file
View File

@ -0,0 +1,665 @@
/*
* Copyright (C) 2009-2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#include <stdlib.h>
#include <string.h>
#include <ctype.h>
#include <time.h>
#include "rtmp_sys.h"
#include "log.h"
#include "http.h"
#ifdef CRYPTO
#ifdef USE_POLARSSL
#include <polarssl/sha2.h>
#ifndef SHA256_DIGEST_LENGTH
#define SHA256_DIGEST_LENGTH 32
#endif
#define HMAC_CTX sha2_context
#define HMAC_setup(ctx, key, len) sha2_hmac_starts(&ctx, (unsigned char *)key, len, 0)
#define HMAC_crunch(ctx, buf, len) sha2_hmac_update(&ctx, buf, len)
#define HMAC_finish(ctx, dig, dlen) dlen = SHA256_DIGEST_LENGTH; sha2_hmac_finish(&ctx, dig)
#define HMAC_close(ctx)
#elif defined(USE_GNUTLS)
#include <nettle/hmac.h>
#ifndef SHA256_DIGEST_LENGTH
#define SHA256_DIGEST_LENGTH 32
#endif
#undef HMAC_CTX
#define HMAC_CTX struct hmac_sha256_ctx
#define HMAC_setup(ctx, key, len) hmac_sha256_set_key(&ctx, len, key)
#define HMAC_crunch(ctx, buf, len) hmac_sha256_update(&ctx, len, buf)
#define HMAC_finish(ctx, dig, dlen) dlen = SHA256_DIGEST_LENGTH; hmac_sha256_digest(&ctx, SHA256_DIGEST_LENGTH, dig)
#define HMAC_close(ctx)
#else /* USE_OPENSSL */
#include <openssl/ssl.h>
#include <openssl/sha.h>
#include <openssl/hmac.h>
#include <openssl/rc4.h>
#define HMAC_setup(ctx, key, len) HMAC_CTX_init(&ctx); HMAC_Init_ex(&ctx, (unsigned char *)key, len, EVP_sha256(), 0)
#define HMAC_crunch(ctx, buf, len) HMAC_Update(&ctx, (unsigned char *)buf, len)
#define HMAC_finish(ctx, dig, dlen) HMAC_Final(&ctx, (unsigned char *)dig, &dlen);
#define HMAC_close(ctx) HMAC_CTX_cleanup(&ctx)
#endif
extern void RTMP_TLS_Init();
extern TLS_CTX RTMP_TLS_ctx;
#include <zlib.h>
#include "strncasecmp.h"
#endif /* CRYPTO */
#define AGENT "Mozilla/5.0"
HTTPResult
HTTP_get(struct HTTP_ctx *http, const char *url, HTTP_read_callback *cb)
{
char *host, *path;
char *p1, *p2;
char hbuf[256];
int port = 80;
#ifdef CRYPTO
int ssl = 0;
#endif
int hlen, flen = 0;
int rc, i;
int len_known;
HTTPResult ret = HTTPRES_OK;
struct sockaddr_in sa;
RTMPSockBuf sb = {0};
http->status = -1;
memset(&sa, 0, sizeof(struct sockaddr_in));
sa.sin_family = AF_INET;
/* we only handle http here */
if (strncasecmp(url, "http", 4))
return HTTPRES_BAD_REQUEST;
if (url[4] == 's')
{
#ifdef CRYPTO
ssl = 1;
port = 443;
if (!RTMP_TLS_ctx)
RTMP_TLS_Init();
#else
return HTTPRES_BAD_REQUEST;
#endif
}
p1 = strchr(url + 4, ':');
if (!p1 || strncmp(p1, "://", 3))
return HTTPRES_BAD_REQUEST;
host = p1 + 3;
path = strchr(host, '/');
hlen = path - host;
strncpy(hbuf, host, hlen);
hbuf[hlen] = '\0';
host = hbuf;
p1 = strrchr(host, ':');
if (p1)
{
*p1++ = '\0';
port = atoi(p1);
}
sa.sin_addr.s_addr = inet_addr(host);
if (sa.sin_addr.s_addr == INADDR_NONE)
{
struct hostent *hp = gethostbyname(host);
if (!hp || !hp->h_addr)
return HTTPRES_LOST_CONNECTION;
sa.sin_addr = *(struct in_addr *)hp->h_addr;
}
sa.sin_port = htons(port);
sb.sb_socket = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP);
if (sb.sb_socket == -1)
return HTTPRES_LOST_CONNECTION;
i =
sprintf(sb.sb_buf,
"GET %s HTTP/1.0\r\nUser-Agent: %s\r\nHost: %s\r\nReferer: %.*s\r\n",
path, AGENT, host, (int)(path - url + 1), url);
if (http->date[0])
i += sprintf(sb.sb_buf + i, "If-Modified-Since: %s\r\n", http->date);
i += sprintf(sb.sb_buf + i, "\r\n");
if (connect
(sb.sb_socket, (struct sockaddr *)&sa, sizeof(struct sockaddr)) < 0)
{
ret = HTTPRES_LOST_CONNECTION;
goto leave;
}
#ifdef CRYPTO
if (ssl)
{
#ifdef NO_SSL
RTMP_Log(RTMP_LOGERROR, "%s, No SSL/TLS support", __FUNCTION__);
ret = HTTPRES_BAD_REQUEST;
goto leave;
#else
TLS_client(RTMP_TLS_ctx, sb.sb_ssl);
TLS_setfd(sb.sb_ssl, sb.sb_socket);
if (TLS_connect(sb.sb_ssl) < 0)
{
RTMP_Log(RTMP_LOGERROR, "%s, TLS_Connect failed", __FUNCTION__);
ret = HTTPRES_LOST_CONNECTION;
goto leave;
}
#endif
}
#endif
RTMPSockBuf_Send(&sb, sb.sb_buf, i);
/* set timeout */
#define HTTP_TIMEOUT 5
{
SET_RCVTIMEO(tv, HTTP_TIMEOUT);
if (setsockopt
(sb.sb_socket, SOL_SOCKET, SO_RCVTIMEO, (char *)&tv, sizeof(tv)))
{
RTMP_Log(RTMP_LOGERROR, "%s, Setting socket timeout to %ds failed!",
__FUNCTION__, HTTP_TIMEOUT);
}
}
sb.sb_size = 0;
sb.sb_timedout = FALSE;
if (RTMPSockBuf_Fill(&sb) < 1)
{
ret = HTTPRES_LOST_CONNECTION;
goto leave;
}
if (strncmp(sb.sb_buf, "HTTP/1", 6))
{
ret = HTTPRES_BAD_REQUEST;
goto leave;
}
p1 = strchr(sb.sb_buf, ' ');
rc = atoi(p1 + 1);
http->status = rc;
if (rc >= 300)
{
if (rc == 304)
{
ret = HTTPRES_OK_NOT_MODIFIED;
goto leave;
}
else if (rc == 404)
ret = HTTPRES_NOT_FOUND;
else if (rc >= 500)
ret = HTTPRES_SERVER_ERROR;
else if (rc >= 400)
ret = HTTPRES_BAD_REQUEST;
else
ret = HTTPRES_REDIRECTED;
}
p1 = memchr(sb.sb_buf, '\n', sb.sb_size);
if (!p1)
{
ret = HTTPRES_BAD_REQUEST;
goto leave;
}
sb.sb_start = p1 + 1;
sb.sb_size -= sb.sb_start - sb.sb_buf;
while ((p2 = memchr(sb.sb_start, '\r', sb.sb_size)))
{
if (*sb.sb_start == '\r')
{
sb.sb_start += 2;
sb.sb_size -= 2;
break;
}
else
if (!strncasecmp
(sb.sb_start, "Content-Length: ", sizeof("Content-Length: ") - 1))
{
flen = atoi(sb.sb_start + sizeof("Content-Length: ") - 1);
}
else
if (!strncasecmp
(sb.sb_start, "Last-Modified: ", sizeof("Last-Modified: ") - 1))
{
*p2 = '\0';
strcpy(http->date, sb.sb_start + sizeof("Last-Modified: ") - 1);
}
p2 += 2;
sb.sb_size -= p2 - sb.sb_start;
sb.sb_start = p2;
if (sb.sb_size < 1)
{
if (RTMPSockBuf_Fill(&sb) < 1)
{
ret = HTTPRES_LOST_CONNECTION;
goto leave;
}
}
}
len_known = flen > 0;
while ((!len_known || flen > 0) &&
(sb.sb_size > 0 || RTMPSockBuf_Fill(&sb) > 0))
{
cb(sb.sb_start, 1, sb.sb_size, http->data);
if (len_known)
flen -= sb.sb_size;
http->size += sb.sb_size;
sb.sb_size = 0;
}
if (flen > 0)
ret = HTTPRES_LOST_CONNECTION;
leave:
RTMPSockBuf_Close(&sb);
return ret;
}
#ifdef CRYPTO
#define CHUNK 16384
struct info
{
z_stream *zs;
HMAC_CTX ctx;
int first;
int zlib;
int size;
};
static size_t
swfcrunch(void *ptr, size_t size, size_t nmemb, void *stream)
{
struct info *i = stream;
char *p = ptr;
size_t len = size * nmemb;
if (i->first)
{
i->first = 0;
/* compressed? */
if (!strncmp(p, "CWS", 3))
{
*p = 'F';
i->zlib = 1;
}
HMAC_crunch(i->ctx, (unsigned char *)p, 8);
p += 8;
len -= 8;
i->size = 8;
}
if (i->zlib)
{
unsigned char out[CHUNK];
i->zs->next_in = (unsigned char *)p;
i->zs->avail_in = len;
do
{
i->zs->avail_out = CHUNK;
i->zs->next_out = out;
inflate(i->zs, Z_NO_FLUSH);
len = CHUNK - i->zs->avail_out;
i->size += len;
HMAC_crunch(i->ctx, out, len);
}
while (i->zs->avail_out == 0);
}
else
{
i->size += len;
HMAC_crunch(i->ctx, (unsigned char *)p, len);
}
return size * nmemb;
}
static int tzoff;
static int tzchecked;
#define JAN02_1980 318340800
static const char *monthtab[12] = { "Jan", "Feb", "Mar",
"Apr", "May", "Jun",
"Jul", "Aug", "Sep",
"Oct", "Nov", "Dec"
};
static const char *days[] =
{ "Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat" };
/* Parse an HTTP datestamp into Unix time */
static time_t
make_unix_time(char *s)
{
struct tm time;
int i, ysub = 1900, fmt = 0;
char *month;
char *n;
time_t res;
if (s[3] != ' ')
{
fmt = 1;
if (s[3] != ',')
ysub = 0;
}
for (n = s; *n; ++n)
if (*n == '-' || *n == ':')
*n = ' ';
time.tm_mon = 0;
n = strchr(s, ' ');
if (fmt)
{
/* Day, DD-MMM-YYYY HH:MM:SS GMT */
time.tm_mday = strtol(n + 1, &n, 0);
month = n + 1;
n = strchr(month, ' ');
time.tm_year = strtol(n + 1, &n, 0);
time.tm_hour = strtol(n + 1, &n, 0);
time.tm_min = strtol(n + 1, &n, 0);
time.tm_sec = strtol(n + 1, NULL, 0);
}
else
{
/* Unix ctime() format. Does not conform to HTTP spec. */
/* Day MMM DD HH:MM:SS YYYY */
month = n + 1;
n = strchr(month, ' ');
while (isspace(*n))
n++;
time.tm_mday = strtol(n, &n, 0);
time.tm_hour = strtol(n + 1, &n, 0);
time.tm_min = strtol(n + 1, &n, 0);
time.tm_sec = strtol(n + 1, &n, 0);
time.tm_year = strtol(n + 1, NULL, 0);
}
if (time.tm_year > 100)
time.tm_year -= ysub;
for (i = 0; i < 12; i++)
if (!strncasecmp(month, monthtab[i], 3))
{
time.tm_mon = i;
break;
}
time.tm_isdst = 0; /* daylight saving is never in effect in GMT */
/* this is normally the value of extern int timezone, but some
* braindead C libraries don't provide it.
*/
if (!tzchecked)
{
struct tm *tc;
time_t then = JAN02_1980;
tc = localtime(&then);
tzoff = (12 - tc->tm_hour) * 3600 + tc->tm_min * 60 + tc->tm_sec;
tzchecked = 1;
}
res = mktime(&time);
/* Unfortunately, mktime() assumes the input is in local time,
* not GMT, so we have to correct it here.
*/
if (res != -1)
res += tzoff;
return res;
}
/* Convert a Unix time to a network time string
* Weekday, DD-MMM-YYYY HH:MM:SS GMT
*/
static void
strtime(time_t * t, char *s)
{
struct tm *tm;
tm = gmtime((time_t *) t);
sprintf(s, "%s, %02d %s %d %02d:%02d:%02d GMT",
days[tm->tm_wday], tm->tm_mday, monthtab[tm->tm_mon],
tm->tm_year + 1900, tm->tm_hour, tm->tm_min, tm->tm_sec);
}
#define HEX2BIN(a) (((a)&0x40)?((a)&0xf)+9:((a)&0xf))
int
RTMP_HashSWF(const char *url, unsigned int *size, unsigned char *hash,
int age)
{
FILE *f = NULL;
char *path, date[64], cctim[64];
long pos = 0;
time_t ctim = -1, cnow;
int i, got = 0, ret = 0;
unsigned int hlen;
struct info in = { 0 };
struct HTTP_ctx http = { 0 };
HTTPResult httpres;
z_stream zs = { 0 };
AVal home, hpre;
date[0] = '\0';
#ifdef _WIN32
#ifdef XBMC4XBOX
hpre.av_val = "Q:";
hpre.av_len = 2;
home.av_val = "\\UserData";
#else
hpre.av_val = getenv("HOMEDRIVE");
hpre.av_len = strlen(hpre.av_val);
home.av_val = getenv("HOMEPATH");
#endif
#define DIRSEP "\\"
#else /* !_WIN32 */
hpre.av_val = "";
hpre.av_len = 0;
home.av_val = getenv("HOME");
#define DIRSEP "/"
#endif
if (!home.av_val)
home.av_val = ".";
home.av_len = strlen(home.av_val);
/* SWF hash info is cached in a fixed-format file.
* url: <url of SWF file>
* ctim: HTTP datestamp of when we last checked it.
* date: HTTP datestamp of the SWF's last modification.
* size: SWF size in hex
* hash: SWF hash in hex
*
* These fields must be present in this order. All fields
* besides URL are fixed size.
*/
path = malloc(hpre.av_len + home.av_len + sizeof(DIRSEP ".swfinfo"));
sprintf(path, "%s%s" DIRSEP ".swfinfo", hpre.av_val, home.av_val);
f = fopen(path, "r+");
while (f)
{
char buf[4096], *file, *p;
file = strchr(url, '/');
if (!file)
break;
file += 2;
file = strchr(file, '/');
if (!file)
break;
file++;
hlen = file - url;
p = strrchr(file, '/');
if (p)
file = p;
else
file--;
while (fgets(buf, sizeof(buf), f))
{
char *r1;
got = 0;
if (strncmp(buf, "url: ", 5))
continue;
if (strncmp(buf + 5, url, hlen))
continue;
r1 = strrchr(buf, '/');
i = strlen(r1);
r1[--i] = '\0';
if (strncmp(r1, file, i))
continue;
pos = ftell(f);
while (got < 4 && fgets(buf, sizeof(buf), f))
{
if (!strncmp(buf, "size: ", 6))
{
*size = strtol(buf + 6, NULL, 16);
got++;
}
else if (!strncmp(buf, "hash: ", 6))
{
unsigned char *ptr = hash, *in = (unsigned char *)buf + 6;
int l = strlen((char *)in) - 1;
for (i = 0; i < l; i += 2)
*ptr++ = (HEX2BIN(in[i]) << 4) | HEX2BIN(in[i + 1]);
got++;
}
else if (!strncmp(buf, "date: ", 6))
{
buf[strlen(buf) - 1] = '\0';
strncpy(date, buf + 6, sizeof(date));
got++;
}
else if (!strncmp(buf, "ctim: ", 6))
{
buf[strlen(buf) - 1] = '\0';
ctim = make_unix_time(buf + 6);
got++;
}
else if (!strncmp(buf, "url: ", 5))
break;
}
break;
}
break;
}
cnow = time(NULL);
/* If we got a cache time, see if it's young enough to use directly */
if (age && ctim > 0)
{
ctim = cnow - ctim;
ctim /= 3600 * 24; /* seconds to days */
if (ctim < age) /* ok, it's new enough */
goto out;
}
in.first = 1;
HMAC_setup(in.ctx, "Genuine Adobe Flash Player 001", 30);
inflateInit(&zs);
in.zs = &zs;
http.date = date;
http.data = &in;
httpres = HTTP_get(&http, url, swfcrunch);
inflateEnd(&zs);
if (httpres != HTTPRES_OK && httpres != HTTPRES_OK_NOT_MODIFIED)
{
ret = -1;
if (httpres == HTTPRES_LOST_CONNECTION)
RTMP_Log(RTMP_LOGERROR, "%s: connection lost while downloading swfurl %s",
__FUNCTION__, url);
else if (httpres == HTTPRES_NOT_FOUND)
RTMP_Log(RTMP_LOGERROR, "%s: swfurl %s not found", __FUNCTION__, url);
else
RTMP_Log(RTMP_LOGERROR, "%s: couldn't contact swfurl %s (HTTP error %d)",
__FUNCTION__, url, http.status);
}
else
{
if (got && pos)
fseek(f, pos, SEEK_SET);
else
{
char *q;
if (!f)
f = fopen(path, "w");
if (!f)
{
int err = errno;
RTMP_Log(RTMP_LOGERROR,
"%s: couldn't open %s for writing, errno %d (%s)",
__FUNCTION__, path, err, strerror(err));
ret = -1;
goto out;
}
fseek(f, 0, SEEK_END);
q = strchr(url, '?');
if (q)
i = q - url;
else
i = strlen(url);
fprintf(f, "url: %.*s\n", i, url);
}
strtime(&cnow, cctim);
fprintf(f, "ctim: %s\n", cctim);
if (!in.first)
{
HMAC_finish(in.ctx, hash, hlen);
*size = in.size;
fprintf(f, "date: %s\n", date);
fprintf(f, "size: %08x\n", in.size);
fprintf(f, "hash: ");
for (i = 0; i < SHA256_DIGEST_LENGTH; i++)
fprintf(f, "%02x", hash[i]);
fprintf(f, "\n");
}
}
HMAC_close(in.ctx);
out:
free(path);
if (f)
fclose(f);
return ret;
}
#else
int
RTMP_HashSWF(const char *url, unsigned int *size, unsigned char *hash,
int age)
{
return -1;
}
#endif

47
librtmp/http.h Normal file
View File

@ -0,0 +1,47 @@
#ifndef __RTMP_HTTP_H__
#define __RTMP_HTTP_H__
/*
* Copyright (C) 2010 Howard Chu
* Copyright (C) 2010 Antti Ajanki
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
typedef enum {
HTTPRES_OK, /* result OK */
HTTPRES_OK_NOT_MODIFIED, /* not modified since last request */
HTTPRES_NOT_FOUND, /* not found */
HTTPRES_BAD_REQUEST, /* client error */
HTTPRES_SERVER_ERROR, /* server reported an error */
HTTPRES_REDIRECTED, /* resource has been moved */
HTTPRES_LOST_CONNECTION /* connection lost while waiting for data */
} HTTPResult;
struct HTTP_ctx {
char *date;
int size;
int status;
void *data;
};
typedef size_t (HTTP_read_callback)(void *ptr, size_t size, size_t nmemb, void *stream);
HTTPResult HTTP_get(struct HTTP_ctx *http, const char *url, HTTP_read_callback *cb);
#endif

210
librtmp/librtmp.3 Normal file
View File

@ -0,0 +1,210 @@
.TH LIBRTMP 3 "2011-07-20" "RTMPDump v2.4"
.\" Copyright 2011 Howard Chu.
.\" Copying permitted according to the GNU General Public License V2.
.SH NAME
librtmp \- RTMPDump Real-Time Messaging Protocol API
.SH LIBRARY
RTMPDump RTMP (librtmp, -lrtmp)
.SH SYNOPSIS
.B #include <librtmp/rtmp.h>
.SH DESCRIPTION
The Real-Time Messaging Protocol (RTMP) is used for streaming
multimedia content across a TCP/IP network. This API provides most client
functions and a few server functions needed to support RTMP, RTMP tunneled
in HTTP (RTMPT), encrypted RTMP (RTMPE), RTMP over SSL/TLS (RTMPS) and
tunneled variants of these encrypted types (RTMPTE, RTMPTS). The basic
RTMP specification has been published by Adobe but this API was
reverse-engineered without use of the Adobe specification. As such, it may
deviate from any published specifications but it usually duplicates the
actual behavior of the original Adobe clients.
The RTMPDump software package includes a basic client utility program
in
.BR rtmpdump (1),
some sample servers, and a library used to provide programmatic access
to the RTMP protocol. This man page gives an overview of the RTMP
library routines. These routines are found in the -lrtmp library. Many
other routines are also available, but they are not documented yet.
The basic interaction is as follows. A session handle is created using
.BR RTMP_Alloc ()
and initialized using
.BR RTMP_Init ().
All session parameters are provided using
.BR RTMP_SetupURL ().
The network connection is established using
.BR RTMP_Connect (),
and then the RTMP session is established using
.BR RTMP_ConnectStream ().
The stream is read using
.BR RTMP_Read ().
A client can publish a stream by calling
.BR RTMP_EnableWrite ()
before the
.BR RTMP_Connect ()
call, and then using
.BR RTMP_Write ()
after the session is established.
While a stream is playing it may be paused and unpaused using
.BR RTMP_Pause ().
The stream playback position can be moved using
.BR RTMP_Seek ().
When
.BR RTMP_Read ()
returns 0 bytes, the stream is complete and may be closed using
.BR RTMP_Close ().
The session handle is freed using
.BR RTMP_Free ().
All data is transferred using FLV format. The basic session requires
an RTMP URL. The RTMP URL format is of the form
.nf
rtmp[t][e|s]://hostname[:port][/app[/playpath]]
.fi
Plain rtmp, as well as tunneled and encrypted sessions are supported.
Additional options may be specified by appending space-separated
key=value pairs to the URL. Special characters in values may need
to be escaped to prevent misinterpretation by the option parser.
The escape encoding uses a backslash followed by two hexadecimal digits
representing the ASCII value of the character. E.g., spaces must
be escaped as \fB\\20\fP and backslashes must be escaped as \fB\\5c\fP.
.SH OPTIONS
.SS "Network Parameters"
These options define how to connect to the media server.
.TP
.BI socks= host:port
Use the specified SOCKS4 proxy.
.SS "Connection Parameters"
These options define the content of the RTMP Connect request packet.
If correct values are not provided, the media server will reject the
connection attempt.
.TP
.BI app= name
Name of application to connect to on the RTMP server. Overrides
the app in the RTMP URL. Sometimes the librtmp URL parser cannot
determine the app name automatically, so it must be given explicitly
using this option.
.TP
.BI tcUrl= url
URL of the target stream. Defaults to rtmp[t][e|s]://host[:port]/app.
.TP
.BI pageUrl= url
URL of the web page in which the media was embedded. By default no
value will be sent.
.TP
.BI swfUrl= url
URL of the SWF player for the media. By default no value will be sent.
.TP
.BI flashVer= version
Version of the Flash plugin used to run the SWF player. The
default is "LNX 10,0,32,18".
.TP
.BI conn= type:data
Append arbitrary AMF data to the Connect message. The type
must be B for Boolean, N for number, S for string, O for object, or Z
for null. For Booleans the data must be either 0 or 1 for FALSE or TRUE,
respectively. Likewise for Objects the data must be 0 or 1 to end or
begin an object, respectively. Data items in subobjects may be named, by
prefixing the type with 'N' and specifying the name before the value, e.g.
NB:myFlag:1. This option may be used multiple times to construct arbitrary
AMF sequences. E.g.
.nf
conn=B:1 conn=S:authMe conn=O:1 conn=NN:code:1.23 conn=NS:flag:ok conn=O:0
.fi
.SS "Session Parameters"
These options take effect after the Connect request has succeeded.
.TP
.BI playpath= path
Overrides the playpath parsed from the RTMP URL. Sometimes the
rtmpdump URL parser cannot determine the correct playpath
automatically, so it must be given explicitly using this option.
.TP
.BI playlist= 0|1
If the value is 1 or TRUE, issue a set_playlist command before sending the
play command. The playlist will just contain the current playpath. If the
value is 0 or FALSE, the set_playlist command will not be sent. The
default is FALSE.
.TP
.BI live= 0|1
Specify that the media is a live stream. No resuming or seeking in
live streams is possible.
.TP
.BI subscribe= path
Name of live stream to subscribe to. Defaults to
.IR playpath .
.TP
.BI start= num
Start at
.I num
seconds into the stream. Not valid for live streams.
.TP
.BI stop= num
Stop at
.I num
seconds into the stream.
.TP
.BI buffer= num
Set buffer time to
.I num
milliseconds. The default is 30000.
.TP
.BI timeout= num
Timeout the session after
.I num
seconds without receiving any data from the server. The default is 120.
.SS "Security Parameters"
These options handle additional authentication requests from the server.
.TP
.BI token= key
Key for SecureToken response, used if the server requires SecureToken
authentication.
.TP
.BI jtv= JSON
JSON token used by legacy Justin.tv servers. Invokes NetStream.Authenticate.UsherToken
.TP
.BI swfVfy= 0|1
If the value is 1 or TRUE, the SWF player is retrieved from the
specified
.I swfUrl
for performing SWF Verification. The SWF hash and size (used in the
verification step) are computed automatically. Also the SWF information is
cached in a
.I .swfinfo
file in the user's home directory, so that it doesn't need to be retrieved
and recalculated every time. The .swfinfo file records
the SWF URL, the time it was fetched, the modification timestamp of the SWF
file, its size, and its hash. By default, the cached info will be used
for 30 days before re-checking.
.TP
.BI swfAge= days
Specify how many days to use the cached SWF info before re-checking. Use
0 to always check the SWF URL. Note that if the check shows that the
SWF file has the same modification timestamp as before, it will not be
retrieved again.
.SH EXAMPLES
An example character string suitable for use with
.BR RTMP_SetupURL ():
.nf
"rtmp://flashserver:1935/ondemand/thefile swfUrl=http://flashserver/player.swf swfVfy=1"
.fi
.SH ENVIRONMENT
.TP
.B HOME
The value of
.RB $ HOME
is used as the location for the
.I .swfinfo
file.
.SH FILES
.TP
.I $HOME/.swfinfo
Cache of SWF Verification information
.SH "SEE ALSO"
.BR rtmpdump (1),
.BR rtmpgw (8)
.SH AUTHORS
Andrej Stepanchuk, Howard Chu, The Flvstreamer Team
.br
<http://rtmpdump.mplayerhq.hu>

312
librtmp/librtmp.3.html Normal file
View File

@ -0,0 +1,312 @@
<HTML>
<HEAD>
<title>LIBRTMP(3): </title></head>
<table>
<thead>
<tr><td>LIBRTMP(3)<td align="center"><td align="right">LIBRTMP(3)
</thead>
<tfoot>
<tr><td>RTMPDump v2.4<td align="center">2011-07-20<td align="right">LIBRTMP(3)
</tfoot>
<tbody><tr><td colspan="3"><br><br><ul>
<!-- Copyright 2011 Howard Chu.
Copying permitted according to the GNU General Public License V2.-->
</ul>
<h3>NAME</h3><ul>
librtmp &minus; RTMPDump Real-Time Messaging Protocol API
</ul>
<h3>LIBRARY</h3><ul>
RTMPDump RTMP (librtmp, -lrtmp)
</ul>
<h3>SYNOPSIS</h3><ul>
<b>#include &lt;librtmp/rtmp.h&gt;</b>
</ul>
<h3>DESCRIPTION</h3><ul>
The Real-Time Messaging Protocol (RTMP) is used for streaming
multimedia content across a TCP/IP network. This API provides most client
functions and a few server functions needed to support RTMP, RTMP tunneled
in HTTP (RTMPT), encrypted RTMP (RTMPE), RTMP over SSL/TLS (RTMPS) and
tunneled variants of these encrypted types (RTMPTE, RTMPTS). The basic
RTMP specification has been published by Adobe but this API was
reverse-engineered without use of the Adobe specification. As such, it may
deviate from any published specifications but it usually duplicates the
actual behavior of the original Adobe clients.
<p>
The RTMPDump software package includes a basic client utility program
in
<a href="../man1/rtmpdump.1"><b>rtmpdump</b></a>(1),
some sample servers, and a library used to provide programmatic access
to the RTMP protocol. This man page gives an overview of the RTMP
library routines. These routines are found in the -lrtmp library. Many
other routines are also available, but they are not documented yet.
<p>
The basic interaction is as follows. A session handle is created using
<b>RTMP_Alloc</b>()
and initialized using
<b>RTMP_Init</b>().
All session parameters are provided using
<b>RTMP_SetupURL</b>().
The network connection is established using
<b>RTMP_Connect</b>(),
and then the RTMP session is established using
<b>RTMP_ConnectStream</b>().
The stream is read using
<b>RTMP_Read</b>().
A client can publish a stream by calling
<b>RTMP_EnableWrite</b>()
before the
<b>RTMP_Connect</b>()
call, and then using
<b>RTMP_Write</b>()
after the session is established.
While a stream is playing it may be paused and unpaused using
<b>RTMP_Pause</b>().
The stream playback position can be moved using
<b>RTMP_Seek</b>().
When
<b>RTMP_Read</b>()
returns 0 bytes, the stream is complete and may be closed using
<b>RTMP_Close</b>().
The session handle is freed using
<b>RTMP_Free</b>().
<p>
All data is transferred using FLV format. The basic session requires
an RTMP URL. The RTMP URL format is of the form
<pre>
rtmp[t][e|s]://hostname[:port][/app[/playpath]]
</pre>
<p>
Plain rtmp, as well as tunneled and encrypted sessions are supported.
<p>
Additional options may be specified by appending space-separated
key=value pairs to the URL. Special characters in values may need
to be escaped to prevent misinterpretation by the option parser.
The escape encoding uses a backslash followed by two hexadecimal digits
representing the ASCII value of the character. E.g., spaces must
be escaped as <b>\20</b> and backslashes must be escaped as <b>\5c</b>.
</ul>
<h3>OPTIONS</h3><ul>
</ul>
<h4>Network Parameters</h4><ul>
These options define how to connect to the media server.
<p>
<dl compact><dt>
<b>socks=</b><i>host:port</i>
<dd>
Use the specified SOCKS4 proxy.
</dl>
</ul>
<h4>Connection Parameters</h4><ul>
These options define the content of the RTMP Connect request packet.
If correct values are not provided, the media server will reject the
connection attempt.
<p>
<dl compact><dt>
<b>app=</b><i>name</i>
<dd>
Name of application to connect to on the RTMP server. Overrides
the app in the RTMP URL. Sometimes the librtmp URL parser cannot
determine the app name automatically, so it must be given explicitly
using this option.
</dl>
<p>
<dl compact><dt>
<b>tcUrl=</b><i>url</i>
<dd>
URL of the target stream. Defaults to rtmp[t][e|s]://host[:port]/app.
</dl>
<p>
<dl compact><dt>
<b>pageUrl=</b><i>url</i>
<dd>
URL of the web page in which the media was embedded. By default no
value will be sent.
</dl>
<p>
<dl compact><dt>
<b>swfUrl=</b><i>url</i>
<dd>
URL of the SWF player for the media. By default no value will be sent.
</dl>
<p>
<dl compact><dt>
<b>flashVer=</b><i>version</i>
<dd>
Version of the Flash plugin used to run the SWF player. The
default is "LNX 10,0,32,18".
</dl>
<p>
<dl compact><dt>
<b>conn=</b><i>type:data</i>
<dd>
Append arbitrary AMF data to the Connect message. The type
must be B for Boolean, N for number, S for string, O for object, or Z
for null. For Booleans the data must be either 0 or 1 for FALSE or TRUE,
respectively. Likewise for Objects the data must be 0 or 1 to end or
begin an object, respectively. Data items in subobjects may be named, by
prefixing the type with 'N' and specifying the name before the value, e.g.
NB:myFlag:1. This option may be used multiple times to construct arbitrary
AMF sequences. E.g.
<pre>
conn=B:1 conn=S:authMe conn=O:1 conn=NN:code:1.23 conn=NS:flag:ok conn=O:0
</pre>
</dl>
</ul>
<h4>Session Parameters</h4><ul>
These options take effect after the Connect request has succeeded.
<p>
<dl compact><dt>
<b>playpath=</b><i>path</i>
<dd>
Overrides the playpath parsed from the RTMP URL. Sometimes the
rtmpdump URL parser cannot determine the correct playpath
automatically, so it must be given explicitly using this option.
</dl>
<p>
<dl compact><dt>
<b>playlist=</b><i>0|1</i>
<dd>
If the value is 1 or TRUE, issue a set_playlist command before sending the
play command. The playlist will just contain the current playpath. If the
value is 0 or FALSE, the set_playlist command will not be sent. The
default is FALSE.
</dl>
<p>
<dl compact><dt>
<b>live=</b><i>0|1</i>
<dd>
Specify that the media is a live stream. No resuming or seeking in
live streams is possible.
</dl>
<p>
<dl compact><dt>
<b>subscribe=</b><i>path</i>
<dd>
Name of live stream to subscribe to. Defaults to
<i>playpath</i>.
</dl>
<p>
<dl compact><dt>
<b>start=</b><i>num</i>
<dd>
Start at
<i>num</i>
seconds into the stream. Not valid for live streams.
</dl>
<p>
<dl compact><dt>
<b>stop=</b><i>num</i>
<dd>
Stop at
<i>num</i>
seconds into the stream.
</dl>
<p>
<dl compact><dt>
<b>buffer=</b><i>num</i>
<dd>
Set buffer time to
<i>num</i>
milliseconds. The default is 30000.
</dl>
<p>
<dl compact><dt>
<b>timeout=</b><i>num</i>
<dd>
Timeout the session after
<i>num</i>
seconds without receiving any data from the server. The default is 120.
</dl>
</ul>
<h4>Security Parameters</h4><ul>
These options handle additional authentication requests from the server.
<p>
<dl compact><dt>
<b>token=</b><i>key</i>
<dd>
Key for SecureToken response, used if the server requires SecureToken
authentication.
</dl>
<p>
<dl compact><dt>
<b>jtv=</b><i>JSON</i>
<dd>
JSON token used by legacy Justin.tv servers. Invokes NetStream.Authenticate.UsherToken
</dl>
<p>
<dl compact><dt>
<b>swfVfy=</b><i>0|1</i>
<dd>
If the value is 1 or TRUE, the SWF player is retrieved from the
specified
<i>swfUrl</i>
for performing SWF Verification. The SWF hash and size (used in the
verification step) are computed automatically. Also the SWF information is
cached in a
<i>.swfinfo</i>
file in the user's home directory, so that it doesn't need to be retrieved
and recalculated every time. The .swfinfo file records
the SWF URL, the time it was fetched, the modification timestamp of the SWF
file, its size, and its hash. By default, the cached info will be used
for 30 days before re-checking.
</dl>
<p>
<dl compact><dt>
<b>swfAge=</b><i>days</i>
<dd>
Specify how many days to use the cached SWF info before re-checking. Use
0 to always check the SWF URL. Note that if the check shows that the
SWF file has the same modification timestamp as before, it will not be
retrieved again.
</dl>
</ul>
<h3>EXAMPLES</h3><ul>
An example character string suitable for use with
<b>RTMP_SetupURL</b>():
<pre>
"rtmp://flashserver:1935/ondemand/thefile swfUrl=<a href="http://flashserver/player.swf">http://flashserver/player.swf</a> swfVfy=1"
</pre>
</ul>
<h3>ENVIRONMENT</h3><ul>
<p>
<dl compact><dt>
<b>HOME</b>
<dd>
The value of
$<b>HOME</b>
is used as the location for the
<i>.swfinfo</i>
file.
</dl>
</ul>
<h3>FILES</h3><ul>
<p>
<dl compact><dt>
<i>$HOME/.swfinfo</i>
<dd>
Cache of SWF Verification information
</dl>
</ul>
<h3>SEE ALSO</h3><ul>
<a href="../man1/rtmpdump.1"><b>rtmpdump</b></a>(1),
<a href="../man8/rtmpgw.8"><b>rtmpgw</b></a>(8)
</ul>
<h3>AUTHORS</h3><ul>
Andrej Stepanchuk, Howard Chu, The Flvstreamer Team
<br>
&lt;<a href="http://rtmpdump.mplayerhq.hu">http://rtmpdump.mplayerhq.hu</a>>
</ul></tbody></table></html>

13
librtmp/librtmp.pc.in Normal file
View File

@ -0,0 +1,13 @@
prefix=@prefix@
exec_prefix=${prefix}
libdir=@libdir@
incdir=${prefix}/include
Name: librtmp
Description: RTMP implementation
Version: @VERSION@
Requires: @CRYPTO_REQ@
URL: http://rtmpdump.mplayerhq.hu
Libs: -L${libdir} -lrtmp -lz @PUBLIC_LIBS@
Libs.private: @PRIVATE_LIBS@
Cflags: -I${incdir}

223
librtmp/log.c Normal file
View File

@ -0,0 +1,223 @@
/*
* Copyright (C) 2008-2009 Andrej Stepanchuk
* Copyright (C) 2009-2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#include <stdarg.h>
#include <string.h>
#include <assert.h>
#include <ctype.h>
#include "rtmp_sys.h"
#include "log.h"
#define MAX_PRINT_LEN 2048
RTMP_LogLevel RTMP_debuglevel = RTMP_LOGERROR;
static int neednl;
static FILE *fmsg;
static RTMP_LogCallback rtmp_log_default, *cb = rtmp_log_default;
static const char *levels[] = {
"CRIT", "ERROR", "WARNING", "INFO",
"DEBUG", "DEBUG2"
};
static void rtmp_log_default(int level, const char *format, va_list vl)
{
char str[MAX_PRINT_LEN]="";
vsnprintf(str, MAX_PRINT_LEN-1, format, vl);
/* Filter out 'no-name' */
if ( RTMP_debuglevel<RTMP_LOGALL && strstr(str, "no-name" ) != NULL )
return;
if ( !fmsg ) fmsg = stderr;
if ( level <= RTMP_debuglevel ) {
if (neednl) {
putc('\n', fmsg);
neednl = 0;
}
fprintf(fmsg, "%s: %s\n", levels[level], str);
#ifdef _DEBUG
fflush(fmsg);
#endif
}
}
void RTMP_LogSetOutput(FILE *file)
{
fmsg = file;
}
void RTMP_LogSetLevel(RTMP_LogLevel level)
{
RTMP_debuglevel = level;
}
void RTMP_LogSetCallback(RTMP_LogCallback *cbp)
{
cb = cbp;
}
RTMP_LogLevel RTMP_LogGetLevel()
{
return RTMP_debuglevel;
}
void RTMP_Log(int level, const char *format, ...)
{
va_list args;
if ( level > RTMP_debuglevel )
return;
va_start(args, format);
cb(level, format, args);
va_end(args);
}
static const char hexdig[] = "0123456789abcdef";
void RTMP_LogHex(int level, const uint8_t *data, unsigned long len)
{
unsigned long i;
char line[50], *ptr;
if ( level > RTMP_debuglevel )
return;
ptr = line;
for(i=0; i<len; i++) {
*ptr++ = hexdig[0x0f & (data[i] >> 4)];
*ptr++ = hexdig[0x0f & data[i]];
if ((i & 0x0f) == 0x0f) {
*ptr = '\0';
ptr = line;
RTMP_Log(level, "%s", line);
} else {
*ptr++ = ' ';
}
}
if (i & 0x0f) {
*ptr = '\0';
RTMP_Log(level, "%s", line);
}
}
void RTMP_LogHexString(int level, const uint8_t *data, unsigned long len)
{
#define BP_OFFSET 9
#define BP_GRAPH 60
#define BP_LEN 80
char line[BP_LEN];
unsigned long i;
if ( !data || level > RTMP_debuglevel )
return;
/* in case len is zero */
line[0] = '\0';
for ( i = 0 ; i < len ; i++ ) {
int n = i % 16;
unsigned off;
if( !n ) {
if( i ) RTMP_Log( level, "%s", line );
memset( line, ' ', sizeof(line)-2 );
line[sizeof(line)-2] = '\0';
off = i % 0x0ffffU;
line[2] = hexdig[0x0f & (off >> 12)];
line[3] = hexdig[0x0f & (off >> 8)];
line[4] = hexdig[0x0f & (off >> 4)];
line[5] = hexdig[0x0f & off];
line[6] = ':';
}
off = BP_OFFSET + n*3 + ((n >= 8)?1:0);
line[off] = hexdig[0x0f & ( data[i] >> 4 )];
line[off+1] = hexdig[0x0f & data[i]];
off = BP_GRAPH + n + ((n >= 8)?1:0);
if ( isprint( data[i] )) {
line[BP_GRAPH + n] = data[i];
} else {
line[BP_GRAPH + n] = '.';
}
}
RTMP_Log( level, "%s", line );
}
/* These should only be used by apps, never by the library itself */
void RTMP_LogPrintf(const char *format, ...)
{
char str[MAX_PRINT_LEN]="";
int len;
va_list args;
va_start(args, format);
len = vsnprintf(str, MAX_PRINT_LEN-1, format, args);
va_end(args);
if ( RTMP_debuglevel==RTMP_LOGCRIT )
return;
if ( !fmsg ) fmsg = stderr;
if (neednl) {
putc('\n', fmsg);
neednl = 0;
}
if (len > MAX_PRINT_LEN-1)
len = MAX_PRINT_LEN-1;
fprintf(fmsg, "%s", str);
if (str[len-1] == '\n')
fflush(fmsg);
}
void RTMP_LogStatus(const char *format, ...)
{
char str[MAX_PRINT_LEN]="";
va_list args;
va_start(args, format);
vsnprintf(str, MAX_PRINT_LEN-1, format, args);
va_end(args);
if ( RTMP_debuglevel==RTMP_LOGCRIT )
return;
if ( !fmsg ) fmsg = stderr;
fprintf(fmsg, "%s", str);
fflush(fmsg);
neednl = 1;
}

69
librtmp/log.h Normal file
View File

@ -0,0 +1,69 @@
/*
* Copyright (C) 2008-2009 Andrej Stepanchuk
* Copyright (C) 2009-2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#ifndef __RTMP_LOG_H__
#define __RTMP_LOG_H__
#include <stdio.h>
#include <stdarg.h>
#include <stdint.h>
#ifdef __cplusplus
extern "C" {
#endif
/* Enable this to get full debugging output */
/* #define _DEBUG */
#ifdef _DEBUG
#undef NODEBUG
#endif
typedef enum
{ RTMP_LOGCRIT=0, RTMP_LOGERROR, RTMP_LOGWARNING, RTMP_LOGINFO,
RTMP_LOGDEBUG, RTMP_LOGDEBUG2, RTMP_LOGALL
} RTMP_LogLevel;
extern RTMP_LogLevel RTMP_debuglevel;
typedef void (RTMP_LogCallback)(int level, const char *fmt, va_list);
void RTMP_LogSetCallback(RTMP_LogCallback *cb);
void RTMP_LogSetOutput(FILE *file);
#ifdef __GNUC__
void RTMP_LogPrintf(const char *format, ...) __attribute__ ((__format__ (__printf__, 1, 2)));
void RTMP_LogStatus(const char *format, ...) __attribute__ ((__format__ (__printf__, 1, 2)));
void RTMP_Log(int level, const char *format, ...) __attribute__ ((__format__ (__printf__, 2, 3)));
#else
void RTMP_LogPrintf(const char *format, ...);
void RTMP_LogStatus(const char *format, ...);
void RTMP_Log(int level, const char *format, ...);
#endif
void RTMP_LogHex(int level, const uint8_t *data, unsigned long len);
void RTMP_LogHexString(int level, const uint8_t *data, unsigned long len);
void RTMP_LogSetLevel(RTMP_LogLevel lvl);
RTMP_LogLevel RTMP_LogGetLevel(void);
#ifdef __cplusplus
}
#endif
#endif

290
librtmp/parseurl.c Normal file
View File

@ -0,0 +1,290 @@
/*
* Copyright (C) 2009 Andrej Stepanchuk
* Copyright (C) 2009-2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include <ctype.h>
#include "rtmp_sys.h"
#include "log.h"
#include "strncasecmp.h"
int RTMP_ParseURL(const char *url, int *protocol, AVal *host, unsigned int *port,
AVal *playpath, AVal *app)
{
char *p, *end, *col, *ques, *slash;
RTMP_Log(RTMP_LOGDEBUG, "Parsing...");
*protocol = RTMP_PROTOCOL_RTMP;
*port = 0;
playpath->av_len = 0;
playpath->av_val = NULL;
app->av_len = 0;
app->av_val = NULL;
/* Old School Parsing */
/* look for usual :// pattern */
p = strstr(url, "://");
if(!p) {
RTMP_Log(RTMP_LOGERROR, "RTMP URL: No :// in url!");
return FALSE;
}
{
int len = (int)(p-url);
if(len == 4 && strncasecmp(url, "rtmp", 4)==0)
*protocol = RTMP_PROTOCOL_RTMP;
else if(len == 5 && strncasecmp(url, "rtmpt", 5)==0)
*protocol = RTMP_PROTOCOL_RTMPT;
else if(len == 5 && strncasecmp(url, "rtmps", 5)==0)
*protocol = RTMP_PROTOCOL_RTMPS;
else if(len == 5 && strncasecmp(url, "rtmpe", 5)==0)
*protocol = RTMP_PROTOCOL_RTMPE;
else if(len == 5 && strncasecmp(url, "rtmfp", 5)==0)
*protocol = RTMP_PROTOCOL_RTMFP;
else if(len == 6 && strncasecmp(url, "rtmpte", 6)==0)
*protocol = RTMP_PROTOCOL_RTMPTE;
else if(len == 6 && strncasecmp(url, "rtmpts", 6)==0)
*protocol = RTMP_PROTOCOL_RTMPTS;
else {
RTMP_Log(RTMP_LOGWARNING, "Unknown protocol!\n");
goto parsehost;
}
}
RTMP_Log(RTMP_LOGDEBUG, "Parsed protocol: %d", *protocol);
parsehost:
/* let's get the hostname */
p+=3;
/* check for sudden death */
if(*p==0) {
RTMP_Log(RTMP_LOGWARNING, "No hostname in URL!");
return FALSE;
}
end = p + strlen(p);
col = strchr(p, ':');
ques = strchr(p, '?');
slash = strchr(p, '/');
{
int hostlen;
if(slash)
hostlen = slash - p;
else
hostlen = end - p;
if(col && col -p < hostlen)
hostlen = col - p;
if(hostlen < 256) {
host->av_val = p;
host->av_len = hostlen;
RTMP_Log(RTMP_LOGDEBUG, "Parsed host : %.*s", hostlen, host->av_val);
} else {
RTMP_Log(RTMP_LOGWARNING, "Hostname exceeds 255 characters!");
}
p+=hostlen;
}
/* get the port number if available */
if(*p == ':') {
unsigned int p2;
p++;
p2 = atoi(p);
if(p2 > 65535) {
RTMP_Log(RTMP_LOGWARNING, "Invalid port number!");
} else {
*port = p2;
}
}
if(!slash) {
RTMP_Log(RTMP_LOGWARNING, "No application or playpath in URL!");
return TRUE;
}
p = slash+1;
{
/* parse application
*
* rtmp://host[:port]/app[/appinstance][/...]
* application = app[/appinstance]
*/
char *slash2, *slash3 = NULL, *slash4 = NULL;
int applen, appnamelen;
slash2 = strchr(p, '/');
if(slash2)
slash3 = strchr(slash2+1, '/');
if(slash3)
slash4 = strchr(slash3+1, '/');
applen = end-p; /* ondemand, pass all parameters as app */
appnamelen = applen; /* ondemand length */
if(ques && strstr(p, "slist=")) { /* whatever it is, the '?' and slist= means we need to use everything as app and parse plapath from slist= */
appnamelen = ques-p;
}
else if(strncmp(p, "ondemand/", 9)==0) {
/* app = ondemand/foobar, only pass app=ondemand */
applen = 8;
appnamelen = 8;
}
else { /* app!=ondemand, so app is app[/appinstance] */
if(slash4)
appnamelen = slash4-p;
else if(slash3)
appnamelen = slash3-p;
else if(slash2)
appnamelen = slash2-p;
applen = appnamelen;
}
app->av_val = p;
app->av_len = applen;
RTMP_Log(RTMP_LOGDEBUG, "Parsed app : %.*s", applen, p);
p += appnamelen;
}
if (*p == '/')
p++;
if (end-p) {
AVal av = {p, end-p};
RTMP_ParsePlaypath(&av, playpath);
}
return TRUE;
}
/*
* Extracts playpath from RTMP URL. playpath is the file part of the
* URL, i.e. the part that comes after rtmp://host:port/app/
*
* Returns the stream name in a format understood by FMS. The name is
* the playpath part of the URL with formatting depending on the stream
* type:
*
* mp4 streams: prepend "mp4:", remove extension
* mp3 streams: prepend "mp3:", remove extension
* flv streams: remove extension
*/
void RTMP_ParsePlaypath(AVal *in, AVal *out) {
int addMP4 = 0;
int addMP3 = 0;
int subExt = 0;
const char *playpath = in->av_val;
const char *temp, *q, *ext = NULL;
const char *ppstart = playpath;
char *streamname, *destptr, *p;
int pplen = in->av_len;
out->av_val = NULL;
out->av_len = 0;
if ((*ppstart == '?') &&
(temp=strstr(ppstart, "slist=")) != 0) {
ppstart = temp+6;
pplen = strlen(ppstart);
temp = strchr(ppstart, '&');
if (temp) {
pplen = temp-ppstart;
}
}
q = strchr(ppstart, '?');
if (pplen >= 4) {
if (q)
ext = q-4;
else
ext = &ppstart[pplen-4];
if ((strncmp(ext, ".f4v", 4) == 0) ||
(strncmp(ext, ".mp4", 4) == 0)) {
addMP4 = 1;
subExt = 1;
/* Only remove .flv from rtmp URL, not slist params */
} else if ((ppstart == playpath) &&
(strncmp(ext, ".flv", 4) == 0)) {
subExt = 1;
} else if (strncmp(ext, ".mp3", 4) == 0) {
addMP3 = 1;
subExt = 1;
}
}
streamname = (char *)malloc((pplen+4+1)*sizeof(char));
if (!streamname)
return;
destptr = streamname;
if (addMP4) {
if (strncmp(ppstart, "mp4:", 4)) {
strcpy(destptr, "mp4:");
destptr += 4;
} else {
subExt = 0;
}
} else if (addMP3) {
if (strncmp(ppstart, "mp3:", 4)) {
strcpy(destptr, "mp3:");
destptr += 4;
} else {
subExt = 0;
}
}
for (p=(char *)ppstart; pplen >0;) {
/* skip extension */
if (subExt && p == ext) {
p += 4;
pplen -= 4;
continue;
}
if (*p == '%') {
unsigned int c;
sscanf(p+1, "%02x", &c);
*destptr++ = c;
pplen -= 3;
p += 3;
} else {
*destptr++ = *p++;
pplen--;
}
}
*destptr = '\0';
out->av_val = streamname;
out->av_len = destptr - streamname;
}

5350
librtmp/rtmp.c Normal file

File diff suppressed because it is too large Load Diff

426
librtmp/rtmp.h Normal file
View File

@ -0,0 +1,426 @@
#ifndef __RTMP_H__
#define __RTMP_H__
/*
* Copyright (C) 2005-2008 Team XBMC
* http://www.xbmc.org
* Copyright (C) 2008-2009 Andrej Stepanchuk
* Copyright (C) 2009-2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#if !defined(NO_CRYPTO) && !defined(CRYPTO)
#define CRYPTO
#endif
#include <errno.h>
#include <stdint.h>
#include <stddef.h>
#include "amf.h"
#ifdef __cplusplus
extern "C"
{
#endif
#define RTMP_LIB_VERSION 0x020300 /* 2.3 */
#define RTMP_FEATURE_HTTP 0x01
#define RTMP_FEATURE_ENC 0x02
#define RTMP_FEATURE_SSL 0x04
#define RTMP_FEATURE_MFP 0x08 /* not yet supported */
#define RTMP_FEATURE_WRITE 0x10 /* publish, not play */
#define RTMP_FEATURE_HTTP2 0x20 /* server-side rtmpt */
#define RTMP_PROTOCOL_UNDEFINED -1
#define RTMP_PROTOCOL_RTMP 0
#define RTMP_PROTOCOL_RTMPE RTMP_FEATURE_ENC
#define RTMP_PROTOCOL_RTMPT RTMP_FEATURE_HTTP
#define RTMP_PROTOCOL_RTMPS RTMP_FEATURE_SSL
#define RTMP_PROTOCOL_RTMPTE (RTMP_FEATURE_HTTP|RTMP_FEATURE_ENC)
#define RTMP_PROTOCOL_RTMPTS (RTMP_FEATURE_HTTP|RTMP_FEATURE_SSL)
#define RTMP_PROTOCOL_RTMFP RTMP_FEATURE_MFP
#define RTMP_DEFAULT_CHUNKSIZE 128
/* needs to fit largest number of bytes recv() may return */
#define RTMP_BUFFER_CACHE_SIZE (16*1024)
#define RTMP_CHANNELS 65600
extern const char RTMPProtocolStringsLower[][7];
extern const AVal RTMP_DefaultFlashVer;
extern int RTMP_ctrlC;
uint32_t RTMP_GetTime(void);
/* RTMP_PACKET_TYPE_... 0x00 */
#define RTMP_PACKET_TYPE_CHUNK_SIZE 0x01
/* RTMP_PACKET_TYPE_... 0x02 */
#define RTMP_PACKET_TYPE_BYTES_READ_REPORT 0x03
#define RTMP_PACKET_TYPE_CONTROL 0x04
#define RTMP_PACKET_TYPE_SERVER_BW 0x05
#define RTMP_PACKET_TYPE_CLIENT_BW 0x06
/* RTMP_PACKET_TYPE_... 0x07 */
#define RTMP_PACKET_TYPE_AUDIO 0x08
#define RTMP_PACKET_TYPE_VIDEO 0x09
/* RTMP_PACKET_TYPE_... 0x0A */
/* RTMP_PACKET_TYPE_... 0x0B */
/* RTMP_PACKET_TYPE_... 0x0C */
/* RTMP_PACKET_TYPE_... 0x0D */
/* RTMP_PACKET_TYPE_... 0x0E */
#define RTMP_PACKET_TYPE_FLEX_STREAM_SEND 0x0F
#define RTMP_PACKET_TYPE_FLEX_SHARED_OBJECT 0x10
#define RTMP_PACKET_TYPE_FLEX_MESSAGE 0x11
#define RTMP_PACKET_TYPE_INFO 0x12
#define RTMP_PACKET_TYPE_SHARED_OBJECT 0x13
#define RTMP_PACKET_TYPE_INVOKE 0x14
/* RTMP_PACKET_TYPE_... 0x15 */
#define RTMP_PACKET_TYPE_FLASH_VIDEO 0x16
#define RTMP_MAX_HEADER_SIZE 18
#define RTMP_PACKET_SIZE_LARGE 0
#define RTMP_PACKET_SIZE_MEDIUM 1
#define RTMP_PACKET_SIZE_SMALL 2
#define RTMP_PACKET_SIZE_MINIMUM 3
typedef struct RTMPChunk
{
int c_headerSize;
int c_chunkSize;
char *c_chunk;
char c_header[RTMP_MAX_HEADER_SIZE];
} RTMPChunk;
typedef struct RTMPPacket
{
uint8_t m_headerType;
uint8_t m_packetType;
uint8_t m_hasAbsTimestamp; /* timestamp absolute or relative? */
int m_nChannel;
uint32_t m_nTimeStamp; /* timestamp */
int32_t m_nInfoField2; /* last 4 bytes in a long header */
uint32_t m_nBodySize;
uint32_t m_nBytesRead;
RTMPChunk *m_chunk;
char *m_body;
} RTMPPacket;
typedef struct RTMPSockBuf
{
int sb_socket;
int sb_size; /* number of unprocessed bytes in buffer */
char *sb_start; /* pointer into sb_pBuffer of next byte to process */
char sb_buf[RTMP_BUFFER_CACHE_SIZE]; /* data read from socket */
int sb_timedout;
void *sb_ssl;
} RTMPSockBuf;
void RTMPPacket_Reset(RTMPPacket *p);
void RTMPPacket_Dump(RTMPPacket *p);
int RTMPPacket_Alloc(RTMPPacket *p, uint32_t nSize);
void RTMPPacket_Free(RTMPPacket *p);
#define RTMPPacket_IsReady(a) ((a)->m_nBytesRead == (a)->m_nBodySize)
typedef struct RTMP_LNK
{
AVal hostname;
AVal sockshost;
AVal playpath0; /* parsed from URL */
AVal playpath; /* passed in explicitly */
AVal tcUrl;
AVal swfUrl;
AVal pageUrl;
AVal app;
AVal auth;
AVal flashVer;
AVal subscribepath;
AVal usherToken;
AVal token;
AVal pubUser;
AVal pubPasswd;
AMFObject extras;
int edepth;
int seekTime;
int stopTime;
#define RTMP_LF_AUTH 0x0001 /* using auth param */
#define RTMP_LF_LIVE 0x0002 /* stream is live */
#define RTMP_LF_SWFV 0x0004 /* do SWF verification */
#define RTMP_LF_PLST 0x0008 /* send playlist before play */
#define RTMP_LF_BUFX 0x0010 /* toggle stream on BufferEmpty msg */
#define RTMP_LF_FTCU 0x0020 /* free tcUrl on close */
#define RTMP_LF_FAPU 0x0040 /* free app on close */
int lFlags;
int swfAge;
int protocol;
int timeout; /* connection timeout in seconds */
int pFlags; /* unused, but kept to avoid breaking ABI */
unsigned short socksport;
unsigned short port;
#ifdef CRYPTO
#define RTMP_SWF_HASHLEN 32
void *dh; /* for encryption */
void *rc4keyIn;
void *rc4keyOut;
uint32_t SWFSize;
uint8_t SWFHash[RTMP_SWF_HASHLEN];
char SWFVerificationResponse[RTMP_SWF_HASHLEN+10];
#endif
} RTMP_LNK;
/* state for read() wrapper */
typedef struct RTMP_READ
{
char *buf;
char *bufpos;
unsigned int buflen;
uint32_t timestamp;
uint8_t dataType;
uint8_t flags;
#define RTMP_READ_HEADER 0x01
#define RTMP_READ_RESUME 0x02
#define RTMP_READ_NO_IGNORE 0x04
#define RTMP_READ_GOTKF 0x08
#define RTMP_READ_GOTFLVK 0x10
#define RTMP_READ_SEEKING 0x20
int8_t status;
#define RTMP_READ_COMPLETE -3
#define RTMP_READ_ERROR -2
#define RTMP_READ_EOF -1
#define RTMP_READ_IGNORE 0
/* if bResume == TRUE */
uint8_t initialFrameType;
uint32_t nResumeTS;
char *metaHeader;
char *initialFrame;
uint32_t nMetaHeaderSize;
uint32_t nInitialFrameSize;
uint32_t nIgnoredFrameCounter;
uint32_t nIgnoredFlvFrameCounter;
} RTMP_READ;
typedef struct RTMP_METHOD
{
AVal name;
int num;
} RTMP_METHOD;
typedef struct RTMP
{
int m_inChunkSize;
int m_outChunkSize;
int m_nBWCheckCounter;
int m_nBytesIn;
int m_nBytesInSent;
int m_nBufferMS;
int m_stream_id; /* returned in _result from createStream */
int m_mediaChannel;
uint32_t m_mediaStamp;
uint32_t m_pauseStamp;
int m_pausing;
int m_nServerBW;
int m_nClientBW;
uint8_t m_nClientBW2;
uint8_t m_bPlaying;
uint8_t m_bSendEncoding;
uint8_t m_bSendCounter;
int m_numInvokes;
int m_numCalls;
RTMP_METHOD *m_methodCalls; /* remote method calls queue */
int m_channelsAllocatedIn;
int m_channelsAllocatedOut;
RTMPPacket **m_vecChannelsIn;
RTMPPacket **m_vecChannelsOut;
int *m_channelTimestamp; /* abs timestamp of last packet */
double m_fAudioCodecs; /* audioCodecs for the connect packet */
double m_fVideoCodecs; /* videoCodecs for the connect packet */
double m_fEncoding; /* AMF0 or AMF3 */
double m_fDuration; /* duration of stream in seconds */
int m_msgCounter; /* RTMPT stuff */
int m_polling;
int m_resplen;
int m_unackd;
AVal m_clientID;
RTMP_READ m_read;
RTMPPacket m_write;
RTMPSockBuf m_sb;
RTMP_LNK Link;
} RTMP;
int RTMP_ParseURL(const char *url, int *protocol, AVal *host,
unsigned int *port, AVal *playpath, AVal *app);
void RTMP_ParsePlaypath(AVal *in, AVal *out);
void RTMP_SetBufferMS(RTMP *r, int size);
void RTMP_UpdateBufferMS(RTMP *r);
int RTMP_SetOpt(RTMP *r, const AVal *opt, AVal *arg);
int RTMP_SetupURL(RTMP *r, char *url);
void RTMP_SetupStream(RTMP *r, int protocol,
AVal *hostname,
unsigned int port,
AVal *sockshost,
AVal *playpath,
AVal *tcUrl,
AVal *swfUrl,
AVal *pageUrl,
AVal *app,
AVal *auth,
AVal *swfSHA256Hash,
uint32_t swfSize,
AVal *flashVer,
AVal *subscribepath,
AVal *usherToken,
int dStart,
int dStop, int bLiveStream, long int timeout);
int RTMP_Connect(RTMP *r, RTMPPacket *cp);
struct sockaddr;
int RTMP_Connect0(RTMP *r, struct sockaddr *svc);
int RTMP_Connect1(RTMP *r, RTMPPacket *cp);
int RTMP_Serve(RTMP *r);
int RTMP_TLS_Accept(RTMP *r, void *ctx);
int RTMP_ReadPacket(RTMP *r, RTMPPacket *packet);
int RTMP_SendPacket(RTMP *r, RTMPPacket *packet, int queue);
int RTMP_SendChunk(RTMP *r, RTMPChunk *chunk);
int RTMP_IsConnected(RTMP *r);
int RTMP_Socket(RTMP *r);
int RTMP_IsTimedout(RTMP *r);
double RTMP_GetDuration(RTMP *r);
int RTMP_ToggleStream(RTMP *r);
int RTMP_ConnectStream(RTMP *r, int seekTime);
int RTMP_ReconnectStream(RTMP *r, int seekTime);
void RTMP_DeleteStream(RTMP *r);
int RTMP_GetNextMediaPacket(RTMP *r, RTMPPacket *packet);
int RTMP_ClientPacket(RTMP *r, RTMPPacket *packet);
void RTMP_Init(RTMP *r);
void RTMP_Close(RTMP *r);
RTMP *RTMP_Alloc(void);
void RTMP_Free(RTMP *r);
void RTMP_EnableWrite(RTMP *r);
void *RTMP_TLS_AllocServerContext(const char* cert, const char* key);
void RTMP_TLS_FreeServerContext(void *ctx);
int RTMP_LibVersion(void);
void RTMP_UserInterrupt(void); /* user typed Ctrl-C */
int RTMP_SendCtrl(RTMP *r, short nType, unsigned int nObject,
unsigned int nTime);
/* caller probably doesn't know current timestamp, should
* just use RTMP_Pause instead
*/
int RTMP_SendPause(RTMP *r, int DoPause, int dTime);
int RTMP_Pause(RTMP *r, int DoPause);
int RTMP_FindFirstMatchingProperty(AMFObject *obj, const AVal *name,
AMFObjectProperty * p);
int RTMPSockBuf_Fill(RTMPSockBuf *sb);
int RTMPSockBuf_Send(RTMPSockBuf *sb, const char *buf, int len);
int RTMPSockBuf_Close(RTMPSockBuf *sb);
int RTMP_SendCreateStream(RTMP *r);
int RTMP_SendSeek(RTMP *r, int dTime);
int RTMP_SendServerBW(RTMP *r);
int RTMP_SendClientBW(RTMP *r);
void RTMP_DropRequest(RTMP *r, int i, int freeit);
int RTMP_Read(RTMP *r, char *buf, int size);
int RTMP_Write(RTMP *r, const char *buf, int size);
/* hashswf.c */
int RTMP_HashSWF(const char *url, unsigned int *size, unsigned char *hash,
int age);
/*
***********************************************************************
<<<<<<< HEAD
<<<<<<< HEAD
* Introduced by SRS, export the ip/pid/cid of BMS
***********************************************************************
*/
/*
* The exported ip of server, for example, we use DNS to connect to server,
* but the ip resolved by DNS system maybe virtual ip, that is, the "real ip"
* only known by server itself and return by the rtmp connect result or flv
* metadata.
*/
extern char* _srs_ip;
/*
* The pid of BMS, used to query the detail log of client.
* A BMS server may restart and the pid changed.
*/
extern int _srs_pid;
/*
* The cid of BMS, used to query the detail log of client.
* A connection of a process(identify by pid) is unique and its id(cid) is
* unique also. The cid generally is a thread or connection or logic unit,
* for example, cid of rtmp client is the rtmp connection, while cid of hls+
* is a virtual connection which merge many http connections.
*/
extern int _srs_cid;
/*
***********************************************************************
* Introduced by SRS, other useful data.
***********************************************************************
*/
/*
* The received bytes from server. user can use to stat the kbps by:
* rkbps = rbytes * 8 / 1000 / (diff seconds)
*/
extern unsigned long _srs_rbytes;
/*
* The sent bytes from server. user can use to stat the kbps by:
* skbps = sbytes * 8 / 1000 / (diff seconds)
*/
extern unsigned long _srs_sbytes;
/*
* The current state of client.
* 0,init 1,idle 2,connected 3,working 4,closed
*/
extern int _srs_state;
#ifdef __cplusplus
};
#endif
#endif

138
librtmp/rtmp_sys.h Normal file
View File

@ -0,0 +1,138 @@
#ifndef __RTMP_SYS_H__
#define __RTMP_SYS_H__
/*
* Copyright (C) 2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#ifdef _WIN32
#include <winsock2.h>
#include <ws2tcpip.h>
#if _MSC_VER < 1500 /* MSVC */
#define snprintf _snprintf
#define strcasecmp stricmp
#define strncasecmp strnicmp
#define vsnprintf _vsnprintf
#endif
#define GetSockError() WSAGetLastError()
#define SetSockError(e) WSASetLastError(e)
#define setsockopt(a,b,c,d,e) (setsockopt)(a,b,c,(const char *)d,(int)e)
#define EWOULDBLOCK WSAETIMEDOUT /* we don't use nonblocking, but we do use timeouts */
#define msleep(n) Sleep(n)
#define SET_RCVTIMEO(tv,s) int tv = s*1000
#else /* !_WIN32 */
#include <sys/types.h>
#include <sys/socket.h>
#include <sys/times.h>
#include <netdb.h>
#include <unistd.h>
#include <netinet/in.h>
#include <netinet/tcp.h>
#include <arpa/inet.h>
#define GetSockError() errno
#define SetSockError(e) errno = e
#undef closesocket
#define closesocket(s) close(s)
#define msleep(n) usleep(n*1000)
#define SET_RCVTIMEO(tv,s) struct timeval tv = {s,0}
#endif
#include "rtmp.h"
#ifdef USE_POLARSSL
#include <polarssl/version.h>
#include <polarssl/net.h>
#include <polarssl/ssl.h>
#include <polarssl/havege.h>
#if POLARSSL_VERSION_NUMBER < 0x01010000
#define havege_random havege_rand
#endif
#if POLARSSL_VERSION_NUMBER >= 0x01020000
#define SSL_SET_SESSION(S,resume,timeout,ctx) ssl_set_session(S,ctx)
#else
#define SSL_SET_SESSION(S,resume,timeout,ctx) ssl_set_session(S,resume,timeout,ctx)
#endif
typedef struct tls_ctx {
havege_state hs;
ssl_session ssn;
} tls_ctx;
typedef struct tls_server_ctx {
havege_state *hs;
x509_cert cert;
rsa_context key;
ssl_session ssn;
const char *dhm_P, *dhm_G;
} tls_server_ctx;
#define TLS_CTX tls_ctx *
#define TLS_client(ctx,s) s = malloc(sizeof(ssl_context)); ssl_init(s);\
ssl_set_endpoint(s, SSL_IS_CLIENT); ssl_set_authmode(s, SSL_VERIFY_NONE);\
ssl_set_rng(s, havege_random, &ctx->hs);\
ssl_set_ciphersuites(s, ssl_default_ciphersuites);\
SSL_SET_SESSION(s, 1, 600, &ctx->ssn)
#define TLS_server(ctx,s) s = malloc(sizeof(ssl_context)); ssl_init(s);\
ssl_set_endpoint(s, SSL_IS_SERVER); ssl_set_authmode(s, SSL_VERIFY_NONE);\
ssl_set_rng(s, havege_random, ((tls_server_ctx*)ctx)->hs);\
ssl_set_ciphersuites(s, ssl_default_ciphersuites);\
SSL_SET_SESSION(s, 1, 600, &((tls_server_ctx*)ctx)->ssn);\
ssl_set_own_cert(s, &((tls_server_ctx*)ctx)->cert, &((tls_server_ctx*)ctx)->key);\
ssl_set_dh_param(s, ((tls_server_ctx*)ctx)->dhm_P, ((tls_server_ctx*)ctx)->dhm_G)
#define TLS_setfd(s,fd) ssl_set_bio(s, net_recv, &fd, net_send, &fd)
#define TLS_connect(s) ssl_handshake(s)
#define TLS_accept(s) ssl_handshake(s)
#define TLS_read(s,b,l) ssl_read(s,(unsigned char *)b,l)
#define TLS_write(s,b,l) ssl_write(s,(unsigned char *)b,l)
#define TLS_shutdown(s) ssl_close_notify(s)
#define TLS_close(s) ssl_free(s); free(s)
#elif defined(USE_GNUTLS)
#include <gnutls/gnutls.h>
typedef struct tls_ctx {
gnutls_certificate_credentials_t cred;
gnutls_priority_t prios;
} tls_ctx;
#define TLS_CTX tls_ctx *
#define TLS_client(ctx,s) gnutls_init((gnutls_session_t *)(&s), GNUTLS_CLIENT); gnutls_priority_set(s, ctx->prios); gnutls_credentials_set(s, GNUTLS_CRD_CERTIFICATE, ctx->cred)
#define TLS_server(ctx,s) gnutls_init((gnutls_session_t *)(&s), GNUTLS_SERVER); gnutls_priority_set_direct(s, "NORMAL", NULL); gnutls_credentials_set(s, GNUTLS_CRD_CERTIFICATE, ctx)
#define TLS_setfd(s,fd) gnutls_transport_set_ptr(s, (gnutls_transport_ptr_t)(long)fd)
#define TLS_connect(s) gnutls_handshake(s)
#define TLS_accept(s) gnutls_handshake(s)
#define TLS_read(s,b,l) gnutls_record_recv(s,b,l)
#define TLS_write(s,b,l) gnutls_record_send(s,b,l)
#define TLS_shutdown(s) gnutls_bye(s, GNUTLS_SHUT_RDWR)
#define TLS_close(s) gnutls_deinit(s)
#else /* USE_OPENSSL */
#define TLS_CTX SSL_CTX *
#define TLS_client(ctx,s) s = SSL_new(ctx)
#define TLS_server(ctx,s) s = SSL_new(ctx)
#define TLS_setfd(s,fd) SSL_set_fd(s,fd)
#define TLS_connect(s) SSL_connect(s)
#define TLS_accept(s) SSL_accept(s)
#define TLS_read(s,b,l) SSL_read(s,b,l)
#define TLS_write(s,b,l) SSL_write(s,b,l)
#define TLS_shutdown(s) SSL_shutdown(s)
#define TLS_close(s) SSL_free(s)
#endif
#endif

17
librtmp/strncasecmp.h Normal file
View File

@ -0,0 +1,17 @@
#ifndef STRNCASECMP_H
#define STRNCASECMP_H
#ifndef WINSHIT_INCLUDED
#define WINSHIT_INCLUDED
#if defined(WIN32) || defined(WIN64)
#define strcasecmp _stricmp
#define strncasecmp(x,y,z) _strnicmp(x,y,z)
#endif /* Def WIN32 or Def WIN64 */
#endif /* Ndef WINSHIT_INCLUDED */
#endif // STRNCASECMP_H

1916
librtmp/zlib.h Normal file

File diff suppressed because it is too large Load Diff

47
main.cpp Normal file
View File

@ -0,0 +1,47 @@
#include "mainwindow.h"
#include <QApplication>
#include "cplaywidget.h"
#include <QTimer>
#include "CameraCapture.h"
#include "mainwindow.h"
#include <qlibrary.h>
#include <qsysinfo.h>
#include <qt_windows.h>
#include "media/screen_capture.h"
#include "media/DXGICapture.h"
#include <QVector>
#include <stdio.h>
#include <tchar.h>
#include <shlobj.h>
#include <D3D9.h>
#if _MSC_VER >= 1600
#pragma execution_character_set("utf-8")
#endif
#ifdef __MINGW32__
#include <Tlhelp32.h>
#include "winuser.h"
#endif
int RegiesterOwnType(){
return 0;
}
int main(int argc, char *argv[])
{
setbuf(stdout, NULL);//让printf立即输出
ScreenCapture p;
p.EnumScreen();
Direct3D9TakeScreenshots(0,4);
QssEventFilter filter;
QApplication app(argc, argv);
MainWindow main;
main.setWindowTitle("流媒体测试工具");
main.setFixedSize(1920,1080);
main.show();
return app.exec();
}

140
mainwindow.cpp Normal file
View File

@ -0,0 +1,140 @@
#include "mainwindow.h"
#include "ui_mainwindow.h"
#include <QDesktopWidget>
#include <QPaintDevice>
#if _MSC_VER >= 1600
#pragma execution_character_set("utf-8")
#endif
MainWindow::MainWindow(QWidget *parent) :
QssMainWindow(parent,0,1.5),
ui(new Ui::MainWindow),
m_bCameraOpen(false),
mCamera(nullptr),
m_bRtmpPushing(false),
mPlayerWidget(nullptr),
mVideoCoder(nullptr),
mPusher(nullptr),
mAudioCapture(nullptr),
mTimer(nullptr)
{
ui->setupUi(this);
this->move(50,50);
int i(0);
QDesktopWidget* desktopWidget = QApplication::desktop();
QRect clientRect = desktopWidget->availableGeometry();
QRect applicationRect = desktopWidget->screenGeometry();
qDebug()<<this->pos()<<clientRect<<applicationRect<<this->m_frame->geometry()<<this->centralWidget()->geometry();
std::vector<std::wstring> cameras = Camera::EnumAllCamera();
for(std::wstring x : cameras){
ui->comboBox->addItem(QString::fromWCharArray(x.c_str(),x.size()),
QString::fromWCharArray(x.c_str(),x.size()));
}
mAudioCapture = new CaptureAudioFfmpeg(44100, 2);
mMic = mAudioCapture->EnumSpeakers();
qDebug()<<"capture "<<mMic.size()<<"mic";
for(vector<CaptureAudioFfmpeg::MICInfo>::iterator itr = mMic.begin();itr != mMic.end();itr++){
qDebug()<<QString::fromStdWString(itr->name)<<itr->index;
}
mTimer = new QTimer(this);
connect(mTimer, SIGNAL(timeout()), this, SLOT(DetectDpi()));
mTimer->start(100);
}
MainWindow::~MainWindow(){
delete ui;
}
void MainWindow::on_pushButton_clicked(){
if(nullptr == mPlayerWidget){
mPlayerWidget = new CPlayWidget(nullptr);
}
if(!m_bCameraOpen){
mPlayerWidget->SetDataType(CPlayWidget::IMG_TYPE::TYPE_RGB32);
mPlayerWidget->SetImgSize(640,480);
qDebug()<<ui->comboBox->currentText().size()<<ui->comboBox->currentText();
wchar_t *opencamera = new wchar_t[ui->comboBox->currentText().size()];
ui->comboBox->currentText().toWCharArray(opencamera);
wstring ss = wstring(opencamera,ui->comboBox->currentText().size());
if(nullptr == mCamera){
this->mCamera = new Camera(ss);
}
this->mCamera->SetObserver(mPlayerWidget);
qDebug()<<ui->comboBox->currentText();
ui->pushButton->setText("关闭摄像头");
m_bCameraOpen = true;
mPlayerWidget->show();
ui->verticalLayout->addWidget(mPlayerWidget);
qDebug()<<ui->verticalLayout->layout();
ui->verticalLayout->setStretch(0,1);
ui->verticalLayout->setStretch(1,0);
ui->verticalLayout->setStretch(2,9);
} else{
m_bCameraOpen = false;
ui->pushButton->setText("打开摄像头");
}
}
void MainWindow::on_pushButton_2_clicked()
{
if(!m_bRtmpPushing){
if(!m_bCameraOpen){
ToastWidget::showTip("请打开摄像头",this);
return;
}else{
//
if(nullptr == mVideoCoder){
mVideoCoder = new VideoCoder(mCamera->GetWidth(),
mCamera->GetHeight(),
GUIDToAvFormat(mCamera->MediaType()));
}
mCamera->SetObserver(mVideoCoder);
// todo 根据返回结果判断是否推流
qDebug()<<"连接RTMP服务器"<<ui->lineEdit->text();
if (!mPusher->IfConnect()) {
const char* address = ui->lineEdit->text().toLocal8Bit().data();
qDebug()<<address;
if (0 == mPusher->RTMP264_Connect("rtmp://127.0.0.1:1939/live/1")) {
ToastWidget::showTip("已经连接上RTMP服务器",this->parentWidget());
mVideoCoder->SetOberver(mPusher);
mPusher->StartPush();
ui->pushButton_2->setText("关闭推流");
/*
if (nullptr != this->mAudioCoder) {
this->mAudioCoder->SetObserver(mPusher);
//音频流先不推流
}*/
}
else {
ToastWidget::showTip("连接RTMP服务器失败请检查服务器地址",this->parentWidget());
}
}else{
ToastWidget::showTip("正在推流,请先关闭",this->parentWidget());
}
}
}
}
void MainWindow::on_pushButton_3_clicked()
{
qDebug()<<ui->comboBox_2->currentText();
}
void MainWindow::DetectDpi()
{
// qDebug()<<"detect dpi";
int horizontalDPI = logicalDpiX();
int verticalDPI = logicalDpiY();
// qDebug()<<horizontalDPI<<verticalDPI<<physicalDpiX()<<physicalDpiY();
}

48
mainwindow.h Normal file
View File

@ -0,0 +1,48 @@
#ifndef MAINWINDOW_H
#define MAINWINDOW_H
#include <QMainWindow>
#include "media/CameraCapture.h"
#include "cplaywidget.h"
#include "media/VideoCoder.h"
#include "media/RtmpPusher.h"
#include "components/toast.h"
#include "utils.h"
#include "Qss.h"
#include "media/audiocaptureff.h"
#include <vector>
using namespace std;
namespace Ui {
class MainWindow;
}
class MainWindow : public QssMainWindow
{
Q_OBJECT
public:
explicit MainWindow(QWidget *parent = nullptr);
~MainWindow();
private slots:
void on_pushButton_clicked();
void on_pushButton_2_clicked();
void on_pushButton_3_clicked();
void DetectDpi();
private:
Ui::MainWindow *ui;
Camera *mCamera;
QStringList mCameraList;
bool m_bCameraOpen;
CPlayWidget *mPlayerWidget;
VideoCoder *mVideoCoder;
bool m_bRtmpPushing;
H264RtmpPuser *mPusher;
CaptureAudioFfmpeg *mAudioCapture;
vector<CaptureAudioFfmpeg::MICInfo> mMic;
QTimer *mTimer;
};
#endif // MAINWINDOW_H

185
mainwindow.ui Normal file
View File

@ -0,0 +1,185 @@
<?xml version="1.0" encoding="UTF-8"?>
<ui version="4.0">
<class>MainWindow</class>
<widget class="QMainWindow" name="MainWindow">
<property name="geometry">
<rect>
<x>0</x>
<y>0</y>
<width>1383</width>
<height>1116</height>
</rect>
</property>
<property name="sizePolicy">
<sizepolicy hsizetype="Expanding" vsizetype="Preferred">
<horstretch>0</horstretch>
<verstretch>0</verstretch>
</sizepolicy>
</property>
<property name="minimumSize">
<size>
<width>600</width>
<height>800</height>
</size>
</property>
<property name="windowTitle">
<string>MainWindow</string>
</property>
<widget class="QWidget" name="centralWidget">
<property name="sizePolicy">
<sizepolicy hsizetype="Expanding" vsizetype="Expanding">
<horstretch>0</horstretch>
<verstretch>0</verstretch>
</sizepolicy>
</property>
<layout class="QGridLayout" name="gridLayout">
<item row="0" column="0">
<layout class="QVBoxLayout" name="verticalLayout" stretch="1,9">
<item>
<layout class="QHBoxLayout" name="horizontalLayout" stretch="1,2,0,0,0,0,0,13">
<property name="leftMargin">
<number>2</number>
</property>
<property name="topMargin">
<number>2</number>
</property>
<property name="rightMargin">
<number>2</number>
</property>
<property name="bottomMargin">
<number>1</number>
</property>
<item>
<widget class="QPushButton" name="pushButton">
<property name="minimumSize">
<size>
<width>100</width>
<height>50</height>
</size>
</property>
<property name="sizeIncrement">
<size>
<width>0</width>
<height>6</height>
</size>
</property>
<property name="baseSize">
<size>
<width>0</width>
<height>50</height>
</size>
</property>
<property name="text">
<string>???????</string>
</property>
</widget>
</item>
<item>
<widget class="QComboBox" name="comboBox">
<property name="minimumSize">
<size>
<width>200</width>
<height>35</height>
</size>
</property>
<property name="baseSize">
<size>
<width>0</width>
<height>50</height>
</size>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="pushButton_3">
<property name="minimumSize">
<size>
<width>100</width>
<height>50</height>
</size>
</property>
<property name="text">
<string>??????</string>
</property>
</widget>
</item>
<item>
<widget class="QComboBox" name="comboBox_2">
<property name="minimumSize">
<size>
<width>200</width>
<height>35</height>
</size>
</property>
</widget>
</item>
<item>
<widget class="QLabel" name="label">
<property name="text">
<string>rtmp???????</string>
</property>
</widget>
</item>
<item>
<widget class="QLineEdit" name="lineEdit">
<property name="minimumSize">
<size>
<width>300</width>
<height>30</height>
</size>
</property>
<property name="text">
<string>rtmp://127.0.0.1:1935/live/1</string>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="pushButton_2">
<property name="minimumSize">
<size>
<width>60</width>
<height>50</height>
</size>
</property>
<property name="text">
<string>????</string>
</property>
</widget>
</item>
<item>
<spacer name="horizontalSpacer_2">
<property name="orientation">
<enum>Qt::Horizontal</enum>
</property>
<property name="sizeHint" stdset="0">
<size>
<width>40</width>
<height>20</height>
</size>
</property>
</spacer>
</item>
</layout>
</item>
<item>
<spacer name="verticalSpacer">
<property name="orientation">
<enum>Qt::Vertical</enum>
</property>
<property name="sizeHint" stdset="0">
<size>
<width>20</width>
<height>40</height>
</size>
</property>
</spacer>
</item>
</layout>
</item>
</layout>
</widget>
</widget>
<layoutdefault spacing="6" margin="11"/>
<resources/>
<connections/>
</ui>

145
media/AACAudioCoder.cpp Normal file
View File

@ -0,0 +1,145 @@
#include "AACAudioCoder.h"
//#include "Debuger.h"
using namespace AAC_CODER;
AACAudioCoder::~AACAudioCoder() {
}
void AACAudioCoder::OnAudioData(const void *frameaddress, uint32_t framelen)
{
this->Encode((unsigned char *)frameaddress, framelen * 4);
}
AACAudioCoder::AACAudioCoder(unsigned int smprate, unsigned int channel) {
AVCodecID codec_id = AV_CODEC_ID_AAC;
pCodec = (AVCodec *)avcodec_find_encoder_by_name("libfdk_aac");
if (!pCodec) {
printf("Codec not found\n");
this->mStatus = FAIL;
}
mCodecCtx = avcodec_alloc_context3(pCodec);
if (!mCodecCtx) {
printf("Could not allocate video codec context\n");
this->mStatus = FAIL;
}
mCodecCtx->codec_id = pCodec->id;
mCodecCtx->codec_type = AVMEDIA_TYPE_AUDIO;
mCodecCtx->sample_fmt = AV_SAMPLE_FMT_S16; ///< float
mCodecCtx->sample_rate = 44100;
mCodecCtx->channel_layout = AV_CH_LAYOUT_STEREO;
mCodecCtx->channels = 2;
mCodecCtx->bit_rate = 640000;
mCodecCtx->time_base.den = 1;
mCodecCtx->time_base.num = 23;
mCodecCtx->frame_size = 1024;
this->mObserver = nullptr;
if (avcodec_open2(mCodecCtx, pCodec, NULL) < 0) {
this->mStatus = FAIL;
}
mFrame = av_frame_alloc();
mFrame->nb_samples = mCodecCtx->frame_size;
mFrame->format = mCodecCtx->sample_fmt;
int size = av_samples_get_buffer_size(NULL, mCodecCtx->channels, mCodecCtx->frame_size, mCodecCtx->sample_fmt, 1);
mFrameBuf = (uint8_t *)av_malloc(size);
avcodec_fill_audio_frame(mFrame, mCodecCtx->channels, mCodecCtx->sample_fmt, (const uint8_t*)mFrameBuf, size, 1);
mPts = 0;
}
int adts_sample_rates[]={96000,882000,64000,48000,441000,32000,24000,22050,16000,12000,11025,8000,7350,0,0,0};
int FindAdstSRIndex(int samplerate)
{
int i;
for (i = 0; i < 16; i++)
{
if (samplerate == adts_sample_rates[i])
return i;
}
return 16 - 1;
}
#define ADTS_HEAD_LEN 7
void MakeAdtsHeader(unsigned char *data, int samplerate, int channels, int iFrameLen)
{
int profile = 2; //AAC LCMediaCodecInfo.CodecProfileLevel.AACObjectLC;
int freqIdx = 4; //32K, 见后面注释avpriv_mpeg4audio_sample_rates中32000对应的数组下标来自ffmpeg源码
int chanCfg = channels; //见后面注释channel_configurationStero双声道立体声
/*int avpriv_mpeg4audio_sample_rates[] = {
96000, 88200, 64000, 48000, 44100, 32000,
24000, 22050, 16000, 12000, 11025, 8000, 7350
};
channel_configuration: chanCfg
0: Defined in AOT Specifc Config
1: 1 channel: front-center
2: 2 channels: front-left, front-right
3: 3 channels: front-center, front-left, front-right
4: 4 channels: front-center, front-left, front-right, back-center
5: 5 channels: front-center, front-left, front-right, back-left, back-right
6: 6 channels: front-center, front-left, front-right, back-left, back-right, LFE-channel
7: 8 channels: front-center, front-left, front-right, side-left, side-right, back-left, back-right, LFE-channel
8-15: Reserved
*/
// fill in ADTS data
data[0] = (uint8_t)0xFF;
data[1] = (uint8_t)0xF9;
data[2] = (uint8_t)(((profile - 1) << 6) + (freqIdx << 2) + (chanCfg >> 2));
data[3] = (uint8_t)(((chanCfg & 3) << 6) + (iFrameLen >> 11));
data[4] = (uint8_t)((iFrameLen & 0x7FF) >> 3);
data[5] = (uint8_t)(((iFrameLen & 7) << 5) + 0x1F);
data[6] = (uint8_t)0xFC;
}
FILE *ptest = nullptr;
int once = 1;
int AACAudioCoder::Encode( unsigned char *input, unsigned int num) {
mFrame->nb_samples = mCodecCtx->frame_size;
mFrame->format = mCodecCtx->sample_fmt;
avcodec_fill_audio_frame(mFrame, mCodecCtx->channels,
mCodecCtx->sample_fmt, input,
num, 1);
int aac_out_len = 0;
unsigned char*aac_buf;
if (nullptr == input) {
return -1;
}
if (nullptr == ptest) {
ptest = fopen("dst.aac", "wb");
}
av_init_packet(&pkt);
pkt.data = NULL; // packet data will be allocated by the encoder
pkt.size = 0;
int got_output = 0;
mFrame->pts = mPts += 23;
int ret = avcodec_encode_audio2(mCodecCtx, &pkt, mFrame, &got_output);
if (ret < 0) {
printf("Error encoding frame\n");
return -1;
}
if (got_output) {
if (nullptr != mObserver) {
mObserver->OnAudioEncode(pkt.data, pkt.size, mFrame->pts);
}
fwrite(pkt.data, 1, pkt.size, ptest);
av_free_packet(&pkt);
}
return 0;
}
int AAC_CODER::AACAudioCoder::SetObserver(EncodeAudioObserver *p)
{
if (nullptr == this->mObserver) {
this->mObserver = p;
}
return 0;
}

52
media/AACAudioCoder.h Normal file
View File

@ -0,0 +1,52 @@
#pragma once
#include "AudioCapture.h"
#ifdef __cplusplus
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libavutil/avutil.h"
#include "libswscale/swscale.h"
#include "libavutil/opt.h"
#include "libavutil/imgutils.h"
};
#endif
namespace AAC_CODER {
class AACAudioCoder :public CaptureAudio::CaptureAudioObserver {
public:
class EncodeAudioObserver {
public:
virtual void OnAudioEncode(const void *frameaddress, uint32_t framelen,uint16_t pts) {};
};
enum CAP_STATUS {
RUNNING = 1,
STOP = 2,
PAUSE = 3,
READY = 4,
UNREADY = 5,
FAIL = 6,
};
void OnAudioData(const void *frameaddress, uint32_t framelen);
AACAudioCoder(unsigned int smprate, unsigned int channel);
~AACAudioCoder();
int Encode(unsigned char *input, unsigned int num);
int SetObserver(EncodeAudioObserver *);
private:
unsigned int mpts;
CAP_STATUS mStatus;
unsigned long mSampleRate = 44100;
unsigned int mChannels = 2;
unsigned int mPCMBitSize = 16;
uint8_t* mAACBuffer;
unsigned long nMaxOutputBytes;
uintptr_t mFablaAacenc;
AVCodec *mCodec;
AVCodec *pCodec;
AVCodecContext *mCodecCtx = NULL;
AVFrame *mFrame;
AVPacket pkt;
uint8_t* mFrameBuf;
uint16_t mPts;
EncodeAudioObserver *mObserver;
};
}

138
media/AACDecoder.cpp Normal file
View File

@ -0,0 +1,138 @@
#include "AACDecoder.h"
#include "Debuger.h"
void AACDecoder::OnRtmpFrame(void * dat, uint32_t size)
{
this->Decode((uint8_t *)dat, size);
}
AACDecoder::AACDecoder() :mObserver(nullptr)
{
mStatus = RUNNING;
this->mObserverType = Observer_Audio;
mCodec = avcodec_find_decoder(AV_CODEC_ID_AAC);
if (mCodec == NULL) {
Debuger::Debug(L"find codec fail\r\n");
mStatus = FAIL;
}
mCodecCtx = avcodec_alloc_context3(mCodec);
if (nullptr == mCodecCtx) {
Debuger::Debug(L"find codec ctx fail\r\n");
mStatus = FAIL;
}
mCodecCtx->codec = mCodec;
mCodecCtx->codec_type = AVMEDIA_TYPE_AUDIO;
mCodecCtx->sample_rate = 44100;
mCodecCtx->channels = 2;
mCodecCtx->channel_layout = AV_CH_LAYOUT_STEREO;
mCodecCtx->sample_fmt = AV_SAMPLE_FMT_FLTP;
mCodecCtx->frame_size = 2048;
#if LIBSWRESAMPLE_VERSION_MINOR >= 17 // 根据版本不同,选用适当函数
mSwrCtx = swr_alloc();
av_opt_set_int(mSwrCtx, "in_channel_layout", AV_CH_LAYOUT_STEREO, 0);
av_opt_set_int(mSwrCtx, "out_channel_layout", AV_CH_LAYOUT_STEREO, 0);
av_opt_set_int(mSwrCtx, "in_sample_rate", 44100, 0);
av_opt_set_int(mSwrCtx, "out_sample_rate", 44100, 0);
av_opt_set_sample_fmt(mSwrCtx, "in_sample_fmt", AV_SAMPLE_FMT_FLTP, 0);
av_opt_set_sample_fmt(mSwrCtx, "out_sample_fmt", AV_SAMPLE_FMT_S16, 0);
swr_init(mSwrCtx);
#else
mSwrCtx = swr_alloc();
mSwrCtx = swr_alloc_set_opts(mSwrCtx,
AV_CH_LAYOUT_STEREO, //output
AV_SAMPLE_FMT_S16,
44100,
AV_CH_LAYOUT_STEREO, // input
AV_SAMPLE_FMT_FLTP,
44100,
0, NULL);
swr_init(mSwrCtx);
#endif
if (avcodec_open2(mCodecCtx, mCodec, NULL) < 0) {
Debuger::Debug(L"can't open codec\r\n");
mStatus = FAIL;
}
mSampleRate = 44100;
mChannel = 2;
mChannelLayout = AV_CH_LAYOUT_STEREO;
mSampleFmt = AV_SAMPLE_FMT_FLTP;
mStatus = RUNNING;
mU16Data = (uint8_t *)av_malloc(192000);
}
#define MAX_AUDIO_FRAME_SIZE 192000
AACDecoder::AACDecoder(AVStream * p):mObserver(nullptr)
{
mStatus = RUNNING;
this->mObserverType = Observer_Audio;
if (nullptr == p) {
Debuger::Debug(L"find codec fail\r\n");
mStatus = FAIL;
}
mCodecCtx = p->codec;
mCodec = avcodec_find_decoder(mCodecCtx->codec_id);
if (mCodec == NULL) {
Debuger::Debug(L"find codec fail\r\n");
mStatus = FAIL;
}
if (avcodec_open2(mCodecCtx, mCodec, NULL) < 0) {
Debuger::Debug(L"can't open codec\r\n");
mStatus = FAIL;
}
mSampleRate = mCodecCtx->sample_rate;
mChannel = mCodecCtx->channels;
mChannelLayout = mCodecCtx->channel_layout;
mSampleFmt = mCodecCtx->sample_fmt;
}
int AACDecoder::Decode(uint8_t * dat, uint16_t size)
{
AVPacket pkt;
int got_pcm = 0;
int len = 0;
if (mStatus == RUNNING) {
mPcmDat = av_frame_alloc();
av_init_packet(&pkt);
char* data = (char*)dat;
pkt.data = (uint8_t *)data;
pkt.size = size;
len = avcodec_decode_audio4(this->mCodecCtx, mPcmDat, &got_pcm, &pkt);
if (len < 0) {
printf("Error while decoding a frame.\n");
return -1;
}
if (got_pcm == 0) {
return 0;
}
int buffer_size = av_samples_get_buffer_size(NULL, AV_CH_LAYOUT_STEREO,
mPcmDat->nb_samples,
AV_SAMPLE_FMT_S16, 1);
swr_convert(mSwrCtx, &mU16Data, buffer_size, (const uint8_t **)mPcmDat->data,
mPcmDat->nb_samples);
//Debuger::Debug(L"get %d audio samples\r\n", mPcmDat->nb_samples);
if (nullptr != this->mObserver) {
int out_buffer_size = av_samples_get_buffer_size(NULL, 2, mPcmDat->nb_samples,
AV_SAMPLE_FMT_FLTP, 1);
this->mObserver->OnAudioDecode(mU16Data, buffer_size);
}
//(const uint8_t **)mPcmDat->data, mPcmDat->nb_samples;
av_frame_free(&mPcmDat);
return 0;
}
}
int AACDecoder::SetObserver(AACDecoderObserver *p)
{
if(nullptr != p)
this->mObserver = p;
return 0;
}

48
media/AACDecoder.h Normal file
View File

@ -0,0 +1,48 @@
#pragma once
#include "RtmpPuller2.h"
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libavutil/avutil.h"
#include "libswscale/swscale.h"
#include "libavutil/opt.h"
#include "libavutil/imgutils.h"
#include "libswresample\swresample.h"
};
class AACDecoder :public RtmpPuller2::RtmpPullObserver {
public:
enum DECODE_STATUS {
RUNNING = 1,
STOP = 2,
PAUSE = 3,
FAIL = 4,
NOSOURCE = 6,
};
class AACDecoderObserver {
public:
virtual int OnAudioDecode(uint8_t *dat, uint16_t size) { return 0; };
};
void OnRtmpFrame(void * dat, uint32_t size);
AACDecoder();
AACDecoder(AVStream *p);
int Decode(uint8_t *dat,uint16_t);
int SetObserver(AACDecoderObserver *);
private:
AVFormatContext *mFormatCtx = nullptr;
AVCodecContext *mCodecCtx = nullptr;
AVCodec *mCodec = nullptr;
AVPacket *mPacket = nullptr;
uint16_t mSampleCnt;
enum AVSampleFormat mSampleFmt;
uint16_t mSampleRate;
uint16_t mChannel;
uint64_t mChannelLayout;
AVFrame *mPcmDat;
uint8_t *mU16Data;
AACDecoderObserver *mObserver;
DECODE_STATUS mStatus;
SwrContext * mSwrCtx;
};

133
media/AudioCapture.cpp Normal file
View File

@ -0,0 +1,133 @@
#include "AudioCapture.h"
#include "Debuger.h"
PaStream *gStreamOut = nullptr;
CaptureAudio::CaptureAudio(uint16_t rate, uint8_t channel) {
this->mChanel = channel;
this->mSampleRate = rate;
this->mSize = 0;
this->mStatus = FAIL;
this->observer = nullptr;
}
/**
* @brief
*
* @param CaptureAudioObserver
* @return
* @retval -1
* @retval 0
*/
int CaptureAudio::SetObserver(CaptureAudioObserver* ob) {
if (nullptr == ob) return -1;
this->observer = ob;
return 0;
}
int paOutStreamBkss(const void* input, void* output, unsigned long frameCount,
const PaStreamCallbackTimeInfo* timeInfo, PaStreamCallbackFlags statusFlags, void * userData)
{
CaptureAudio *pCap;
Debuger::Debug(L"%d\r\n", frameCount);
if (userData != nullptr) {
pCap = (CaptureAudio *)userData;
pCap->OnCallBack(input,output,frameCount);
}
pCap->AddCnt(4 * frameCount);
return 0;
}
int CaptureAudio::OnCallBack(const void* input, void* output, unsigned long frameCount) {
if(nullptr != this->observer)
this->observer->OnAudioData(input, frameCount);
return 0;
}
CaptureAudio::~CaptureAudio() {
if(mInStream != nullptr)
Pa_CloseStream(mInStream);
}
int CaptureAudio::StartCapture()
{
PaError err = paNoError;
if (this->mStatus == RUNNING) {
err = Pa_StartStream(mInStream);
if (err != paNoError) {
this->mStatus = FAIL;
}
}
else
return -1;
return 0;
}
vector<CaptureAudio::MICInfo> CaptureAudio::EnumSpeakers()
{
vector<CaptureAudio::MICInfo> ret;
PaError err = Pa_Initialize();
if (err != paNoError) {
Debuger::Debug(L"init stream error\r\n");
mStatus = FAIL;
}
//获得设备数量
PaDeviceIndex iNumDevices = Pa_GetDeviceCount();
if (iNumDevices <= 0)
{
return ret;
}
for (int i = 0; i < iNumDevices; i++)
{
MICInfo ins;
ins.index = i;
const PaDeviceInfo *deviceInfo = Pa_GetDeviceInfo(i);
if (nullptr != deviceInfo)
if (deviceInfo->maxInputChannels > 0) {
ins.name = deviceInfo->name;
ret.push_back(ins);
}
}
return ret;
}
int CaptureAudio::InitCapture(int index,uint16_t rate, uint8_t channel) {
PaStreamParameters intputParameters;
PaError err = paNoError;
err = Pa_Initialize();
if (err != paNoError) goto error;
if (index < 0)
{
index = Pa_GetDefaultInputDevice();
}
if (paNoDevice == index) {
mStatus = FAIL;
return -1;
}
intputParameters.device = index;
intputParameters.channelCount = 2;
intputParameters.sampleFormat = paInt16;
intputParameters.suggestedLatency = Pa_GetDeviceInfo(intputParameters.device)->defaultLowInputLatency;
intputParameters.hostApiSpecificStreamInfo = NULL;
err = Pa_OpenStream(&mInStream, &intputParameters, NULL, 44100, 1024,
paFramesPerBufferUnspecified, paOutStreamBkss, this);
if (err != paNoError) {
this->mStatus = FAIL;
return -1;
}
this->mStatus = RUNNING;
return 0;
error:
Pa_Terminate();
return -1;
}
void CaptureAudio::StopCapture()
{
if (this->mStatus == RUNNING) {
Pa_StopStream(mInStream);
this->mStatus = STOP;
}
}

73
media/AudioCapture.h Normal file
View File

@ -0,0 +1,73 @@
#ifndef __CAPTUREAUDIO_H__
#define __CAPTUREAUDIO_H__
#include "stdint.h"
#include "../third/portaudio/portaudio.h"
#include <vector>
#include <string>
//Windows
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libavutil/avutil.h"
#include "libswscale/swscale.h"
#include "libavutil/opt.h"
#include "libavutil/imgutils.h"
#include "libavdevice/avdevice.h"
#include "libavfilter/avfilter.h"
};
#include <functional>
#include <dshow.h>
#include <windows.h>
#include "qedit.h"
#include <mutex>
#include <vector>
#include <thread>
#include "guiddef.h"
using namespace std;
typedef int (CbAudio)(const void* input, void* output, unsigned long frameCount,
const PaStreamCallbackTimeInfo* timeInfo, PaStreamCallbackFlags statusFlags, void * userData);
class CaptureAudio {
public:
class CaptureAudioObserver {
public:
virtual void OnAudioData(const void *frameaddress, uint32_t framelen) {};
};
typedef struct MICInfo
{
string name;
int index;
}MICInfo;
enum CAP_STATUS {
RUNNING = 1,
STOP = 2,
PAUSE = 3,
FAIL = 4,
};
vector<CaptureAudio::MICInfo> EnumSpeakers();
CaptureAudio(uint16_t rate, uint8_t channel);
~CaptureAudio();
int StartCapture();
int InitCapture(int index,uint16_t rate,uint8_t channel);
void StopCapture();
int SetObserver(CaptureAudioObserver*);
int OnCallBack(const void* input, void* output, unsigned long frameCount);
void AddCnt(unsigned int x) {this->mSize += x;};
private:
uint16_t mSampleRate; //²ÉÑùÂÊ
uint16_t mChanel; //ͨµÀºÅ
PaStream *mInStream;
PaStream *mOutStream;
unsigned long mSize;
CAP_STATUS mStatus;
CaptureAudioObserver *observer;
};
#endif //__CAPTUREAUDIO_H__

91
media/AudioPlayer.cpp Normal file
View File

@ -0,0 +1,91 @@
#include "AudioPlayer.h"
#include "Debuger.h"
#include "utils.h"
AudioPlayer::AudioPlayer(int index)
{
mStatus = RUNNING;
PaError err = Pa_Initialize();
if (err != paNoError) {
Debuger::Debug(L"init stream error\r\n");
mStatus = FAIL;
}
//获得设备数量
PaDeviceIndex iNumDevices = Pa_GetDeviceCount();
if (iNumDevices < 0)
{
}
for (int i = 0; i < iNumDevices; i++)
{
const PaDeviceInfo *deviceInfo = Pa_GetDeviceInfo(i);
Debuger::Debug(L"index %d %d %d \r\n",i,
deviceInfo->maxInputChannels, deviceInfo->maxOutputChannels); //打印设备名
}
mOutputParameters.device = index;
mOutputParameters.channelCount = 2; //输出采用双声道,左声道在前
mOutputParameters.sampleFormat = paInt16;
mOutputParameters.suggestedLatency = Pa_GetDeviceInfo(mOutputParameters.device)->defaultLowOutputLatency;
mOutputParameters.hostApiSpecificStreamInfo = NULL;
err = Pa_OpenStream(&mOutStream, NULL, &mOutputParameters, 44100, 1024,
paFramesPerBufferUnspecified, NULL, NULL);
if (err != paNoError) {
Debuger::Debug(L"open output stream error\r\n");
mStatus = FAIL;
goto end;
}
err = Pa_StartStream(mOutStream);
if (err != paNoError) {
Debuger::Debug(L"start stream error\r\n");
mStatus = FAIL;
}
end:
return;
}
vector<AudioPlayer::SpeakerInfo> AudioPlayer::EnumSpeakers()
{
vector<AudioPlayer::SpeakerInfo> ret;
PaError err = Pa_Initialize();
if (err != paNoError) {
Debuger::Debug(L"init stream error\r\n");
mStatus = FAIL;
}
//获得设备数量
PaDeviceIndex iNumDevices = Pa_GetDeviceCount();
if (iNumDevices <= 0)
{
return ret;
}
for (int i = 0; i < iNumDevices; i++)
{
SpeakerInfo ins;
ins.index = i;
const PaDeviceInfo *deviceInfo = Pa_GetDeviceInfo(i);
if(nullptr != deviceInfo)
if (deviceInfo->maxOutputChannels > 0) {
//ins.name = char2wchar(deviceInfo->name);
ret.push_back(ins);
}
}
return ret;
}
int AudioPlayer::Play(uint8_t * data, uint16_t num)
{
PaError err;
if (mStatus == RUNNING) {
err = Pa_WriteStream(mOutStream, data, num);
if (paNoError != err) {
return -1;
}
}
else {
return -1;
}
return 0;
}
int AudioPlayer::OnAudioDecode(uint8_t * dat, uint16_t size)
{
return this->Play(dat, 1024);
return 0;
}

31
media/AudioPlayer.h Normal file
View File

@ -0,0 +1,31 @@
#pragma once
#include "stdint.h"
#include "../third/portaudio/portaudio.h"
#include "AACDecoder.h"
class AudioPlayer :public AACDecoder::AACDecoderObserver{
public:
class AudioPlayerObserver{
public:
virtual int OnAudioPlay();
};
typedef struct {
int index;
wstring name;
}SpeakerInfo;
enum PLAY_STATUS {
RUNNING = 1,
STOP = 2,
PAUSE = 3,
FAIL = 4,
};
AudioPlayer(int index);
vector<SpeakerInfo> EnumSpeakers();
int Play(uint8_t *data,uint16_t num);
int OnAudioDecode(uint8_t *dat, uint16_t size);
private:
PLAY_STATUS mStatus;
PaStreamParameters mOutputParameters;
PaStream *mOutStream;
};

431
media/CameraCapture.cpp Normal file
View File

@ -0,0 +1,431 @@
#include "CameraCapture.h"
#include<iostream>
#ifdef __MINGW32__
#pragma comment(lib, "strmiids")
#endif
//define release maco
#define ReleaseInterface(x) \
if ( nullptr != x ) \
{ \
x->Release( ); \
x = nullptr; \
}
// Application-defined message to notify app of filter graph events
#define WM_GRAPHNOTIFY WM_APP+100
Camera::Camera():
mInitOK(false),
mVideoHeight(0),
mVideoWidth(0),
mDevFilter(nullptr),
mCaptureGB(nullptr),
mGraphBuilder(nullptr),
mMediaControl(nullptr),
mMediaEvent(nullptr),
mSampGrabber(nullptr),
mIsVideoOpened(false),
mDebug(false)
{
}
GUID Camera::MediaType()
{
return mMediaType;
}
Camera::Camera(wstring camera)
{
mInitOK = false;
mVideoHeight = 0;
mVideoWidth = 0;
mDevFilter = nullptr;
mCaptureGB = nullptr;
mGraphBuilder = nullptr;
mMediaControl = nullptr;
mMediaEvent = nullptr;
mSampGrabber = nullptr;
mIsVideoOpened = false;
if(!this->Open(camera)){
mStatus = FAIL;
}
mStatus = STOP;
}
Camera::~Camera()
{
Close();
CoUninitialize();
}
HRESULT Camera::InitializeEnv() {
HRESULT hr;
//Create the filter graph
hr = CoCreateInstance(CLSID_FilterGraph, nullptr, CLSCTX_INPROC_SERVER,
IID_IGraphBuilder, (LPVOID*)&mGraphBuilder);
if (FAILED(hr))
return hr;
//Create the capture graph builder
hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, nullptr, CLSCTX_INPROC_SERVER,
IID_ICaptureGraphBuilder2, (LPVOID*)&mCaptureGB);
if (FAILED(hr))
return hr;
//Obtain interfaces for media control and Video Window
hr = mGraphBuilder->QueryInterface(IID_IMediaControl, (LPVOID*)&mMediaControl);
if (FAILED(hr))
return hr;
hr = mGraphBuilder->QueryInterface(IID_IMediaEventEx, (LPVOID*)&mMediaEvent);
if (FAILED(hr))
return hr;
mCaptureGB->SetFiltergraph(mGraphBuilder);
if (FAILED(hr))
return hr;
return hr;
}
std::vector<std::wstring> Camera::EnumAllCamera(void) {
std::vector<std::wstring> names;
IEnumMoniker *pEnum = nullptr;
// Create the System Device Enumerator.
ICreateDevEnum *pDevEnum;
HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, nullptr,
CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pDevEnum));
if (SUCCEEDED(hr))
{
// Create an enumerator for the category.
hr = pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnum, 0);
if (hr == S_FALSE)
{
hr = VFW_E_NOT_FOUND; // The category is empty. Treat as an error.
}
pDevEnum->Release();
}
if (!SUCCEEDED(hr))
return std::vector<std::wstring>();
IMoniker *pMoniker = nullptr;
while (pEnum->Next(1, &pMoniker, nullptr) == S_OK)
{
IPropertyBag *pPropBag;
IBindCtx* bindCtx = nullptr;
LPOLESTR str = nullptr;
VARIANT var;
VariantInit(&var);
HRESULT hr = pMoniker->BindToStorage(0, 0, IID_PPV_ARGS(&pPropBag));
if (FAILED(hr))
{
pMoniker->Release();
continue;
}
// Get description or friendly name.
hr = pPropBag->Read(L"Description", &var, 0);
if (FAILED(hr))
{
hr = pPropBag->Read(L"FriendlyName", &var, 0);
}
if (SUCCEEDED(hr))
{
names.push_back(var.bstrVal);
VariantClear(&var);
}
pPropBag->Release();
pMoniker->Release();
}
pEnum->Release();
return names;
}
HRESULT Camera::BindFilter(int deviceID, IBaseFilter **pBaseFilter) {
ICreateDevEnum *pDevEnum;
IEnumMoniker *pEnumMon;
IMoniker *pMoniker;
HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, nullptr, CLSCTX_INPROC_SERVER,
IID_ICreateDevEnum, (LPVOID*)&pDevEnum);
if (SUCCEEDED(hr))
{
hr = pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumMon, 0);
if (hr == S_FALSE)
{
hr = VFW_E_NOT_FOUND;
return hr;
}
pEnumMon->Reset();
ULONG cFetched;
int index = 0;
hr = pEnumMon->Next(1, &pMoniker, &cFetched);
while (hr == S_OK && index <= deviceID) {
IPropertyBag *pProBag;
hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (LPVOID*)&pProBag);
if (SUCCEEDED(hr)) {
if (index == deviceID) {
pMoniker->BindToObject(0, 0, IID_IBaseFilter, (LPVOID*)pBaseFilter);
}
}
pMoniker->Release();
index++;
hr = pEnumMon->Next(1, &pMoniker, &cFetched);
}
pEnumMon->Release();
}
return hr;
}
int Camera::SetObserver(CameraObserver *p) {
return this->mSampleGrabberCB.SetObserver(p);
}
int Camera::RemoveObserver(CameraObserver * p) {
return this->mSampleGrabberCB.RemoveObserver(p);
}
void Camera::SetDebug(bool isDebug) {
mDebug = isDebug;
}
int Camera::SampleGrabberCallback::SetObserver(CameraObserver *p) {
if (nullptr == p)
return -1;
mMux.lock();
for (auto itr = this->mObserver.begin(); itr != mObserver.end(); itr++) {
if (p == *itr) {
mMux.unlock();
return 0;
}
}
this->mObserver.push_back(p);
mMux.unlock();
return 0;
}
int Camera::SampleGrabberCallback::RemoveObserver(CameraObserver * p)
{
mMux.lock();
bool founded = false;
auto itrDel = this->mObserver.begin();
for (auto itr = this->mObserver.begin(); itr != mObserver.end(); itr++) {
if (p == *itr) {
itrDel = itr;
founded = true;
}
}
if (founded)
mObserver.erase(itrDel);
mMux.unlock();
return 0;
}
bool Camera::Open(std::wstring &camera_name)
{
if (mIsVideoOpened)
return true;
HRESULT hr;
#define CHECK_HR(x) do{ hr = (x); if (FAILED(hr)){ Close(); return false;}}while(0)
CHECK_HR(InitializeEnv());
IBaseFilter *pSampleGrabberFilter , *dest_filter;
std::vector<std::wstring> names = EnumAllCamera();
if (names.empty())
{
Close();
return false;
}
bool founded = false;
int deviceID = 0;
for(std::wstring i : names ){
if(i == camera_name){
founded = true;
}
}
if (!founded){
return false;
}
// create grabber filter instance
CHECK_HR(CoCreateInstance(CLSID_SampleGrabber, nullptr, CLSCTX_INPROC_SERVER,
IID_IBaseFilter, (LPVOID*)&pSampleGrabberFilter));
// bind source device
CHECK_HR(BindFilter(deviceID, &mDevFilter));
// add src filter
CHECK_HR(mGraphBuilder->AddFilter(mDevFilter, L"Video Filter"));
// add grabber filter and query interface
CHECK_HR(mGraphBuilder->AddFilter(pSampleGrabberFilter, L"Sample Grabber"));
CHECK_HR(pSampleGrabberFilter->QueryInterface(IID_ISampleGrabber, (LPVOID*)&mSampGrabber));
// find the current bit depth
HDC hdc = GetDC(nullptr);
mBitDepth = GetDeviceCaps(hdc, BITSPIXEL);
ReleaseDC(nullptr, hdc);
// set the media type for grabber filter
AM_MEDIA_TYPE mediaType;
ZeroMemory(&mediaType, sizeof(AM_MEDIA_TYPE));
mediaType.majortype = MEDIATYPE_Video;
switch (mBitDepth)
{
case 8:
mediaType.subtype = MEDIASUBTYPE_RGB8;
break;
case 16:
mediaType.subtype = MEDIASUBTYPE_RGB555;
break;
case 24:
mediaType.subtype = MEDIASUBTYPE_RGB24;
break;
case 32:
mediaType.subtype = MEDIASUBTYPE_RGB32;
break;
default:
Close();
return false;
}
mediaType.formattype = FORMAT_VideoInfo;
hr = mSampGrabber->SetMediaType(&mediaType);
// 意味着最后的数据是丢掉的
CHECK_HR(CoCreateInstance(CLSID_NullRenderer, nullptr, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)(&dest_filter)));
mGraphBuilder->AddFilter(dest_filter, L"nullptrRenderer");
// connect source filter to grabber filter
CHECK_HR(mCaptureGB->RenderStream(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video,
mDevFilter, pSampleGrabberFilter, dest_filter));
// get connected media type
CHECK_HR(mSampGrabber->GetConnectedMediaType(&mediaType));
VIDEOINFOHEADER * vih = (VIDEOINFOHEADER*)mediaType.pbFormat;
mVideoWidth = vih->bmiHeader.biWidth;
mVideoHeight = vih->bmiHeader.biHeight;
mPixFmt = mediaType.subtype;
mMediaType = mediaType.subtype;
std::cout<<"guid media type is"<<mediaType.subtype.Data1<<" "<<
mediaType.subtype.Data2<<" "<<
mediaType.subtype.Data3<<" "<<
mediaType.subtype.Data4<<" "<<
mVideoWidth<<" "<<mVideoHeight;
// configure grabber filter
CHECK_HR(mSampGrabber->SetOneShot(0));
CHECK_HR(mSampGrabber->SetBufferSamples(0));
// Use the BufferCB callback method
CHECK_HR(mSampGrabber->SetCallback(&mSampleGrabberCB, 1));
mSampleGrabberCB.mNewDataCallBack = mFrameCallBack;
mMediaControl->Run();
dest_filter->Release();
pSampleGrabberFilter->Release();
// release resource
if (mediaType.cbFormat != 0)
{
CoTaskMemFree((PVOID)mediaType.pbFormat);
mediaType.cbFormat = 0;
mediaType.pbFormat = nullptr;
}
if (mediaType.pUnk != nullptr)
{
mediaType.pUnk->Release();
mediaType.pUnk = nullptr;
}
mIsVideoOpened = TRUE;
mStatus = RUNNING;
return true;
}
bool Camera::Close() {
if (mMediaControl)
{
mMediaControl->Stop();
}
if (mMediaEvent)
{
mMediaEvent->SetNotifyWindow(NULL, WM_GRAPHNOTIFY, 0);
}
mIsVideoOpened = false;
//release interface
ReleaseInterface(mDevFilter);
ReleaseInterface(mCaptureGB);
ReleaseInterface(mGraphBuilder);
ReleaseInterface(mMediaControl);
ReleaseInterface(mMediaEvent);
ReleaseInterface(mSampGrabber);
return true;
}
void Camera::SetCallBack(std::function<void(double, BYTE*, LONG)> f) {
mFrameCallBack = f;
}
ULONG STDMETHODCALLTYPE Camera::SampleGrabberCallback::AddRef() {
return 1;
}
ULONG STDMETHODCALLTYPE Camera::SampleGrabberCallback::Release() {
return 2;
}
HRESULT STDMETHODCALLTYPE Camera::SampleGrabberCallback::QueryInterface(REFIID riid, void** ppvObject) {
if (nullptr == ppvObject) return E_POINTER;
if (riid == __uuidof(IUnknown))
{
*ppvObject = static_cast<IUnknown*>(this);
return S_OK;
}
if (riid == IID_ISampleGrabberCB)
{
*ppvObject = static_cast<ISampleGrabberCB*>(this);
return S_OK;
}
return E_NOTIMPL;
}
HRESULT STDMETHODCALLTYPE Camera::SampleGrabberCallback::SampleCB(double Time, IMediaSample *pSample) {
return E_NOTIMPL;
}
HRESULT STDMETHODCALLTYPE Camera::SampleGrabberCallback::BufferCB(double Time, BYTE * pBuffer, long BufferLen)
{
#ifdef DEBUG_CAMERA
static FILE *p = fopen("camera_test.yuv","wb+");
fwrite(pBuffer,BufferLen,1,p);
fflush(p);
#endif
if (mObserver.size() > 0) {
mMux.lock();
for (auto itr = this->mObserver.begin(); itr != mObserver.end(); itr++) {
CameraObserver *p = (CameraObserver *)*itr;
p->OnCameraData(pBuffer, BufferLen);
}
mMux.unlock();
}
return S_OK;
}

95
media/CameraCapture.h Normal file
View File

@ -0,0 +1,95 @@
#pragma once
#include <vector>
#include <functional>
#include <dshow.h>
#include <windows.h>
#include "qedit.h"
#include <mutex>
#include <vector>
#include "guiddef.h"
using namespace std;
class Camera
{
public:
enum CAP_STATUS {
RUNNING = 1,
STOP = 2,
PAUSE = 3,
FAIL = 4,
};
class CameraObserver {
public:
virtual int OnCameraData(uint8_t *dat, uint32_t size) { return 0; };
};
class SampleGrabberCallback : public ISampleGrabberCB
{
public:
ULONG STDMETHODCALLTYPE AddRef();
ULONG STDMETHODCALLTYPE Release();
HRESULT STDMETHODCALLTYPE QueryInterface(REFIID riid, void** ppvObject);
HRESULT STDMETHODCALLTYPE SampleCB(double Time, IMediaSample *pSample);
HRESULT STDMETHODCALLTYPE BufferCB(double Time, BYTE *pBuffer, long BufferLen);
std::function<void(double, BYTE *, LONG)> mNewDataCallBack;
mutex mMux;
int SetObserver(CameraObserver *);
int RemoveObserver(CameraObserver *p);
private:
vector<CameraObserver*> mObserver;
};
Camera(wstring camera);
Camera(const Camera &) = delete;
Camera& operator =(const Camera&) = delete;
~Camera();
private:
Camera();
bool mInitOK;
bool mIsVideoOpened;
int mVideoWidth, mVideoHeight, mBitDepth;
std::function<void(double, BYTE *, LONG)> mFrameCallBack;
IGraphBuilder *mGraphBuilder;
ICaptureGraphBuilder2 *mCaptureGB;
IMediaControl *mMediaControl;
IBaseFilter *mDevFilter;
ISampleGrabber *mSampGrabber;
IMediaEventEx *mMediaEvent;
SampleGrabberCallback mSampleGrabberCB;
HRESULT InitializeEnv();
HRESULT BindFilter(int deviceID, IBaseFilter **pBaseFilter);
GUID mMediaType;
bool mDebug;
public:
int SetObserver(CameraObserver *);
int RemoveObserver(CameraObserver *p);
CAP_STATUS mStatus;
void SetDebug(bool);
static std::vector<std::wstring> EnumAllCamera(void);
GUID mPixFmt;
bool Open(std::wstring &camera_name);
bool Close(void);
/*!
* @param time : Starting time of the sample, in seconds.
* @param buff : Pointer to a buffer that contains the sample data.
* @param len : Length of the buffer pointed to by pBuffer, in bytes.
*/
void SetCallBack(std::function<void(double time, BYTE *buff, LONG len)>);
int GetHeight() { return mVideoHeight; }
int GetWidth() { return mVideoWidth; }
int GetBitDepth() { return mBitDepth; }
GUID MediaType();
};

684
media/DXGICapture.cpp Normal file
View File

@ -0,0 +1,684 @@
/*****************************************************************************
* DXGICapture.cpp
*
* Copyright (C) 2020 Gokhan Erdogdu <gokhan_erdogdu - at - yahoo - dot - com>
*
* DXGICapture is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* DXGICapture is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*
******************************************************************************/
#include "DXGICapture.h"
#include "DXGICaptureHelper.h"
#include <chrono>
#pragma comment(lib, "D3D11.lib")
#pragma comment(lib, "d2d1.lib")
#pragma comment(lib, "windowscodecs.lib")
#pragma comment(lib, "shcore.lib") // SetProcessDpiAwareness
// Driver types supported
const D3D_DRIVER_TYPE g_DriverTypes[] =
{
D3D_DRIVER_TYPE_HARDWARE,
D3D_DRIVER_TYPE_WARP,
D3D_DRIVER_TYPE_REFERENCE,
};
const UINT g_NumDriverTypes = ARRAYSIZE(g_DriverTypes);
// Feature levels supported
const D3D_FEATURE_LEVEL g_FeatureLevels[] =
{
D3D_FEATURE_LEVEL_11_0,
D3D_FEATURE_LEVEL_10_1,
D3D_FEATURE_LEVEL_10_0,
D3D_FEATURE_LEVEL_9_1
};
const UINT g_NumFeatureLevels = ARRAYSIZE(g_FeatureLevels);
#define AUTOLOCK() ATL::CComCritSecLock<ATL::CComAutoCriticalSection> auto_lock((ATL::CComAutoCriticalSection&)(m_csLock))
//
// class CDXGICapture
//
CDXGICapture::CDXGICapture()
: m_csLock()
, m_bInitialized(FALSE)
, m_lD3DFeatureLevel(D3D_FEATURE_LEVEL_INVALID)
{
RtlZeroMemory(&m_rendererInfo, sizeof(m_rendererInfo));
RtlZeroMemory(&m_mouseInfo, sizeof(m_mouseInfo));
RtlZeroMemory(&m_tempMouseBuffer, sizeof(m_tempMouseBuffer));
RtlZeroMemory(&m_desktopOutputDesc, sizeof(m_desktopOutputDesc));
}
CDXGICapture::~CDXGICapture()
{
this->Terminate();
}
HRESULT CDXGICapture::loadMonitorInfos(ID3D11Device *pDevice)
{
CHECK_POINTER(pDevice);
HRESULT hr = S_OK;
CComPtr<ID3D11Device> ipDevice(pDevice);
// Get DXGI device
CComPtr<IDXGIDevice> ipDxgiDevice;
hr = ipDevice->QueryInterface(IID_PPV_ARGS(&ipDxgiDevice));
if (FAILED(hr)) {
return hr;
}
// Get DXGI adapter
CComPtr<IDXGIAdapter> ipDxgiAdapter;
hr = ipDxgiDevice->GetParent(IID_PPV_ARGS(&ipDxgiAdapter));
if (FAILED(hr)) {
return hr;
}
ipDxgiDevice = nullptr;
CComPtr<IDXGIOutput> ipDxgiOutput;
for (UINT i = 0; SUCCEEDED(hr); ++i)
{
ipDxgiOutput = nullptr;
hr = ipDxgiAdapter->EnumOutputs(i, &ipDxgiOutput);
if ((nullptr != ipDxgiOutput) && (hr != DXGI_ERROR_NOT_FOUND))
{
DXGI_OUTPUT_DESC DesktopDesc;
hr = ipDxgiOutput->GetDesc(&DesktopDesc);
if (FAILED(hr)) {
continue;
}
tagDublicatorMonitorInfo *pInfo;
pInfo = new (std::nothrow) tagDublicatorMonitorInfo;
if (nullptr == pInfo) {
return E_OUTOFMEMORY;
}
hr = DXGICaptureHelper::ConvertDxgiOutputToMonitorInfo(&DesktopDesc, i, pInfo);
if (FAILED(hr)) {
delete pInfo;
continue;
}
m_monitorInfos.push_back(pInfo);
}
}
ipDxgiOutput = nullptr;
ipDxgiAdapter = nullptr;
return S_OK;
}
void CDXGICapture::freeMonitorInfos()
{
size_t nCount = m_monitorInfos.size();
if (nCount == 0) {
return;
}
DublicatorMonitorInfoVec::iterator it = m_monitorInfos.begin();
DublicatorMonitorInfoVec::iterator end = m_monitorInfos.end();
for (size_t i = 0; (i < nCount) && (it != end); i++, it++) {
tagDublicatorMonitorInfo *pInfo = *it;
if (nullptr != pInfo) {
delete pInfo;
}
}
m_monitorInfos.clear();
}
HRESULT CDXGICapture::createDeviceResource(
const tagScreenCaptureFilterConfig *pConfig,
const tagDublicatorMonitorInfo *pSelectedMonitorInfo
)
{
HRESULT hr = S_OK;
CComPtr<IDXGIOutputDuplication> ipDxgiOutputDuplication;
CComPtr<ID3D11Texture2D> ipCopyTexture2D;
CComPtr<ID2D1Device> ipD2D1Device;
CComPtr<ID2D1DeviceContext> ipD2D1DeviceContext;
CComPtr<ID2D1Factory> ipD2D1Factory;
CComPtr<IWICImagingFactory> ipWICImageFactory;
CComPtr<IWICBitmap> ipWICOutputBitmap;
CComPtr<ID2D1RenderTarget> ipD2D1RenderTarget;
DXGI_OUTPUT_DESC dgixOutputDesc;
tagRendererInfo rendererInfo;
RtlZeroMemory(&dgixOutputDesc, sizeof(dgixOutputDesc));
RtlZeroMemory(&rendererInfo, sizeof(rendererInfo));
// copy configuration to renderer info
rendererInfo.MonitorIdx = pConfig->MonitorIdx;
rendererInfo.ShowCursor = pConfig->ShowCursor;
rendererInfo.RotationMode = pConfig->RotationMode;
rendererInfo.SizeMode = pConfig->SizeMode;
rendererInfo.OutputSize = pConfig->OutputSize;
// default
rendererInfo.ScaleX = 1.0f;
rendererInfo.ScaleY = 1.0f;
do
{
// Get DXGI factory
CComPtr<IDXGIDevice> ipDxgiDevice;
hr = m_ipD3D11Device->QueryInterface(IID_PPV_ARGS(&ipDxgiDevice));
CHECK_HR_BREAK(hr);
CComPtr<IDXGIAdapter> ipDxgiAdapter;
hr = ipDxgiDevice->GetParent(IID_PPV_ARGS(&ipDxgiAdapter));
CHECK_HR_BREAK(hr);
// Get output
CComPtr<IDXGIOutput> ipDxgiOutput;
hr = ipDxgiAdapter->EnumOutputs(rendererInfo.MonitorIdx, &ipDxgiOutput);
CHECK_HR_BREAK(hr);
// Get output description
hr = ipDxgiOutput->GetDesc(&dgixOutputDesc);
CHECK_HR_BREAK(hr);
tagDublicatorMonitorInfo curMonInfo;
hr = DXGICaptureHelper::ConvertDxgiOutputToMonitorInfo(&dgixOutputDesc, rendererInfo.MonitorIdx, &curMonInfo);
CHECK_HR_BREAK(hr);
if (!DXGICaptureHelper::IsEqualMonitorInfo(pSelectedMonitorInfo, &curMonInfo)) {
hr = E_INVALIDARG; // Monitor settings have changed ???
break;
}
// QI for Output 1
CComPtr<IDXGIOutput1> ipDxgiOutput1;
hr = ipDxgiOutput->QueryInterface(IID_PPV_ARGS(&ipDxgiOutput1));
CHECK_HR_BREAK(hr);
// Create desktop duplication
hr = ipDxgiOutput1->DuplicateOutput(m_ipD3D11Device, &ipDxgiOutputDuplication);
CHECK_HR_BREAK(hr);
DXGI_OUTDUPL_DESC dxgiOutputDuplDesc;
ipDxgiOutputDuplication->GetDesc(&dxgiOutputDuplDesc);
hr = DXGICaptureHelper::CalculateRendererInfo(&dxgiOutputDuplDesc, &rendererInfo);
CHECK_HR_BREAK(hr);
// Create CPU access texture
D3D11_TEXTURE2D_DESC desc;
desc.Width = rendererInfo.SrcBounds.Width;
desc.Height = rendererInfo.SrcBounds.Height;
desc.Format = rendererInfo.SrcFormat;
desc.ArraySize = 1;
desc.BindFlags = 0;
desc.MiscFlags = 0;
desc.SampleDesc.Count = 1;
desc.SampleDesc.Quality = 0;
desc.MipLevels = 1;
desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ | D3D11_CPU_ACCESS_WRITE;
desc.Usage = D3D11_USAGE_STAGING;
hr = m_ipD3D11Device->CreateTexture2D(&desc, NULL, &ipCopyTexture2D);
CHECK_HR_BREAK(hr);
if (nullptr == ipCopyTexture2D)
{
hr = E_OUTOFMEMORY;
break;
}
#pragma region <For_2D_operations>
// Create D2D1 device
UINT uiFlags = m_ipD3D11Device->GetCreationFlags();
D2D1_CREATION_PROPERTIES d2d1Props = D2D1::CreationProperties
(
(uiFlags & D3D11_CREATE_DEVICE_SINGLETHREADED)
? D2D1_THREADING_MODE_SINGLE_THREADED
: D2D1_THREADING_MODE_MULTI_THREADED,
D2D1_DEBUG_LEVEL_NONE,
(uiFlags & D3D11_CREATE_DEVICE_SINGLETHREADED)
? D2D1_DEVICE_CONTEXT_OPTIONS_NONE
: D2D1_DEVICE_CONTEXT_OPTIONS_ENABLE_MULTITHREADED_OPTIMIZATIONS
);
hr = D2D1CreateDevice(ipDxgiDevice, d2d1Props, &ipD2D1Device);
CHECK_HR_BREAK(hr);
// Get D2D1 factory
ipD2D1Device->GetFactory(&ipD2D1Factory);
if (nullptr == ipD2D1Factory)
{
hr = D2DERR_INVALID_CALL;
break;
}
//create WIC factory
hr = CoCreateInstance(
CLSID_WICImagingFactory,
NULL,
CLSCTX_INPROC_SERVER,
IID_IWICImagingFactory,
reinterpret_cast<void **>(&ipWICImageFactory)
);
CHECK_HR_BREAK(hr);
// create D2D1 target bitmap for render
hr = ipWICImageFactory->CreateBitmap(
(UINT)rendererInfo.OutputSize.Width,
(UINT)rendererInfo.OutputSize.Height,
GUID_WICPixelFormat32bppPBGRA,
WICBitmapCacheOnDemand,
&ipWICOutputBitmap);
CHECK_HR_BREAK(hr);
if (nullptr == ipWICOutputBitmap)
{
hr = E_OUTOFMEMORY;
break;
}
// create a D2D1 render target (for D2D1 drawing)
D2D1_RENDER_TARGET_PROPERTIES d2d1RenderTargetProp = D2D1::RenderTargetProperties
(
D2D1_RENDER_TARGET_TYPE_DEFAULT,
D2D1::PixelFormat(DXGI_FORMAT_B8G8R8A8_UNORM, D2D1_ALPHA_MODE_PREMULTIPLIED),
0.0f, // default dpi
0.0f, // default dpi
D2D1_RENDER_TARGET_USAGE_GDI_COMPATIBLE
);
hr = ipD2D1Factory->CreateWicBitmapRenderTarget(
ipWICOutputBitmap,
d2d1RenderTargetProp,
&ipD2D1RenderTarget
);
CHECK_HR_BREAK(hr);
#pragma endregion </For_2D_operations>
} while (false);
if (SUCCEEDED(hr))
{
// copy output parameters
memcpy_s((void*)&m_rendererInfo, sizeof(m_rendererInfo), (const void*)&rendererInfo, sizeof(m_rendererInfo));
// set parameters
m_desktopOutputDesc = dgixOutputDesc;
m_ipDxgiOutputDuplication = ipDxgiOutputDuplication;
m_ipCopyTexture2D = ipCopyTexture2D;
m_ipD2D1Device = ipD2D1Device;
m_ipD2D1Factory = ipD2D1Factory;
m_ipWICImageFactory = ipWICImageFactory;
m_ipWICOutputBitmap = ipWICOutputBitmap;
m_ipD2D1RenderTarget = ipD2D1RenderTarget;
}
return S_OK;
}
void CDXGICapture::terminateDeviceResource()
{
m_ipDxgiOutputDuplication = nullptr;
m_ipCopyTexture2D = nullptr;
m_ipD2D1Device = nullptr;
m_ipD2D1Factory = nullptr;
m_ipWICImageFactory = nullptr;
m_ipWICOutputBitmap = nullptr;
m_ipD2D1RenderTarget = nullptr;
// clear config parameters
RtlZeroMemory(&m_rendererInfo, sizeof(m_rendererInfo));
// clear mouse information parameters
if (m_mouseInfo.PtrShapeBuffer != nullptr) {
delete[] m_mouseInfo.PtrShapeBuffer;
m_mouseInfo.PtrShapeBuffer = nullptr;
}
RtlZeroMemory(&m_mouseInfo, sizeof(m_mouseInfo));
// clear temp temp buffer
if (m_tempMouseBuffer.Buffer != nullptr) {
delete[] m_tempMouseBuffer.Buffer;
m_tempMouseBuffer.Buffer = nullptr;
}
RtlZeroMemory(&m_tempMouseBuffer, sizeof(m_tempMouseBuffer));
// clear desktop output desc
RtlZeroMemory(&m_desktopOutputDesc, sizeof(m_desktopOutputDesc));
}
HRESULT CDXGICapture::Initialize()
{
AUTOLOCK();
if (m_bInitialized) {
return HRESULT_FROM_WIN32(ERROR_ALREADY_INITIALIZED); // already initialized
}
HRESULT hr = S_OK;
D3D_FEATURE_LEVEL lFeatureLevel;
CComPtr<ID3D11Device> ipDevice;
CComPtr<ID3D11DeviceContext> ipDeviceContext;
// required for monitor dpi problem (???)
SetProcessDpiAwareness(PROCESS_PER_MONITOR_DPI_AWARE);
// Create device
for (UINT i = 0; i < g_NumDriverTypes; ++i)
{
hr = D3D11CreateDevice(
nullptr,
g_DriverTypes[i],
nullptr,
/* D3D11_CREATE_DEVICE_BGRA_SUPPORT
* This flag adds support for surfaces with a different
* color channel ordering than the API default.
* You need it for compatibility with Direct2D. */
D3D11_CREATE_DEVICE_BGRA_SUPPORT,
g_FeatureLevels,
g_NumFeatureLevels,
D3D11_SDK_VERSION,
&ipDevice,
&lFeatureLevel,
&ipDeviceContext);
if (SUCCEEDED(hr))
{
// Device creation success, no need to loop anymore
break;
}
ipDevice = nullptr;
ipDeviceContext = nullptr;
}
if (FAILED(hr)) {
return hr;
}
if (nullptr == ipDevice) {
return E_UNEXPECTED;
}
// load all monitor informations
hr = loadMonitorInfos(ipDevice);
if (FAILED(hr)) {
return hr;
}
// set common fields
m_lD3DFeatureLevel = lFeatureLevel;
m_ipD3D11Device = ipDevice;
m_ipD3D11DeviceContext = ipDeviceContext;
m_bInitialized = TRUE;
return S_OK;
}
HRESULT CDXGICapture::Terminate()
{
AUTOLOCK();
if (!m_bInitialized) {
return S_FALSE; // already terminated
}
this->terminateDeviceResource();
m_ipD3D11Device = nullptr;
m_ipD3D11DeviceContext = nullptr;
m_lD3DFeatureLevel = D3D_FEATURE_LEVEL_INVALID;
freeMonitorInfos();
m_bInitialized = FALSE;
return S_OK;
}
HRESULT CDXGICapture::SetConfig(const tagScreenCaptureFilterConfig *pConfig)
{
AUTOLOCK();
if (!m_bInitialized) {
return D2DERR_NOT_INITIALIZED;
}
if (nullptr == pConfig) {
return E_INVALIDARG;
}
// terminate old resources
this->terminateDeviceResource();
HRESULT hr = S_OK;
const tagDublicatorMonitorInfo *pSelectedMonitorInfo = nullptr;
pSelectedMonitorInfo = this->FindDublicatorMonitorInfo(pConfig->MonitorIdx);
if (nullptr == pSelectedMonitorInfo) {
return E_INVALIDARG;
}
hr = this->createDeviceResource(pConfig, pSelectedMonitorInfo);
if (FAILED(hr)) {
return hr;
}
return hr;
}
HRESULT CDXGICapture::SetConfig(const tagScreenCaptureFilterConfig &config)
{
return this->SetConfig(&config);
}
BOOL CDXGICapture::IsInitialized() const
{
AUTOLOCK();
return m_bInitialized;
}
D3D_FEATURE_LEVEL CDXGICapture::GetD3DFeatureLevel() const
{
AUTOLOCK();
return m_lD3DFeatureLevel;
}
int CDXGICapture::GetDublicatorMonitorInfoCount() const
{
AUTOLOCK();
return (int)m_monitorInfos.size();
}
const tagDublicatorMonitorInfo* CDXGICapture::GetDublicatorMonitorInfo(int index) const
{
AUTOLOCK();
size_t nCount = m_monitorInfos.size();
if ((index < 0) || (index >= (int)nCount)) {
return nullptr;
}
return m_monitorInfos[index];
} // GetDublicatorMonitorInfo
const tagDublicatorMonitorInfo* CDXGICapture::FindDublicatorMonitorInfo(int monitorIdx) const
{
AUTOLOCK();
size_t nCount = m_monitorInfos.size();
if (nCount == 0) {
return nullptr;
}
DublicatorMonitorInfoVec::const_iterator it = m_monitorInfos.begin();
DublicatorMonitorInfoVec::const_iterator end = m_monitorInfos.end();
for (size_t i = 0; (i < nCount) && (it != end); i++, it++) {
tagDublicatorMonitorInfo *pInfo = *it;
if (monitorIdx == pInfo->Idx) {
return pInfo;
}
}
return nullptr;
} // FindDublicatorMonitorInfo
//
// CaptureToFile
//
HRESULT CDXGICapture::CaptureToFile(_In_ LPCWSTR lpcwOutputFileName, _Out_opt_ BOOL *pRetIsTimeout /*= NULL*/, _Out_opt_ UINT *pRetRenderDuration /*= NULL*/)
{
AUTOLOCK();
if (nullptr != pRetIsTimeout) {
*pRetIsTimeout = FALSE;
}
if (nullptr != pRetRenderDuration) {
*pRetRenderDuration = 0xFFFFFFFF;
}
if (!m_bInitialized) {
return D2DERR_NOT_INITIALIZED;
}
CHECK_POINTER_EX(m_ipDxgiOutputDuplication, E_INVALIDARG);
CHECK_POINTER_EX(lpcwOutputFileName, E_INVALIDARG);
HRESULT hr = S_OK;
hr = DXGICaptureHelper::IsRendererInfoValid(&m_rendererInfo);
if (FAILED(hr)) {
return hr;
}
// is valid?
hr = DXGICaptureHelper::GetContainerFormatByFileName(lpcwOutputFileName);
if (FAILED(hr)) {
return hr;
}
DXGI_OUTDUPL_FRAME_INFO FrameInfo;
CComPtr<IDXGIResource> ipDesktopResource;
CComPtr<ID3D11Texture2D> ipAcquiredDesktopImage;
CComPtr<ID2D1Bitmap> ipD2D1SourceBitmap;
std::chrono::system_clock::time_point startTick;
if (nullptr != pRetRenderDuration) {
startTick = std::chrono::system_clock::now();
}
// Get new frame
hr = m_ipDxgiOutputDuplication->AcquireNextFrame(1000, &FrameInfo, &ipDesktopResource);
if (hr == DXGI_ERROR_WAIT_TIMEOUT)
{
if (nullptr != pRetIsTimeout) {
*pRetIsTimeout = TRUE;
}
return S_FALSE;
}
else if (FAILED(hr))
{
return hr;
}
// QI for ID3D11Texture2D
hr = ipDesktopResource->QueryInterface(IID_PPV_ARGS(&ipAcquiredDesktopImage));
ipDesktopResource = nullptr;
CHECK_HR_RETURN(hr);
if (nullptr == ipAcquiredDesktopImage)
{
// release frame
m_ipDxgiOutputDuplication->ReleaseFrame();
return E_OUTOFMEMORY;
}
// Copy needed full part of desktop image
m_ipD3D11DeviceContext->CopyResource(m_ipCopyTexture2D, ipAcquiredDesktopImage);
if (m_rendererInfo.ShowCursor) {
hr = DXGICaptureHelper::GetMouse(m_ipDxgiOutputDuplication, &m_mouseInfo, &FrameInfo, (UINT)m_rendererInfo.MonitorIdx, m_desktopOutputDesc.DesktopCoordinates.left, m_desktopOutputDesc.DesktopCoordinates.top);
if (SUCCEEDED(hr) && m_mouseInfo.Visible) {
hr = DXGICaptureHelper::DrawMouse(&m_mouseInfo, &m_desktopOutputDesc, &m_tempMouseBuffer, m_ipCopyTexture2D);
}
if (FAILED(hr)) {
// release frame
m_ipDxgiOutputDuplication->ReleaseFrame();
return hr;
}
}
// release frame
hr = m_ipDxgiOutputDuplication->ReleaseFrame();
CHECK_HR_RETURN(hr);
// create D2D1 source bitmap
hr = DXGICaptureHelper::CreateBitmap(m_ipD2D1RenderTarget, m_ipCopyTexture2D, &ipD2D1SourceBitmap);
CHECK_HR_RETURN(hr);
D2D1_RECT_F rcSource = D2D1::RectF(
(FLOAT)m_rendererInfo.SrcBounds.X,
(FLOAT)m_rendererInfo.SrcBounds.Y,
(FLOAT)(m_rendererInfo.SrcBounds.X + m_rendererInfo.SrcBounds.Width),
(FLOAT)(m_rendererInfo.SrcBounds.Y + m_rendererInfo.SrcBounds.Height));
D2D1_RECT_F rcTarget = D2D1::RectF(
(FLOAT)m_rendererInfo.DstBounds.X,
(FLOAT)m_rendererInfo.DstBounds.Y,
(FLOAT)(m_rendererInfo.DstBounds.X + m_rendererInfo.DstBounds.Width),
(FLOAT)(m_rendererInfo.DstBounds.Y + m_rendererInfo.DstBounds.Height));
D2D1_POINT_2F ptTransformCenter = D2D1::Point2F(m_rendererInfo.OutputSize.Width / 2.0f, m_rendererInfo.OutputSize.Height / 2.0f);
// Apply the rotation transform to the render target.
D2D1::Matrix3x2F rotate = D2D1::Matrix3x2F::Rotation(
m_rendererInfo.RotationDegrees,
ptTransformCenter
);
D2D1::Matrix3x2F scale = D2D1::Matrix3x2F::Scale(
D2D1::SizeF(m_rendererInfo.ScaleX, m_rendererInfo.ScaleY),
ptTransformCenter
);
// Priority: first rotate, after scale...
m_ipD2D1RenderTarget->SetTransform(rotate * scale);
m_ipD2D1RenderTarget->BeginDraw();
// clear background color
m_ipD2D1RenderTarget->Clear(D2D1::ColorF(D2D1::ColorF::Black, 1.0f));
m_ipD2D1RenderTarget->DrawBitmap(ipD2D1SourceBitmap, rcTarget, 1.0f,
D2D1_BITMAP_INTERPOLATION_MODE_LINEAR, rcSource);
// Reset transform
//m_ipD2D1RenderTarget->SetTransform(D2D1::Matrix3x2F::Identity());
// Logo draw sample
//m_ipD2D1RenderTarget->DrawBitmap(ipBmpLogo, D2D1::RectF(0, 0, 2 * 200, 2 * 46));
hr = m_ipD2D1RenderTarget->EndDraw();
if (FAILED(hr)) {
return hr;
}
// calculate render time without save
if (nullptr != pRetRenderDuration) {
*pRetRenderDuration = (UINT)((std::chrono::system_clock::now() - startTick).count() / 10000);
}
hr = DXGICaptureHelper::SaveImageToFile(m_ipWICImageFactory,
m_ipWICOutputBitmap, lpcwOutputFileName);
if (FAILED(hr)) {
return hr;
}
return S_OK;
} // CaptureToFile
#undef AUTOLOCK

92
media/DXGICapture.h Normal file
View File

@ -0,0 +1,92 @@
/*****************************************************************************
* DXGICapture.h
*
* Copyright (C) 2020 Gokhan Erdogdu <gokhan_erdogdu - at - yahoo - dot - com>
*
* DXGICapture is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* DXGICapture is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*
******************************************************************************/
#pragma once
#ifndef __DXGICAPTURE_H__
#define __DXGICAPTURE_H__
#include <atlbase.h>
#include <ShellScalingAPI.h>
#include <dxgi1_2.h>
#include <d3d11.h>
#include <d2d1.h>
#include <d2d1_1.h> // for ID2D1Effect
#include <wincodec.h>
#include "DXGICaptureTypes.h"
#define D3D_FEATURE_LEVEL_INVALID ((D3D_FEATURE_LEVEL)0x0)
class CDXGICapture
{
private:
ATL::CComAutoCriticalSection m_csLock;
BOOL m_bInitialized;
DublicatorMonitorInfoVec m_monitorInfos;
tagRendererInfo m_rendererInfo;
tagMouseInfo m_mouseInfo;
tagFrameBufferInfo m_tempMouseBuffer;
DXGI_OUTPUT_DESC m_desktopOutputDesc;
D3D_FEATURE_LEVEL m_lD3DFeatureLevel;
CComPtr<ID3D11Device> m_ipD3D11Device;
CComPtr<ID3D11DeviceContext> m_ipD3D11DeviceContext;
CComPtr<IDXGIOutputDuplication> m_ipDxgiOutputDuplication;
CComPtr<ID3D11Texture2D> m_ipCopyTexture2D;
CComPtr<ID2D1Device> m_ipD2D1Device;
CComPtr<ID2D1Factory> m_ipD2D1Factory;
CComPtr<IWICImagingFactory> m_ipWICImageFactory;
CComPtr<IWICBitmap> m_ipWICOutputBitmap;
CComPtr<ID2D1RenderTarget> m_ipD2D1RenderTarget;
public:
CDXGICapture();
~CDXGICapture();
private:
HRESULT loadMonitorInfos(ID3D11Device *pDevice);
void freeMonitorInfos();
HRESULT createDeviceResource(
const tagScreenCaptureFilterConfig *pConfig,
const tagDublicatorMonitorInfo *pSelectedMonitorInfo);
void terminateDeviceResource();
public:
HRESULT Initialize();
HRESULT Terminate();
HRESULT SetConfig(const tagScreenCaptureFilterConfig *pConfig);
HRESULT SetConfig(const tagScreenCaptureFilterConfig &config);
BOOL IsInitialized() const;
D3D_FEATURE_LEVEL GetD3DFeatureLevel() const;
int GetDublicatorMonitorInfoCount() const;
const tagDublicatorMonitorInfo* GetDublicatorMonitorInfo(int index) const;
const tagDublicatorMonitorInfo* FindDublicatorMonitorInfo(int monitorIdx) const;
HRESULT CaptureToFile(_In_ LPCWSTR lpcwOutputFileName, _Out_opt_ BOOL *pRetIsTimeout = NULL, _Out_opt_ UINT *pRetRenderDuration = NULL);
};
#endif // __DXGICAPTURE_H__

960
media/DXGICaptureHelper.h Normal file
View File

@ -0,0 +1,960 @@
/*****************************************************************************
* DXGICaptureHelper.h
*
* Copyright (C) 2020 Gokhan Erdogdu <gokhan_erdogdu - at - yahoo - dot - com>
*
* DXGICapture is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* DXGICapture is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*
******************************************************************************/
#pragma once
#ifndef __DXGICAPTUREHELPER_H__
#define __DXGICAPTUREHELPER_H__
#include <atlbase.h>
#include <Shlwapi.h>
#include <dxgi1_2.h>
#include <d3d11.h>
#include <d2d1.h>
#include <wincodec.h>
#include "DXGICaptureTypes.h"
#pragma comment (lib, "Shlwapi.lib")
//
// class DXGICaptureHelper
//
class DXGICaptureHelper
{
public:
static
COM_DECLSPEC_NOTHROW
inline
HRESULT
ConvertDxgiOutputToMonitorInfo(
_In_ const DXGI_OUTPUT_DESC *pDxgiOutput,
_In_ int monitorIdx,
_Out_ tagDublicatorMonitorInfo *pOutVal
)
{
CHECK_POINTER(pOutVal);
// reset output parameter
RtlZeroMemory(pOutVal, sizeof(tagDublicatorMonitorInfo));
CHECK_POINTER_EX(pDxgiOutput, E_INVALIDARG);
switch (pDxgiOutput->Rotation)
{
case DXGI_MODE_ROTATION_UNSPECIFIED:
case DXGI_MODE_ROTATION_IDENTITY:
pOutVal->RotationDegrees = 0;
break;
case DXGI_MODE_ROTATION_ROTATE90:
pOutVal->RotationDegrees = 90;
break;
case DXGI_MODE_ROTATION_ROTATE180:
pOutVal->RotationDegrees = 180;
break;
case DXGI_MODE_ROTATION_ROTATE270:
pOutVal->RotationDegrees = 270;
break;
}
pOutVal->Idx = monitorIdx;
pOutVal->Bounds.X = pDxgiOutput->DesktopCoordinates.left;
pOutVal->Bounds.Y = pDxgiOutput->DesktopCoordinates.top;
pOutVal->Bounds.Width = pDxgiOutput->DesktopCoordinates.right - pDxgiOutput->DesktopCoordinates.left;
pOutVal->Bounds.Height = pDxgiOutput->DesktopCoordinates.bottom - pDxgiOutput->DesktopCoordinates.top;
wsprintfW(pOutVal->DisplayName, L"Display %d: %ldx%ld @ %ld,%ld"
, monitorIdx + 1
, pOutVal->Bounds.Width, pOutVal->Bounds.Height
, pOutVal->Bounds.X, pOutVal->Bounds.Y);
return S_OK;
} // ConvertDxgiOutputToMonitorInfo
static
COM_DECLSPEC_NOTHROW
inline
BOOL
IsEqualMonitorInfo(
_In_ const tagDublicatorMonitorInfo *p1,
_In_ const tagDublicatorMonitorInfo *p2
)
{
if (nullptr == p1) {
return (nullptr == p2);
}
if (nullptr == p2) {
return FALSE;
}
return memcmp((const void*)p1, (const void*)p2, sizeof(tagDublicatorMonitorInfo)) == 0;
} // IsEqualMonitorInfo
static
COM_DECLSPEC_NOTHROW
inline
HRESULT
IsRendererInfoValid(
_In_ const tagRendererInfo *pRendererInfo
)
{
CHECK_POINTER_EX(pRendererInfo, E_INVALIDARG);
if (pRendererInfo->SrcFormat != DXGI_FORMAT_B8G8R8A8_UNORM) {
return D2DERR_UNSUPPORTED_PIXEL_FORMAT;
}
if (pRendererInfo->SizeMode != tagFrameSizeMode_Normal) {
if ((pRendererInfo->OutputSize.Width <= 0) || (pRendererInfo->OutputSize.Height <= 0)) {
return D2DERR_BITMAP_BOUND_AS_TARGET;
}
}
if ((pRendererInfo->DstBounds.Width <= 0) || (pRendererInfo->DstBounds.Height <= 0) ||
(pRendererInfo->SrcBounds.Width <= 0) || (pRendererInfo->SrcBounds.Height <= 0))
{
return D2DERR_ORIGINAL_TARGET_NOT_BOUND;
}
return S_OK;
}
static
COM_DECLSPEC_NOTHROW
inline
HRESULT
CalculateRendererInfo(
_In_ const DXGI_OUTDUPL_DESC *pDxgiOutputDuplDesc,
_Inout_ tagRendererInfo *pRendererInfo
)
{
CHECK_POINTER_EX(pDxgiOutputDuplDesc, E_INVALIDARG);
CHECK_POINTER_EX(pRendererInfo, E_INVALIDARG);
pRendererInfo->SrcFormat = pDxgiOutputDuplDesc->ModeDesc.Format;
// get rotate state
switch (pDxgiOutputDuplDesc->Rotation)
{
case DXGI_MODE_ROTATION_ROTATE90:
pRendererInfo->RotationDegrees = 90.0f;
pRendererInfo->SrcBounds.X = 0;
pRendererInfo->SrcBounds.Y = 0;
pRendererInfo->SrcBounds.Width = pDxgiOutputDuplDesc->ModeDesc.Height;
pRendererInfo->SrcBounds.Height = pDxgiOutputDuplDesc->ModeDesc.Width;
break;
case DXGI_MODE_ROTATION_ROTATE180:
pRendererInfo->RotationDegrees = 180.0;
pRendererInfo->SrcBounds.X = 0;
pRendererInfo->SrcBounds.Y = 0;
pRendererInfo->SrcBounds.Width = pDxgiOutputDuplDesc->ModeDesc.Width;
pRendererInfo->SrcBounds.Height = pDxgiOutputDuplDesc->ModeDesc.Height;
break;
case DXGI_MODE_ROTATION_ROTATE270:
pRendererInfo->RotationDegrees = 270.0f;
pRendererInfo->SrcBounds.X = 0;
pRendererInfo->SrcBounds.Y = 0;
pRendererInfo->SrcBounds.Width = pDxgiOutputDuplDesc->ModeDesc.Height;
pRendererInfo->SrcBounds.Height = pDxgiOutputDuplDesc->ModeDesc.Width;
break;
default: // OR DXGI_MODE_ROTATION_IDENTITY:
pRendererInfo->RotationDegrees = 0.0f;
pRendererInfo->SrcBounds.X = 0;
pRendererInfo->SrcBounds.Y = 0;
pRendererInfo->SrcBounds.Width = pDxgiOutputDuplDesc->ModeDesc.Width;
pRendererInfo->SrcBounds.Height = pDxgiOutputDuplDesc->ModeDesc.Height;
break;
}
// force rotate
switch (pRendererInfo->RotationMode)
{
case tagFrameRotationMode::tagFrameRotationMode_Identity:
pRendererInfo->RotationDegrees = 0.0f;
break;
case tagFrameRotationMode::tagFrameRotationMode_90:
pRendererInfo->RotationDegrees = 90.0f;
break;
case tagFrameRotationMode::tagFrameRotationMode_180:
pRendererInfo->RotationDegrees = 180.0f;
break;
case tagFrameRotationMode::tagFrameRotationMode_270:
pRendererInfo->RotationDegrees = 270.0f;
break;
default: // tagFrameRotationMode::tagFrameRotationMode_Auto
break;
}
if (pRendererInfo->SizeMode == tagFrameSizeMode_Zoom)
{
FLOAT fSrcAspect, fOutAspect, fScaleFactor;
// center for output
pRendererInfo->DstBounds.Width = pRendererInfo->SrcBounds.Width;
pRendererInfo->DstBounds.Height = pRendererInfo->SrcBounds.Height;
pRendererInfo->DstBounds.X = (pRendererInfo->OutputSize.Width - pRendererInfo->SrcBounds.Width) >> 1;
pRendererInfo->DstBounds.Y = (pRendererInfo->OutputSize.Height - pRendererInfo->SrcBounds.Height) >> 1;
fOutAspect = (FLOAT)pRendererInfo->OutputSize.Width / pRendererInfo->OutputSize.Height;
if ((pRendererInfo->RotationDegrees == 0.0f) || (pRendererInfo->RotationDegrees == 180.0f))
{
fSrcAspect = (FLOAT)pRendererInfo->SrcBounds.Width / pRendererInfo->SrcBounds.Height;
if (fSrcAspect > fOutAspect)
{
fScaleFactor = (FLOAT)pRendererInfo->OutputSize.Width / pRendererInfo->SrcBounds.Width;
}
else
{
fScaleFactor = (FLOAT)pRendererInfo->OutputSize.Height / pRendererInfo->SrcBounds.Height;
}
}
else // 90 or 270 degree
{
fSrcAspect = (FLOAT)pRendererInfo->SrcBounds.Height / pRendererInfo->SrcBounds.Width;
if (fSrcAspect > fOutAspect)
{
fScaleFactor = (FLOAT)pRendererInfo->OutputSize.Width / pRendererInfo->SrcBounds.Height;
}
else
{
fScaleFactor = (FLOAT)pRendererInfo->OutputSize.Height / pRendererInfo->SrcBounds.Width;
}
}
pRendererInfo->ScaleX = fScaleFactor;
pRendererInfo->ScaleY = fScaleFactor;
}
else if (pRendererInfo->SizeMode == tagFrameSizeMode_CenterImage)
{
// center for output
pRendererInfo->DstBounds.Width = pRendererInfo->SrcBounds.Width;
pRendererInfo->DstBounds.Height = pRendererInfo->SrcBounds.Height;
pRendererInfo->DstBounds.X = (pRendererInfo->OutputSize.Width - pRendererInfo->SrcBounds.Width) >> 1;
pRendererInfo->DstBounds.Y = (pRendererInfo->OutputSize.Height - pRendererInfo->SrcBounds.Height) >> 1;
}
else if (pRendererInfo->SizeMode == tagFrameSizeMode_AutoSize)
{
// set the destination bounds
pRendererInfo->DstBounds.Width = pRendererInfo->SrcBounds.Width;
pRendererInfo->DstBounds.Height = pRendererInfo->SrcBounds.Height;
if ((pRendererInfo->RotationDegrees == 0.0f) || (pRendererInfo->RotationDegrees == 180.0f))
{
// same as the source size
pRendererInfo->OutputSize.Width = pRendererInfo->SrcBounds.Width;
pRendererInfo->OutputSize.Height = pRendererInfo->SrcBounds.Height;
}
else // 90 or 270 degree
{
// same as the source size
pRendererInfo->OutputSize.Width = pRendererInfo->SrcBounds.Height;
pRendererInfo->OutputSize.Height = pRendererInfo->SrcBounds.Width;
// center for output
pRendererInfo->DstBounds.X = (pRendererInfo->OutputSize.Width - pRendererInfo->SrcBounds.Width) >> 1;
pRendererInfo->DstBounds.Y = (pRendererInfo->OutputSize.Height - pRendererInfo->SrcBounds.Height) >> 1;
}
}
else if (pRendererInfo->SizeMode == tagFrameSizeMode_StretchImage)
{
// center for output
pRendererInfo->DstBounds.Width = pRendererInfo->SrcBounds.Width;
pRendererInfo->DstBounds.Height = pRendererInfo->SrcBounds.Height;
pRendererInfo->DstBounds.X = (pRendererInfo->OutputSize.Width - pRendererInfo->SrcBounds.Width) >> 1;
pRendererInfo->DstBounds.Y = (pRendererInfo->OutputSize.Height - pRendererInfo->SrcBounds.Height) >> 1;
if ((pRendererInfo->RotationDegrees == 0.0f) || (pRendererInfo->RotationDegrees == 180.0f))
{
pRendererInfo->ScaleX = (FLOAT)pRendererInfo->OutputSize.Width / pRendererInfo->DstBounds.Width;
pRendererInfo->ScaleY = (FLOAT)pRendererInfo->OutputSize.Height / pRendererInfo->DstBounds.Height;
}
else // 90 or 270 degree
{
pRendererInfo->ScaleX = (FLOAT)pRendererInfo->OutputSize.Width / pRendererInfo->DstBounds.Height;
pRendererInfo->ScaleY = (FLOAT)pRendererInfo->OutputSize.Height / pRendererInfo->DstBounds.Width;
}
}
else // tagFrameSizeMode_Normal
{
pRendererInfo->DstBounds.Width = pRendererInfo->SrcBounds.Width;
pRendererInfo->DstBounds.Height = pRendererInfo->SrcBounds.Height;
if (pRendererInfo->RotationDegrees == 90)
{
// set destination origin (bottom-left)
pRendererInfo->DstBounds.X = (pRendererInfo->OutputSize.Width - pRendererInfo->OutputSize.Height) >> 1;
pRendererInfo->DstBounds.Y = ((pRendererInfo->OutputSize.Width + pRendererInfo->OutputSize.Height) >> 1) - pRendererInfo->DstBounds.Height;
}
else if (pRendererInfo->RotationDegrees == 180.0f)
{
// set destination origin (bottom-right)
pRendererInfo->DstBounds.X = pRendererInfo->OutputSize.Width - pRendererInfo->DstBounds.Width;
pRendererInfo->DstBounds.Y = pRendererInfo->OutputSize.Height - pRendererInfo->DstBounds.Height;
}
else if (pRendererInfo->RotationDegrees == 270)
{
// set destination origin (top-right)
pRendererInfo->DstBounds.Y = (pRendererInfo->OutputSize.Height - pRendererInfo->OutputSize.Width) >> 1;
pRendererInfo->DstBounds.X = pRendererInfo->OutputSize.Width - pRendererInfo->DstBounds.Width - ((pRendererInfo->OutputSize.Width - pRendererInfo->OutputSize.Height) >> 1);
}
}
return S_OK;
}
static
COM_DECLSPEC_NOTHROW
inline
HRESULT
ResizeFrameBuffer(
_Inout_ tagFrameBufferInfo *pBufferInfo,
_In_ UINT uiNewSize
)
{
CHECK_POINTER(pBufferInfo);
if (uiNewSize <= pBufferInfo->BufferSize)
{
return S_FALSE; // no change
}
if (nullptr != pBufferInfo->Buffer) {
delete[] pBufferInfo->Buffer;
pBufferInfo->Buffer = nullptr;
}
pBufferInfo->Buffer = new (std::nothrow) BYTE[uiNewSize];
if (!(pBufferInfo->Buffer))
{
pBufferInfo->BufferSize = 0;
return E_OUTOFMEMORY;
}
pBufferInfo->BufferSize = uiNewSize;
return S_OK;
} // ResizeFrameBuffer
static
COM_DECLSPEC_NOTHROW
inline
HRESULT
GetMouse(
_In_ IDXGIOutputDuplication *pOutputDuplication,
_Inout_ tagMouseInfo *PtrInfo,
_In_ DXGI_OUTDUPL_FRAME_INFO *FrameInfo,
UINT MonitorIdx,
INT OffsetX,
INT OffsetY
)
{
CHECK_POINTER_EX(pOutputDuplication, E_INVALIDARG);
CHECK_POINTER_EX(PtrInfo, E_INVALIDARG);
CHECK_POINTER_EX(FrameInfo, E_INVALIDARG);
// A non-zero mouse update timestamp indicates that there is a mouse position update and optionally a shape change
if (FrameInfo->LastMouseUpdateTime.QuadPart == 0)
{
return S_OK;
}
bool UpdatePosition = true;
// Make sure we don't update pointer position wrongly
// If pointer is invisible, make sure we did not get an update from another output that the last time that said pointer
// was visible, if so, don't set it to invisible or update.
if (!FrameInfo->PointerPosition.Visible && (PtrInfo->WhoUpdatedPositionLast != MonitorIdx))
{
UpdatePosition = false;
}
// If two outputs both say they have a visible, only update if new update has newer timestamp
if (FrameInfo->PointerPosition.Visible && PtrInfo->Visible && (PtrInfo->WhoUpdatedPositionLast != MonitorIdx) && (PtrInfo->LastTimeStamp.QuadPart > FrameInfo->LastMouseUpdateTime.QuadPart))
{
UpdatePosition = false;
}
// Update position
if (UpdatePosition)
{
PtrInfo->Position.x = FrameInfo->PointerPosition.Position.x - OffsetX;
PtrInfo->Position.y = FrameInfo->PointerPosition.Position.y - OffsetY;
PtrInfo->WhoUpdatedPositionLast = MonitorIdx;
PtrInfo->LastTimeStamp = FrameInfo->LastMouseUpdateTime;
PtrInfo->Visible = FrameInfo->PointerPosition.Visible != 0;
}
// No new shape
if (FrameInfo->PointerShapeBufferSize == 0)
{
return S_OK;
}
// Old buffer too small
if (FrameInfo->PointerShapeBufferSize > PtrInfo->ShapeBufferSize)
{
if (PtrInfo->PtrShapeBuffer != nullptr)
{
delete[] PtrInfo->PtrShapeBuffer;
PtrInfo->PtrShapeBuffer = nullptr;
}
PtrInfo->PtrShapeBuffer = new (std::nothrow) BYTE[FrameInfo->PointerShapeBufferSize];
if (PtrInfo->PtrShapeBuffer == nullptr)
{
PtrInfo->ShapeBufferSize = 0;
return E_OUTOFMEMORY;
}
// Update buffer size
PtrInfo->ShapeBufferSize = FrameInfo->PointerShapeBufferSize;
}
// Get shape
UINT BufferSizeRequired;
HRESULT hr = pOutputDuplication->GetFramePointerShape(
FrameInfo->PointerShapeBufferSize,
reinterpret_cast<VOID*>(PtrInfo->PtrShapeBuffer),
&BufferSizeRequired,
&(PtrInfo->ShapeInfo)
);
if (FAILED(hr))
{
delete[] PtrInfo->PtrShapeBuffer;
PtrInfo->PtrShapeBuffer = nullptr;
PtrInfo->ShapeBufferSize = 0;
return hr;
}
return S_OK;
} // GetMouse
static
COM_DECLSPEC_NOTHROW
inline
HRESULT
ProcessMouseMask(
_In_ const tagMouseInfo *PtrInfo,
_In_ const DXGI_OUTPUT_DESC *DesktopDesc,
_Inout_ tagFrameBufferInfo *pBufferInfo
)
{
CHECK_POINTER_EX(PtrInfo, E_INVALIDARG);
CHECK_POINTER_EX(DesktopDesc, E_INVALIDARG);
CHECK_POINTER_EX(pBufferInfo, E_INVALIDARG);
if (!PtrInfo->Visible) {
return S_FALSE;
}
HRESULT hr = S_OK;
INT DesktopWidth = (INT)(DesktopDesc->DesktopCoordinates.right - DesktopDesc->DesktopCoordinates.left);
INT DesktopHeight = (INT)(DesktopDesc->DesktopCoordinates.bottom - DesktopDesc->DesktopCoordinates.top);
pBufferInfo->Bounds.X = PtrInfo->Position.x;
pBufferInfo->Bounds.Y = PtrInfo->Position.y;
pBufferInfo->Bounds.Width = PtrInfo->ShapeInfo.Width;
pBufferInfo->Bounds.Height = (PtrInfo->ShapeInfo.Type == DXGI_OUTDUPL_POINTER_SHAPE_TYPE_MONOCHROME)
? (INT)(PtrInfo->ShapeInfo.Height / 2)
: (INT)PtrInfo->ShapeInfo.Height;
pBufferInfo->Pitch = pBufferInfo->Bounds.Width * 4;
switch (PtrInfo->ShapeInfo.Type)
{
case DXGI_OUTDUPL_POINTER_SHAPE_TYPE_COLOR:
{
// Resize mouseshape buffer (if necessary)
hr = DXGICaptureHelper::ResizeFrameBuffer(pBufferInfo, PtrInfo->ShapeBufferSize);
if (FAILED(hr)) {
return hr;
}
// use current mouseshape buffer
// Copy mouseshape buffer
memcpy_s((void*)pBufferInfo->Buffer, pBufferInfo->BufferSize, (const void*)PtrInfo->PtrShapeBuffer, PtrInfo->ShapeBufferSize);
break;
}
case DXGI_OUTDUPL_POINTER_SHAPE_TYPE_MONOCHROME:
{
// Resize mouseshape buffer (if necessary)
hr = DXGICaptureHelper::ResizeFrameBuffer(pBufferInfo, pBufferInfo->Bounds.Height * pBufferInfo->Pitch);
if (FAILED(hr)) {
return hr;
}
UINT* InitBuffer32 = reinterpret_cast<UINT*>(pBufferInfo->Buffer);
for (INT Row = 0; Row < pBufferInfo->Bounds.Height; ++Row)
{
// Set mask
BYTE Mask = 0x80;
for (INT Col = 0; Col < pBufferInfo->Bounds.Width; ++Col)
{
BYTE XorMask = PtrInfo->PtrShapeBuffer[(Col / 8) + ((Row + (PtrInfo->ShapeInfo.Height / 2)) * (PtrInfo->ShapeInfo.Pitch))] & Mask;
// Set new pixel
InitBuffer32[(Row * pBufferInfo->Bounds.Width) + Col] = (XorMask) ? 0xFFFFFFFF : 0x00000000;
// Adjust mask
if (Mask == 0x01)
{
Mask = 0x80;
}
else
{
Mask = Mask >> 1;
}
}
}
break;
}
case DXGI_OUTDUPL_POINTER_SHAPE_TYPE_MASKED_COLOR:
{
// Resize mouseshape buffer (if necessary)
hr = DXGICaptureHelper::ResizeFrameBuffer(pBufferInfo, pBufferInfo->Bounds.Height * pBufferInfo->Pitch);
if (FAILED(hr)) {
return hr;
}
UINT* InitBuffer32 = reinterpret_cast<UINT*>(pBufferInfo->Buffer);
UINT* ShapeBuffer32 = reinterpret_cast<UINT*>(PtrInfo->PtrShapeBuffer);
for (INT Row = 0; Row < pBufferInfo->Bounds.Height; ++Row)
{
for (INT Col = 0; Col < pBufferInfo->Bounds.Width; ++Col)
{
InitBuffer32[(Row * pBufferInfo->Bounds.Width) + Col] = ShapeBuffer32[Col + (Row * (PtrInfo->ShapeInfo.Pitch / sizeof(UINT)))] | 0xFF000000;
}
}
break;
}
default:
return E_INVALIDARG;
}
UINT* InitBuffer32 = reinterpret_cast<UINT*>(pBufferInfo->Buffer);
UINT width = (UINT)pBufferInfo->Bounds.Width;
UINT height = (UINT)pBufferInfo->Bounds.Height;
switch (DesktopDesc->Rotation)
{
case DXGI_MODE_ROTATION_ROTATE90:
{
// Rotate -90 or +270
for (UINT i = 0; i < width; i++)
{
for (UINT j = 0; j < height; j++)
{
UINT I = j;
UINT J = width - 1 - i;
while ((i*height + j) >(I*width + J))
{
UINT p = I*width + J;
UINT tmp_i = p / height;
UINT tmp_j = p % height;
I = tmp_j;
J = width - 1 - tmp_i;
}
std::swap(*(InitBuffer32 + (i*height + j)), *(InitBuffer32 + (I*width + J)));
}
}
// translate bounds
std::swap(pBufferInfo->Bounds.Width, pBufferInfo->Bounds.Height);
INT nX = pBufferInfo->Bounds.Y;
INT nY = DesktopWidth - (INT)(pBufferInfo->Bounds.X + pBufferInfo->Bounds.Height);
pBufferInfo->Bounds.X = nX;
pBufferInfo->Bounds.Y = nY;
pBufferInfo->Pitch = pBufferInfo->Bounds.Width * 4;
} break;
case DXGI_MODE_ROTATION_ROTATE180:
{
// Rotate -180 or +180
if (height % 2 != 0)
{
//If N is odd reverse the middle row in the matrix
UINT j = height >> 1;
for (UINT i = 0; i < (width >> 1); i++)
{
std::swap(InitBuffer32[j * width + i], InitBuffer32[j * width + width - i - 1]);
}
}
for (UINT j = 0; j < (height >> 1); j++)
{
for (UINT i = 0; i < width; i++)
{
std::swap(InitBuffer32[j * width + i], InitBuffer32[(height - j - 1) * width + width - i - 1]);
}
}
// translate position
INT nX = DesktopWidth - (INT)(pBufferInfo->Bounds.X + pBufferInfo->Bounds.Width);
INT nY = DesktopHeight - (INT)(pBufferInfo->Bounds.Y + pBufferInfo->Bounds.Height);
pBufferInfo->Bounds.X = nX;
pBufferInfo->Bounds.Y = nY;
} break;
case DXGI_MODE_ROTATION_ROTATE270:
{
// Rotate -270 or +90
for (UINT i = 0; i < width; i++)
{
for (UINT j = 0; j < height; j++)
{
UINT I = height - 1 - j;
UINT J = i;
while ((i*height + j) >(I*width + J))
{
int p = I*width + J;
int tmp_i = p / height;
int tmp_j = p % height;
I = height - 1 - tmp_j;
J = tmp_i;
}
std::swap(*(InitBuffer32 + (i*height + j)), *(InitBuffer32 + (I*width + J)));
}
}
// translate bounds
std::swap(pBufferInfo->Bounds.Width, pBufferInfo->Bounds.Height);
INT nX = DesktopHeight - (pBufferInfo->Bounds.Y + pBufferInfo->Bounds.Width);
INT nY = pBufferInfo->Bounds.X;
pBufferInfo->Bounds.X = nX;
pBufferInfo->Bounds.Y = nY;
pBufferInfo->Pitch = pBufferInfo->Bounds.Width * 4;
} break;
}
return S_OK;
} // ProcessMouseMask
//
// Draw mouse provided in buffer to backbuffer
//
static
COM_DECLSPEC_NOTHROW
inline
HRESULT
DrawMouse(
_In_ tagMouseInfo *PtrInfo,
_In_ const DXGI_OUTPUT_DESC *DesktopDesc,
_Inout_ tagFrameBufferInfo *pTempMouseBuffer,
_Inout_ ID3D11Texture2D *pSharedSurf
)
{
CHECK_POINTER_EX(PtrInfo, E_INVALIDARG);
CHECK_POINTER_EX(DesktopDesc, E_INVALIDARG);
CHECK_POINTER_EX(pTempMouseBuffer, E_INVALIDARG);
CHECK_POINTER_EX(pSharedSurf, E_INVALIDARG);
HRESULT hr = S_OK;
D3D11_TEXTURE2D_DESC FullDesc;
pSharedSurf->GetDesc(&FullDesc);
INT SurfWidth = FullDesc.Width;
INT SurfHeight = FullDesc.Height;
INT SurfPitch = FullDesc.Width * 4;
hr = DXGICaptureHelper::ProcessMouseMask(PtrInfo, DesktopDesc, pTempMouseBuffer);
if (FAILED(hr)) {
return hr;
}
// Buffer used if necessary (in case of monochrome or masked pointer)
BYTE* InitBuffer = pTempMouseBuffer->Buffer;
// Clipping adjusted coordinates / dimensions
INT PtrWidth = (INT)pTempMouseBuffer->Bounds.Width;
INT PtrHeight = (INT)pTempMouseBuffer->Bounds.Height;
INT PtrLeft = (INT)pTempMouseBuffer->Bounds.X;
INT PtrTop = (INT)pTempMouseBuffer->Bounds.Y;
INT PtrPitch = (INT)pTempMouseBuffer->Pitch;
INT SrcLeft = 0;
INT SrcTop = 0;
INT SrcWidth = PtrWidth;
INT SrcHeight = PtrHeight;
if (PtrLeft < 0)
{
// crop mouseshape left
SrcLeft = -PtrLeft;
// new mouse x position for drawing
PtrLeft = 0;
}
else if (PtrLeft + PtrWidth > SurfWidth)
{
// crop mouseshape width
SrcWidth = SurfWidth - PtrLeft;
}
if (PtrTop < 0)
{
// crop mouseshape top
SrcTop = -PtrTop;
// new mouse y position for drawing
PtrTop = 0;
}
else if (PtrTop + PtrHeight > SurfHeight)
{
// crop mouseshape height
SrcHeight = SurfHeight - PtrTop;
}
// QI for IDXGISurface
CComPtr<IDXGISurface> ipCopySurface;
hr = pSharedSurf->QueryInterface(__uuidof(IDXGISurface), (void **)&ipCopySurface);
if (SUCCEEDED(hr)) {
// Map pixels
DXGI_MAPPED_RECT MappedSurface;
hr = ipCopySurface->Map(&MappedSurface, DXGI_MAP_READ | DXGI_MAP_WRITE);
if (SUCCEEDED(hr))
{
// 0xAARRGGBB
UINT* SrcBuffer32 = reinterpret_cast<UINT*>(InitBuffer);
UINT* DstBuffer32 = reinterpret_cast<UINT*>(MappedSurface.pBits) + PtrTop * SurfWidth + PtrLeft;
// Alpha blending masks
const UINT AMask = 0xFF000000;
const UINT RBMask = 0x00FF00FF;
const UINT GMask = 0x0000FF00;
const UINT AGMask = AMask | GMask;
const UINT OneAlpha = 0x01000000;
UINT uiPixel1;
UINT uiPixel2;
UINT uiAlpha;
UINT uiNAlpha;
UINT uiRedBlue;
UINT uiAlphaGreen;
for (INT Row = SrcTop; Row < SrcHeight; ++Row)
{
for (INT Col = SrcLeft; Col < SrcWidth; ++Col)
{
// Alpha blending
uiPixel1 = DstBuffer32[((Row - SrcTop) * SurfWidth) + (Col - SrcLeft)];
uiPixel2 = SrcBuffer32[(Row * PtrWidth) + Col];
uiAlpha = (uiPixel2 & AMask) >> 24;
uiNAlpha = 255 - uiAlpha;
uiRedBlue = ((uiNAlpha * (uiPixel1 & RBMask)) + (uiAlpha * (uiPixel2 & RBMask))) >> 8;
uiAlphaGreen = (uiNAlpha * ((uiPixel1 & AGMask) >> 8)) + (uiAlpha * (OneAlpha | ((uiPixel2 & GMask) >> 8)));
DstBuffer32[((Row - SrcTop) * SurfWidth) + (Col - SrcLeft)] = ((uiRedBlue & RBMask) | (uiAlphaGreen & AGMask));
}
}
}
// Done with resource
hr = ipCopySurface->Unmap();
}
return S_OK;
} // DrawMouse
static
COM_DECLSPEC_NOTHROW
inline
HRESULT
CreateBitmap(
_In_ ID2D1RenderTarget *pRenderTarget,
_In_ ID3D11Texture2D *pSourceTexture,
_Outptr_ ID2D1Bitmap **ppOutBitmap
)
{
CHECK_POINTER(ppOutBitmap);
*ppOutBitmap = nullptr;
CHECK_POINTER_EX(pRenderTarget, E_INVALIDARG);
CHECK_POINTER_EX(pSourceTexture, E_INVALIDARG);
HRESULT hr = S_OK;
CComPtr<ID3D11Texture2D> ipSourceTexture(pSourceTexture);
CComPtr<IDXGISurface> ipCopySurface;
CComPtr<ID2D1Bitmap> ipD2D1SourceBitmap;
// QI for IDXGISurface
hr = ipSourceTexture->QueryInterface(__uuidof(IDXGISurface), (void **)&ipCopySurface);
CHECK_HR_RETURN(hr);
// Map pixels
DXGI_MAPPED_RECT MappedSurface;
hr = ipCopySurface->Map(&MappedSurface, DXGI_MAP_READ);
CHECK_HR_RETURN(hr);
D3D11_TEXTURE2D_DESC destImageDesc;
ipSourceTexture->GetDesc(&destImageDesc);
hr = pRenderTarget->CreateBitmap(
D2D1::SizeU(destImageDesc.Width, destImageDesc.Height),
(const void*)MappedSurface.pBits,
MappedSurface.Pitch,
D2D1::BitmapProperties(D2D1::PixelFormat(DXGI_FORMAT_B8G8R8A8_UNORM, D2D1_ALPHA_MODE_PREMULTIPLIED)),
&ipD2D1SourceBitmap);
if (FAILED(hr))
{
// Done with resource
hr = ipCopySurface->Unmap();
return hr;
}
// Done with resource
hr = ipCopySurface->Unmap();
CHECK_HR_RETURN(hr);
// set return value
*ppOutBitmap = ipD2D1SourceBitmap.Detach();
return S_OK;
} // CreateBitmap
static
inline
COM_DECLSPEC_NOTHROW
HRESULT
GetContainerFormatByFileName(
_In_ LPCWSTR lpcwFileName,
_Out_opt_ GUID *pRetVal = NULL
)
{
RESET_POINTER_EX(pRetVal, GUID_NULL);
CHECK_POINTER_EX(lpcwFileName, E_INVALIDARG);
if (lstrlenW(lpcwFileName) == 0) {
return E_INVALIDARG;
}
LPCWSTR lpcwExtension = ::PathFindExtensionW(lpcwFileName);
if (lstrlenW(lpcwExtension) == 0) {
return MK_E_INVALIDEXTENSION; // ERROR_MRM_INVALID_FILE_TYPE
}
if (lstrcmpiW(lpcwExtension, L".bmp") == 0)
{
RESET_POINTER_EX(pRetVal, GUID_ContainerFormatBmp);
}
else if ((lstrcmpiW(lpcwExtension, L".tif") == 0) ||
(lstrcmpiW(lpcwExtension, L".tiff") == 0))
{
RESET_POINTER_EX(pRetVal, GUID_ContainerFormatTiff);
}
else if (lstrcmpiW(lpcwExtension, L".png") == 0)
{
RESET_POINTER_EX(pRetVal, GUID_ContainerFormatPng);
}
else if ((lstrcmpiW(lpcwExtension, L".jpg") == 0) ||
(lstrcmpiW(lpcwExtension, L".jpeg") == 0))
{
RESET_POINTER_EX(pRetVal, GUID_ContainerFormatJpeg);
}
else
{
return ERROR_MRM_INVALID_FILE_TYPE;
}
return S_OK;
}
static
COM_DECLSPEC_NOTHROW
inline
HRESULT
SaveImageToFile(
_In_ IWICImagingFactory *pWICImagingFactory,
_In_ IWICBitmapSource *pWICBitmapSource,
_In_ LPCWSTR lpcwFileName
)
{
CHECK_POINTER_EX(pWICImagingFactory, E_INVALIDARG);
CHECK_POINTER_EX(pWICBitmapSource, E_INVALIDARG);
HRESULT hr = S_OK;
GUID guidContainerFormat;
hr = GetContainerFormatByFileName(lpcwFileName, &guidContainerFormat);
if (FAILED(hr)) {
return hr;
}
WICPixelFormatGUID format = GUID_WICPixelFormatDontCare;
CComPtr<IWICImagingFactory> ipWICImagingFactory(pWICImagingFactory);
CComPtr<IWICBitmapSource> ipWICBitmapSource(pWICBitmapSource);
CComPtr<IWICStream> ipStream;
CComPtr<IWICBitmapEncoder> ipEncoder;
CComPtr<IWICBitmapFrameEncode> ipFrameEncode;
unsigned int uiWidth = 0;
unsigned int uiHeight = 0;
hr = ipWICImagingFactory->CreateStream(&ipStream);
if (SUCCEEDED(hr)) {
hr = ipStream->InitializeFromFilename(lpcwFileName, GENERIC_WRITE);
}
if (SUCCEEDED(hr)) {
hr = ipWICImagingFactory->CreateEncoder(guidContainerFormat, NULL, &ipEncoder);
}
if (SUCCEEDED(hr))
{
hr = ipEncoder->Initialize(ipStream, WICBitmapEncoderNoCache);
}
if (SUCCEEDED(hr))
{
hr = ipEncoder->CreateNewFrame(&ipFrameEncode, NULL);
}
if (SUCCEEDED(hr))
{
hr = ipFrameEncode->Initialize(NULL);
}
if (SUCCEEDED(hr))
{
hr = ipWICBitmapSource->GetSize(&uiWidth, &uiHeight);
}
if (SUCCEEDED(hr))
{
hr = ipFrameEncode->SetSize(uiWidth, uiHeight);
}
if (SUCCEEDED(hr))
{
hr = ipFrameEncode->SetPixelFormat(&format);
}
if (SUCCEEDED(hr))
{
hr = ipFrameEncode->WriteSource(ipWICBitmapSource, NULL);
}
if (SUCCEEDED(hr))
{
hr = ipFrameEncode->Commit();
}
if (SUCCEEDED(hr))
{
hr = ipEncoder->Commit();
}
return hr;
} // SaveImageToFile
}; // end class DXGICaptureHelper
#endif // __DXGICAPTUREHELPER_H__

150
media/DXGICaptureTypes.h Normal file
View File

@ -0,0 +1,150 @@
/*****************************************************************************
* DXGICaptureTypes.h
*
* Copyright (C) 2020 Gokhan Erdogdu <gokhan_erdogdu - at - yahoo - dot - com>
*
* DXGICapture is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* DXGICapture is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*
******************************************************************************/
#pragma once
#ifndef __DXGICAPTURETYPES_H__
#define __DXGICAPTURETYPES_H__
#include <dxgi1_2.h>
#include <windef.h>
#include <sal.h>
#include <vector>
//
// enum tagFrameSizeMode_e
//
typedef enum tagFrameSizeMode_e : UINT
{
tagFrameSizeMode_Normal = 0x0,
tagFrameSizeMode_StretchImage = 0x1,
tagFrameSizeMode_AutoSize = 0x2,
tagFrameSizeMode_CenterImage = 0x3,
tagFrameSizeMode_Zoom = 0x4,
} tagFrameSizeMode;
//
// enum tagFrameRotationMode_e
//
typedef enum tagFrameRotationMode_e : UINT
{
tagFrameRotationMode_Auto = 0x0,
tagFrameRotationMode_Identity = 0x1,
tagFrameRotationMode_90 = 0x2,
tagFrameRotationMode_180 = 0x3,
tagFrameRotationMode_270 = 0x4,
} tagFrameRotationMode;
//
// Holds info about the pointer/cursor
// struct tagMouseInfo_s
//
typedef struct tagMouseInfo_s
{
UINT ShapeBufferSize;
_Field_size_bytes_(ShapeBufferSize) BYTE* PtrShapeBuffer;
DXGI_OUTDUPL_POINTER_SHAPE_INFO ShapeInfo;
POINT Position;
bool Visible;
UINT WhoUpdatedPositionLast;
LARGE_INTEGER LastTimeStamp;
} tagMouseInfo;
//
// struct tagFrameSize_s
//
typedef struct tagFrameSize_s
{
LONG Width;
LONG Height;
} tagFrameSize;
//
// struct tagBounds_s
//
typedef struct tagFrameBounds_s
{
LONG X;
LONG Y;
LONG Width;
LONG Height;
} tagFrameBounds;
//
// struct tagFrameBufferInfo_s
//
typedef struct tagFrameBufferInfo_s
{
UINT BufferSize;
_Field_size_bytes_(BufferSize) BYTE* Buffer;
INT BytesPerPixel;
tagFrameBounds Bounds;
INT Pitch;
} tagFrameBufferInfo;
//
// struct tagDublicatorMonitorInfo_s
//
typedef struct tagDublicatorMonitorInfo_s
{
INT Idx;
WCHAR DisplayName[64];
INT RotationDegrees;
tagFrameBounds Bounds;
} tagDublicatorMonitorInfo;
typedef std::vector<tagDublicatorMonitorInfo*> DublicatorMonitorInfoVec;
//
// struct tagScreenCaptureFilterConfig_s
//
typedef struct tagScreenCaptureFilterConfig_s
{
public:
INT MonitorIdx;
INT ShowCursor;
tagFrameRotationMode RotationMode;
tagFrameSizeMode SizeMode;
tagFrameSize OutputSize; /* Discard for tagFrameSizeMode_AutoSize */
} tagScreenCaptureFilterConfig;
//
// struct tagRendererInfo_s
//
typedef struct tagRendererInfo_s
{
INT MonitorIdx;
INT ShowCursor;
tagFrameRotationMode RotationMode;
tagFrameSizeMode SizeMode;
tagFrameSize OutputSize;
FLOAT RotationDegrees;
FLOAT ScaleX;
FLOAT ScaleY;
DXGI_FORMAT SrcFormat;
tagFrameBounds SrcBounds;
tagFrameBounds DstBounds;
} tagRendererInfo;
// macros
#define RESET_POINTER_EX(p, v) if (nullptr != (p)) { *(p) = (v); }
#define RESET_POINTER(p) RESET_POINTER_EX(p, nullptr)
#define CHECK_POINTER_EX(p, hr) if (nullptr == (p)) { return (hr); }
#define CHECK_POINTER(p) CHECK_POINTER_EX(p, E_POINTER)
#define CHECK_HR_BREAK(hr) if (FAILED(hr)) { break; }
#define CHECK_HR_RETURN(hr) { HRESULT hr_379f4648 = hr; if (FAILED(hr_379f4648)) { return hr_379f4648; } }
#endif // __DXGICAPTURETYPES_H__

15
media/Debuger.h Normal file
View File

@ -0,0 +1,15 @@
#pragma once
#include <string>
using namespace std;
class Debuger
{
public:
Debuger();
~Debuger();
static int Debug(wstring log);
static int Debug(const wchar_t *format, ...);
static int Debug(string log);
};

100
media/H264Docoder.cpp Normal file
View File

@ -0,0 +1,100 @@
#include "H264Docoder.h"
#include "Debuger.h"
extern "C" {
#include "libswscale/swscale.h"
#include "libavformat/avformat.h"
#include "libavcodec/avcodec.h"
#include "libswscale/swscale.h"
#include "libavutil/pixfmt.h"
}
H264decoder::H264decoder()
:mObserver(nullptr){
this->mObserverType = Observer_Video;
avcodec_register_all();
mCodec = avcodec_find_decoder(AV_CODEC_ID_H264);
if (!mCodec) {
cout << "could not found 264 decoder" << endl;
exit(1);
}
mCtx = avcodec_alloc_context3(mCodec);
picture = av_frame_alloc();
if ((mCodec->capabilities)&AV_CODEC_CAP_TRUNCATED)
(mCtx->flags) |= AV_CODEC_FLAG2_CHUNKS;
mCtx->height = 720;
mCtx->width = 1280;
if (avcodec_open2(mCtx, mCodec, NULL) < 0) {
cout << "could not open codec\n";
exit(1);
}
}
H264decoder::~H264decoder()
{
}
//
// Created by 29019 on 2019/5/7.
//
const int width = 640;
const int height = 480;
const int framesize = width * height * 3 / 2; //Ò»¸±Í¼Ëùº¬µÄÏñËظöÊý
VData *H264decoder::Decodes(void *dat,uint32_t size) {
//FILE *pOut = fopen("pic.yuv","wb+");
AVPacket pkt;
int got_picture = 0;
int len = 0;
picture = av_frame_alloc();
av_init_packet(&pkt);
char* data = (char*)dat;
pkt.data = (uint8_t *)data;
pkt.size = size;
len = avcodec_decode_video2(this->mCtx, picture, &got_picture, &pkt);
if (len < 0) {
printf("Error while decoding a frame.\n");
return nullptr;
}
if (got_picture == 0) {
return nullptr;
}
++frame;
AVPixelFormat pix;
int pic_size = avpicture_get_size(AV_PIX_FMT_YUVJ420P, picture->width, picture->height);
/*
cout << "receive width " << picture->width << " height "
<< picture->height<<"pic size "
<< pic_size <<" channel layout "
<< picture->linesize[0]<< endl;*/
uint32_t pitchY = picture->linesize[0];
uint32_t pitchU = picture->linesize[1];
uint32_t pitchV = picture->linesize[2];
uint8_t *avY = picture->data[0];
uint8_t *avU = picture->data[1];
uint8_t *avV = picture->data[2];
if (nullptr != mObserver) {
this->mObserver->OnRecieveData(picture);
}
av_frame_free(&picture);
}
void H264decoder::OnRtmpFrame(void * dat, uint32_t size)
{
//Debuger::Debug(L"get data\r\n");
this->Decodes(dat, size);
}
int H264decoder::SetObserver(H264DecodeObserver *p)
{
if (nullptr != p)
this->mObserver = p;
else
return -1;
return 0;
}

58
media/H264Docoder.h Normal file
View File

@ -0,0 +1,58 @@
#pragma once
#include <ctype.h>
#include <vector>
#include <list>
#include <iostream>
#include <time.h>
#include "RtmpPuller.h"
#include "RtmpPuller2.h"
using namespace std;
extern "C" {
#include "libavutil/pixfmt.h"
#include "libavcodec/avcodec.h"
#include "sdl/SDL.h"
}
#define INBUF_SIZE 4096
typedef vector<char> VData;
class Decoder {
private:
list<VData> mDecodeData;
public:
virtual int Decode(VData &dat) { return -1; };
};
typedef class H264decoder :public Decoder, public RtmpPuller2::RtmpPullObserver {
public:
class H264DecodeObserver {
public:
virtual int OnRecieveData(AVFrame *frame) { return 0; };
};
enum CAP_STATUS {
RUNNING = 1,
STOP = 2,
PAUSE = 3,
FAIL = 4,
};
H264decoder();
~H264decoder();
VData *Decodes(void *dat, uint32_t len);
void OnRtmpFrame(void * dat, uint32_t size);
int SetObserver(H264DecodeObserver *);
private:
AVCodec *mCodec;
AVCodecContext *mCtx = NULL;
int frame, got_picture, len;
AVFrame *picture;
AVPacket avpkt;
H264DecodeObserver *mObserver;
//SDL---------------------------
int screen_w = 0, screen_h = 0;
SDL_Window *screen;
SDL_Renderer* sdlRenderer;
SDL_Texture* sdlTexture;
SDL_Rect sdlRect;
}CH264Decoder;

36
media/ImageUtil.cpp Normal file
View File

@ -0,0 +1,36 @@
#pragma once
#include "ImageUtil.h"
bool GuidCompare(GUID g1, GUID g2) {
if (g1.Data1 != g2.Data1) {
return false;
}
if (g1.Data2 != g2.Data2) {
return false;
}
if (g1.Data3 != g2.Data3) {
return false;
}
return true;
}
AVPixelFormat GetFormatFromGuid(GUID g)
{
if (GuidCompare(g, MEDIASUBTYPE_YUY2)) {
return AV_PIX_FMT_YUYV422;
}
if (GuidCompare(g, MEDIASUBTYPE_RGB24)) {
return AV_PIX_FMT_RGB24;
}
if (GuidCompare(g, MEDIASUBTYPE_RGB32)) {
return AV_PIX_FMT_RGB32;
}
if (GuidCompare(g, MEDIASUBTYPE_MJPG)) {
return AV_PIX_FMT_YUVJ420P;
}
if (GuidCompare(g, MEDIASUBTYPE_IYUV)) {
return AV_PIX_FMT_YUYV422;
}
return AV_PIX_FMT_NONE;
}

12
media/ImageUtil.h Normal file
View File

@ -0,0 +1,12 @@
#pragma once
#include "guiddef.h"
#include "uuids.h"
extern "C" {
#include "libswscale/swscale.h"
#include "libavformat/avformat.h"
#include "libavcodec/avcodec.h"
#include "libswscale/swscale.h"
#include "libavutil/pixfmt.h"
}
AVPixelFormat GetFormatFromGuid(GUID g);

132
media/RtmpPuller.cpp Normal file
View File

@ -0,0 +1,132 @@
#include "RtmpPuller.h"
RtmpPuller::RtmpPuller()
:mFrameIndex(0), mAudioIndex(-1),mVideoIndex(-1)
, mAudioStream(nullptr),mVideoStream(nullptr)
{
av_register_all();
//Network
avformat_network_init();
//Input
}
int RtmpPuller::ConnectServer(const char *p)
{
int ret = 0;
if ((ret = avformat_open_input(&mIfmtCtx, p, 0, 0)) < 0) {
printf("Could not open input file.");
return -1;
}
if ((ret = avformat_find_stream_info(mIfmtCtx, 0)) < 0) {
printf("Failed to retrieve input stream information");
return -1;
}
for (int i = 0; i < mIfmtCtx->nb_streams; i++) {
if (mIfmtCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
mVideoIndex = i;
}
if (mIfmtCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO) {
mAudioIndex = i;
}
}
if(mAudioIndex > -1)
this->mAudioStream = mIfmtCtx->streams[mAudioIndex];
if(mVideoIndex > -1)
this->mVideoStream = mIfmtCtx->streams[mVideoIndex];
av_dump_format(mIfmtCtx, 0, p, 0);
mH264bsfc = av_bitstream_filter_init("h264_mp4toannexb");
mStatus = RUNNING;
if((mAudioIndex == -1 ) &&(mVideoIndex == -1))
mStatus = NOSOURCE;
return 0;
}
int ThreadPull(RtmpPuller*p) {
while (p->Status() == RtmpPuller::CAP_STATUS::RUNNING) {
p->PullData();
}
return 0;
}
int RtmpPuller::StartPull()
{
this->mThread = new std::thread(ThreadPull, this);
this->mThread->get_id();
mStatus = RUNNING;
return 0;
}
int RtmpPuller::PullData()
{
static int drop = 0;
AVStream *in_stream;
//Get an AVPacket
int ret = av_read_frame(mIfmtCtx, &pkt);
if (ret < 0)
return -1;
in_stream = mIfmtCtx->streams[pkt.stream_index];
/* copy packet */
//Convert PTS/DTS
pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, in_stream->time_base,
(AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, in_stream->time_base,
(AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, in_stream->time_base);
pkt.pos = -1;
//Print to Screen
if (drop < 100) {
drop++;
goto end;
}
if (pkt.stream_index == mVideoIndex) {
printf("Receive %8d video frames from input URL\n", mFrameIndex);
mFrameIndex++;
av_bitstream_filter_filter(mH264bsfc, in_stream->codec, NULL,
&pkt.data, &pkt.size, pkt.data, pkt.size, 0);
if (mObserver.size() > 0) {
for (auto itr = this->mObserver.begin(); itr != mObserver.end(); itr++) {
RtmpPullObserver *p = (RtmpPullObserver *)*itr;
if (p->mObserverType == RtmpPullObserver::Observer_Video) {
p->OnRtmpFrame(pkt.data, pkt.size);
}
}
}
}
if (pkt.stream_index == mAudioIndex) {
if (mObserver.size() > 0) {
for (auto itr = this->mObserver.begin(); itr != mObserver.end(); itr++) {
RtmpPullObserver *p = (RtmpPullObserver *)*itr;
if (p->mObserverType == RtmpPullObserver::Observer_Audio) {
p->OnRtmpFrame(pkt.data, pkt.size);
}
}
}
}
end:
//printf("%02x %02x %02x %02x %02x\r\n", pkt.data[0], pkt.data[1], pkt.data[2], pkt.data[3], pkt.data[4]);
av_free_packet(&pkt);
}
int RtmpPuller::SetObserver(RtmpPullObserver *p)
{
if (nullptr == p)
return -1;
mMux.lock();
for (auto itr = this->mObserver.begin(); itr != mObserver.end(); itr++) {
if (p == *itr) return 0;
}
this->mObserver.push_back(p);
mMux.unlock();
return 0;
}
RtmpPuller::CAP_STATUS RtmpPuller::Status()
{
return this->mStatus;
}
AVStream * RtmpPuller::AudioStream()
{
return this->mAudioStream;
}

64
media/RtmpPuller.h Normal file
View File

@ -0,0 +1,64 @@
#pragma once
//Windows
#include <string>
#include <thread>
#include <vector>
#include <mutex>
using namespace std;
extern "C"
{
#include "libavformat/avformat.h"
#include "libavutil/mathematics.h"
#include "libavutil/time.h"
};
#pragma comment (lib, "ws2_32.lib")
#pragma comment (lib, "Secur32.lib")
#pragma comment (lib, "Bcrypt.lib")
class RtmpPuller {
public:
class RtmpPullObserver {
public :
enum ObserverType {
Observer_Video = 0,
Observer_Audio = 1,
};
virtual void OnRtmpFrame(void * dat, uint32_t size) {};
ObserverType mObserverType;
};
enum CAP_STATUS {
RUNNING = 1,
STOP = 2,
PAUSE = 3,
FAIL = 4,
NOSOURCE = 6,
};
RtmpPuller();
int ConnectServer(const char *);
int StartPull();
int PullData();
int SetObserver(RtmpPullObserver *);
CAP_STATUS Status();
AVStream *AudioStream();
private:
CAP_STATUS mStatus;
AVOutputFormat *mOutFormat = NULL;
//Input AVFormatContext and Output AVFormatContext
AVFormatContext *mIfmtCtx = NULL;
AVPacket pkt;
string mRtmpUrl;
int mVideoIndex;
int mAudioIndex;
int mFrameIndex;
AVBitStreamFilterContext* mH264bsfc;
std::thread *mThread;
vector<RtmpPullObserver*> mObserver;
AVStream *mAudioStream;
AVStream *mVideoStream;
mutex mMux;
};
int ThreadPull(RtmpPuller*p);

251
media/RtmpPuller2.cpp Normal file
View File

@ -0,0 +1,251 @@
#include "RtmpPuller2.h"
#include "Debuger.h"
RtmpPuller2::RtmpPuller2()
{
mAccBuffer = new uint8_t[3000];
}
RtmpPuller2::~RtmpPuller2()
{
}
int ThreadPull(RtmpPuller2*p) {
while (p->Status() == RtmpPuller2::CAP_STATUS::RUNNING) {
p->PullData();
Sleep(10);
}
return 0;
}
// 关闭拉流
int RtmpPuller2::StopPull()
{
mStatus = STOP;
this->mThread->join();
RTMP_Close(mRtmp);
RTMP_Free(mRtmp);
return 0;
}
int RtmpPuller2::StartPull()
{
if(this->mStatus == CONNECTED) {
mStatus = RUNNING;
this->mThread = new std::thread(ThreadPull, this);
this->mThread->get_id();
}
else {
}
return 0;
}
FILE *fp = nullptr;
int RtmpPuller2::PullData()
{
RTMPPacket packet = { 0 };
// Parse rtmp stream to h264 and aac
uint8_t nalu_header[4] = { 0x00, 0x00, 0x00, 0x01 };
int ret = RTMP_ReadPacket(mRtmp, &packet);
if (ret < 0)
return ret;
if (nullptr == fp) {
fp = fopen("src.aac", "wb");
}
if (RTMPPacket_IsReady(&packet)) {
// Process packet, eg: set chunk size, set bw, ...
RTMP_ClientPacket(mRtmp, &packet);
if (packet.m_packetType == RTMP_PACKET_TYPE_VIDEO) {
bool keyframe = 0x17 == packet.m_body[0] ? true : false;
bool sequence = 0x00 == packet.m_body[1];
printf("keyframe=%s, sequence=%s\n", keyframe ? "true" : "false", sequence ? "true" : "false");
// SPS/PPS sequence
if (sequence) {
uint32_t offset = 10;
uint32_t sps_num = packet.m_body[offset++] & 0x1f;
for (int i = 0; i < sps_num; i++) {
uint8_t ch0 = packet.m_body[offset];
uint8_t ch1 = packet.m_body[offset + 1];
uint32_t sps_len = ((ch0 << 8) | ch1);
offset += 2;
packet.m_body[offset - 1] = 0x01;
packet.m_body[offset - 2] = 0x00;
packet.m_body[offset - 3] = 0x00;
packet.m_body[offset - 4] = 0x00;
if (mObserver.size() > 0) {
for (auto itr = this->mObserver.begin(); itr != mObserver.end(); itr++) {
RtmpPullObserver *p = (RtmpPullObserver *)*itr;
if (p->mObserverType == RtmpPullObserver::Observer_Video) {
p->OnRtmpFrame(packet.m_body + offset - 4, sps_len + 4);
}
}
}
// Write sps data
//fwrite(nalu_header, sizeof(uint8_t), 4, _file_ptr);
//fwrite(packet.m_body + offset, sizeof(uint8_t), sps_len, _file_ptr);
offset += sps_len;
}
uint32_t pps_num = packet.m_body[offset++] & 0x1f;
for (int i = 0; i < pps_num; i++) {
uint8_t ch0 = packet.m_body[offset];
uint8_t ch1 = packet.m_body[offset + 1];
uint32_t pps_len = ((ch0 << 8) | ch1);
offset += 2;
packet.m_body[offset - 1] = 0x01;
packet.m_body[offset - 2] = 0x00;
packet.m_body[offset - 3] = 0x00;
packet.m_body[offset - 4] = 0x00;
if (mObserver.size() > 0) {
for (auto itr = this->mObserver.begin(); itr != mObserver.end(); itr++) {
RtmpPullObserver *p = (RtmpPullObserver *)*itr;
if (p->mObserverType == RtmpPullObserver::Observer_Video) {
p->OnRtmpFrame(packet.m_body + offset - 4, pps_len + 4);
}
}
}
// Write pps data
offset += pps_len;
}
}
// Nalu frames
else {
uint32_t offset = 5;
uint8_t ch0 = packet.m_body[offset];
uint8_t ch1 = packet.m_body[offset + 1];
uint8_t ch2 = packet.m_body[offset + 2];
uint8_t ch3 = packet.m_body[offset + 3];
uint32_t data_len = ((ch0 << 24) | (ch1 << 16) | (ch2 << 8) | ch3);
offset += 4;
packet.m_body[offset - 1] = 0x01;
packet.m_body[offset - 2] = 0x00;
packet.m_body[offset - 3] = 0x00;
packet.m_body[offset - 4] = 0x00;
if (mObserver.size() > 0) {
for (auto itr = this->mObserver.begin(); itr != mObserver.end(); itr++) {
RtmpPullObserver *p = (RtmpPullObserver *)*itr;
if (p->mObserverType == RtmpPullObserver::Observer_Video) {
p->OnRtmpFrame(packet.m_body + offset - 4, data_len + 4);
}
}
}
// Write nalu data(already started with '0x00,0x00,0x00,0x01')
//fwrite(nalu_header, sizeof(uint8_t), 4, _file_ptr);
offset += data_len;
}
}
else if (packet.m_packetType == RTMP_PACKET_TYPE_AUDIO) {
bool sequence = 0x00 == packet.m_body[1];
printf("sequence=%s\n", sequence ? "true" : "false");
// AAC sequence
if (sequence) {
uint8_t format = (packet.m_body[0] & 0xf0) >> 4;
uint8_t samplerate = (packet.m_body[0] & 0x0c) >> 2;
uint8_t sampledepth = (packet.m_body[0] & 0x02) >> 1;
uint8_t type = packet.m_body[0] & 0x01;
// sequence = packet.m_body[1];
// AAC(AudioSpecificConfig)
if (format == 10) {
ch0 = packet.m_body[2];
ch1 = packet.m_body[3];
config = ((ch0 << 8) | ch1);
object_type = (config & 0xF800) >> 11;
sample_frequency_index = (config & 0x0780) >> 7;
channels = (config & 0x78) >> 3;
frame_length_flag = (config & 0x04) >> 2;
depend_on_core_coder = (config & 0x02) >> 1;
extension_flag = config & 0x01;
}
// Speex(Fix data here, so no need to parse...)
else if (format == 11) {
// 16 KHz, mono, 16bit/sample
type = 0;
sampledepth = 1;
samplerate = 4;
}
}
// Audio frames
else {
// ADTS(7 bytes) + AAC data
uint32_t data_len = packet.m_nBodySize - 2 + 7;
uint8_t adts[7];
adts[0] = 0xff;
adts[1] = 0xf1;
adts[2] = ((object_type - 1) << 6) | (sample_frequency_index << 2)
| (channels >> 2);
adts[3] = ((channels & 3) << 6) + (data_len >> 11);
adts[4] = (data_len & 0x7FF) >> 3;
adts[5] = ((data_len & 7) << 5) + 0x1F;
adts[6] = 0xfc;
// Write audio frames
fwrite(adts, sizeof(uint8_t), 7, fp);
fwrite(packet.m_body + 2, sizeof(uint8_t), packet.m_nBodySize - 2, fp);
fflush(fp);
memcpy(mAccBuffer, adts, 7);
memcpy(mAccBuffer + 7, packet.m_body + 2, packet.m_nBodySize - 2);
if (mObserver.size() > 0) {
for (auto itr = this->mObserver.begin(); itr != mObserver.end(); itr++) {
RtmpPullObserver *p = (RtmpPullObserver *)*itr;
if (p->mObserverType == RtmpPullObserver::Observer_Audio) {
p->OnRtmpFrame(mAccBuffer, packet.m_nBodySize - 2 + 7);
}
}
}
}
}
RTMPPacket_Free(&packet);
}
return 0;
}
int RtmpPuller2::SetObserver(RtmpPuller2::RtmpPullObserver *p)
{
if (nullptr == p)
return -1;
mMux.lock();
for (auto itr = this->mObserver.begin(); itr != mObserver.end(); itr++) {
if (p == *itr) return 0;
}
this->mObserver.push_back(p);
mMux.unlock();
return 0;
}
RtmpPuller2::CAP_STATUS RtmpPuller2::Status()
{
return mStatus;
}
int RtmpPuller2::ConnectServer(string url)
{
mRtmp = RTMP_Alloc();
RTMP_Init(mRtmp);
if (RTMP_SetupURL(mRtmp, (char*)url.c_str()) == FALSE)
{
RTMP_Free(mRtmp);
mStatus = FAIL;
return -1;
}
/*连接服务器*/
if (RTMP_Connect(mRtmp, NULL) == FALSE)
{
RTMP_Free(mRtmp);
mStatus = FAIL;
return -1;
}
/*连接流*/
if (RTMP_ConnectStream(mRtmp, 0) == FALSE)
{
RTMP_Close(mRtmp);
RTMP_Free(mRtmp);
mStatus = FAIL;
return -1;
}
mStatus = CONNECTED;
return 0;
}

78
media/RtmpPuller2.h Normal file
View File

@ -0,0 +1,78 @@
#pragma once
#ifdef __cplusplus
extern "C"{
#endif
extern "C" {
#include "librtmp\rtmp.h"
#include "librtmp\rtmp_sys.h"
#include "librtmp\amf.h"
}
#ifdef __cplusplus
}
#endif
#include <windows.h>
#include <string>
#include <thread>
#include <vector>
#include <mutex>
using namespace std;
#ifdef WIN32
#include <windows.h>
#pragma comment(lib,"WS2_32.lib")
#pragma comment(lib,"winmm.lib")
#endif
class RtmpPuller2
{
public:
class RtmpPullObserver {
public:
enum ObserverType {
Observer_Video = 0,
Observer_Audio = 1,
};
virtual void OnRtmpFrame(void * dat, uint32_t size) {};
ObserverType mObserverType;
};
enum CAP_STATUS {
CONNECTED = 0,
RUNNING = 1,
STOP = 2,
PAUSE = 3,
FAIL = 4,
NOSOURCE = 6,
};
RtmpPuller2();
~RtmpPuller2();
int StopPull();
int StartPull();
int PullData();
int SetObserver(RtmpPuller2::RtmpPullObserver *);
CAP_STATUS Status();
int ConnectServer(string url);
private:
std::thread *mThread;
RTMP *mRtmp;
string mUrl;
CAP_STATUS mStatus;
vector<RtmpPuller2::RtmpPullObserver*> mObserver;
mutex mMux;
uint8_t *mAccBuffer;
// adts 头部信息因为aac码流只会在首包发送
uint8_t ch0 = 0;
uint8_t ch1 = 0;
uint16_t config = 0;
uint16_t object_type = 0;
uint16_t sample_frequency_index = 0;
uint16_t channels = 0;
uint16_t frame_length_flag = 0;
uint16_t depend_on_core_coder = 0;
uint16_t extension_flag = 0;
};

573
media/RtmpPusher.cpp Normal file
View File

@ -0,0 +1,573 @@
#include "RtmpPusher.h"
/**
* winsock
*
* @1 ,
*/
int InitSockets()
{
#ifdef WIN32
WORD version;
WSADATA wsaData;
version = MAKEWORD(1, 1);
return (WSAStartup(version, &wsaData) == 0);
#else
return TRUE;
#endif
}
bool RtmpPusher::IfConnect()
{
return mIfConnected;
}
int RtmpPusher::RTMP264_Connect(const char* url)
{
InitSockets();
m_pRtmp = RTMP_Alloc();
RTMP_Init(m_pRtmp);
/*设置URL*/
if (RTMP_SetupURL(m_pRtmp, (char*)url) == FALSE)
{
RTMP_Free(m_pRtmp);
return -1;
}
/*设置可写,即发布流,这个函数必须在连接前使用,否则无效*/
RTMP_EnableWrite(m_pRtmp);
/*连接服务器*/
if (RTMP_Connect(m_pRtmp, NULL) == FALSE)
{
RTMP_Free(m_pRtmp);
return -1;
}
/*连接流*/
if (RTMP_ConnectStream(m_pRtmp, 0) == FALSE)
{
RTMP_Close(m_pRtmp);
RTMP_Free(m_pRtmp);
return -1;
}
this->mUrl = string(url);
this->mIfConnected = true;
return 0;
}
/**
* winsock
*
* @0 ,
*/
inline void CleanupSockets()
{
#ifdef WIN32
WSACleanup();
#endif
}
void RtmpPusher::RTMP264_Close()
{
mMux.lock();
if (m_pRtmp)
{
RTMP_Close(m_pRtmp);
RTMP_Free(m_pRtmp);
m_pRtmp = NULL;
}
mMux.unlock();
CleanupSockets();
}
RTMPPacket* gPacket = nullptr;
int RtmpPusher::SendPacket(unsigned int nPacketType, unsigned char * data,
unsigned int size, unsigned int nTimestamp)
{
static bool once = true;
/*分配包内存和初始化,len为包体长度*/
if(nullptr == gPacket)
gPacket = (RTMPPacket *)malloc(640*720*3 + size);
memset(gPacket, 0, RTMP_HEAD_SIZE);
/*包体内存*/
gPacket->m_body = (char *)gPacket + RTMP_HEAD_SIZE;
gPacket->m_nBodySize = size;
memcpy(gPacket->m_body, data, size);
gPacket->m_hasAbsTimestamp = 0;
gPacket->m_packetType = nPacketType; /*此处为类型有两种一种是音频,一种是视频*/
gPacket->m_nInfoField2 = m_pRtmp->m_stream_id;
gPacket->m_nChannel = 0x04;
gPacket->m_headerType = RTMP_PACKET_SIZE_LARGE;
if (RTMP_PACKET_TYPE_AUDIO == nPacketType && size != 4)
{
gPacket->m_headerType = RTMP_PACKET_SIZE_MEDIUM;
}
gPacket->m_nTimeStamp = nTimestamp;
/*发送*/
int nRet = 0;
if (RTMP_IsConnected(m_pRtmp))
{
nRet = RTMP_SendPacket(m_pRtmp, gPacket, FALSE); /*TRUE为放进发送队列,FALSE是不放进发送队列,直接发送*/
}
else {
if (once) {
once = false;
}
}
/*释放内存*/
//free(gPacket);
return nRet;
}
int RtmpPusher::SendVideoPacket(unsigned int nPacketType,
unsigned char * data, unsigned int size, unsigned int nTimestamp)
{
RTMPPacket* packet;
/*分配包内存和初始化,len为包体长度*/
packet = (RTMPPacket *)malloc(RTMP_HEAD_SIZE + size);
memset(packet, 0, RTMP_HEAD_SIZE);
/*包体内存*/
packet->m_body = (char *)packet + RTMP_HEAD_SIZE;
packet->m_nBodySize = size;
memcpy(packet->m_body, data, size);
packet->m_hasAbsTimestamp = 0;
packet->m_packetType = nPacketType; /*此处为类型有两种一种是音频,一种是视频*/
packet->m_nInfoField2 = m_pRtmp->m_stream_id;
packet->m_nChannel = 0x04;
packet->m_nTimeStamp += 33;
packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
if (RTMP_PACKET_TYPE_AUDIO == nPacketType && size != 4)
{
packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM;
}
packet->m_nTimeStamp = nTimestamp;
/*发送*/
int nRet = 0;
if (RTMP_IsConnected(m_pRtmp))
{
nRet = RTMP_SendPacket(m_pRtmp, packet, TRUE); /*TRUE为放进发送队列,FALSE是不放进发送队列,直接发送*/
}
/*释放内存*/
free(packet);
return 0;
}
RtmpPusher::RtmpPusher()
:mThread(nullptr),
mIfConnected(false)
{
}
RtmpPusher::~RtmpPusher()
{
if (m_pRtmp)
{
RTMP_Close(m_pRtmp);
RTMP_Free(m_pRtmp);
m_pRtmp = NULL;
}
CleanupSockets();
}
void H264RtmpPuser::OnAudioEncode(const void * frameaddress, uint32_t framelen,uint16_t pts)
{
uint8_t *pack = (uint8_t*)malloc(framelen);
memcpy(pack, frameaddress, framelen);
mMux.lock();
Buffer buf;
buf.buf = (uint8_t *)pack;
buf.len = framelen;
buf.type = PAYLOAD_TYPE_AUDIO;
this->mPack.push(buf);
mMux.unlock();
this->mAudioPts = pts;
}
H264RtmpPuser::H264RtmpPuser()
{
this->metaData.Pps = nullptr;
this->metaData.Sps = nullptr;
this->metaData.nPpsLen = 0;
this->metaData.nSpsLen = 0;
this->mStartTime = 0;
mFirtACC = false;
}
int H264RtmpPuser::sortAndSendNal(uint8_t * data, int len)
{
int i = 0;
uint8_t * nalhead = nullptr;
uint8_t * naltail = nullptr;
uint32_t size = 0;
if(0 == mStartTime){
mStartTime = RTMP_GetTime();
}
if (nullptr == data) {
return -1;
}
while (i < len)
{
// sps pps p frame
if ((data[i] == 0x00) && (data[i + 1] == 0x00)
&& ((data[i + 2] == 0x00) && (data[i + 3] == 0x01) || (data[i + 2] == 0x01))) {
if ((nalhead == nullptr) && (i == 0) ) {
if ((data[i + 3] == 0x01) && (data[i + 4] == 0x41)) { //p 帧直接发
nalhead = data;
naltail = data + (len);
size = naltail - nalhead;
this->SendH264Packet(nalhead, size, 0, RTMP_GetTime() - mStartTime);
return 0;
}
//sps 帧进行解包
if ((data[i + 3] == 0x01) && (data[i + 4] == 0x67)) { // sps or pps or sei
nalhead = data;
i += 1;
}
//sei
if ((data[i + 2] == 0x01) && (data[i + 3] == 0x06)) {
i += 1;
}
}
else {
// i frame
if ((data[i + 2] == 0x01) && (data[i + 3] == 0x65)) {
naltail = data + i;
size = naltail - nalhead;
this->SendH264Packet(nalhead, size, 0, RTMP_GetTime() - mStartTime);
nalhead = data + i;
naltail = data + (len);
size = naltail - nalhead;
this->SendH264Packet(nalhead, size, 0, RTMP_GetTime() - mStartTime);
return 0;
}
//pps
if ((data[i + 3] == 0x01) && (data[i + 4] == 0x68)) { // sps or pps or sei
naltail = data + i;
size = naltail - nalhead;
this->SendH264Packet(nalhead, size, 0, RTMP_GetTime() - mStartTime);
nalhead = data + i;
i += 3;
}//sps
if ((data[i + 3] == 0x01) && (data[i + 4] == 0x67)) { // sps or pps or sei
nalhead = data + i;
i += 3;
}
//sei
if ((data[i + 3] == 0x01) && (data[i + 4] == 0x06)) { // sps or pps or sei
naltail = data + i;
size = naltail - nalhead;
this->SendH264Packet(nalhead, size, 0, RTMP_GetTime() - mStartTime);
nalhead = data + i;
i += 3;
}
// sps pps or sei
}
// 跳过00 00 00 00 01的情况
}
i++;
}
return 0;
}
// 视频同步包详细结构请见https://blog.csdn.net/liwf616/article/details/51596373
int H264RtmpPuser::SendVideoSpsPps(unsigned char * pps,
int pps_len, unsigned char * sps,
int sps_len,unsigned int nTimeStamp)
{
RTMPPacket * packet = NULL;//rtmp包结构
unsigned char * body = NULL;
int i;
packet = (RTMPPacket *)malloc(RTMP_HEAD_SIZE + 1024);
//RTMPPacket_Reset(packet);//重置packet状态
memset(packet, 0, RTMP_HEAD_SIZE + 1024);
packet->m_body = (char *)packet + RTMP_HEAD_SIZE;
body = (unsigned char *)packet->m_body;
i = 0;
// FrameType == 1CodecID == 7
body[i++] = 0x17;
//AVCPacketType
body[i++] = 0x00;
//CompositionTime
body[i++] = 0x00;
body[i++] = 0x00;
body[i++] = 0x00;
/*AVCDecoderConfigurationRecord*/
body[i++] = 0x01;
body[i++] = sps[1];
body[i++] = sps[2];
body[i++] = sps[3];
body[i++] = 0xff;
/*sps*/
body[i++] = 0xe1;
body[i++] = (sps_len >> 8) & 0xff;
body[i++] = sps_len & 0xff;
memcpy(&body[i], sps, sps_len);
i += sps_len;
/*pps*/
body[i++] = 0x01;
body[i++] = (pps_len >> 8) & 0xff;
body[i++] = (pps_len) & 0xff;
memcpy(&body[i], pps, pps_len);
i += pps_len;
packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;
packet->m_nBodySize = i;
packet->m_nChannel = 0x04;
packet->m_nTimeStamp = nTimeStamp;
packet->m_hasAbsTimestamp = 0;
packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM;
packet->m_nInfoField2 = m_pRtmp->m_stream_id;
/*调用发送接口*/
int nRet = RTMP_SendPacket(m_pRtmp, packet, TRUE);
free(packet); //释放内存
return nRet;
}
int H264RtmpPuser::SendAudioData(unsigned char * dat,
unsigned int size, unsigned int nTimeStamp)
{
return 0;
}
int H264RtmpPuser::SendH264Packet(unsigned char * data,
unsigned int size, int bIsKeyFrame, unsigned int nTimeStamp)
{
if(data == NULL){
return false;
}
unsigned int nal_type = 0;
// 小帧应该是PPS或者SPS
if ((data[0] != 0x00) || (data[1] != 0x00)
|| ((data[2] != 0x00)&&data[2]!= 0x01)) {
return false;
}
//Debuger::Debug(L"%02x %02x %02x %02x %02x %02d\r\n",
// data[0],data[1],data[2],data[3],data[4],size);
if (data[2] == 0x01) {
nal_type = data[3];
}
if (data[3] == 0x01) {
nal_type = data[4];
}
switch (nal_type)
{
case 0x67: //just update sps and pps
if (NULL == metaData.Sps)
metaData.Sps = (unsigned char *)malloc(size - 4);
h264_decode_sps(data + 4, size - 4, metaData.nWidth, metaData.nHeight, metaData.nFrameRate);
metaData.nSpsLen = size - 4;
memcpy(this->metaData.Sps, data + 4, size - 4);
break;
case 0x68: //just update sps and pps
this->metaData.nPpsLen = size - 4;
if (NULL == metaData.Pps) metaData.Pps = (unsigned char *)malloc(size - 4);
memcpy(this->metaData.Pps, data + 4, size - 4);
break;
case 0x41: //p frame
this->sendDataPackH264(data + 4, size - 4, 0, nTimeStamp);
break;
case 0x65: //i frame
this->sendDataPackH264(data + 3, size - 3, 1, nTimeStamp);
break;
case 0x06:
size = size;
//this->sendDataPack(data + 4, size - 4, 0, nTimeStamp);
break;
default:
break;
}
}
unsigned char *gBody = nullptr;
int H264RtmpPuser::sendDataPackH264(unsigned char * data,
unsigned int size, int bIsKeyFrame, unsigned int nTimeStamp)
{
if (gBody == nullptr) {
gBody = new unsigned char[640*720*3 + 9];
}
if (size < 0) {
gBody = gBody;
}
memset(gBody, 0, size + 9);
int i = 0;
if (1 == bIsKeyFrame) {
gBody[i++] = 0x17;// 1:Iframe 7:AVC
gBody[i++] = 0x01;// AVC NALU
gBody[i++] = 0x00;
gBody[i++] = 0x00;
gBody[i++] = 0x00;
// NALU size
gBody[i++] = size >> 24 & 0xff;
gBody[i++] = size >> 16 & 0xff;
gBody[i++] = size >> 8 & 0xff;
gBody[i++] = size & 0xff;
// NALU data
memcpy(&gBody[i], data, size);
if(metaData.Sps != nullptr)
SendVideoSpsPps(metaData.Pps, metaData.nPpsLen, metaData.Sps,
metaData.nSpsLen, 0);
}
else {
gBody[i++] = 0x27;// 2:Pframe 7:AVC
gBody[i++] = 0x01;// AVC NALU
gBody[i++] = 0x00;
gBody[i++] = 0x00;
gBody[i++] = 0x00;
// NALU size
gBody[i++] = size >> 24 & 0xff;
gBody[i++] = size >> 16 & 0xff;
gBody[i++] = size >> 8 & 0xff;
gBody[i++] = size & 0xff;
// NALU data
memcpy(&gBody[i], data, size);
}
int bRet = SendPacket(RTMP_PACKET_TYPE_VIDEO, gBody, i + size, nTimeStamp);
return bRet;
}
int H264RtmpPuser::SendAudioSync(int audioType,
int sampleIndex, int channel, unsigned int nTimeStamp)
{
RTMPPacket * packet = NULL;//rtmp包结构
unsigned char * body = NULL;
int i;
packet = (RTMPPacket *)malloc(RTMP_HEAD_SIZE + 1024);
//RTMPPacket_Reset(packet);//重置packet状态
memset(packet, 0, RTMP_HEAD_SIZE + 1024);
packet->m_body = (char *)packet + RTMP_HEAD_SIZE;
body = (unsigned char *)packet->m_body;
body[0] = 0xaf;
body[1] = 0x00;
uint16_t audioSpecConf = 0;
audioSpecConf |= ((2 << 11) & 0xf800); //2: AACLC
audioSpecConf |= ((4 << 7) & 0x0780); //4: 44khz
audioSpecConf |= ((2 << 3) & 0x78); //4: 2:stero
audioSpecConf |= 0 & 0x07; //4: 0 padding
body[2] = (audioSpecConf >> 8) & 0xff;
body[3] = audioSpecConf & 0xff;
packet->m_packetType = RTMP_PACKET_TYPE_AUDIO;
packet->m_nBodySize = 4;
packet->m_nChannel = 0x04;
packet->m_nTimeStamp = nTimeStamp;
packet->m_hasAbsTimestamp = 0;
packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM;
packet->m_nInfoField2 = m_pRtmp->m_stream_id;
/*调用发送接口*/
int nRet = RTMP_SendPacket(m_pRtmp, packet, TRUE);
free(packet); //释放内存
return nRet;
}
int H264RtmpPuser::sendDataPackAAC(unsigned char * data,
unsigned int size, unsigned int nTimeStamp)
{
unsigned char *gBody = nullptr;
static int timestamp = 0;
timestamp += 20;
if (!mFirtACC) {
SendAudioSync(2,4,4, timestamp);
mFirtACC = 1;
}
gBody = (unsigned char*)malloc(size + 2);
gBody[0] = 0xAF;
gBody[1] = 0x01; //aac raw data
memcpy(gBody + 2, data + 7, size - 7);
int bRet = SendPacket(RTMP_PACKET_TYPE_AUDIO, gBody,
size - 7 + 2, timestamp);
free(gBody);
return 0;
}
void H264RtmpPuser::OnGetCodeFrame(uint8_t * data, int len)
{
static int timetamp = 0;
timetamp += this->mTick;
uint8_t *pack = (uint8_t*)malloc(len);
memcpy(pack, data, len);
mMux.lock();
Buffer buf;
buf.buf = pack;
buf.len = len;
buf.type = PAYLOAD_TYPE_VIDEO;
this->mPack.push(buf);
mMux.unlock();
}
void H264RtmpPuser::ProcessSend()
{
while (this->mIfStart) {
int len = mPack.size();
if (!mPack.empty()) {
mMux.lock();
Buffer buf = mPack.front();
mPack.pop();
mMux.unlock();
//如果是视频帧
if (buf.type == PAYLOAD_TYPE_VIDEO) {
this->sortAndSendNal(buf.buf, buf.len);
}// 如果是音频帧
if (buf.type == PAYLOAD_TYPE_AUDIO) {
this->sendDataPackAAC(buf.buf, buf.len, this->mAudioPts);
}
free(buf.buf);
}
msleep(10);
}
}
int ThreadEncode(H264RtmpPuser * p)
{
Debuger::Debug(L"thread started\r\n");
if (nullptr == p)
return -1;
p->ProcessSend();
return 0;
}
int H264RtmpPuser::StartPush()
{
mIfStart = true;
this->mThread = new std::thread(ThreadEncode,this);
mThreadId = this->mThread->get_id();
return 0;
}
int H264RtmpPuser::StopPush()
{
mIfConnected = false;
mIfStart = false;
if(mThread != nullptr)
this->mThread->join();
this->RTMP264_Close();
return 0;
}

117
media/RtmpPusher.h Normal file
View File

@ -0,0 +1,117 @@
#pragma once
#ifdef __cplusplus
extern "C" {
#endif
#include "librtmp_send264.h"
#include "librtmp\rtmp.h"
#include "librtmp\rtmp_sys.h"
#ifdef __cplusplus
}
#endif
#include "librtmp\amf.h"
#include "AACAudioCoder.h"
#include "sps_decode.h"
#include "VideoCoder.h"
#include <mutex>
#include <thread>
#include <queue>
#include<iostream>
#include <string>
using namespace std;
#define RTMP_HEAD_SIZE (sizeof(RTMPPacket)+RTMP_MAX_HEADER_SIZE)
class RtmpPusher
{
protected:
RTMP *m_pRtmp;
string mUrl;
int mTick = 10;
std::mutex mMux;
std::thread *mThread;
bool mIfConnected = false;
std::thread::id mThreadId;
public:
bool IfConnect();
int RTMP264_Connect(const char* url);
void RTMP264_Close();
int SendPacket(unsigned int nPacketType, unsigned char *data, unsigned int size, unsigned int nTimestamp);
int SendVideoPacket(unsigned int nPacketType, unsigned char *data, unsigned int size, unsigned int nTimestamp);
int SetTick(int tick) { this->mTick = tick; };
virtual int StartPush() { return 0; };
RtmpPusher();
virtual ~RtmpPusher();
};
/**
* _RTMPMetadata
*
*/
typedef struct _RTMPMetadata
{
// video, must be h264 type
int nWidth;
int nHeight;
int nFrameRate;
unsigned int nSpsLen;
unsigned char *Sps;
unsigned int nPpsLen;
unsigned char *Pps;
} RTMPMetadata, *LPRTMPMetadata;
enum Payload_Type {
PAYLOAD_TYPE_VIDEO = 0,
PAYLOAD_TYPE_AUDIO = 1
};
typedef struct _T_Buffer {
uint8_t *buf;
int len;
Payload_Type type;
}Buffer;
class H264RtmpPuser : public RtmpPusher ,
public VideoCodeObserver,
public AAC_CODER::AACAudioCoder::EncodeAudioObserver {
private:
bool mFirtACC;
uint16_t mAudioPts;
bool mIfStart = false;
// 视频同步包
int SendVideoSpsPps(unsigned char *pps, int pps_len,
unsigned char * sps, int sps_len, unsigned int nTimeStamp);
// 音频同步包
int SendAudioSync(int audioType, int sampleIndex, int channel, unsigned int nTimeStamp);
int SendAudioData(unsigned char*dat, unsigned int size, unsigned int nTimeStamp);
int SendH264Packet(unsigned char *data,
unsigned int size, int bIsKeyFrame, unsigned int nTimeStamp);
int sendDataPackH264(unsigned char *data,
unsigned int size, int bIsKeyFrame, unsigned int nTimeStamp);
int sendDataPackAAC(unsigned char *data, unsigned int size, unsigned int nTimeStamp);
uint32_t mStartTime;
public:
queue<Buffer> mPack;
RTMPMetadata metaData;
H264RtmpPuser();
int sortAndSendNal(uint8_t *data, int len);
int SetSpsPps(unsigned char *pps, int pps_len,
unsigned char * sps, int sps_len);
void OnAudioEncode(const void *frameaddress, uint32_t framelen, uint16_t pts);
void OnGetCodeFrame(uint8_t *data, int len);
void ProcessSend();
int StartPush();
int StopPush();
};
int ThreadEncode(H264RtmpPuser*p);

34
media/SdlPlayer.h Normal file
View File

@ -0,0 +1,34 @@
#pragma once
#include <Windows.h>
#include "H264Docoder.h"
#include "CameraCapture.h"
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libavutil/avutil.h"
#include "libswscale/swscale.h"
#include "libavutil/opt.h"
#include "libavutil/imgutils.h"
#include "sdl/SDL.h"
};
class SDLPlayser : public H264decoder::H264DecodeObserver , public Camera::CameraObserver{
public:
SDLPlayser(HWND,int ,int, AVPixelFormat);
~SDLPlayser();
int RenderYuv(void *pBuf,uint32_t size, AVPixelFormat pix);
int OnRecieveData(AVFrame *frame);
int OnBuffer(double dblSampleTime, BYTE * pBuffer, long lBufferSize) ;
int OnCameraData(uint8_t *dat, uint32_t size) ;
private:
HWND mWindowWnd;
//SDL---------------------------
int screen_w = 0, screen_h = 0;
int mInWidth, mInHeight;
SDL_Texture* mTexture;
SDL_Rect sdlRect;
AVPixelFormat mFormat;
SDL_Window *mScreen;
SDL_Renderer *mRender;
};

280
media/VideoCoder.cpp Normal file
View File

@ -0,0 +1,280 @@
#include "VideoCoder.h"
#include "Debuger.h"
FILE *p = nullptr;
int VideoCoder::OnBuffer(double dblSampleTime, BYTE * pBuffer, long lBufferSize)
{
this->Encode(pBuffer, lBufferSize, AV_PIX_FMT_YUV420P);
return 0;
}
int VideoCoder::OnCameraData(uint8_t * dat, uint32_t size)
{
//std::cout<<"captrue data and into coder"<<std::endl;
this->Encode(dat, size, AV_PIX_FMT_YUV420P);
return 0;
}
int VideoCoder::SetDestPix(uint8_t width, uint8_t height) {
this->mDestHeight = height;
this->mDestWidth = width;
return 0;
}
VideoCoder::VideoCoder(int width, int height, AVPixelFormat formt):
mObserver(nullptr),
mFrame(nullptr),
mPitureBuffer(nullptr),
mFormatCtx(nullptr),
mOutputFmt(nullptr),
mVideoStream(nullptr),
mCodecCtx(nullptr),
mCodec(nullptr) {
AVCodecID codec_id = AV_CODEC_ID_H264;
mCodec = avcodec_find_encoder(codec_id);
av_register_all();
if (nullptr == p) {
p = fopen("shit.h264", "wb");
}
this->mWidth = width;
this->mHeight = height;
this->mInformat = formt;
if (!mCodec) {
printf("Codec not found\n");
}
this->mFormatCtx = avformat_alloc_context();
//原文链接https ://blog.csdn.net/leixiaohua1020/article/details/25430425 引用来自雷神的文章,雷神保佑
this->mOutputFmt = av_guess_format(NULL, "shit.h264", NULL);
this->mFormatCtx->oformat = mOutputFmt;
mCodecCtx = avcodec_alloc_context3(mCodec);
if (!mCodecCtx) {
printf("Could not allocate video codec context\n");
}
mCodecCtx->bit_rate = 1000;
this->mDestHeight = 480;
this->mDestWidth = 640;
mCodecCtx->width = this->mDestWidth;
mCodecCtx->height = this->mDestHeight;
mCodecCtx->time_base.num = 1;
mCodecCtx->time_base.den = 10;
mCodecCtx->max_b_frames = 0;
mCodecCtx->qmin = 10;
mCodecCtx->qmax = 25;
//mCodecCtx->flags |= AV_CODEC_FLAG_LOW_DELAY;
mCodecCtx->gop_size = 10;
mCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
av_opt_set(mCodecCtx->priv_data, "preset", "superfast", 0);
av_opt_set(mCodecCtx->priv_data, "tune", "zerolatency", 0);
if (avcodec_open2(mCodecCtx, mCodec, NULL) < 0) {
printf("Could not open codec\n");
}
mFrame = av_frame_alloc();
if (!mFrame) {
printf("Could not allocate video frame\n");
}
mFrame->format = mCodecCtx->pix_fmt;
mFrame->width = mCodecCtx->width/2;
mFrame->height = mCodecCtx->height/2;
mFrame->pts = 0;
int ret = av_image_alloc(mFrame->data, mFrame->linesize, mCodecCtx->width, mCodecCtx->height,
mCodecCtx->pix_fmt, 8);
if (ret < 0) {
printf("Could not allocate raw picture buffer\n");
}
// 让我们假设分辨率都是不可改变的AvPack可以复用
avformat_write_header(mFormatCtx, NULL);
int picture_size = avpicture_get_size(AV_PIX_FMT_YUV420P, mCodecCtx->width, mCodecCtx->height);
}
VideoCoder::~VideoCoder()
{
fclose(p);
}
void VideoCoder::Encode(uint8_t * src, int size, enum AVPixelFormat format) {
uint8_t *pFrame[4];
int lineSize[4];
static int debugs = 1;
//如果不是yuv420p就转成yuv420p
int iFramesize;
av_init_packet(&mAVPack);
mAVPack.data = NULL; // packet data will be allocated by the encoder
int ret = av_image_alloc(pFrame, lineSize, mWidth, mHeight, AV_PIX_FMT_YUV420P, 1);
if (ret< 0) {
Debuger::Debug(L"Could not allocate destination image\n");
}
if (this->mInformat != AV_PIX_FMT_YUV420P || (this->mDestHeight != mHeight)) {
int size = avpicture_get_size(this->mInformat,mWidth,mHeight);
this->forceYUV420P(src, size, mInformat, (uint8_t ***)&pFrame,&iFramesize);
//仅仅支持yuv420p
mFrame->data[0] = pFrame[0]; //Y
mFrame->data[1] = pFrame[1]; //U
mFrame->data[2] = pFrame[2]; //V
}
else {
mFrame->data[0] = src; //Y
mFrame->data[1] = src + mWidth*mHeight; //U
mFrame->data[2] = src + mWidth*mHeight + mWidth*mHeight/4; //V
}
//PTS
mFrame->pts++;
int got_picture = 0;
//Encode
avcodec_encode_video2(mCodecCtx, &mAVPack, mFrame, &got_picture);
if (got_picture > 0) {
if(nullptr != this->mObserver)
this->mObserver->OnGetCodeFrame(mAVPack.data, mAVPack.size);
}
//Debuger::Debug(L"Succeed to encode frame: %5d\tsize:%5d\n", 1, mAVPack.size);
fwrite(mAVPack.data, 1, mAVPack.size, p);
fflush(p);
// 刷新coder防止包挤压
av_packet_unref(&mAVPack);
av_freep(&pFrame[0]);
free(pFrame[0]);
//av_freep(&mFrame->data[0]);
//av_freep(&mFrame->data[0]);
}
void VideoCoder::SetOutPutPixel(unsigned int width, unsigned int height)
{
this->mHeight = height;
this->mWidth = width;
}
int VideoCoder::flushCoder(AVFormatContext *fmt_ctx, unsigned int stream_index) {
int ret;
int got_frame;
AVPacket enc_pkt;
if (!(this->mFormatCtx->streams[stream_index]->codec->codec->capabilities ))
return 0;
while (1) {
enc_pkt.data = NULL;
enc_pkt.size = 0;
av_init_packet(&enc_pkt);
ret = avcodec_encode_video2(fmt_ctx->streams[stream_index]->codec, &enc_pkt,
NULL, &got_frame);
av_frame_free(NULL);
if (ret < 0)
break;
if (!got_frame) {
ret = 0;
break;
}
Debuger::Debug(L"Flush Encoder: Succeed to encode 1 frame!\tsize:%5d\n", enc_pkt.size);
/* mux encoded frame */
ret = av_write_frame(fmt_ctx, &enc_pkt);
if (ret < 0)
break;
}
return ret;
}
// 强制把其他个数的数据转换成libav可以认得到的数据
int VideoCoder::forceYUV420P(uint8_t * src, int size,
AVPixelFormat format,uint8_t **dst[4],int *len)
{
uint8_t *src_data[4];
int src_linesize[4];
uint8_t *dst_data[4];
int dst_linesize[4];
struct SwsContext *img_convert_ctx;
int ret = 0;
if (nullptr == dst || nullptr == len) {
return -2;
}
int src_bpp = av_get_bits_per_pixel(av_pix_fmt_desc_get(format));
AVPixelFormat dst_pixfmt = AV_PIX_FMT_YUV420P;
int dst_bpp = av_get_bits_per_pixel(av_pix_fmt_desc_get(dst_pixfmt));
ret = av_image_alloc(src_data, src_linesize, mWidth, mHeight, format, 1);
if (ret< 0) {
Debuger::Debug(L"Could not allocate source image\n");
return -1;
}
ret = av_image_alloc(dst_data, dst_linesize, mDestWidth, mDestHeight, AV_PIX_FMT_YUV420P, 1);
if (ret< 0) {
Debuger::Debug(L"Could not allocate destination image\n");
return -1;
}
img_convert_ctx = sws_alloc_context();
//Show AVOption
//av_opt_show2(img_convert_ctx, stdout, AV_OPT_FLAG_VIDEO_PARAM, 0);
//Set Value
av_opt_set_int(img_convert_ctx, "sws_flags", SWS_BICUBIC | SWS_PRINT_INFO, 0);
av_opt_set_int(img_convert_ctx, "srcw", mWidth, 0);
av_opt_set_int(img_convert_ctx, "srch", mHeight, 0);
av_opt_set_int(img_convert_ctx, "src_format", format, 0);
av_opt_set_int(img_convert_ctx, "src_range", 1, 0);
av_opt_set_int(img_convert_ctx, "dstw", mDestWidth, 0);
av_opt_set_int(img_convert_ctx, "dsth", mDestHeight, 0);
av_opt_set_int(img_convert_ctx, "dst_format", dst_pixfmt, 0);
av_opt_set_int(img_convert_ctx, "dst_range", 1, 0);
sws_init_context(img_convert_ctx, NULL, NULL);
// 设置输入
switch (format) {
case AV_PIX_FMT_GRAY8: {
memcpy(src_data[0], src, mWidth*mHeight);
break;
}
case AV_PIX_FMT_YUV420P: {
memcpy(src_data[0], src, mWidth*mHeight); //Y
memcpy(src_data[1], src + mWidth*mHeight, mWidth*mHeight / 4); //U
memcpy(src_data[2], src + mWidth*mHeight * 5 / 4, mWidth*mHeight / 4); //V
break;
}
case AV_PIX_FMT_YUV422P: {
memcpy(src_data[0], src, mWidth*mHeight); //Y
memcpy(src_data[1], src + mWidth*mHeight, mWidth*mHeight / 2); //U
memcpy(src_data[2], src + mWidth*mHeight * 3 / 2, mWidth*mHeight / 2); //V
break;
}
case AV_PIX_FMT_YUV444P: {
memcpy(src_data[0], src, mWidth*mHeight); //Y
memcpy(src_data[1], src + mWidth*mHeight, mWidth*mHeight); //U
memcpy(src_data[2], src + mWidth*mHeight * 2, mWidth*mHeight); //V
break;
}
case AV_PIX_FMT_YUYV422: {
memcpy(src_data[0], src, mWidth*mHeight * 2); //Packed
break;
}
case AV_PIX_FMT_RGB24: {
memcpy(src_data[0], src, mWidth*mHeight * 3); //Packed
break;
}
case AV_PIX_FMT_RGB32: {
memcpy(src_data[0], src, mWidth*mHeight *4); //Packed
break;
}
default: {
Debuger::Debug(L"Not Support Input Pixel Format.\n");
break;
}
}
// 转换数据
ret = sws_scale(img_convert_ctx, src_data, src_linesize, 0, mHeight, dst_data, dst_linesize);
if (ret < 0) {
return ret;
}
memcpy(dst[0], dst_data[0], mDestWidth*mDestHeight);
memcpy(dst[1], dst_data[1], mDestWidth*mDestHeight /4);
memcpy(dst[2], dst_data[2], mDestWidth*mDestHeight /4);
*len = mDestWidth*mDestHeight + mDestWidth*mDestHeight / 2;
// source此时就不需要了但是dst要在外面free
av_freep(&src_data[0]);
av_freep(&dst_data[0]);
sws_freeContext(img_convert_ctx);
return 0;
}

70
media/VideoCoder.h Normal file
View File

@ -0,0 +1,70 @@
#pragma once
#ifdef _WIN32
#include "Debuger.h"
#include "CameraCapture.h"
//Windows
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libavutil/avutil.h"
#include "libswscale/swscale.h"
#include "libavutil/opt.h"
#include "libavutil/imgutils.h"
};
#include <iostream>
#else
#ifdef __cplusplus
extern "C"
{
#endif
#include "libavutil/opt.h"
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#ifdef __cplusplus
};
#endif
#endif
class VideoCodeObserver {
public:
virtual void OnGetCodeFrame(uint8_t *data, int len) {
Debuger::Debug(L"get one code %d \r\n", len);
}
};
class VideoCoder : public Camera::CameraObserver{
private:
int mWidth;
int mHeight;
unsigned int mDestWidth;
unsigned int mDestHeight;
int mBytePerPixel;
enum AVPixelFormat mInformat;
AVFormatContext *mFormatCtx;
AVOutputFormat *mOutputFmt;
AVStream *mVideoStream;
AVCodecContext *mCodecCtx;
AVCodec *mCodec;
AVPacket mAVPack;
uint8_t *mPitureBuffer;
AVFrame *mFrame;
VideoCodeObserver *mObserver;
public:
int OnBuffer(double dblSampleTime, BYTE * pBuffer, long lBufferSize);
int OnCameraData(uint8_t *dat, uint32_t size) ;
int SetDestPix(uint8_t width,uint8_t height);
VideoCoder(int width,int height,AVPixelFormat formt);
~VideoCoder();
void Encode(uint8_t*src,int size, enum AVPixelFormat format);
void SetOberver(VideoCodeObserver *p) {
this->mObserver = p;
}
void SetOutPutPixel(unsigned int width,unsigned int height);
private:
int flushCoder(AVFormatContext *fmt_ctx, unsigned int stream_index);
int forceYUV420P(uint8_t *src, int size, enum AVPixelFormat format, uint8_t ***dst,int *s);
};

409
media/audiocaptureff.cpp Normal file
View File

@ -0,0 +1,409 @@
#include "audiocaptureff.h"
#ifdef __MINGW32__
std::string WString2String(const std::wstring& ws)
{
std::string strLocale = setlocale(LC_ALL, "");
const wchar_t* wchSrc = ws.c_str();
size_t nDestSize = wcstombs(NULL, wchSrc, 0) + 1;
char *chDest = new char[nDestSize];
memset(chDest, 0, nDestSize);
wcstombs(chDest, wchSrc, nDestSize);
std::string strResult = chDest;
delete[]chDest;
setlocale(LC_ALL, strLocale.c_str());
return strResult;
}
#endif
vector<CaptureAudioFfmpeg::MICInfo> CaptureAudioFfmpeg::EnumSpeakers()
{
vector<CaptureAudioFfmpeg::MICInfo> ret;
std::vector<std::wstring> names;
IEnumMoniker *pEnum = nullptr;
// Create the System Device Enumerator.
ICreateDevEnum *pDevEnum;
HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, nullptr,
CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pDevEnum));
if (SUCCEEDED(hr))
{
// Create an enumerator for the category.
hr = pDevEnum->CreateClassEnumerator(CLSID_AudioInputDeviceCategory, &pEnum, 0);
if (hr == S_FALSE)
{
hr = VFW_E_NOT_FOUND; // The category is empty. Treat as an error.
}
pDevEnum->Release();
}
if (!SUCCEEDED(hr))
return ret;
IMoniker *pMoniker = nullptr;
while (pEnum->Next(1, &pMoniker, nullptr) == S_OK)
{
IPropertyBag *pPropBag;
IBindCtx* bindCtx = nullptr;
LPOLESTR str = nullptr;
VARIANT var;
VariantInit(&var);
HRESULT hr = pMoniker->BindToStorage(0, 0, IID_PPV_ARGS(&pPropBag));
if (FAILED(hr))
{
pMoniker->Release();
continue;
}
// Get description or friendly name.
hr = pPropBag->Read(L"Description", &var, 0);
if (FAILED(hr))
{
hr = pPropBag->Read(L"FriendlyName", &var, 0);
}
if (SUCCEEDED(hr))
{
names.push_back(var.bstrVal);
CaptureAudioFfmpeg::MICInfo ele;
ele.name = var.bstrVal;
ret.push_back(ele);
VariantClear(&var);
}
pPropBag->Release();
pMoniker->Release();
}
pEnum->Release();
return ret;
}
CaptureAudioFfmpeg::CaptureAudioFfmpeg(uint16_t rate, uint8_t channel)
{
mSampleRate = rate;
mChanel = channel;
}
static char *dup_wchar_to_utf8(wchar_t *w)
{
char *s = NULL;
int l = WideCharToMultiByte(CP_UTF8, 0, w, -1, 0, 0, 0, 0);
s = (char *)av_malloc(l);
if (s)
WideCharToMultiByte(CP_UTF8, 0, w, -1, s, l, 0, 0);
return s;
}
int CaptureAudioFfmpeg::InitCapture(wstring url, uint16_t rate, uint8_t channel)
{
string fileAudioInput = dup_wchar_to_utf8((wchar_t *)url.c_str());
AVInputFormat* imft = av_find_input_format("dshow");
AVDictionary *format_opts = nullptr;
av_dict_set_int(&format_opts, "audio_buffer_size", 20, 0);
if (0 > avformat_open_input(&mInfmt_ctx, fileAudioInput.c_str(), imft, &format_opts)) {
printf("failed input file\n");
return -1;
}
if (0 > avformat_find_stream_info(mInfmt_ctx, NULL)) {
printf("failed find stream info\n");
avformat_close_input(&mInfmt_ctx);
return -1;
}
int audio_index = -1;
audio_index = av_find_best_stream(mInfmt_ctx, AVMEDIA_TYPE_AUDIO, -1, -1, NULL, 0);
if (-1 == audio_index) {
printf("failed find best stream\n");
avformat_close_input(&mInfmt_ctx);
return -1;
}
//av_dump_format(infmt_ctx, 0, fileAudioInput.c_str(), 1);
//END输入文件
//打开解码器
static AVCodec* decodec = avcodec_find_decoder(mInfmt_ctx->streams[0]->codec->codec_id);
if (!decodec) {
printf("failed find decoder\n");
return -1;
}
if (0 > avcodec_open2(mInfmt_ctx->streams[0]->codec, decodec, NULL)) {
printf("failed open decoder\n");
return -1;
}
//END解码器
//重采样初始化
initAudioFilters();
//END重采样初始化
//编码器
static AVCodec* codec = NULL;
//codec = avcodec_find_encoder_by_name("libmp3lame");
codec = avcodec_find_encoder(AV_CODEC_ID_AAC);
static AVCodecContext* codec_ctx = NULL;
codec_ctx = avcodec_alloc_context3(codec);
// codec_ctx->bit_rate = 64000;
// inputContext->streams[0]->codec
codec_ctx->codec = codec;
codec_ctx->sample_rate = 48000;
codec_ctx->channel_layout = 3;
codec_ctx->channels = 2;
//codec_ctx->frame_size = 1024;
codec_ctx->sample_fmt = AV_SAMPLE_FMT_FLTP;
codec_ctx->codec_tag = 0;
codec_ctx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
if (0 > avcodec_open2(codec_ctx, codec, NULL)) {
printf("failed open coder\n");
avformat_close_input(&mInfmt_ctx);
avcodec_free_context(&codec_ctx);
return -1;
}
//END编码器
//输出文件
AVFormatContext* outfmt_ctx = NULL;
if (0 > avformat_alloc_output_context2(&outfmt_ctx, NULL, NULL, "aac.aac")) {
printf("failed alloc outputcontext\n");
avformat_close_input(&mInfmt_ctx);
avcodec_free_context(&codec_ctx);
return -1;
}
AVStream* out_stream = avformat_new_stream(outfmt_ctx, codec_ctx->codec);
if (!out_stream) {
printf("failed new stream\n");
avformat_close_input(&mInfmt_ctx);
avcodec_free_context(&codec_ctx);
avformat_close_input(&outfmt_ctx);
return -1;
}
avcodec_copy_context(out_stream->codec, codec_ctx);
// if (0 > avio_open(&outfmt_ctx->pb, "rtmp://localhost/testlive", AVIO_FLAG_WRITE)) {
if (0 > avio_open(&outfmt_ctx->pb, "aac.aac", AVIO_FLAG_WRITE)) {
printf("failed to open outfile\n");
avformat_close_input(&mInfmt_ctx);
avcodec_free_context(&codec_ctx);
avformat_close_input(&outfmt_ctx);
return -1;
}
avformat_write_header(outfmt_ctx, NULL);
//END输出文件
#if 0
AVFrame* Frame = av_frame_alloc();
Frame->nb_samples = codec_ctx->frame_size;
Frame->format = codec_ctx->sample_fmt;
Frame->channel_layout = codec_ctx->channel_layout;
int size = av_samples_get_buffer_size(NULL, codec_ctx->channels, codec_ctx->frame_size,
codec_ctx->sample_fmt, 1);
uint8_t* frame_buf = (uint8_t *)av_malloc(size);
avcodec_fill_audio_frame(Frame, codec_ctx->channels, codec_ctx->sample_fmt, (const uint8_t*)frame_buf, size, 1);
int64_t in_channel_layout = av_get_default_channel_layout(codec_ctx->channels);
AVPacket pkt;
av_new_packet(&pkt, size);
pkt.data = NULL;
int got_frame = -1;
int delayedFrame = 0;
static uint8_t audio_buf[(MAX_AUDIO_FRAME_SIZE * 3) / 2];
int audioCount = 0;
const uint8_t *indata[AV_NUM_DATA_POINTERS] = { 0 };
AVFrame* Frame1 = av_frame_alloc();
#endif
int loop = 1;
int delayedFrame = 0;
AVPacket packet;
av_init_packet(&packet);
packet.data = NULL;
packet.size = 0;
AVPacket pkt;
av_init_packet(&pkt);
pkt.data = NULL;
pkt.size = 0;
AVFrame* pSrcAudioFrame = av_frame_alloc();
int got_frame = 0;
while (1) {
av_read_frame(mInfmt_ctx, &packet);
loop++;
if (packet.stream_index == audio_index) {
auto filterFrame = DecodeAudio(&packet, pSrcAudioFrame);
if (filterFrame) {
avcodec_encode_audio2(codec_ctx, &pkt, filterFrame, &got_frame);
if (got_frame) {
#if 1
auto streamTimeBase = outfmt_ctx->streams[pkt.stream_index]->time_base.den;
auto codecTimeBase = outfmt_ctx->streams[pkt.stream_index]->codec->time_base.den;
pkt.pts = pkt.dts = (1024 * streamTimeBase * mAudioCount) / codecTimeBase;
mAudioCount++;
auto inputStream = mInfmt_ctx->streams[pkt.stream_index];
auto outputStream = outfmt_ctx->streams[pkt.stream_index];
av_packet_rescale_ts(&pkt, inputStream->time_base, outputStream->time_base);
#endif
// pkt.stream_index = out_stream->index;
av_interleaved_write_frame(outfmt_ctx, &pkt);
av_packet_unref(&pkt);
printf("output frame %3d\n", loop - delayedFrame);
}
else {
delayedFrame++;
av_packet_unref(&pkt);
printf("no output frame\n");
}
}
}
av_packet_unref(&packet);
}
flush_encoder(outfmt_ctx, 0);
av_write_trailer(outfmt_ctx);
//av_free(Frame);
av_free(pSrcAudioFrame);
avio_close(outfmt_ctx->pb);
avformat_close_input(&mInfmt_ctx);
//avformat_close_input(&outfmt_ctx);
return 0;
}
int CaptureAudioFfmpeg::initAudioFilters()
{
char args[512];
int ret;
AVFilter *abuffersrc = (AVFilter *)avfilter_get_by_name("abuffer");
AVFilter *abuffersink = (AVFilter *)avfilter_get_by_name("abuffersink");
AVFilterInOut *outputs = avfilter_inout_alloc();
AVFilterInOut *inputs = avfilter_inout_alloc();
auto audioDecoderContext = mInfmt_ctx->streams[0]->codec;
if (!audioDecoderContext->channel_layout)
audioDecoderContext->channel_layout = av_get_default_channel_layout(audioDecoderContext->channels);
static const enum AVSampleFormat out_sample_fmts[] = { AV_SAMPLE_FMT_FLTP, AV_SAMPLE_FMT_NONE };
static const uint64_t out_channel_layouts[] = { audioDecoderContext->channel_layout};
static const int out_sample_rates[] = { audioDecoderContext->sample_rate , -1 };
AVRational time_base = mInfmt_ctx->streams[0]->time_base;
mFilterGraph = avfilter_graph_alloc();
mFilterGraph->nb_threads = 1;
sprintf_s(args, sizeof(args),
"time_base=%d/%d:sample_rate=%d:sample_fmt=%s:channel_layout=0x%I64x",
time_base.num, time_base.den, audioDecoderContext->sample_rate,
av_get_sample_fmt_name(audioDecoderContext->sample_fmt),
audioDecoderContext->channel_layout);
ret = avfilter_graph_create_filter(&mBuffersrcCtx, abuffersrc, "in",
args, NULL, mFilterGraph);
if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, "Cannot create audio buffer source\n");
return ret;
}
/* buffer audio sink: to terminate the filter chain. */
ret = avfilter_graph_create_filter(&mBuffersinkCtx, abuffersink, "out",
NULL, NULL, mFilterGraph);
if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, "Cannot create audio buffer sink\n");
return ret;
}
ret = av_opt_set_int_list(mBuffersinkCtx, "sample_fmts", out_sample_fmts, -1,
AV_OPT_SEARCH_CHILDREN);
if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, "Cannot set output sample format\n");
return ret;
}
ret = av_opt_set_int_list(mBuffersinkCtx, "channel_layouts", out_channel_layouts, -1,
AV_OPT_SEARCH_CHILDREN);
if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, "Cannot set output channel layout\n");
return ret;
}
ret = av_opt_set_int_list(mBuffersinkCtx, "sample_rates", out_sample_rates, -1,
AV_OPT_SEARCH_CHILDREN);
if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, "Cannot set output sample rate\n");
return ret;
}
/* Endpoints for the filter graph. */
outputs->name = av_strdup("in");
outputs->filter_ctx = mBuffersrcCtx;;
outputs->pad_idx = 0;
outputs->next = NULL;
inputs->name = av_strdup("out");
inputs->filter_ctx = mBuffersinkCtx;
inputs->pad_idx = 0;
inputs->next = NULL;
if ((ret = avfilter_graph_parse_ptr(mFilterGraph, "anull",
&inputs, &outputs, nullptr)) < 0)
return ret;
if ((ret = avfilter_graph_config(mFilterGraph, NULL)) < 0)
return ret;
av_buffersink_set_frame_size(mBuffersinkCtx, 1024);
return 0;
}
int CaptureAudioFfmpeg::flush_encoder(AVFormatContext *fmt_ctx, unsigned int stream_index)
{
int ret;
int got_frame;
AVPacket enc_pkt;
if (!(fmt_ctx->streams[stream_index]->codec->codec->capabilities &
0x0020))
return 0;
while (1) {
enc_pkt.data = NULL;
enc_pkt.size = 0;
av_init_packet(&enc_pkt);
ret = avcodec_encode_audio2(fmt_ctx->streams[stream_index]->codec, &enc_pkt,
NULL, &got_frame);
av_frame_free(NULL);
if (ret < 0)
break;
if (!got_frame) {
ret = 0;
break;
}
printf("Flush Encoder: Succeed to encode 1 frame!\tsize:%5d\n", enc_pkt.size);
/* mux encoded frame */
ret = av_write_frame(fmt_ctx, &enc_pkt);
if (ret < 0)
break;
}
return ret;
}
AVFrame *CaptureAudioFfmpeg::DecodeAudio(AVPacket *packet, AVFrame *pSrcAudioFrame)
{
AVStream * stream = mInfmt_ctx->streams[0];
AVCodecContext* codecContext = stream->codec;
int gotFrame;
AVFrame *filtFrame = nullptr;
auto length = avcodec_decode_audio4(codecContext, pSrcAudioFrame, &gotFrame, packet);
if (length >= 0 && gotFrame != 0)
{
if (av_buffersrc_add_frame_flags(mBuffersrcCtx, pSrcAudioFrame, AV_BUFFERSRC_FLAG_PUSH) < 0) {
av_log(NULL, AV_LOG_ERROR, "buffe src add frame error!\n");
return nullptr;
}
filtFrame = av_frame_alloc();
int ret = av_buffersink_get_frame_flags(mBuffersinkCtx, filtFrame, AV_BUFFERSINK_FLAG_NO_REQUEST);
if (ret < 0)
{
av_frame_free(&filtFrame);
goto error;
}
return filtFrame;
}
error:
return nullptr;
}

85
media/audiocaptureff.h Normal file
View File

@ -0,0 +1,85 @@
#ifndef AUDIOCAPTUREFF_H
#define AUDIOCAPTUREFF_H
#include "stdint.h"
#include "../third/portaudio/portaudio.h"
#include <vector>
#include <string>
//Windows
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libavutil/avutil.h"
#include "libswscale/swscale.h"
#include "libavutil/opt.h"
#include "libavutil/imgutils.h"
#include "libavdevice/avdevice.h"
#include "libavfilter/avfilter.h"
#include "libavfilter/buffersrc.h"
#include "libavfilter/buffersink.h"
};
#include <functional>
#include <dshow.h>
#include <windows.h>
#include "qedit.h"
#include <mutex>
#include <vector>
#include <thread>
#include "guiddef.h"
using namespace std;
class CaptureAudioFfmpeg {
public:
class CaptureAudioObserver {
public:
virtual void OnAudioData(const void *frameaddress, uint32_t framelen) {};
};
typedef struct _T_MicInfo
{
wstring name;
int index;
}MICInfo;
enum CAP_STATUS {
RUNNING = 1,
STOP = 2,
PAUSE = 3,
FAIL = 4,
};
vector<CaptureAudioFfmpeg::MICInfo> EnumSpeakers();
CaptureAudioFfmpeg(uint16_t rate, uint8_t channel);
int InitCapture(wstring url,uint16_t rate,uint8_t channel);
/*
~CaptureAudio();
int StartCapture();
void StopCapture();
int SetObserver(CaptureAudioObserver*);
int OnCallBack(const void* input, void* output, unsigned long frameCount);
void AddCnt(unsigned int x) {this->mSize += x;};
*/
private:
std::thread mThread;
uint16_t mSampleRate; //采样率
uint16_t mChanel; //通道号
uint16_t mSamplefmt;
unsigned long mSize;
CAP_STATUS mStatus;
CaptureAudioObserver *observer;
int initAudioFilters();
AVFormatContext *mInfmt_ctx = nullptr;
AVFormatContext * mOutfmt_ctx = nullptr;
int64_t mLastReadPacktTime;
AVFilterContext *mBuffersinkCtx = nullptr;
AVFilterContext *mBuffersrcCtx = nullptr;
AVFilterGraph *mFilterGraph = nullptr;
AVCodecContext* mOutPutAudioEncContext = nullptr;
int64_t mAudioCount = 0;
int flush_encoder(AVFormatContext *fmt_ctx, unsigned int stream_index);
AVFrame *DecodeAudio(AVPacket* packet, AVFrame*pSrcAudioFrame);
};
#endif // AUDIOCAPTUREFF_H

177
media/imgutil.cpp Normal file
View File

@ -0,0 +1,177 @@
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <sys/stat.h>
#include <fcntl.h>
#define MAX_LEN (1*1024*1024)
#define POSITIVE_HEIGHT (1)
/*12Bytes*/
typedef struct /**** BMP file header structure ****/
{
unsigned int bfSize; /* Size of file */
unsigned short bfReserved1; /* Reserved */
unsigned short bfReserved2; /* ... */
unsigned int bfOffBits; /* Offset to bitmap data */
}BITMAPFILEHEADER;
/*40Bytes*/
typedef struct /**** BMP file info structure ****/
{
unsigned int biSize; /* Size of info header */
int biWidth; /* Width of image */
int biHeight; /* Height of image */
unsigned short biPlanes; /* Number of color planes */
unsigned short biBitCount; /* Number of bits per pixel */
unsigned int biCompression; /* Type of compression to use */
unsigned int biSizeImage; /* Size of image data */
int biXPelsPerMeter; /* X pixels per meter */
int biYPelsPerMeter; /* Y pixels per meter */
unsigned int biClrUsed; /* Number of colors used */
unsigned int biClrImportant; /* Number of important colors */
}BITMAPINFOHEADER;
int simplest_rgb24_to_bmp(const char* rgb24Path, int w, int h, const char* bmpPath)
{
int s32Ret = 0;
int fd_ori = -1;
int fd_bmp = -1;
int headerSize = 0;
int i = 0;//for circle
int j = 0;//for circle
unsigned char temp = 0;
unsigned char readBuff[MAX_LEN] = {'\0'};
memset(readBuff, 0, sizeof(readBuff));
#ifdef POSITIVE_HEIGHT
unsigned char readBuff4Ph[MAX_LEN] = {'\0'};
memset(readBuff4Ph, 0, sizeof(readBuff4Ph));
#endif
char bfType[2] = {'B', 'M'};
BITMAPFILEHEADER myHead;
BITMAPINFOHEADER myHeadInfo;
memset(&myHead, 0, sizeof(myHead));
memset(&myHeadInfo, 0, sizeof(myHeadInfo));
printf("sizeof(myHead) = %d\n", sizeof(myHead));
printf("sizeof(myHeadInfo) = %d\n", sizeof(myHeadInfo));
/*myHead*/
headerSize = sizeof(bfType) + sizeof(myHead) + sizeof(myHeadInfo);
myHead.bfSize = headerSize + w*h*3;
myHead.bfOffBits = headerSize;
/*myHeadInfo*/
myHeadInfo.biSize = sizeof(myHeadInfo);
myHeadInfo.biWidth = w;
#ifndef POSITIVE_HEIGHT
myHeadInfo.biHeight = -1 * h;
#else
myHeadInfo.biHeight = h;
#endif
myHeadInfo.biPlanes = 1;
myHeadInfo.biBitCount = 24;
myHeadInfo.biSizeImage = w*h*3;
/*open files*/
fd_ori = open(rgb24Path, O_RDONLY);
if(fd_ori < 0)
{
printf("open rgb24 failed!\n");
return -1;
}
printf("open rgb24 success!\n");
fd_bmp = open(bmpPath, O_WRONLY|O_CREAT|O_TRUNC|O_APPEND, 777);
if(fd_bmp < 0)
{
printf("open bmp failed!\n");
close(fd_ori);
return -1;
}
printf("open bmp success!\n");
/*read*/
memset(readBuff, 0, sizeof(readBuff));
s32Ret = read(fd_ori, readBuff, sizeof(readBuff));
if((s32Ret < 0) || (s32Ret != w*h*3))
{
printf("read RGB file failed!\n");
close(fd_bmp);
close(fd_ori);
return -1;
}
printf("read RGB file success!\n");
/*change R-G-B to B-G-R*/
for(i = 0; i < (w*h); i++)
{
temp = *(readBuff + i*3);
*(readBuff + i*3) = *(readBuff + i*3 + 2);
*(readBuff + i*3 + 2) = temp;
}
/*positive height storage sequence:left-right down-up*/
#ifdef POSITIVE_HEIGHT
for(i = (h - 1), j = 0; i >= 0; i--, j++)
{
memcpy(readBuff4Ph + j*w*3, readBuff + i*w*3, w*3);
}
#endif
/*write-4 parts*/
s32Ret = write(fd_bmp, bfType, sizeof(bfType));
if(s32Ret < 0)
{
printf("write bfType failed!\n");
close(fd_bmp);
close(fd_ori);
return -1;
}
s32Ret = write(fd_bmp, &myHead, sizeof(myHead));
if(s32Ret < 0)
{
printf("write myHead failed!\n");
close(fd_bmp);
close(fd_ori);
return -1;
}
s32Ret = write(fd_bmp, &myHeadInfo, sizeof(myHeadInfo));
if(s32Ret < 0)
{
printf("write myHeadInfo failed!\n");
close(fd_bmp);
close(fd_ori);
return -1;
}
#ifdef POSITIVE_HEIGHT
s32Ret = write(fd_bmp, readBuff4Ph, w*h*3);
if(s32Ret < 0)
{
printf("write readBuff4Ph failed!\n");
close(fd_bmp);
close(fd_ori);
return -1;
}
printf("write readBuff4Ph success!\n");
#else
s32Ret = write(fd_bmp, readBuff, w*h*3);
if(s32Ret < 0)
{
printf("write readBuff failed!\n");
close(fd_bmp);
close(fd_ori);
return -1;
}
printf("write readBuff success!\n");
#endif
close(fd_bmp);
close(fd_ori);
return 0;
}

41
media/librtmp_send264.h Normal file
View File

@ -0,0 +1,41 @@
/**
* Simplest Librtmp Send 264
*
*
* leixiaohua1020@126.com
* zhanghuicuc@gmail.com
* /
* Communication University of China / Digital TV Technology
* http://blog.csdn.net/leixiaohua1020
*
* H.264RTMP
*
*/
/**
*
*
* @param url webapp
*
* @1 , 0
*/
int RTMP264_Connect(const char* url);
/**
* H.264RTMP
*
* @param read_buffer
* 2
* uint8_t *buf
* int buf_size
*
* @1 , 0
*/
int RTMP264_Send(int (*read_buffer)(unsigned char *buf, int buf_size));
/**
*
*
*/
void RTMP264_Close();

178
media/screen_capture.cpp Normal file
View File

@ -0,0 +1,178 @@
#include "screen_capture.h"
#include <conio.h>
#include <stdio.h>
#include <QDebug>
#include <QString>
#if _MSC_VER >= 1600
#pragma execution_character_set("utf-8")
#endif
#define WIDEN2(x) L ## x
#define WIDEN(x) WIDEN2(x)
#define __WFILE__ WIDEN(__FILE__)
#define HRCHECK(__expr) {hr=(__expr);if(FAILED(hr)){wprintf(L"FAILURE 0x%08X (%i)\n\tline: %u file: '%s'\n\texpr: '" WIDEN(#__expr) L"'\n",hr, hr, __LINE__,__WFILE__);goto cleanup;}}
#define RELEASE(__p) {if(__p!=nullptr){__p->Release();__p=nullptr;}}
// ȱenum·½·¨
// https://www.gamedev.net/forums/topic/132636-enum-displaymode--dx9--false/
HRESULT SavePixelsToFile32bppPBGRA(UINT width, UINT height, UINT stride,
LPBYTE pixels, LPWSTR filePath, const GUID &format)
{
if (!filePath || !pixels)
return E_INVALIDARG;
HRESULT hr = S_OK;
IWICImagingFactory *factory = nullptr;
IWICBitmapEncoder *encoder = nullptr;
IWICBitmapFrameEncode *frame = nullptr;
IWICStream *stream = nullptr;
GUID pf = GUID_WICPixelFormat32bppPBGRA;
BOOL coInit = CoInitialize(nullptr);
HRCHECK(CoCreateInstance(CLSID_WICImagingFactory, nullptr, CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&factory)));
HRCHECK(factory->CreateStream(&stream));
HRCHECK(stream->InitializeFromFilename(filePath, GENERIC_WRITE));
HRCHECK(factory->CreateEncoder(format, nullptr, &encoder));
HRCHECK(encoder->Initialize(stream, WICBitmapEncoderNoCache));
HRCHECK(encoder->CreateNewFrame(&frame, nullptr)); // we don't use options here
HRCHECK(frame->Initialize(nullptr)); // we dont' use any options here
HRCHECK(frame->SetSize(width, height));
HRCHECK(frame->SetPixelFormat(&pf));
HRCHECK(frame->WritePixels(height, stride, stride * height, pixels));
HRCHECK(frame->Commit());
HRCHECK(encoder->Commit());
cleanup:
RELEASE(stream);
RELEASE(frame);
RELEASE(encoder);
RELEASE(factory);
if (coInit) CoUninitialize();
return hr;
}
HRESULT Direct3D9TakeScreenshots(UINT adapter, UINT count)
{
HRESULT hr = S_OK;
IDirect3D9 *d3d = nullptr;
IDirect3DDevice9 *device = nullptr;
IDirect3DSurface9 *surface = nullptr;
D3DPRESENT_PARAMETERS parameters = { 0 };
D3DDISPLAYMODE mode;
D3DLOCKED_RECT rc;
UINT pitch;
SYSTEMTIME st;
LPBYTE *shots = nullptr;
// init D3D and get screen size
d3d = Direct3DCreate9(D3D_SDK_VERSION);
HRCHECK(d3d->GetAdapterDisplayMode(adapter, &mode));
parameters.Windowed = TRUE;
parameters.BackBufferCount = 1;
parameters.BackBufferHeight = mode.Height;
parameters.BackBufferWidth = mode.Width;
parameters.SwapEffect = D3DSWAPEFFECT_DISCARD;
parameters.hDeviceWindow = NULL;
// create device & capture surface
HRCHECK(d3d->CreateDevice(adapter, D3DDEVTYPE_HAL, NULL, D3DCREATE_SOFTWARE_VERTEXPROCESSING, &parameters, &device));
HRCHECK(device->CreateOffscreenPlainSurface(mode.Width, mode.Height, D3DFMT_A8R8G8B8, D3DPOOL_SYSTEMMEM, &surface, nullptr));
// compute the required buffer size
HRCHECK(surface->LockRect(&rc, NULL, 0));
pitch = rc.Pitch;
HRCHECK(surface->UnlockRect());
// allocate screenshots buffers
shots = new LPBYTE[count];
for (UINT i = 0; i < count; i++)
{
shots[i] = new BYTE[pitch * mode.Height];
}
GetSystemTime(&st); // measure the time we spend doing <count> captures
wprintf(L"%i:%i:%i.%i\n", st.wHour, st.wMinute, st.wSecond, st.wMilliseconds);
for (UINT i = 0; i < count; i++)
{
// get the data
HRCHECK(device->GetFrontBufferData(0, surface));
// copy it into our buffers
HRCHECK(surface->LockRect(&rc, NULL, 0));
CopyMemory(shots[i], rc.pBits, rc.Pitch * mode.Height);
HRCHECK(surface->UnlockRect());
}
GetSystemTime(&st);
wprintf(L"%i:%i:%i.%i\n", st.wHour, st.wMinute, st.wSecond, st.wMilliseconds);
// save all screenshots
for (UINT i = 0; i < count; i++)
{
WCHAR file[100];
wsprintf(file, L"cap%i.png", i);
HRCHECK(SavePixelsToFile32bppPBGRA(mode.Width, mode.Height, pitch, shots[i], file, GUID_ContainerFormatPng));
}
cleanup:
if (shots != nullptr)
{
for (UINT i = 0; i < count; i++)
{
delete shots[i];
}
delete[] shots;
}
RELEASE(surface);
RELEASE(device);
RELEASE(d3d);
return hr;
}
ScreenCapture::ScreenCapture()
{
m_d3d9_dev = ::Direct3DCreate9(D3D_SDK_VERSION);
}
BOOL CALLBACK MonitorEnumProc(HMONITOR hMonitor,HDC hdcMonitor,
LPRECT lprcMonitor,LPARAM dwData)
{
MONITORINFOEX mi;
mi.cbSize=sizeof(MONITORINFOEX);
GetMonitorInfo(hMonitor,&mi);
qDebug()<<QString::asprintf("Device name:%s\t",mi.szDevice);
if(mi.dwFlags==MONITORINFOF_PRIMARY) printf("Primary monitor!\n");
else printf("\n");
qDebug()<<QString::asprintf("Monitor rectangle:(%d,%d,%d,%d)\n",mi.rcMonitor.left,mi.rcMonitor.top,
mi.rcMonitor.right,mi.rcMonitor.bottom);
qDebug()<<QString::asprintf("Work rectangle:(%d,%d,%d,%d)\n",mi.rcWork.left,mi.rcWork.top,
mi.rcWork.right,mi.rcWork.bottom);
return true;
}
void ScreenCapture::EnumScreen()
{
// EnumDisplayMonitors(NULL,NULL,MonitorEnumProc,NULL);
if(m_d3d9_dev == NULL)
{
return;
}
D3DADAPTER_IDENTIFIER9 adapterID; // Used to store device info
char strBuffer[20480];
DWORD dwDisplayCount = m_d3d9_dev->GetAdapterCount();
for(DWORD i = 0; i < dwDisplayCount; i++)
{
if( m_d3d9_dev->GetAdapterIdentifier( i/*D3DADAPTER_DEFAULT*/, 0,&adapterID ) != D3D_OK )
{
return;
}
qDebug()<<adapterID.DeviceName;
}
}

24
media/screen_capture.h Normal file
View File

@ -0,0 +1,24 @@
#ifndef SCREEN_CAPTURE
#define SCREEN_CAPTURE
#include <Windows.h>
#include <Wincodec.h> // we use WIC for saving images
#include <d3d9.h> // DirectX 9 header
#include <d3d9helper.h>
HRESULT Direct3D9TakeScreenshots(UINT adapter, UINT count);
class ScreenCapture
{
public:
ScreenCapture();
void EnumScreen();
void PrintDisplayModeInfo(IDirect3D9 *pD3D, D3DFORMAT fmt);
private:
IDirect3D9* m_d3d9_dev = nullptr;
};
#endif // SCREEN_CAPTURE_H

247
media/sps_decode.cpp Normal file
View File

@ -0,0 +1,247 @@
/**
* Simplest Librtmp Send 264
*
*
* leixiaohua1020@126.com
* zhanghuicuc@gmail.com
* /
* Communication University of China / Digital TV Technology
* http://blog.csdn.net/leixiaohua1020
*
* H.264RTMP
*
*/
#include "sps_decode.h"
typedef unsigned int UINT;
typedef unsigned char BYTE;
typedef unsigned long DWORD;
UINT Ue(BYTE *pBuff, UINT nLen, UINT &nStartBit)
{
//计算0bit的个数
UINT nZeroNum = 0;
while (nStartBit < nLen * 8)
{
if (pBuff[nStartBit / 8] & (0x80 >> (nStartBit % 8))) //&:按位与,%取余
{
break;
}
nZeroNum++;
nStartBit++;
}
nStartBit ++;
//计算结果
DWORD dwRet = 0;
for (UINT i=0; i<nZeroNum; i++)
{
dwRet <<= 1;
if (pBuff[nStartBit / 8] & (0x80 >> (nStartBit % 8)))
{
dwRet += 1;
}
nStartBit++;
}
return (1 << nZeroNum) - 1 + dwRet;
}
int Se(BYTE *pBuff, UINT nLen, UINT &nStartBit)
{
int UeVal=Ue(pBuff,nLen,nStartBit);
double k=UeVal;
int nValue=ceil(k/2);
//ceil函数ceil函数的作用是求不小于给定实数的最小整数。ceil(2)=ceil(1.2)=cei(1.5)=2.00
if (UeVal % 2==0)
nValue=-nValue;
return nValue;
}
DWORD u(UINT BitCount,BYTE * buf,UINT &nStartBit)
{
DWORD dwRet = 0;
for (UINT i=0; i<BitCount; i++)
{
dwRet <<= 1;
if (buf[nStartBit / 8] & (0x80 >> (nStartBit % 8)))
{
dwRet += 1;
}
nStartBit++;
}
return dwRet;
}
/**
* H264NAL
*
* @param buf SPS
*
* @
*/
void de_emulation_prevention(BYTE* buf,unsigned int* buf_size)
{
int i=0,j=0;
BYTE* tmp_ptr = nullptr;
unsigned int tmp_buf_size=0;
int val=0;
tmp_ptr=buf;
tmp_buf_size=*buf_size;
for(i=0;i<(tmp_buf_size-2);i++)
{
//check for 0x000003
val=(tmp_ptr[i]^0x00) +(tmp_ptr[i+1]^0x00)+(tmp_ptr[i+2]^0x03);
if(val==0)
{
//kick out 0x03
for(j=i+2;j<tmp_buf_size-1;j++)
tmp_ptr[j]=tmp_ptr[j+1];
//and so we should devrease bufsize
(*buf_size)--;
}
}
return;
}
/**
* SPS,
*
* @param buf SPS
* @param nLen SPS
* @param width
* @param height
* @1 , 0
*/
int h264_decode_sps(BYTE * buf,unsigned int nLen,int &width,int &height,int &fps)
{
UINT StartBit=0;
fps=0;
de_emulation_prevention(buf,&nLen);
int forbidden_zero_bit = u(1,buf,StartBit);
int nal_ref_idc = u(2,buf,StartBit);
int nal_unit_type = u(5,buf,StartBit);
if(nal_unit_type==7)
{
int profile_idc=u(8,buf,StartBit);
int constraint_set0_flag=u(1,buf,StartBit);//(buf[1] & 0x80)>>7;
int constraint_set1_flag=u(1,buf,StartBit);//(buf[1] & 0x40)>>6;
int constraint_set2_flag=u(1,buf,StartBit);//(buf[1] & 0x20)>>5;
int constraint_set3_flag=u(1,buf,StartBit);//(buf[1] & 0x10)>>4;
int reserved_zero_4bits=u(4,buf,StartBit);
int level_idc=u(8,buf,StartBit);
int seq_parameter_set_id=Ue(buf,nLen,StartBit);
if( profile_idc == 100 || profile_idc == 110 ||
profile_idc == 122 || profile_idc == 144 )
{
int chroma_format_idc=Ue(buf,nLen,StartBit);
if( chroma_format_idc == 3 )
int residual_colour_transform_flag=u(1,buf,StartBit);
int bit_depth_luma_minus8=Ue(buf,nLen,StartBit);
int bit_depth_chroma_minus8=Ue(buf,nLen,StartBit);
int qpprime_y_zero_transform_bypass_flag=u(1,buf,StartBit);
int seq_scaling_matrix_present_flag=u(1,buf,StartBit);
int seq_scaling_list_present_flag[8];
if( seq_scaling_matrix_present_flag )
{
for( int i = 0; i < 8; i++ ) {
seq_scaling_list_present_flag[i]=u(1,buf,StartBit);
}
}
}
int log2_max_frame_num_minus4=Ue(buf,nLen,StartBit);
int pic_order_cnt_type=Ue(buf,nLen,StartBit);
if( pic_order_cnt_type == 0 )
int log2_max_pic_order_cnt_lsb_minus4=Ue(buf,nLen,StartBit);
else if( pic_order_cnt_type == 1 )
{
int delta_pic_order_always_zero_flag=u(1,buf,StartBit);
int offset_for_non_ref_pic=Se(buf,nLen,StartBit);
int offset_for_top_to_bottom_field=Se(buf,nLen,StartBit);
int num_ref_frames_in_pic_order_cnt_cycle=Ue(buf,nLen,StartBit);
int *offset_for_ref_frame=new int[num_ref_frames_in_pic_order_cnt_cycle];
for( int i = 0; i < num_ref_frames_in_pic_order_cnt_cycle; i++ )
offset_for_ref_frame[i]=Se(buf,nLen,StartBit);
delete [] offset_for_ref_frame;
}
int num_ref_frames=Ue(buf,nLen,StartBit);
int gaps_in_frame_num_value_allowed_flag=u(1,buf,StartBit);
int pic_width_in_mbs_minus1=Ue(buf,nLen,StartBit);
int pic_height_in_map_units_minus1=Ue(buf,nLen,StartBit);
width=(pic_width_in_mbs_minus1+1)*16;
height=(pic_height_in_map_units_minus1+1)*16;
int frame_mbs_only_flag=u(1,buf,StartBit);
if(!frame_mbs_only_flag)
int mb_adaptive_frame_field_flag=u(1,buf,StartBit);
int direct_8x8_inference_flag=u(1,buf,StartBit);
int frame_cropping_flag=u(1,buf,StartBit);
if(frame_cropping_flag)
{
int frame_crop_left_offset=Ue(buf,nLen,StartBit);
int frame_crop_right_offset=Ue(buf,nLen,StartBit);
int frame_crop_top_offset=Ue(buf,nLen,StartBit);
int frame_crop_bottom_offset=Ue(buf,nLen,StartBit);
}
int vui_parameter_present_flag=u(1,buf,StartBit);
if(vui_parameter_present_flag)
{
int aspect_ratio_info_present_flag=u(1,buf,StartBit);
if(aspect_ratio_info_present_flag)
{
int aspect_ratio_idc=u(8,buf,StartBit);
if(aspect_ratio_idc==255)
{
int sar_width=u(16,buf,StartBit);
int sar_height=u(16,buf,StartBit);
}
}
int overscan_info_present_flag=u(1,buf,StartBit);
if(overscan_info_present_flag)
int overscan_appropriate_flagu=u(1,buf,StartBit);
int video_signal_type_present_flag=u(1,buf,StartBit);
if(video_signal_type_present_flag)
{
int video_format=u(3,buf,StartBit);
int video_full_range_flag=u(1,buf,StartBit);
int colour_description_present_flag=u(1,buf,StartBit);
if(colour_description_present_flag)
{
int colour_primaries=u(8,buf,StartBit);
int transfer_characteristics=u(8,buf,StartBit);
int matrix_coefficients=u(8,buf,StartBit);
}
}
int chroma_loc_info_present_flag=u(1,buf,StartBit);
if(chroma_loc_info_present_flag)
{
int chroma_sample_loc_type_top_field=Ue(buf,nLen,StartBit);
int chroma_sample_loc_type_bottom_field=Ue(buf,nLen,StartBit);
}
int timing_info_present_flag=u(1,buf,StartBit);
if(timing_info_present_flag)
{
int num_units_in_tick=u(32,buf,StartBit);
int time_scale=u(32,buf,StartBit);
fps=time_scale/(2*num_units_in_tick);
}
}
return true;
}
else
return false;
}

34
media/sps_decode.h Normal file
View File

@ -0,0 +1,34 @@
#ifndef __SPS_DECODE__
#define __SPS_DECODE__
#include <stdio.h>
#include <stdint.h>
#include <string.h>
#include <math.h>
#include <windows.h>
UINT Ue(BYTE *pBuff, UINT nLen, UINT &nStartBit);
int Se(BYTE *pBuff, UINT nLen, UINT &nStartBit);
DWORD u(UINT BitCount, BYTE * buf, UINT &nStartBit);
/**
* H264NAL
*
* @param buf SPS
*
* @
*/
void de_emulation_prevention(BYTE* buf, unsigned int* buf_size);
/**
* SPS,
*
* @param buf SPS
* @param nLen SPS
* @param width
* @param height
* @1 , 0
*/
int h264_decode_sps(BYTE * buf, unsigned int nLen, int &width, int &height, int &fps);
#endif

66
qedit.h Normal file
View File

@ -0,0 +1,66 @@
#include <Unknwn.h>
#include <strmif.h>
#pragma comment(lib, "strmiids.lib")
#ifndef __qedit_h__
#define __qedit_h__
///////////////////////////////////////////////////////////////////////////////////
#pragma once
///////////////////////////////////////////////////////////////////////////////////
struct __declspec(uuid("0579154a-2b53-4994-b0d0-e773148eff85"))
ISampleGrabberCB : IUnknown
{
//
// Raw methods provided by interface
//
virtual HRESULT __stdcall SampleCB(
double SampleTime,
struct IMediaSample * pSample) = 0;
virtual HRESULT __stdcall BufferCB(
double SampleTime,
unsigned char * pBuffer,
long BufferLen) = 0;
};
struct __declspec(uuid("6b652fff-11fe-4fce-92ad-0266b5d7c78f"))
ISampleGrabber : IUnknown
{
//
// Raw methods provided by interface
//
virtual HRESULT __stdcall SetOneShot(
long OneShot) = 0;
virtual HRESULT __stdcall SetMediaType(
struct _AMMediaType * pType) = 0;
virtual HRESULT __stdcall GetConnectedMediaType(
struct _AMMediaType * pType) = 0;
virtual HRESULT __stdcall SetBufferSamples(
long BufferThem) = 0;
virtual HRESULT __stdcall GetCurrentBuffer(
/*[in,out]*/ long * pBufferSize,
/*[out]*/ long * pBuffer) = 0;
virtual HRESULT __stdcall GetCurrentSample(
/*[out,retval]*/ struct IMediaSample * * ppSample) = 0;
virtual HRESULT __stdcall SetCallback(
struct ISampleGrabberCB * pCallback,
long WhichMethodToCallback) = 0;
};
static const IID IID_ISampleGrabber = { 0x6B652FFF, 0x11FE, 0x4fce,{ 0x92, 0xAD, 0x02, 0x66, 0xB5, 0xD7, 0xC7, 0x8F } };
static const IID IID_ISampleGrabberCB = { 0x0579154A, 0x2B53, 0x4994,{ 0xB0, 0xD0, 0xE7, 0x73, 0x14, 0x8E, 0xFF, 0x85 } };
static const CLSID CLSID_SampleGrabber = { 0xC1F400A0, 0x3F08, 0x11d3,{ 0x9F, 0x0B, 0x00, 0x60, 0x08, 0x03, 0x9E, 0x37 } };
static const CLSID CLSID_NullRenderer = { 0xC1F400A4, 0x3F08, 0x11d3,{ 0x9F, 0x0B, 0x00, 0x60, 0x08, 0x03, 0x9E, 0x37 } };
#endif

21
readme.md Normal file
View File

@ -0,0 +1,21 @@
### 基于qt本地PC摄像头采集并264 编码rtmp推流
#### 支持平台
1. windows
#### 依赖库:
1. 使用librtmp提供的接口实现rtmp推流。
2. ffmpeg作为视频264编码。
3. opengl用于本地摄像头的渲染。
4. directshow的相关接口用于获取本地的摄像头数据。
5. 界面风格基于!qssWrpter库 https://gitee.com/290198252/qsswraper
### 界面:
![image.png](https://www.testingcloud.club/sapi/api/image_download/80463174-3633-11eb-a135-525400dc6cec.png)
#### 支持的摄像头
#### 发行版
- win32

161
ui_mainwindow.h Normal file
View File

@ -0,0 +1,161 @@
/********************************************************************************
** Form generated from reading UI file 'mainwindow.ui'
**
** Created by: Qt User Interface Compiler version 5.14.0
**
** WARNING! All changes made in this file will be lost when recompiling UI file!
********************************************************************************/
#ifndef UI_MAINWINDOW_H
#define UI_MAINWINDOW_H
#include <QtCore/QVariant>
#include <QtWidgets/QApplication>
#include <QtWidgets/QComboBox>
#include <QtWidgets/QGridLayout>
#include <QtWidgets/QHBoxLayout>
#include <QtWidgets/QLabel>
#include <QtWidgets/QLineEdit>
#include <QtWidgets/QMainWindow>
#include <QtWidgets/QPushButton>
#include <QtWidgets/QSpacerItem>
#include <QtWidgets/QVBoxLayout>
#include <QtWidgets/QWidget>
QT_BEGIN_NAMESPACE
class Ui_MainWindow
{
public:
QWidget *centralWidget;
QGridLayout *gridLayout;
QVBoxLayout *verticalLayout;
QHBoxLayout *horizontalLayout;
QPushButton *pushButton;
QComboBox *comboBox;
QPushButton *pushButton_3;
QComboBox *comboBox_2;
QLabel *label;
QLineEdit *lineEdit;
QPushButton *pushButton_2;
QSpacerItem *horizontalSpacer_2;
QSpacerItem *verticalSpacer;
void setupUi(QMainWindow *MainWindow)
{
if (MainWindow->objectName().isEmpty())
MainWindow->setObjectName(QString::fromUtf8("MainWindow"));
MainWindow->resize(1383, 1116);
QSizePolicy sizePolicy(QSizePolicy::Expanding, QSizePolicy::Preferred);
sizePolicy.setHorizontalStretch(0);
sizePolicy.setVerticalStretch(0);
sizePolicy.setHeightForWidth(MainWindow->sizePolicy().hasHeightForWidth());
MainWindow->setSizePolicy(sizePolicy);
MainWindow->setMinimumSize(QSize(600, 800));
centralWidget = new QWidget(MainWindow);
centralWidget->setObjectName(QString::fromUtf8("centralWidget"));
QSizePolicy sizePolicy1(QSizePolicy::Expanding, QSizePolicy::Expanding);
sizePolicy1.setHorizontalStretch(0);
sizePolicy1.setVerticalStretch(0);
sizePolicy1.setHeightForWidth(centralWidget->sizePolicy().hasHeightForWidth());
centralWidget->setSizePolicy(sizePolicy1);
gridLayout = new QGridLayout(centralWidget);
gridLayout->setSpacing(6);
gridLayout->setContentsMargins(11, 11, 11, 11);
gridLayout->setObjectName(QString::fromUtf8("gridLayout"));
verticalLayout = new QVBoxLayout();
verticalLayout->setSpacing(6);
verticalLayout->setObjectName(QString::fromUtf8("verticalLayout"));
horizontalLayout = new QHBoxLayout();
horizontalLayout->setSpacing(6);
horizontalLayout->setObjectName(QString::fromUtf8("horizontalLayout"));
horizontalLayout->setContentsMargins(2, 2, 2, 1);
pushButton = new QPushButton(centralWidget);
pushButton->setObjectName(QString::fromUtf8("pushButton"));
pushButton->setMinimumSize(QSize(100, 50));
pushButton->setSizeIncrement(QSize(0, 6));
pushButton->setBaseSize(QSize(0, 50));
horizontalLayout->addWidget(pushButton);
comboBox = new QComboBox(centralWidget);
comboBox->setObjectName(QString::fromUtf8("comboBox"));
comboBox->setMinimumSize(QSize(200, 35));
comboBox->setBaseSize(QSize(0, 50));
horizontalLayout->addWidget(comboBox);
pushButton_3 = new QPushButton(centralWidget);
pushButton_3->setObjectName(QString::fromUtf8("pushButton_3"));
pushButton_3->setMinimumSize(QSize(100, 50));
horizontalLayout->addWidget(pushButton_3);
comboBox_2 = new QComboBox(centralWidget);
comboBox_2->setObjectName(QString::fromUtf8("comboBox_2"));
comboBox_2->setMinimumSize(QSize(200, 35));
horizontalLayout->addWidget(comboBox_2);
label = new QLabel(centralWidget);
label->setObjectName(QString::fromUtf8("label"));
horizontalLayout->addWidget(label);
lineEdit = new QLineEdit(centralWidget);
lineEdit->setObjectName(QString::fromUtf8("lineEdit"));
lineEdit->setMinimumSize(QSize(300, 30));
horizontalLayout->addWidget(lineEdit);
pushButton_2 = new QPushButton(centralWidget);
pushButton_2->setObjectName(QString::fromUtf8("pushButton_2"));
pushButton_2->setMinimumSize(QSize(60, 50));
horizontalLayout->addWidget(pushButton_2);
horizontalSpacer_2 = new QSpacerItem(40, 20, QSizePolicy::Expanding, QSizePolicy::Minimum);
horizontalLayout->addItem(horizontalSpacer_2);
horizontalLayout->setStretch(0, 1);
horizontalLayout->setStretch(1, 2);
horizontalLayout->setStretch(7, 13);
verticalLayout->addLayout(horizontalLayout);
verticalSpacer = new QSpacerItem(20, 40, QSizePolicy::Minimum, QSizePolicy::Expanding);
verticalLayout->addItem(verticalSpacer);
verticalLayout->setStretch(0, 1);
verticalLayout->setStretch(1, 9);
gridLayout->addLayout(verticalLayout, 0, 0, 1, 1);
MainWindow->setCentralWidget(centralWidget);
retranslateUi(MainWindow);
QMetaObject::connectSlotsByName(MainWindow);
} // setupUi
void retranslateUi(QMainWindow *MainWindow)
{
MainWindow->setWindowTitle(QCoreApplication::translate("MainWindow", "MainWindow", nullptr));
pushButton->setText(QCoreApplication::translate("MainWindow", "\346\211\223\345\274\200\346\221\204\345\203\217\345\244\264", nullptr));
pushButton_3->setText(QCoreApplication::translate("MainWindow", "\346\211\223\345\274\200\351\272\246\345\205\213\351\243\216", nullptr));
label->setText(QCoreApplication::translate("MainWindow", "rtmp\346\216\250\346\265\201\345\234\260\345\235\200", nullptr));
lineEdit->setText(QCoreApplication::translate("MainWindow", "rtmp://127.0.0.1:1935/live/1", nullptr));
pushButton_2->setText(QCoreApplication::translate("MainWindow", "\346\216\250\346\265\201", nullptr));
} // retranslateUi
};
namespace Ui {
class MainWindow: public Ui_MainWindow {};
} // namespace Ui
QT_END_NAMESPACE
#endif // UI_MAINWINDOW_H

63
ui_process.h Normal file
View File

@ -0,0 +1,63 @@
/********************************************************************************
** Form generated from reading UI file 'process.ui'
**
** Created by: Qt User Interface Compiler version 5.14.0
**
** WARNING! All changes made in this file will be lost when recompiling UI file!
********************************************************************************/
#ifndef UI_PROCESS_H
#define UI_PROCESS_H
#include <QtCore/QVariant>
#include <QtWidgets/QApplication>
#include <QtWidgets/QDialog>
#include <QtWidgets/QLabel>
#include <QtWidgets/QProgressBar>
QT_BEGIN_NAMESPACE
class Ui_Process
{
public:
QProgressBar *progressBar;
QLabel *label;
void setupUi(QDialog *Process)
{
if (Process->objectName().isEmpty())
Process->setObjectName(QString::fromUtf8("Process"));
Process->resize(324, 88);
QSizePolicy sizePolicy(QSizePolicy::Fixed, QSizePolicy::Fixed);
sizePolicy.setHorizontalStretch(0);
sizePolicy.setVerticalStretch(0);
sizePolicy.setHeightForWidth(Process->sizePolicy().hasHeightForWidth());
Process->setSizePolicy(sizePolicy);
progressBar = new QProgressBar(Process);
progressBar->setObjectName(QString::fromUtf8("progressBar"));
progressBar->setGeometry(QRect(30, 50, 281, 31));
progressBar->setValue(24);
label = new QLabel(Process);
label->setObjectName(QString::fromUtf8("label"));
label->setGeometry(QRect(120, 30, 121, 16));
retranslateUi(Process);
QMetaObject::connectSlotsByName(Process);
} // setupUi
void retranslateUi(QDialog *Process)
{
Process->setWindowTitle(QCoreApplication::translate("Process", "Dialog", nullptr));
label->setText(QCoreApplication::translate("Process", "\346\255\243\345\234\250\345\212\240\350\275\275\346\225\260\346\215\256", nullptr));
} // retranslateUi
};
namespace Ui {
class Process: public Ui_Process {};
} // namespace Ui
QT_END_NAMESPACE
#endif // UI_PROCESS_H

55
ui_qsstoast.h Normal file
View File

@ -0,0 +1,55 @@
/********************************************************************************
** Form generated from reading UI file 'qsstoast.ui'
**
** Created by: Qt User Interface Compiler version 5.14.0
**
** WARNING! All changes made in this file will be lost when recompiling UI file!
********************************************************************************/
#ifndef UI_QSSTOAST_H
#define UI_QSSTOAST_H
#include <QtCore/QVariant>
#include <QtWidgets/QApplication>
#include <QtWidgets/QLabel>
#include <QtWidgets/QWidget>
QT_BEGIN_NAMESPACE
class Ui_Toast
{
public:
QLabel *label;
void setupUi(QWidget *Toast)
{
if (Toast->objectName().isEmpty())
Toast->setObjectName(QString::fromUtf8("Toast"));
Toast->resize(932, 59);
QFont font;
font.setFamily(QString::fromUtf8("Arial"));
Toast->setFont(font);
label = new QLabel(Toast);
label->setObjectName(QString::fromUtf8("label"));
label->setGeometry(QRect(170, 10, 231, 31));
retranslateUi(Toast);
QMetaObject::connectSlotsByName(Toast);
} // setupUi
void retranslateUi(QWidget *Toast)
{
Toast->setWindowTitle(QCoreApplication::translate("Toast", "Form", nullptr));
label->setText(QCoreApplication::translate("Toast", "TextLabel", nullptr));
} // retranslateUi
};
namespace Ui {
class Toast: public Ui_Toast {};
} // namespace Ui
QT_END_NAMESPACE
#endif // UI_QSSTOAST_H

52
ui_toast.h Normal file
View File

@ -0,0 +1,52 @@
/********************************************************************************
** Form generated from reading UI file 'toast.ui'
**
** Created by: Qt User Interface Compiler version 5.14.0
**
** WARNING! All changes made in this file will be lost when recompiling UI file!
********************************************************************************/
#ifndef UI_TOAST_H
#define UI_TOAST_H
#include <QtCore/QVariant>
#include <QtWidgets/QApplication>
#include <QtWidgets/QLabel>
#include <QtWidgets/QWidget>
QT_BEGIN_NAMESPACE
class Ui_Form
{
public:
QLabel *label;
void setupUi(QWidget *Form)
{
if (Form->objectName().isEmpty())
Form->setObjectName(QString::fromUtf8("Form"));
Form->resize(932, 59);
label = new QLabel(Form);
label->setObjectName(QString::fromUtf8("label"));
label->setGeometry(QRect(170, 10, 231, 31));
retranslateUi(Form);
QMetaObject::connectSlotsByName(Form);
} // setupUi
void retranslateUi(QWidget *Form)
{
Form->setWindowTitle(QCoreApplication::translate("Form", "Form", nullptr));
label->setText(QCoreApplication::translate("Form", "TextLabel", nullptr));
} // retranslateUi
};
namespace Ui {
class Form: public Ui_Form {};
} // namespace Ui
QT_END_NAMESPACE
#endif // UI_TOAST_H

119
utils/Base64.cpp Normal file
View File

@ -0,0 +1,119 @@
#include "Base64.h"
#include "string.h"
int DecodeBase64(char * pInput, char * pOutput) {
int i = 0;
int iCnt = 0;
int iSrcLen = (int)strlen(pInput);
char * p = pOutput;
for (i=0; i<iSrcLen; i++)
{
if (pInput[i] > 127) continue;
if (pInput[i] == '=') return p-pOutput+1;
char a = BVal(pInput[i]);
if (a == 255) continue;
switch (iCnt)
{
case 0:
{
*p = a << 2;
iCnt++;
}
break;
case 1:
{
*p++ |= a >> 4;
*p = a << 4;
iCnt++;
}
break;
case 2:
{
*p++ |= a >> 2;
*p = a << 6;
iCnt++;
}
break;
case 3:
{
*p++ |= a;
iCnt = 0;
}
break;
}
}
*p = 0x00;
return p-pOutput;
}
int EncodeBase64(char * pInput,int iInputLen,char * pOutput)
{
int i = 0;
int loop = 0;
int remain = 0;
int iDstLen = 0;
int iSrcLen = iInputLen;
loop = iSrcLen/3;
remain = iSrcLen%3;
// also can encode native char one by one as decode method
// but because all of char in native string is to be encoded so encode 3-chars one time is easier.
for (i=0; i<loop; i++)
{
char a1 = (pInput[i*3] >> 2);
char a2 = ( ((pInput[i*3] & 0x03) << 4) | (pInput[i*3+1] >> 4) );
char a3 = ( ((pInput[i*3+1] & 0x0F) << 2) | ((pInput[i*3+2] & 0xC0) >> 6) );
char a4 = (pInput[i*3+2] & 0x3F);
pOutput[i*4] = AVal(a1);
pOutput[i*4+1] = AVal(a2);
pOutput[i*4+2] = AVal(a3);
pOutput[i*4+3] = AVal(a4);
}
iDstLen = i*4;
if (remain == 1)
{
// should pad two equal sign
i = iSrcLen-1;
char a1 = (pInput[i] >> 2);
char a2 = ((pInput[i] & 0x03) << 4);
pOutput[iDstLen++] = AVal(a1);
pOutput[iDstLen++] = AVal(a2);
pOutput[iDstLen++] = '=';
pOutput[iDstLen++] = '=';
pOutput[iDstLen] = 0x00;
}
else if (remain == 2)
{
// should pad one equal sign
i = iSrcLen-2;
char a1 = (pInput[i] >> 2);
char a2 = ( ((pInput[i] & 0x03) << 4) | (pInput[i+1] >> 4));
char a3 = ( (pInput[i+1] & 0x0F) << 2);
pOutput[iDstLen++] = AVal(a1);
pOutput[iDstLen++] = AVal(a2);
pOutput[iDstLen++] = AVal(a3);
pOutput[iDstLen++] = '=';
pOutput[iDstLen] = 0x00;
}
else
{
// just division by 3
pOutput[iDstLen] = 0x00;
}
return iDstLen;
}

21
utils/Debuger.cpp Normal file
View File

@ -0,0 +1,21 @@
#include "Debuger.h"
Debuger::Debuger() {
}
Debuger::~Debuger() {
}
int Debuger::Debug(wstring log) {
return 0;
}
int Debuger::Debug(string log) {
return 0;
}
int Debuger::Debug(const wchar_t *format, ...) {
return 0;
}

24
utils/utils.cpp Normal file
View File

@ -0,0 +1,24 @@
#include "utils.h"
wstring char2wchar(const char* cchar)
{
/*
wchar_t m_wchar[2000];
int len = MultiByteToWideChar(CP_ACP, 0, cchar, strlen(cchar), NULL, 0);
if (len > 0)
{
MultiByteToWideChar(CP_ACP, 0, cchar, strlen(cchar), m_wchar, len);
m_wchar[len] = L'\0';
return std::wstring(m_wchar);
}*/
return wstring(L"");
}
AVPixelFormat GUIDToAvFormat(GUID mediatype){
if(IsEqualIID(MEDIASUBTYPE_RGB32,mediatype)){
return AV_PIX_FMT_BGRA;
}
return AV_PIX_FMT_NONE;
}

119
yuvgl.pro Normal file
View File

@ -0,0 +1,119 @@
#-------------------------------------------------
#
# Project created by QtCreator 2019-09-23T11:02:49
#
#-------------------------------------------------
QT += core gui
greaterThan(QT_MAJOR_VERSION, 4): QT += widgets
QT += network
QT += multimedia
TARGET = yuvgl
INCLUDEPATH += $$[QT_INSTALL_HEADERS]/QtZlib
include(G:\\project\\c++qt\\qsswraper\\qsswraper.pri)
# The following define makes your compiler emit warnings if you use
# any feature of Qt which has been marked as deprecated (the exact warnings
# depend on your compiler). Please consult the documentation of the
# deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS
# You can also make your code fail to compile if you use deprecated APIs.
# In order to do so, uncomment the following line.
# You can also select to disable deprecated APIs only up to a certain version of Qt.
#DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000 # disables all the APIs deprecated before Qt 6.0.0
CONFIG += C++11
DEFINES += WIN32_LEAN_AND_MEAN
SOURCES += \
components/toast.cpp \
librtmp/amf.c \
librtmp/hashswf.c \
librtmp/log.c \
librtmp/parseurl.c \
librtmp/rtmp.c \
main.cpp \
mainwindow.cpp \
cplaywidget.cpp \
media/AACAudioCoder.cpp \
media/AudioCapture.cpp \
media/CameraCapture.cpp \
media/DXGICapture.cpp \
media/RtmpPusher.cpp \
media/VideoCoder.cpp \
media/audiocaptureff.cpp \
media/screen_capture.cpp \
media/sps_decode.cpp \
utils/Base64.cpp \
utils/Debuger.cpp \
utils/utils.cpp
HEADERS += \
components/toast.h \
librtmp/strncasecmp.h \
mainwindow.h \
cplaywidget.h \
media/screen_capture.h
FORMS += \
components/toast.ui \
mainwindow.ui
INCLUDEPATH += media/ \
C:\\Program Files\\OpenSSL-Win64\\include
contains(DEFINES, __MINGW32__){
message("mingw")
INCLUDEPATH += media/ inc/
contains(QT_ARCH, i386) {
message("32-bit")
LIBS += -L$$PWD/third/ffmpeg/mingw/32/lib
LIBS += -lm -lavformat -lavdevice -lavfilter -lavcodec -lavutil -lswresample -lswscale -lpthread -lm -lfdk-aac -lx264 -liconv -lucrtbase -lstrmiids
LIBS += -lole32 -loleAut32 -lquartz -ldxguid -ldxapi -lwinmm -lbcrypt -lssl -lcrypto -lGdi32 -lws2_32 -lbz2 -lz -lportaudio -lshlwapi -lvfw32 -lpostproc -luuid
} else {
message("64-bit")
}
}else{
message("msvc")
DEFINES += _CRT_SECURE_NO_DEPRECATE \
_CRT_NONSTDC_NO_DEPRECATE
contains(QT_ARCH, i386) {
INCLUDEPATH += inc $$PWD/third/msvc32/fdk-aac/include \
$$PWD/third/msvc32/libx264/include \
$$PWD/third/msvc32/ffmpeg/include \
$$PWD/third/msvc32/openssl/include
LIBS += -L$$PWD/third/msvc32/libx264/lib
LIBS += -L$$PWD/third/msvc32/fdk-aac/lib
LIBS += -L$$PWD/third/msvc32/ffmpeg/lib
LIBS += -L$$PWD/third/msvc32/openssl/lib
LIBS += libavfilter.a libavdevice.a libavcodec.a libpostproc.a \
libavformat.a libavutil.a \
libswresample.a libswscale.a fdk-aac.lib ws2_32.lib libeay32.lib ssleay32.lib \
shell32.lib gdi32.lib crypt32.lib User32.lib GDI32.lib Advapi32.lib zlibstaticd.lib Secur32.lib \
Bcrypt.lib Kernel32.lib portaudio_x86.lib ole32.lib oleaut32.lib strmiids.lib libx264.lib d3d9.lib
}
else{
message("64-bit")
QMAKE_CXXFLAGS_RELEASE += -Zi
QMAKE_LFLAGS_RELEASE += /DEBUG /OPT:REF
}
}
qnx: target.path = /tmp/$${TARGET}/bin
else: unix:!android: target.path = /opt/$${TARGET}/bin
!isEmpty(target.path): INSTALLS += target

263
yuvgl.pro.user Normal file
View File

@ -0,0 +1,263 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE QtCreatorProject>
<!-- Written by QtCreator 11.0.3, 2023-11-12T20:47:56. -->
<qtcreator>
<data>
<variable>EnvironmentId</variable>
<value type="QByteArray">{2f8149c7-6065-4889-8af2-fdde6a16f5dc}</value>
</data>
<data>
<variable>ProjectExplorer.Project.ActiveTarget</variable>
<value type="qlonglong">0</value>
</data>
<data>
<variable>ProjectExplorer.Project.EditorSettings</variable>
<valuemap type="QVariantMap">
<value type="bool" key="EditorConfiguration.AutoIndent">true</value>
<value type="bool" key="EditorConfiguration.AutoSpacesForTabs">false</value>
<value type="bool" key="EditorConfiguration.CamelCaseNavigation">true</value>
<valuemap type="QVariantMap" key="EditorConfiguration.CodeStyle.0">
<value type="QString" key="language">Cpp</value>
<valuemap type="QVariantMap" key="value">
<value type="QByteArray" key="CurrentPreferences">CppGlobal</value>
</valuemap>
</valuemap>
<valuemap type="QVariantMap" key="EditorConfiguration.CodeStyle.1">
<value type="QString" key="language">QmlJS</value>
<valuemap type="QVariantMap" key="value">
<value type="QByteArray" key="CurrentPreferences">QmlJSGlobal</value>
</valuemap>
</valuemap>
<value type="qlonglong" key="EditorConfiguration.CodeStyle.Count">2</value>
<value type="QByteArray" key="EditorConfiguration.Codec">GBK</value>
<value type="bool" key="EditorConfiguration.ConstrainTooltips">false</value>
<value type="int" key="EditorConfiguration.IndentSize">4</value>
<value type="bool" key="EditorConfiguration.KeyboardTooltips">false</value>
<value type="int" key="EditorConfiguration.MarginColumn">80</value>
<value type="bool" key="EditorConfiguration.MouseHiding">true</value>
<value type="bool" key="EditorConfiguration.MouseNavigation">true</value>
<value type="int" key="EditorConfiguration.PaddingMode">1</value>
<value type="bool" key="EditorConfiguration.PreferSingleLineComments">false</value>
<value type="bool" key="EditorConfiguration.ScrollWheelZooming">true</value>
<value type="bool" key="EditorConfiguration.ShowMargin">false</value>
<value type="int" key="EditorConfiguration.SmartBackspaceBehavior">0</value>
<value type="bool" key="EditorConfiguration.SmartSelectionChanging">true</value>
<value type="bool" key="EditorConfiguration.SpacesForTabs">true</value>
<value type="int" key="EditorConfiguration.TabKeyBehavior">2</value>
<value type="int" key="EditorConfiguration.TabSize">8</value>
<value type="bool" key="EditorConfiguration.UseGlobal">true</value>
<value type="bool" key="EditorConfiguration.UseIndenter">false</value>
<value type="int" key="EditorConfiguration.Utf8BomBehavior">1</value>
<value type="bool" key="EditorConfiguration.addFinalNewLine">true</value>
<value type="bool" key="EditorConfiguration.cleanIndentation">true</value>
<value type="bool" key="EditorConfiguration.cleanWhitespace">true</value>
<value type="QString" key="EditorConfiguration.ignoreFileTypes">*.md, *.MD, Makefile</value>
<value type="bool" key="EditorConfiguration.inEntireDocument">false</value>
<value type="bool" key="EditorConfiguration.skipTrailingWhitespace">true</value>
<value type="bool" key="EditorConfiguration.tintMarginArea">true</value>
</valuemap>
</data>
<data>
<variable>ProjectExplorer.Project.PluginSettings</variable>
<valuemap type="QVariantMap">
<valuemap type="QVariantMap" key="AutoTest.ActiveFrameworks">
<value type="bool" key="AutoTest.Framework.Boost">true</value>
<value type="bool" key="AutoTest.Framework.CTest">false</value>
<value type="bool" key="AutoTest.Framework.Catch">true</value>
<value type="bool" key="AutoTest.Framework.GTest">true</value>
<value type="bool" key="AutoTest.Framework.QtQuickTest">true</value>
<value type="bool" key="AutoTest.Framework.QtTest">true</value>
</valuemap>
<valuemap type="QVariantMap" key="AutoTest.CheckStates"/>
<value type="int" key="AutoTest.RunAfterBuild">0</value>
<value type="bool" key="AutoTest.UseGlobal">true</value>
<valuemap type="QVariantMap" key="ClangTools">
<value type="bool" key="ClangTools.AnalyzeOpenFiles">true</value>
<value type="bool" key="ClangTools.BuildBeforeAnalysis">true</value>
<value type="QString" key="ClangTools.DiagnosticConfig">Builtin.DefaultTidyAndClazy</value>
<value type="int" key="ClangTools.ParallelJobs">6</value>
<value type="bool" key="ClangTools.PreferConfigFile">true</value>
<valuelist type="QVariantList" key="ClangTools.SelectedDirs"/>
<valuelist type="QVariantList" key="ClangTools.SelectedFiles"/>
<valuelist type="QVariantList" key="ClangTools.SuppressedDiagnostics"/>
<value type="bool" key="ClangTools.UseGlobalSettings">true</value>
</valuemap>
</valuemap>
</data>
<data>
<variable>ProjectExplorer.Project.Target.0</variable>
<valuemap type="QVariantMap">
<value type="QString" key="DeviceType">Desktop</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.DefaultDisplayName">Desktop Qt 5.15.2 MSVC2019 64bit</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.DisplayName">Desktop Qt 5.15.2 MSVC2019 64bit</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">qt.qt5.5152.win64_msvc2019_64_kit</value>
<value type="qlonglong" key="ProjectExplorer.Target.ActiveBuildConfiguration">0</value>
<value type="qlonglong" key="ProjectExplorer.Target.ActiveDeployConfiguration">0</value>
<value type="qlonglong" key="ProjectExplorer.Target.ActiveRunConfiguration">0</value>
<valuemap type="QVariantMap" key="ProjectExplorer.Target.BuildConfiguration.0">
<value type="int" key="EnableQmlDebugging">0</value>
<value type="QString" key="ProjectExplorer.BuildConfiguration.BuildDirectory">G:\project\multimedia\client\rtmp_demo\build-yuvgl-Desktop_Qt_5_15_2_MSVC2019_64bit-Debug</value>
<value type="QString" key="ProjectExplorer.BuildConfiguration.BuildDirectory.shadowDir">G:/project/multimedia/client/rtmp_demo/build-yuvgl-Desktop_Qt_5_15_2_MSVC2019_64bit-Debug</value>
<valuemap type="QVariantMap" key="ProjectExplorer.BuildConfiguration.BuildStepList.0">
<valuemap type="QVariantMap" key="ProjectExplorer.BuildStepList.Step.0">
<value type="bool" key="ProjectExplorer.BuildStep.Enabled">true</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">QtProjectManager.QMakeBuildStep</value>
<value type="bool" key="QtProjectManager.QMakeBuildStep.QMakeForced">false</value>
<valuelist type="QVariantList" key="QtProjectManager.QMakeBuildStep.SelectedAbis"/>
</valuemap>
<valuemap type="QVariantMap" key="ProjectExplorer.BuildStepList.Step.1">
<value type="bool" key="ProjectExplorer.BuildStep.Enabled">true</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">Qt4ProjectManager.MakeStep</value>
</valuemap>
<value type="qlonglong" key="ProjectExplorer.BuildStepList.StepsCount">2</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.DefaultDisplayName">构建</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.DisplayName">构建</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">ProjectExplorer.BuildSteps.Build</value>
</valuemap>
<valuemap type="QVariantMap" key="ProjectExplorer.BuildConfiguration.BuildStepList.1">
<valuemap type="QVariantMap" key="ProjectExplorer.BuildStepList.Step.0">
<value type="bool" key="ProjectExplorer.BuildStep.Enabled">true</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">Qt4ProjectManager.MakeStep</value>
<value type="QString" key="Qt4ProjectManager.MakeStep.MakeArguments">clean</value>
</valuemap>
<value type="qlonglong" key="ProjectExplorer.BuildStepList.StepsCount">1</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.DefaultDisplayName">清除</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.DisplayName">清除</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">ProjectExplorer.BuildSteps.Clean</value>
</valuemap>
<value type="int" key="ProjectExplorer.BuildConfiguration.BuildStepListCount">2</value>
<value type="bool" key="ProjectExplorer.BuildConfiguration.ClearSystemEnvironment">false</value>
<valuelist type="QVariantList" key="ProjectExplorer.BuildConfiguration.CustomParsers"/>
<value type="bool" key="ProjectExplorer.BuildConfiguration.ParseStandardOutput">false</value>
<valuelist type="QVariantList" key="ProjectExplorer.BuildConfiguration.UserEnvironmentChanges"/>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.DisplayName">Debug</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">Qt4ProjectManager.Qt4BuildConfiguration</value>
<value type="int" key="Qt4ProjectManager.Qt4BuildConfiguration.BuildConfiguration">2</value>
</valuemap>
<valuemap type="QVariantMap" key="ProjectExplorer.Target.BuildConfiguration.1">
<value type="QString" key="ProjectExplorer.BuildConfiguration.BuildDirectory">G:\project\multimedia\client\rtmp_demo\build-yuvgl-Desktop_Qt_5_15_2_MSVC2019_64bit-Release</value>
<value type="QString" key="ProjectExplorer.BuildConfiguration.BuildDirectory.shadowDir">G:/project/multimedia/client/rtmp_demo/build-yuvgl-Desktop_Qt_5_15_2_MSVC2019_64bit-Release</value>
<valuemap type="QVariantMap" key="ProjectExplorer.BuildConfiguration.BuildStepList.0">
<valuemap type="QVariantMap" key="ProjectExplorer.BuildStepList.Step.0">
<value type="bool" key="ProjectExplorer.BuildStep.Enabled">true</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">QtProjectManager.QMakeBuildStep</value>
<value type="bool" key="QtProjectManager.QMakeBuildStep.QMakeForced">false</value>
<valuelist type="QVariantList" key="QtProjectManager.QMakeBuildStep.SelectedAbis"/>
</valuemap>
<valuemap type="QVariantMap" key="ProjectExplorer.BuildStepList.Step.1">
<value type="bool" key="ProjectExplorer.BuildStep.Enabled">true</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">Qt4ProjectManager.MakeStep</value>
</valuemap>
<value type="qlonglong" key="ProjectExplorer.BuildStepList.StepsCount">2</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.DefaultDisplayName">构建</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.DisplayName">构建</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">ProjectExplorer.BuildSteps.Build</value>
</valuemap>
<valuemap type="QVariantMap" key="ProjectExplorer.BuildConfiguration.BuildStepList.1">
<valuemap type="QVariantMap" key="ProjectExplorer.BuildStepList.Step.0">
<value type="bool" key="ProjectExplorer.BuildStep.Enabled">true</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">Qt4ProjectManager.MakeStep</value>
<value type="QString" key="Qt4ProjectManager.MakeStep.MakeArguments">clean</value>
</valuemap>
<value type="qlonglong" key="ProjectExplorer.BuildStepList.StepsCount">1</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.DefaultDisplayName">清除</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.DisplayName">清除</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">ProjectExplorer.BuildSteps.Clean</value>
</valuemap>
<value type="int" key="ProjectExplorer.BuildConfiguration.BuildStepListCount">2</value>
<value type="bool" key="ProjectExplorer.BuildConfiguration.ClearSystemEnvironment">false</value>
<valuelist type="QVariantList" key="ProjectExplorer.BuildConfiguration.CustomParsers"/>
<value type="bool" key="ProjectExplorer.BuildConfiguration.ParseStandardOutput">false</value>
<valuelist type="QVariantList" key="ProjectExplorer.BuildConfiguration.UserEnvironmentChanges"/>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.DisplayName">Release</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">Qt4ProjectManager.Qt4BuildConfiguration</value>
<value type="int" key="Qt4ProjectManager.Qt4BuildConfiguration.BuildConfiguration">0</value>
<value type="int" key="QtQuickCompiler">0</value>
</valuemap>
<valuemap type="QVariantMap" key="ProjectExplorer.Target.BuildConfiguration.2">
<value type="int" key="EnableQmlDebugging">0</value>
<value type="QString" key="ProjectExplorer.BuildConfiguration.BuildDirectory">G:\project\multimedia\client\rtmp_demo\build-yuvgl-Desktop_Qt_5_15_2_MSVC2019_64bit-Profile</value>
<value type="QString" key="ProjectExplorer.BuildConfiguration.BuildDirectory.shadowDir">G:/project/multimedia/client/rtmp_demo/build-yuvgl-Desktop_Qt_5_15_2_MSVC2019_64bit-Profile</value>
<valuemap type="QVariantMap" key="ProjectExplorer.BuildConfiguration.BuildStepList.0">
<valuemap type="QVariantMap" key="ProjectExplorer.BuildStepList.Step.0">
<value type="bool" key="ProjectExplorer.BuildStep.Enabled">true</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">QtProjectManager.QMakeBuildStep</value>
<value type="bool" key="QtProjectManager.QMakeBuildStep.QMakeForced">false</value>
<valuelist type="QVariantList" key="QtProjectManager.QMakeBuildStep.SelectedAbis"/>
</valuemap>
<valuemap type="QVariantMap" key="ProjectExplorer.BuildStepList.Step.1">
<value type="bool" key="ProjectExplorer.BuildStep.Enabled">true</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">Qt4ProjectManager.MakeStep</value>
</valuemap>
<value type="qlonglong" key="ProjectExplorer.BuildStepList.StepsCount">2</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.DefaultDisplayName">构建</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.DisplayName">构建</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">ProjectExplorer.BuildSteps.Build</value>
</valuemap>
<valuemap type="QVariantMap" key="ProjectExplorer.BuildConfiguration.BuildStepList.1">
<valuemap type="QVariantMap" key="ProjectExplorer.BuildStepList.Step.0">
<value type="bool" key="ProjectExplorer.BuildStep.Enabled">true</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">Qt4ProjectManager.MakeStep</value>
<value type="QString" key="Qt4ProjectManager.MakeStep.MakeArguments">clean</value>
</valuemap>
<value type="qlonglong" key="ProjectExplorer.BuildStepList.StepsCount">1</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.DefaultDisplayName">清除</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.DisplayName">清除</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">ProjectExplorer.BuildSteps.Clean</value>
</valuemap>
<value type="int" key="ProjectExplorer.BuildConfiguration.BuildStepListCount">2</value>
<value type="bool" key="ProjectExplorer.BuildConfiguration.ClearSystemEnvironment">false</value>
<valuelist type="QVariantList" key="ProjectExplorer.BuildConfiguration.CustomParsers"/>
<value type="bool" key="ProjectExplorer.BuildConfiguration.ParseStandardOutput">false</value>
<valuelist type="QVariantList" key="ProjectExplorer.BuildConfiguration.UserEnvironmentChanges"/>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.DisplayName">Profile</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">Qt4ProjectManager.Qt4BuildConfiguration</value>
<value type="int" key="Qt4ProjectManager.Qt4BuildConfiguration.BuildConfiguration">0</value>
<value type="int" key="QtQuickCompiler">0</value>
<value type="int" key="SeparateDebugInfo">0</value>
</valuemap>
<value type="qlonglong" key="ProjectExplorer.Target.BuildConfigurationCount">3</value>
<valuemap type="QVariantMap" key="ProjectExplorer.Target.DeployConfiguration.0">
<valuemap type="QVariantMap" key="ProjectExplorer.BuildConfiguration.BuildStepList.0">
<value type="qlonglong" key="ProjectExplorer.BuildStepList.StepsCount">0</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.DefaultDisplayName">部署</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.DisplayName">部署</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">ProjectExplorer.BuildSteps.Deploy</value>
</valuemap>
<value type="int" key="ProjectExplorer.BuildConfiguration.BuildStepListCount">1</value>
<valuemap type="QVariantMap" key="ProjectExplorer.DeployConfiguration.CustomData"/>
<value type="bool" key="ProjectExplorer.DeployConfiguration.CustomDataEnabled">false</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">ProjectExplorer.DefaultDeployConfiguration</value>
</valuemap>
<value type="qlonglong" key="ProjectExplorer.Target.DeployConfigurationCount">1</value>
<valuemap type="QVariantMap" key="ProjectExplorer.Target.RunConfiguration.0">
<value type="bool" key="Analyzer.Perf.Settings.UseGlobalSettings">true</value>
<value type="bool" key="Analyzer.QmlProfiler.Settings.UseGlobalSettings">true</value>
<value type="bool" key="Analyzer.Valgrind.Settings.UseGlobalSettings">true</value>
<valuelist type="QVariantList" key="CustomOutputParsers"/>
<value type="int" key="PE.EnvironmentAspect.Base">2</value>
<valuelist type="QVariantList" key="PE.EnvironmentAspect.Changes"/>
<value type="bool" key="PE.EnvironmentAspect.PrintOnRun">false</value>
<value type="QString" key="ProjectExplorer.ProjectConfiguration.Id">Qt4ProjectManager.Qt4RunConfiguration:G:/project/multimedia/client/rtmp_demo/yuvgl/yuvgl.pro</value>
<value type="QString" key="ProjectExplorer.RunConfiguration.BuildKey">G:/project/multimedia/client/rtmp_demo/yuvgl/yuvgl.pro</value>
<value type="bool" key="RunConfiguration.UseCppDebuggerAuto">true</value>
<value type="bool" key="RunConfiguration.UseLibrarySearchPath">true</value>
<value type="bool" key="RunConfiguration.UseQmlDebuggerAuto">true</value>
<value type="QString" key="RunConfiguration.WorkingDirectory.default">G:/project/multimedia/client/rtmp_demo/build-yuvgl-Desktop_Qt_5_15_2_MSVC2019_64bit-Debug</value>
</valuemap>
<value type="qlonglong" key="ProjectExplorer.Target.RunConfigurationCount">1</value>
</valuemap>
</data>
<data>
<variable>ProjectExplorer.Project.TargetCount</variable>
<value type="qlonglong">1</value>
</data>
<data>
<variable>ProjectExplorer.Project.Updater.FileVersion</variable>
<value type="int">22</value>
</data>
<data>
<variable>Version</variable>
<value type="int">22</value>
</data>
</qtcreator>

1048
yuvgl.pro.user.76c403c Normal file

File diff suppressed because it is too large Load Diff

1916
zlib.h Normal file

File diff suppressed because it is too large Load Diff