可以运行的版本,32位库匹配好了

master
a7458969 2020-05-16 03:30:11 +08:00
parent a5e4ba8acb
commit 587a642bbb
2 changed files with 3 additions and 364 deletions

View File

@ -1,328 +0,0 @@
#include "SdlPlayer.h"
#include "Debuger.h"
uint32_t FfmpegPixFormatToSdl(AVPixelFormat av) {
switch (av) {
case AV_PIX_FMT_YUYV422:
return SDL_PIXELFORMAT_YUY2;
}
}
SDLPlayser::SDLPlayser(HWND hwnd,int width,int height, AVPixelFormat fmt):
mScreen(nullptr),
mRender(nullptr)
{
if (nullptr == hwnd) {
}
// SDL part
if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
printf("Could not initialize SDL - %s\n", SDL_GetError());
}
this->mInWidth = width;
this->mInHeight = height;
screen_w = 640;
screen_h = 480;
//SDL 2.0 Support for multiple windows
/*
screen = SDL_CreateWindow("Simplest ffmpeg player's Window", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,
screen_w, screen_h,
SDL_WINDOW_OPENGL);*/
if (nullptr == mScreen) {
mScreen = SDL_CreateWindowFrom((void *)hwnd);
printf("SDL: could not create window - exiting:%s\n", SDL_GetError());
}
mRender = SDL_CreateRenderer(mScreen, -1, 0);
SDL_GetWindowSize(mScreen, &screen_w, &screen_h);
Debuger::Debug(L"pix width %d height %d\r\n",screen_w,screen_h);
//IYUV: Y + U + V (3 planes)
//YV12: Y + V + U (3 planes)
mTexture = SDL_CreateTexture(mRender, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING,
screen_w, screen_h);
this->mFormat = fmt;
}
SDLPlayser::~SDLPlayser()
{
SDL_DestroyWindow(mScreen);
SDL_DestroyRenderer(mRender);
}
// 强制把其他个数的数据转换成libav可以认得到的数据
int forceYUV420PBuffer(uint8_t * src, int size,int inWidth,int inHeight,
AVPixelFormat format, uint8_t **dst[4], int *len,int mWidth,int mHeight)
{
uint8_t *src_data[4];
int src_linesize[4];
uint8_t *dst_data[4];
int dst_linesize[4];
struct SwsContext *img_convert_ctx;
int ret = 0;
if (nullptr == dst || nullptr == len) {
return -2;
}
int src_bpp = av_get_bits_per_pixel(av_pix_fmt_desc_get(format));
AVPixelFormat dst_pixfmt = AV_PIX_FMT_YUV420P;
int dst_bpp = av_get_bits_per_pixel(av_pix_fmt_desc_get(dst_pixfmt));
ret = av_image_alloc(src_data, src_linesize, inWidth, inHeight, format, 1);
if (ret< 0) {
Debuger::Debug(L"Could not allocate source image\n");
return -1;
}
ret = av_image_alloc(dst_data, dst_linesize, mWidth, mHeight, AV_PIX_FMT_YUV420P, 1);
if (ret< 0) {
Debuger::Debug(L"Could not allocate destination image\n");
return -1;
}
img_convert_ctx = sws_alloc_context();
//Show AVOption
//av_opt_show2(img_convert_ctx, stdout, AV_OPT_FLAG_VIDEO_PARAM, 0);
//Set Value
av_opt_set_int(img_convert_ctx, "sws_flags", SWS_BICUBIC | SWS_PRINT_INFO, 0);
av_opt_set_int(img_convert_ctx, "srcw", inWidth, 0);
av_opt_set_int(img_convert_ctx, "srch", inHeight, 0);
av_opt_set_int(img_convert_ctx, "src_format", format, 0);
//'0' for MPEG (Y:0-235);'1' for JPEG (Y:0-255)
av_opt_set_int(img_convert_ctx, "src_range", 1, 0);
av_opt_set_int(img_convert_ctx, "dstw", mWidth, 0);
av_opt_set_int(img_convert_ctx, "dsth", mHeight, 0);
av_opt_set_int(img_convert_ctx, "dst_format", dst_pixfmt, 0);
av_opt_set_int(img_convert_ctx, "dst_range", 1, 0);
sws_init_context(img_convert_ctx, NULL, NULL);
// 设置输入
switch (format) {
case AV_PIX_FMT_GRAY8: {
memcpy(src_data[0], src, inWidth*inHeight);
break;
}
case AV_PIX_FMT_YUV420P: {
memcpy(src_data[0], src, inWidth*inHeight); //Y
memcpy(src_data[1], src + inWidth*inHeight, inWidth*inHeight / 4); //U
memcpy(src_data[2], src + inWidth*inHeight * 5 / 4, inWidth*inHeight / 4); //V
break;
}
case AV_PIX_FMT_YUV422P: {
memcpy(src_data[0], src, inWidth*inHeight); //Y
memcpy(src_data[1], src + inWidth*inHeight, inWidth*inHeight / 2); //U
memcpy(src_data[2], src + inWidth*inHeight * 3 / 2, inWidth*inHeight / 2); //V
break;
}
case AV_PIX_FMT_YUV444P: {
memcpy(src_data[0], src, inWidth*inHeight); //Y
memcpy(src_data[1], src + inWidth*inHeight, inWidth*inHeight); //U
memcpy(src_data[2], src + inWidth*inHeight * 2, inWidth*inHeight); //V
break;
}
case AV_PIX_FMT_YUYV422: {
memcpy(src_data[0], src, inWidth*inHeight * 2); //Packed
break;
}
case AV_PIX_FMT_RGB24: {
memcpy(src_data[0], src, inWidth*inHeight * 3); //Packed
break;
}
case AV_PIX_FMT_RGB32: {
memcpy(src_data[0], src, inWidth*inHeight * 4); //Packed
break;
}
default: {
Debuger::Debug(L"Not Support Input Pixel Format.\n");
break;
}
}
if (FALSE)
{
src_linesize[0] = -inHeight;
src_data[0] += inWidth*(src_linesize[0] - 1);
// 转换数据
ret = sws_scale(img_convert_ctx, src_data, src_linesize,
0, inHeight, dst_data, dst_linesize);
if (ret < 0) {
return ret;
}
}
else{
ret = sws_scale(img_convert_ctx, src_data, src_linesize,
0, inHeight, dst_data, dst_linesize);
if (ret < 0) {
return ret;
}
}
memcpy(dst[0], dst_data[0], mWidth*mHeight);
memcpy(dst[1], dst_data[1], mWidth*mHeight / 4);
memcpy(dst[2], dst_data[2], mWidth*mHeight / 4);
*len = mWidth*mHeight + mWidth*mHeight / 2;
// source此时就不需要了但是dst要在外面free
av_freep(&src_data[0]);
av_freep(&dst_data[0]);
sws_freeContext(img_convert_ctx);
return 0;
}
int SDLPlayser::RenderYuv(void * pBuf, uint32_t size, AVPixelFormat pix)
{
uint8_t *pFrame[4];
int iFramesize;
int lineSize[4];
static FILE *pFile = nullptr;
if (nullptr == pFile) {
pFile = fopen("test.yuv", "wb");
}
int ret = av_image_alloc(pFrame, lineSize, 640, 480, AV_PIX_FMT_YUV420P, 1);
if (ret< 0) {
Debuger::Debug(L"Could not allocate destination image\n");
}
forceYUV420PBuffer((uint8_t *)pBuf, size,this->mInWidth,this->mInHeight, this->mFormat
,(uint8_t ***)&pFrame, &iFramesize, 640, 480);
SDL_UpdateYUVTexture(mTexture, &sdlRect,
pFrame[0], lineSize[0],
pFrame[1], lineSize[1],
pFrame[2], lineSize[2]);
//SDL_UpdateTexture(sdlTexture, NULL, picture->data[0], 1280);
/*
SDL_UpdateYUVTexture(sdlTexture, &sdlRect,
picture->data[0], picture->linesize[0],
picture->data[1], picture->linesize[1],
picture->data[2], picture->linesize[2]);
*/
SDL_RenderClear(mRender);
sdlRect.x = 0;
sdlRect.y = 0;
sdlRect.w = screen_w;
sdlRect.h = screen_h;
//fwrite(pFrame[0], lineSize[0]*480,1 ,pFile);
//fwrite(pFrame[1], lineSize[1] * 480 ,1, pFile);
//fwrite(pFrame[2], lineSize[2] * 480 ,1, pFile);
fflush(pFile);
SDL_RenderCopy(mRender, mTexture, nullptr, &sdlRect);
SDL_RenderPresent(mRender);
av_freep(&pFrame[0]);
free(pFrame[0]);
//Debuger::Debug(L"local screen size is %d %d\r\n", screen_w, screen_h);
return 0;
return 0;
}
int forceYUV420P(AVFrame * picture,uint8_t **dst[4], int *len,int width,int height)
{
uint8_t *dst_data[4];
int dst_linesize[4];
struct SwsContext *img_convert_ctx;
int ret = 0;
if (nullptr == dst || nullptr == len) {
return -2;
}
AVPixelFormat dst_pixfmt = AV_PIX_FMT_YUV420P;
int dst_bpp = av_get_bits_per_pixel(av_pix_fmt_desc_get(dst_pixfmt));
ret = av_image_alloc(dst_data, dst_linesize, width, height, AV_PIX_FMT_YUV420P, 1);
if (ret< 0) {
Debuger::Debug(L"Could not allocate destination image\n");
return -1;
}
img_convert_ctx = sws_alloc_context();
//Show AVOption
//av_opt_show2(img_convert_ctx, stdout, AV_OPT_FLAG_VIDEO_PARAM, 0);
//Set Value
av_opt_set_int(img_convert_ctx, "sws_flags", SWS_BICUBIC | SWS_PRINT_INFO, 0);
av_opt_set_int(img_convert_ctx, "srcw", picture->width, 0);
av_opt_set_int(img_convert_ctx, "srch", picture->height, 0);
av_opt_set_int(img_convert_ctx, "src_format", dst_pixfmt, 0);
//'0' for MPEG (Y:0-235);'1' for JPEG (Y:0-255)
av_opt_set_int(img_convert_ctx, "src_range", 1, 0);
av_opt_set_int(img_convert_ctx, "dstw", width, 0);
av_opt_set_int(img_convert_ctx, "dsth", height, 0);
av_opt_set_int(img_convert_ctx, "dst_format", dst_pixfmt, 0);
av_opt_set_int(img_convert_ctx, "dst_range", 1, 0);
sws_init_context(img_convert_ctx, NULL, NULL);
// 转换数据
ret = sws_scale(img_convert_ctx, picture->data, picture->linesize, 0, picture->height,
dst_data, dst_linesize);
if (ret < 0) {
return ret;
}
memcpy(dst[0], dst_data[0], width*height);
memcpy(dst[1], dst_data[1], width*height / 4);
memcpy(dst[2], dst_data[2], width*height / 4);
// source此时就不需要了但是dst要在外面free
av_freep(&dst_data[0]);
sws_freeContext(img_convert_ctx);
return 0;
}
// 输入参数一定YUV 420P的情况
int SDLPlayser::OnRecieveData(AVFrame * picture)
{
uint8_t *pFrame[4];
int iFramesize;
int lineSize[4];
int ret = av_image_alloc(pFrame, lineSize, 640, 480, AV_PIX_FMT_YUV420P, 1);
if (ret< 0) {
Debuger::Debug(L"Could not allocate destination image\n");
}
forceYUV420P(picture, (uint8_t ***)&pFrame, &iFramesize, 640, 480);
uint32_t pitchY = picture->linesize[0];
uint32_t pitchU = picture->linesize[1];
uint32_t pitchV = picture->linesize[2];
uint8_t *avY = picture->data[0];
uint8_t *avU = picture->data[1];
uint8_t *avV = picture->data[2];
SDL_UpdateYUVTexture(mTexture, &sdlRect,
pFrame[0], lineSize[0],
pFrame[1], lineSize[1],
pFrame[2], lineSize[2]);
//SDL_UpdateTexture(sdlTexture, NULL, picture->data[0], 1280);
/*
SDL_UpdateYUVTexture(sdlTexture, &sdlRect,
picture->data[0], picture->linesize[0],
picture->data[1], picture->linesize[1],
picture->data[2], picture->linesize[2]);
*/
SDL_RenderClear(mRender);
sdlRect.x = 0;
sdlRect.y = 0;
sdlRect.w = screen_w;
sdlRect.h = screen_h;
SDL_RenderCopy(mRender, mTexture, nullptr, &sdlRect);
SDL_RenderPresent(mRender);
av_freep(&pFrame[0]);
free(pFrame[0]);
//Debuger::Debug(L"remote screen size is %d %d\r\n", screen_w, screen_h);
return 0;
}
int SDLPlayser::OnBuffer(double dblSampleTime, BYTE * pBuffer, long lBufferSize)
{
this->RenderYuv(pBuffer, lBufferSize, this->mFormat);
return 0;
}
int SDLPlayser::OnCameraData(uint8_t * dat, uint32_t size)
{
this->RenderYuv(dat, size, AV_PIX_FMT_YUYV422);
return 0;
}

View File

@ -7,6 +7,7 @@
QT += core gui QT += core gui
greaterThan(QT_MAJOR_VERSION, 4): QT += widgets greaterThan(QT_MAJOR_VERSION, 4): QT += widgets
QT += network
TARGET = yuvgl TARGET = yuvgl
TEMPLATE = app TEMPLATE = app
@ -29,15 +30,6 @@ SOURCES += \
mainwindow.cpp \ mainwindow.cpp \
cplaywidget.cpp \ cplaywidget.cpp \
media/AACAudioCoder.cpp \ media/AACAudioCoder.cpp \
media/AACDecoder.cpp \
media/AudioCapture.cpp \
media/AudioPlayer.cpp \
media/CameraCapture.cpp \
media/H264Docoder.cpp \
media/ImageUtil.cpp \
media/SdlPlayer.cpp \
media/VideoCoder.cpp \
media/sps_decode.cpp \
utils/Base64.cpp \ utils/Base64.cpp \
utils/Debuger.cpp \ utils/Debuger.cpp \
utils/utils.cpp utils/utils.cpp
@ -54,35 +46,10 @@ FORMS += \
INCLUDEPATH += media/ third/ffmpeg/include/ inc/ third/ INCLUDEPATH += media/ third/ffmpeg/include/ inc/ third/
message($$PWD/third/libs/) message($$PWD/third/libs/)
LIBS += -L$$PWD/third/libs/ LIBS += -L$$PWD/third/libs/
LIBS += -lws2_32
LIBS += -lm LIBS += -lm
LIBS += -lgcc LIBS += -lavformat -lavdevice -lavcodec -lavutil -lswresample -lpthread -lbz2 -lm -lfdk-aac -lx264 -lOle32 -lbcrypt -liconv -lucrtbase
win32: LIBS += -llibavcodec
win32: LIBS += -llibrtmp
win32: LIBS += -llibswscale
win32: LIBS += -llibswresample
win32: LIBS += -llibavresample
win32: LIBS += -llibavformat
win32: LIBS += -llibavdevice
win32: LIBS += -llibavfilter
win32: LIBS += -llibavutil
win32: LIBS += -llibpostproc
win32: LIBS += -lSecur32
win32: LIBS += -lSDL2
win32: LIBS += -lSDL2main
win32: LIBS += -lportaudio_x86
win32: LIBS += -llibx264
win32: LIBS += -lOle32
win32: LIBS += -lstrmiids
win32: LIBS += -lquartz
win32: LIBS += -lSDL2
win32: LIBS += -lSDL2main
win32: LIBS += -lOleAut32
win32: LIBS += $$PWD/third/libs/chkstk.obj
win32: LIBS += -lbufferoverflowU
win32: LIBS += -lbufferoverflow
win32: LIBS += -lmsvcrt
win32: LIBS += -llibfdk-aac
# Default rules for deployment. # Default rules for deployment.
qnx: target.path = /tmp/$${TARGET}/bin qnx: target.path = /tmp/$${TARGET}/bin