multimedia/client/qt_gl_/yuvgl/media/VideoCoder.cpp

281 lines
8.5 KiB
C++
Raw Normal View History

2020-06-05 15:59:47 +00:00
#include "VideoCoder.h"
2020-06-04 15:28:45 +00:00
#include "Debuger.h"
FILE *p = nullptr;
int VideoCoder::OnBuffer(double dblSampleTime, BYTE * pBuffer, long lBufferSize)
{
this->Encode(pBuffer, lBufferSize, AV_PIX_FMT_YUV420P);
return 0;
}
int VideoCoder::OnCameraData(uint8_t * dat, uint32_t size)
{
2020-06-05 15:59:47 +00:00
//std::cout<<"captrue data and into coder"<<std::endl;
this->Encode(dat, size, AV_PIX_FMT_YUV420P);
2020-06-04 15:28:45 +00:00
return 0;
}
2020-06-05 15:59:47 +00:00
int VideoCoder::SetDestPix(uint8_t width, uint8_t height) {
2020-06-04 15:28:45 +00:00
this->mDestHeight = height;
this->mDestWidth = width;
return 0;
}
2020-06-05 15:59:47 +00:00
VideoCoder::VideoCoder(int width, int height, AVPixelFormat formt):
mObserver(nullptr),
mFrame(nullptr),
mPitureBuffer(nullptr),
mFormatCtx(nullptr),
mOutputFmt(nullptr),
mVideoStream(nullptr),
mCodecCtx(nullptr),
mCodec(nullptr) {
2020-06-04 15:28:45 +00:00
AVCodecID codec_id = AV_CODEC_ID_H264;
mCodec = avcodec_find_encoder(codec_id);
av_register_all();
if (nullptr == p) {
p = fopen("shit.h264", "wb");
}
this->mWidth = width;
this->mHeight = height;
this->mInformat = formt;
if (!mCodec) {
printf("Codec not found\n");
}
this->mFormatCtx = avformat_alloc_context();
//原文链接https ://blog.csdn.net/leixiaohua1020/article/details/25430425 引用来自雷神的文章,雷神保佑
this->mOutputFmt = av_guess_format(NULL, "shit.h264", NULL);
this->mFormatCtx->oformat = mOutputFmt;
mCodecCtx = avcodec_alloc_context3(mCodec);
if (!mCodecCtx) {
printf("Could not allocate video codec context\n");
}
mCodecCtx->bit_rate = 1000;
this->mDestHeight = 480;
this->mDestWidth = 640;
mCodecCtx->width = this->mDestWidth;
mCodecCtx->height = this->mDestHeight;
mCodecCtx->time_base.num = 1;
mCodecCtx->time_base.den = 10;
mCodecCtx->max_b_frames = 0;
mCodecCtx->qmin = 10;
mCodecCtx->qmax = 25;
//mCodecCtx->flags |= AV_CODEC_FLAG_LOW_DELAY;
mCodecCtx->gop_size = 10;
mCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
av_opt_set(mCodecCtx->priv_data, "preset", "superfast", 0);
av_opt_set(mCodecCtx->priv_data, "tune", "zerolatency", 0);
if (avcodec_open2(mCodecCtx, mCodec, NULL) < 0) {
printf("Could not open codec\n");
}
mFrame = av_frame_alloc();
if (!mFrame) {
printf("Could not allocate video frame\n");
}
mFrame->format = mCodecCtx->pix_fmt;
mFrame->width = mCodecCtx->width/2;
mFrame->height = mCodecCtx->height/2;
mFrame->pts = 0;
int ret = av_image_alloc(mFrame->data, mFrame->linesize, mCodecCtx->width, mCodecCtx->height,
mCodecCtx->pix_fmt, 8);
if (ret < 0) {
printf("Could not allocate raw picture buffer\n");
}
// 让我们假设分辨率都是不可改变的AvPack可以复用
avformat_write_header(mFormatCtx, NULL);
int picture_size = avpicture_get_size(AV_PIX_FMT_YUV420P, mCodecCtx->width, mCodecCtx->height);
}
VideoCoder::~VideoCoder()
{
fclose(p);
}
2020-06-05 15:59:47 +00:00
void VideoCoder::Encode(uint8_t * src, int size, enum AVPixelFormat format) {
2020-06-04 15:28:45 +00:00
uint8_t *pFrame[4];
int lineSize[4];
static int debugs = 1;
//如果不是yuv420p就转成yuv420p
int iFramesize;
av_init_packet(&mAVPack);
mAVPack.data = NULL; // packet data will be allocated by the encoder
int ret = av_image_alloc(pFrame, lineSize, mWidth, mHeight, AV_PIX_FMT_YUV420P, 1);
if (ret< 0) {
Debuger::Debug(L"Could not allocate destination image\n");
}
if (this->mInformat != AV_PIX_FMT_YUV420P || (this->mDestHeight != mHeight)) {
int size = avpicture_get_size(this->mInformat,mWidth,mHeight);
this->forceYUV420P(src, size, mInformat, (uint8_t ***)&pFrame,&iFramesize);
//仅仅支持yuv420p
mFrame->data[0] = pFrame[0]; //Y
mFrame->data[1] = pFrame[1]; //U
mFrame->data[2] = pFrame[2]; //V
}
else {
mFrame->data[0] = src; //Y
mFrame->data[1] = src + mWidth*mHeight; //U
mFrame->data[2] = src + mWidth*mHeight + mWidth*mHeight/4; //V
}
//PTS
mFrame->pts++;
int got_picture = 0;
//Encode
avcodec_encode_video2(mCodecCtx, &mAVPack, mFrame, &got_picture);
if (got_picture > 0) {
if(nullptr != this->mObserver)
this->mObserver->OnGetCodeFrame(mAVPack.data, mAVPack.size);
}
//Debuger::Debug(L"Succeed to encode frame: %5d\tsize:%5d\n", 1, mAVPack.size);
2020-06-05 15:59:47 +00:00
fwrite(mAVPack.data, 1, mAVPack.size, p);
fflush(p);
2020-06-04 15:28:45 +00:00
// 刷新coder防止包挤压
av_packet_unref(&mAVPack);
av_freep(&pFrame[0]);
free(pFrame[0]);
//av_freep(&mFrame->data[0]);
//av_freep(&mFrame->data[0]);
}
void VideoCoder::SetOutPutPixel(unsigned int width, unsigned int height)
{
this->mHeight = height;
this->mWidth = width;
}
2020-06-05 15:59:47 +00:00
int VideoCoder::flushCoder(AVFormatContext *fmt_ctx, unsigned int stream_index) {
2020-06-04 15:28:45 +00:00
int ret;
int got_frame;
AVPacket enc_pkt;
if (!(this->mFormatCtx->streams[stream_index]->codec->codec->capabilities ))
return 0;
while (1) {
enc_pkt.data = NULL;
enc_pkt.size = 0;
av_init_packet(&enc_pkt);
ret = avcodec_encode_video2(fmt_ctx->streams[stream_index]->codec, &enc_pkt,
NULL, &got_frame);
av_frame_free(NULL);
if (ret < 0)
break;
if (!got_frame) {
ret = 0;
break;
}
Debuger::Debug(L"Flush Encoder: Succeed to encode 1 frame!\tsize:%5d\n", enc_pkt.size);
/* mux encoded frame */
ret = av_write_frame(fmt_ctx, &enc_pkt);
if (ret < 0)
break;
}
return ret;
}
// 强制把其他个数的数据转换成libav可以认得到的数据
int VideoCoder::forceYUV420P(uint8_t * src, int size,
AVPixelFormat format,uint8_t **dst[4],int *len)
{
uint8_t *src_data[4];
int src_linesize[4];
uint8_t *dst_data[4];
int dst_linesize[4];
struct SwsContext *img_convert_ctx;
int ret = 0;
if (nullptr == dst || nullptr == len) {
return -2;
}
int src_bpp = av_get_bits_per_pixel(av_pix_fmt_desc_get(format));
AVPixelFormat dst_pixfmt = AV_PIX_FMT_YUV420P;
int dst_bpp = av_get_bits_per_pixel(av_pix_fmt_desc_get(dst_pixfmt));
ret = av_image_alloc(src_data, src_linesize, mWidth, mHeight, format, 1);
if (ret< 0) {
Debuger::Debug(L"Could not allocate source image\n");
return -1;
}
ret = av_image_alloc(dst_data, dst_linesize, mDestWidth, mDestHeight, AV_PIX_FMT_YUV420P, 1);
if (ret< 0) {
Debuger::Debug(L"Could not allocate destination image\n");
return -1;
}
img_convert_ctx = sws_alloc_context();
//Show AVOption
//av_opt_show2(img_convert_ctx, stdout, AV_OPT_FLAG_VIDEO_PARAM, 0);
//Set Value
av_opt_set_int(img_convert_ctx, "sws_flags", SWS_BICUBIC | SWS_PRINT_INFO, 0);
av_opt_set_int(img_convert_ctx, "srcw", mWidth, 0);
av_opt_set_int(img_convert_ctx, "srch", mHeight, 0);
av_opt_set_int(img_convert_ctx, "src_format", format, 0);
av_opt_set_int(img_convert_ctx, "src_range", 1, 0);
av_opt_set_int(img_convert_ctx, "dstw", mDestWidth, 0);
av_opt_set_int(img_convert_ctx, "dsth", mDestHeight, 0);
av_opt_set_int(img_convert_ctx, "dst_format", dst_pixfmt, 0);
av_opt_set_int(img_convert_ctx, "dst_range", 1, 0);
sws_init_context(img_convert_ctx, NULL, NULL);
// 设置输入
switch (format) {
case AV_PIX_FMT_GRAY8: {
memcpy(src_data[0], src, mWidth*mHeight);
break;
}
case AV_PIX_FMT_YUV420P: {
memcpy(src_data[0], src, mWidth*mHeight); //Y
memcpy(src_data[1], src + mWidth*mHeight, mWidth*mHeight / 4); //U
memcpy(src_data[2], src + mWidth*mHeight * 5 / 4, mWidth*mHeight / 4); //V
break;
}
case AV_PIX_FMT_YUV422P: {
memcpy(src_data[0], src, mWidth*mHeight); //Y
memcpy(src_data[1], src + mWidth*mHeight, mWidth*mHeight / 2); //U
memcpy(src_data[2], src + mWidth*mHeight * 3 / 2, mWidth*mHeight / 2); //V
break;
}
case AV_PIX_FMT_YUV444P: {
memcpy(src_data[0], src, mWidth*mHeight); //Y
memcpy(src_data[1], src + mWidth*mHeight, mWidth*mHeight); //U
memcpy(src_data[2], src + mWidth*mHeight * 2, mWidth*mHeight); //V
break;
}
case AV_PIX_FMT_YUYV422: {
memcpy(src_data[0], src, mWidth*mHeight * 2); //Packed
break;
}
case AV_PIX_FMT_RGB24: {
memcpy(src_data[0], src, mWidth*mHeight * 3); //Packed
break;
}
case AV_PIX_FMT_RGB32: {
memcpy(src_data[0], src, mWidth*mHeight *4); //Packed
break;
}
default: {
Debuger::Debug(L"Not Support Input Pixel Format.\n");
break;
}
}
// 转换数据
2020-06-05 15:59:47 +00:00
ret = sws_scale(img_convert_ctx, src_data, src_linesize, 0, mHeight, dst_data, dst_linesize);
2020-06-04 15:28:45 +00:00
if (ret < 0) {
return ret;
}
memcpy(dst[0], dst_data[0], mDestWidth*mDestHeight);
memcpy(dst[1], dst_data[1], mDestWidth*mDestHeight /4);
memcpy(dst[2], dst_data[2], mDestWidth*mDestHeight /4);
*len = mDestWidth*mDestHeight + mDestWidth*mDestHeight / 2;
// source此时就不需要了但是dst要在外面free
av_freep(&src_data[0]);
av_freep(&dst_data[0]);
sws_freeContext(img_convert_ctx);
return 0;
}