qt_rtmp_demo/media/screen_capture.cpp

549 lines
17 KiB
C++
Raw Normal View History

2023-11-12 16:13:24 +00:00
#include "screen_capture.h"
#include <conio.h>
#include <stdio.h>
#include <QDebug>
#include <QString>
2024-06-16 16:14:06 +00:00
#include <iostream>
2023-11-12 16:13:24 +00:00
#if _MSC_VER >= 1600
#pragma execution_character_set("utf-8")
#endif
#define WIDEN2(x) L ## x
#define WIDEN(x) WIDEN2(x)
#define __WFILE__ WIDEN(__FILE__)
#define HRCHECK(__expr) {hr=(__expr);if(FAILED(hr)){wprintf(L"FAILURE 0x%08X (%i)\n\tline: %u file: '%s'\n\texpr: '" WIDEN(#__expr) L"'\n",hr, hr, __LINE__,__WFILE__);goto cleanup;}}
#define RELEASE(__p) {if(__p!=nullptr){__p->Release();__p=nullptr;}}
// ȱenum<75><6D><EFBFBD><EFBFBD>
// https://www.gamedev.net/forums/topic/132636-enum-displaymode--dx9--false/
HRESULT SavePixelsToFile32bppPBGRA(UINT width, UINT height, UINT stride,
LPBYTE pixels, LPWSTR filePath, const GUID &format)
{
if (!filePath || !pixels)
return E_INVALIDARG;
HRESULT hr = S_OK;
IWICImagingFactory *factory = nullptr;
IWICBitmapEncoder *encoder = nullptr;
IWICBitmapFrameEncode *frame = nullptr;
IWICStream *stream = nullptr;
GUID pf = GUID_WICPixelFormat32bppPBGRA;
BOOL coInit = CoInitialize(nullptr);
HRCHECK(CoCreateInstance(CLSID_WICImagingFactory, nullptr, CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&factory)));
HRCHECK(factory->CreateStream(&stream));
HRCHECK(stream->InitializeFromFilename(filePath, GENERIC_WRITE));
HRCHECK(factory->CreateEncoder(format, nullptr, &encoder));
HRCHECK(encoder->Initialize(stream, WICBitmapEncoderNoCache));
HRCHECK(encoder->CreateNewFrame(&frame, nullptr)); // we don't use options here
HRCHECK(frame->Initialize(nullptr)); // we dont' use any options here
HRCHECK(frame->SetSize(width, height));
HRCHECK(frame->SetPixelFormat(&pf));
HRCHECK(frame->WritePixels(height, stride, stride * height, pixels));
HRCHECK(frame->Commit());
HRCHECK(encoder->Commit());
cleanup:
RELEASE(stream);
RELEASE(frame);
RELEASE(encoder);
RELEASE(factory);
if (coInit) CoUninitialize();
return hr;
}
2024-06-16 16:14:06 +00:00
HRESULT Direct3D9TakeScreenshots(UINT adapter, UINT count) {
2023-11-12 16:13:24 +00:00
HRESULT hr = S_OK;
IDirect3D9 *d3d = nullptr;
IDirect3DDevice9 *device = nullptr;
IDirect3DSurface9 *surface = nullptr;
D3DPRESENT_PARAMETERS parameters = { 0 };
D3DDISPLAYMODE mode;
D3DLOCKED_RECT rc;
UINT pitch;
SYSTEMTIME st;
LPBYTE *shots = nullptr;
// init D3D and get screen size
d3d = Direct3DCreate9(D3D_SDK_VERSION);
HRCHECK(d3d->GetAdapterDisplayMode(adapter, &mode));
parameters.Windowed = TRUE;
parameters.BackBufferCount = 1;
parameters.BackBufferHeight = mode.Height;
parameters.BackBufferWidth = mode.Width;
parameters.SwapEffect = D3DSWAPEFFECT_DISCARD;
parameters.hDeviceWindow = NULL;
// create device & capture surface
HRCHECK(d3d->CreateDevice(adapter, D3DDEVTYPE_HAL, NULL, D3DCREATE_SOFTWARE_VERTEXPROCESSING, &parameters, &device));
HRCHECK(device->CreateOffscreenPlainSurface(mode.Width, mode.Height, D3DFMT_A8R8G8B8, D3DPOOL_SYSTEMMEM, &surface, nullptr));
// compute the required buffer size
HRCHECK(surface->LockRect(&rc, NULL, 0));
pitch = rc.Pitch;
HRCHECK(surface->UnlockRect());
// allocate screenshots buffers
shots = new LPBYTE[count];
for (UINT i = 0; i < count; i++)
{
shots[i] = new BYTE[pitch * mode.Height];
}
GetSystemTime(&st); // measure the time we spend doing <count> captures
wprintf(L"%i:%i:%i.%i\n", st.wHour, st.wMinute, st.wSecond, st.wMilliseconds);
for (UINT i = 0; i < count; i++)
{
// get the data
HRCHECK(device->GetFrontBufferData(0, surface));
// copy it into our buffers
HRCHECK(surface->LockRect(&rc, NULL, 0));
CopyMemory(shots[i], rc.pBits, rc.Pitch * mode.Height);
HRCHECK(surface->UnlockRect());
}
GetSystemTime(&st);
wprintf(L"%i:%i:%i.%i\n", st.wHour, st.wMinute, st.wSecond, st.wMilliseconds);
// save all screenshots
for (UINT i = 0; i < count; i++)
{
WCHAR file[100];
wsprintf(file, L"cap%i.png", i);
HRCHECK(SavePixelsToFile32bppPBGRA(mode.Width, mode.Height, pitch, shots[i], file, GUID_ContainerFormatPng));
}
cleanup:
if (shots != nullptr)
{
for (UINT i = 0; i < count; i++)
{
delete shots[i];
}
delete[] shots;
}
RELEASE(surface);
RELEASE(device);
RELEASE(d3d);
return hr;
}
ScreenCapture::ScreenCapture()
2024-06-16 16:14:06 +00:00
:mObserver(nullptr)
2023-11-12 16:13:24 +00:00
{
m_d3d9_dev = ::Direct3DCreate9(D3D_SDK_VERSION);
}
BOOL CALLBACK MonitorEnumProc(HMONITOR hMonitor,HDC hdcMonitor,
LPRECT lprcMonitor,LPARAM dwData)
{
MONITORINFOEX mi;
mi.cbSize=sizeof(MONITORINFOEX);
GetMonitorInfo(hMonitor,&mi);
qDebug()<<QString::asprintf("Device name:%s\t",mi.szDevice);
if(mi.dwFlags==MONITORINFOF_PRIMARY) printf("Primary monitor!\n");
else printf("\n");
qDebug()<<QString::asprintf("Monitor rectangle:(%d,%d,%d,%d)\n",mi.rcMonitor.left,mi.rcMonitor.top,
mi.rcMonitor.right,mi.rcMonitor.bottom);
qDebug()<<QString::asprintf("Work rectangle:(%d,%d,%d,%d)\n",mi.rcWork.left,mi.rcWork.top,
mi.rcWork.right,mi.rcWork.bottom);
return true;
}
void ScreenCapture::EnumScreen()
{
// EnumDisplayMonitors(NULL,NULL,MonitorEnumProc,NULL);
if(m_d3d9_dev == NULL)
{
return;
}
D3DADAPTER_IDENTIFIER9 adapterID; // Used to store device info
char strBuffer[20480];
DWORD dwDisplayCount = m_d3d9_dev->GetAdapterCount();
2024-06-16 16:14:06 +00:00
for(DWORD i = 0; i < dwDisplayCount; i++) {
if( m_d3d9_dev->GetAdapterIdentifier( i/*D3DADAPTER_DEFAULT*/, 0,&adapterID ) != D3D_OK) {
2023-11-12 16:13:24 +00:00
return;
}
qDebug()<<adapterID.DeviceName;
}
}
2024-06-16 16:14:06 +00:00
void ScreenCapture::SetObserver(CaptureVideoObserver *ob)
{
this->mObserver = ob;
}
int ScreenCapture::InitCap() {
avdevice_register_all();
avcodec_register_all();
const char* deviceName = "desktop";
const char* inputformat = "gdigrab";
int FPS = 23; //15
m_fmt_ctx = avformat_alloc_context();
m_input_fmt = av_find_input_format(inputformat);
AVDictionary* deoptions = NULL;
AVDictionary* dic = NULL;
av_dict_set_int(&deoptions, "framerate", FPS, AV_DICT_MATCH_CASE);
av_dict_set_int(&deoptions, "rtbufsize", 3041280 * 100 * 5, 0);
//<2F><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>õĻ<C3B5><C4BB><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>Դ<EFBFBD><D4B4>ֱ<EFBFBD><D6B1><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ʱ<EFBFBD>򣬻Ứ<F2A3ACBB><E1BBA8><EFBFBD><EFBFBD><EFBFBD><EFBFBD>λbytes
//av_dict_set(&deoptions, "buffer_size", "10485760", 0);
//av_dict_set(&deoptions, "reuse", "1", 0);
int ret = avformat_open_input(&m_fmt_ctx, deviceName, m_input_fmt, &deoptions);
if (ret != 0) {
return ret;
}
av_dict_free(&deoptions);
ret = avformat_find_stream_info(m_fmt_ctx, NULL);
if (ret < 0) {
return ret;
}
av_dump_format(m_fmt_ctx, 0, deviceName, 0);
video_stream = av_find_best_stream(m_fmt_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, NULL, 0);
if (video_stream < 0) {
return -1;
}
_codec_ctx = m_fmt_ctx->streams[video_stream]->codec;
_codec = avcodec_find_decoder(_codec_ctx->codec_id);
if (_codec == NULL) {
return -1;
}
ret = avcodec_open2(_codec_ctx, _codec, NULL);
if (ret != 0) {
return -1;
}
width = m_fmt_ctx->streams[video_stream]->codec->width;
height = m_fmt_ctx->streams[video_stream]->codec->height;
int fps = m_fmt_ctx->streams[video_stream]->codec->framerate.num > 0 ? m_fmt_ctx->streams[video_stream]->codec->framerate.num : 25;
AVPixelFormat videoType = m_fmt_ctx->streams[video_stream]->codec->pix_fmt;
std::cout << "avstream timebase : " << m_fmt_ctx->streams[video_stream]->time_base.num << " / " << m_fmt_ctx->streams[video_stream]->time_base.den << endl;
AVDictionary* enoptions = 0;
//av_dict_set(&enoptions, "preset", "superfast", 0);
//av_dict_set(&enoptions, "tune", "zerolatency", 0);
av_dict_set(&enoptions, "preset", "ultrafast", 0);
av_dict_set(&enoptions, "tune", "zerolatency", 0);
//TODO
//av_dict_set(&enoptions, "pkt_size", "1316", 0); //Maximum UDP packet size
av_dict_set(&dic, "fifo_size", "18800", 0);
av_dict_set(&enoptions, "buffer_size", "0", 1);
av_dict_set(&dic, "bitrate", "11000000", 0);
av_dict_set(&dic, "buffer_size", "1000000", 0);//1316
//av_dict_set(&enoptions, "reuse", "1", 0);
AVCodec* codec = avcodec_find_encoder(AV_CODEC_ID_H264);
if (!codec)
{
std::cout << "avcodec_find_encoder failed!" << endl;
return NULL;
}
vc = avcodec_alloc_context3(codec);
if (!vc)
{
std::cout << "avcodec_alloc_context3 failed!" << endl;
return NULL;
}
std::cout << "avcodec_alloc_context3 success!" << endl;
vc->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
vc->codec_id = AV_CODEC_ID_H264;
vc->codec_type = AVMEDIA_TYPE_VIDEO;
vc->pix_fmt = AV_PIX_FMT_YUV420P;
vc->width = width;
vc->height = height;
vc->time_base.num = 1;
vc->time_base.den = FPS;
vc->framerate = { FPS,1 };
vc->bit_rate = 10241000;
vc->gop_size = 120;
vc->qmin = 10;
vc->qmax = 51;
vc->max_b_frames = 0;
vc->profile = FF_PROFILE_H264_MAIN;
ret = avcodec_open2(vc, codec, &enoptions);
if (ret != 0)
{
return ret;
}
std::cout << "avcodec_open2 success!" << endl;
av_dict_free(&enoptions);
vsc = nullptr;
vsc = sws_getCachedContext(vsc,
width, height, (AVPixelFormat)videoType, //Դ<><D4B4><EFBFBD><EFBFBD><EFBFBD>ߡ<EFBFBD><DFA1><EFBFBD><EFBFBD>ظ<EFBFBD>ʽ
width, height, AV_PIX_FMT_YUV420P,//Ŀ<><C4BF><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ߡ<EFBFBD><DFA1><EFBFBD><EFBFBD>ظ<EFBFBD>ʽ
SWS_BICUBIC, // <20>ߴ<EFBFBD><DFB4>ʹ<E4BBAF><CAB9><EFBFBD>
0, 0, 0
);
if (!vsc)
{
std::cout << "sws_getCachedContext failed!";
return false;
}
yuv = av_frame_alloc();
yuv->format = AV_PIX_FMT_YUV420P;
yuv->width = width;
yuv->height = height;
yuv->pts = 0;
ret = av_frame_get_buffer(yuv, 32);
if (ret != 0)
{
return ret;
}
ic = NULL;
//ret = avformat_alloc_output_context2(&ic, 0, "flv", rtmpurl);
ret = avformat_alloc_output_context2(&ic, NULL, "mpegts", "output.mp4");//UDP
if (ret < 0)
{
return ret;
}
st = avformat_new_stream(ic, NULL);
if (!st)
{
return -1;
}
st->codecpar->codec_tag = 0;
avcodec_parameters_from_context(st->codecpar, vc);
ret = avio_open(&ic->pb, "output1.mp4", AVIO_FLAG_WRITE);
if (ret != 0)
{
return ret;
}
ret = avformat_write_header(ic, NULL);
if (ret != 0)
{
return ret;
}
packet = av_packet_alloc();
Encodepacket = av_packet_alloc();
rgb = av_frame_alloc();
m_cut_frame = av_frame_alloc();
AVBitStreamFilterContext* h264bsfc = av_bitstream_filter_init("h264_mp4toannexb");
startpts = m_fmt_ctx->start_time;
lastpts = 0;
duration = av_rescale_q(1, { 1,FPS }, { 1,AV_TIME_BASE });
}
static AVFrame *crop_frame(const AVFrame *in, int left, int top, int right, int bottom)
{
AVFilterContext *buffersink_ctx;
AVFilterContext *buffersrc_ctx;
AVFilterGraph *filter_graph = avfilter_graph_alloc();
AVFrame *f = av_frame_alloc();
AVFilterInOut *inputs = NULL, *outputs = NULL;
char args[512];
int ret;
snprintf(args, sizeof(args),
"buffer=video_size=%dx%d:pix_fmt=%d:time_base=1/1:pixel_aspect=0/1[in];"
"[in]crop=300:300:0:0[out];"
"[out]buffersink",
in->width, in->height, in->format,
left, top, right, bottom);
ret = avfilter_graph_parse2(filter_graph, args, &inputs, &outputs);
if (ret < 0) return NULL;
assert(inputs == NULL && outputs == NULL);
ret = avfilter_graph_config(filter_graph, NULL);
if (ret < 0) return NULL;
buffersrc_ctx = avfilter_graph_get_filter(filter_graph, "Parsed_buffer_0");
buffersink_ctx = avfilter_graph_get_filter(filter_graph, "Parsed_buffersink_2");
assert(buffersrc_ctx != NULL);
assert(buffersink_ctx != NULL);
av_frame_ref(f, in);
ret = av_buffersrc_add_frame(buffersrc_ctx, f);
if (ret < 0) return NULL;
ret = av_buffersink_get_frame(buffersink_ctx, f);
if (ret < 0) return NULL;
avfilter_graph_free(&filter_graph);
return f;
}
void crop_rgb32(unsigned char* src, int src_width, int src_height,
unsigned char* dst, int dst_width, int dst_height,
int start_x, int start_y) {
int src_stride = src_width * 4;
int dst_stride = dst_width * 4;
unsigned char* src_row = src + start_y * src_stride;
unsigned char* dst_row = dst;
for (int y = start_y; y < start_y + dst_height && y < src_height; ++y) {
unsigned char* src_pixel = src_row + (start_x * 4);
unsigned char* dst_pixel = dst_row;
for (int x = start_x; x < start_x + dst_width && x < src_width; ++x) {
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
memcpy(dst_pixel, src_pixel, 4);
dst_pixel += 4;
src_pixel += 4;
}
src_row += src_stride;
dst_row += dst_stride;
}
}
int ScreenCapture::Process(void *p)
{
int got_picture = 0;
unsigned char *dat = new unsigned char[300*300*4];
int ret = av_read_frame(m_fmt_ctx, packet);
if (ret < 0) {
return -1;
}
if (packet->stream_index == video_stream) {
ret = avcodec_decode_video2(_codec_ctx, rgb, &got_picture, packet);
if (ret < 0) {
printf("Decode Error.\n");
return ret;
}
if (got_picture) {
if(ret < 0){
qDebug()<<"fail filter";
}
crop_rgb32(rgb->data[0],1920,1200,dat,300,300,300,300);
av_frame_unref(rgb);
av_frame_unref(m_cut_frame);
if(this->mObserver != nullptr){
this->mObserver->OnScreenData(dat ,
300*4*300);
}
delete dat;
// qDebug()<<rgb->linesize[0]<<height;
// int h = sws_scale(vsc, rgb->data, rgb->linesize, 0, height, //Դ<><D4B4><EFBFBD><EFBFBD>
// yuv->data, yuv->linesize);
// int guesspts = frameIndex * duration;
// yuv->pts = guesspts;
// frameIndex++;
// ret = avcodec_encode_video2(vc, Encodepacket, yuv, &got_picture);
// if (ret < 0) {
// printf("Failed to encode!\n");
// }
// if (got_picture == 1) {
// Encodepacket->pts = av_rescale_q(EncodeIndex, vc->time_base, st->time_base);
// Encodepacket->dts = Encodepacket->pts;
// qDebug() << "frameindex : " << EncodeIndex << " pts : " << Encodepacket->pts << " dts: " << Encodepacket->dts << " encodeSize:" << Encodepacket->size << " curtime - lasttime " << Encodepacket->pts - lastpts << endl;
// lastpts = Encodepacket->pts;
// ret = av_interleaved_write_frame(ic, Encodepacket);
// EncodeIndex++;
// av_packet_unref(Encodepacket);
// }
}
}
av_packet_unref(packet);
return 0;
}
int ScreenCapture::Height()
{
return this->height;
}
int ScreenCapture::Width()
{
return this->width;
}
int ScreenCapture::InitFilter(const char *filters_descr)
{
char args[512];
int ret;
AVFilter *buffersrc = (AVFilter *)avfilter_get_by_name("buffer");
AVFilter *buffersink = (AVFilter *)avfilter_get_by_name("buffersink");
AVFilterInOut *outputs = avfilter_inout_alloc();
AVFilterInOut *inputs = avfilter_inout_alloc();
enum AVPixelFormat pix_fmts[] = { (AVPixelFormat)28, AV_PIX_FMT_NONE };
AVBufferSinkParams *buffersink_params;
m_filter_graph = avfilter_graph_alloc();
/* buffer video source: the decoded frames from the decoder will be inserted here. */
// snprintf(args, sizeof(args),
// "video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:pixel_aspect=%d/%d",
// _codec_ctx->width, _codec_ctx->height, _codec_ctx->pix_fmt,
// _codec_ctx->time_base.num, _codec_ctx->time_base.den,
// _codec_ctx->sample_aspect_ratio.num, _codec_ctx->sample_aspect_ratio.den);
snprintf(args, sizeof(args),
"video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:pixel_aspect=%d/%d",
_codec_ctx->width, _codec_ctx->height, _codec_ctx->pix_fmt,
_codec_ctx->time_base.num, _codec_ctx->time_base.den,
_codec_ctx->sample_aspect_ratio.num, _codec_ctx->sample_aspect_ratio.den);
qDebug()<<args;
ret = avfilter_graph_create_filter(&m_buffersrc_ctx, buffersrc, "in",
args, NULL, m_filter_graph);
if (ret < 0) {
printf("Cannot create buffer source\n");
return ret;
}
/* buffer video sink: to terminate the filter chain. */
buffersink_params = av_buffersink_params_alloc();
buffersink_params->pixel_fmts = pix_fmts;
ret = avfilter_graph_create_filter(&m_buffersink_ctx, buffersink, "out",
NULL, buffersink_params, m_filter_graph);
av_free(buffersink_params);
if (ret < 0) {
qDebug()<<"Cannot create buffer sink\n";
return ret;
}
/* Endpoints for the filter graph. */
outputs->name = av_strdup("in");
outputs->filter_ctx = m_buffersrc_ctx;
outputs->pad_idx = 0;
outputs->next = NULL;
inputs->name = av_strdup("out");
inputs->filter_ctx = m_buffersink_ctx;
inputs->pad_idx = 0;
inputs->next = NULL;
if ((ret = avfilter_graph_parse_ptr(m_filter_graph, filters_descr,
&inputs, &outputs, NULL)) < 0)
return ret;
if ((ret = avfilter_graph_config(m_filter_graph, NULL)) < 0)
return ret;
return 0;
}
2023-11-12 16:13:24 +00:00