From 822b35dd51798e3f62498efeaed1a43a71b95c6c Mon Sep 17 00:00:00 2001 From: zcy <290198252@qq.com> Date: Mon, 17 Jun 2024 00:14:06 +0800 Subject: [PATCH] =?UTF-8?q?=E6=B7=BB=E5=8A=A0=E8=87=AA=E5=AE=9A=E4=B9=89?= =?UTF-8?q?=E5=BD=95=E5=B1=8F=E5=8A=9F=E8=83=BD?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- cplaywidget.cpp | 71 ++++--- cplaywidget.h | 10 +- main.cpp | 6 +- mainwindow.cpp | 76 ++++++-- mainwindow.h | 18 +- mainwindow.ui | 36 +++- media/VideoCoder.h | 1 + media/audioplayerff.cpp | 10 + media/audioplayerff.h | 17 ++ media/screen_capture.cpp | 382 ++++++++++++++++++++++++++++++++++++- media/screen_capture.h | 65 ++++++- media/streamcontrol.cpp | 66 +++++++ media/streamcontrol.h | 28 +++ ui_mainwindow.h | 29 ++- ui_process.h | 2 +- ui_qsstoast.h | 2 +- ui_toast.h | 2 +- utils/screenshotwidget.cpp | 299 +++++++++++++++++++++++++++++ utils/screenshotwidget.h | 66 +++++++ yuvgl.pro | 4 + yuvgl.pro.user | 6 +- 21 files changed, 1119 insertions(+), 77 deletions(-) create mode 100644 media/audioplayerff.cpp create mode 100644 media/audioplayerff.h create mode 100644 media/streamcontrol.cpp create mode 100644 media/streamcontrol.h create mode 100644 utils/screenshotwidget.cpp create mode 100644 utils/screenshotwidget.h diff --git a/cplaywidget.cpp b/cplaywidget.cpp index f4e8d87..5ccce8c 100644 --- a/cplaywidget.cpp +++ b/cplaywidget.cpp @@ -14,7 +14,7 @@ varying vec2 textureOut; \ void main(void) \ { \ gl_Position = vertexIn; \ - textureOut = textureIn; \ + textureOut = vec2(textureIn.x,1.0-textureIn.y);; \ }"; @@ -85,42 +85,44 @@ CPlayWidget::CPlayWidget(QWidget *parent):QOpenGLWidget(parent) { m_nVideoW = 0; mType = TYPE_YUV420P; connect(&this->tm,SIGNAL(timeout()),this,SLOT(OnUpdateFrame())); - //tm.start(1000); + tm.start(1000); + + } CPlayWidget::~CPlayWidget() { } void CPlayWidget::PlayOneFrame() {//函数功能读取一张yuv图像数据进行显示,每单击一次,就显示一张图片 - if(NULL == m_pYuvFile) - { - //打开yuv视频文件 注意修改文件路径 - // m_pYuvFile = fopen("F://OpenglYuvDemo//1920_1080.yuv", "rb"); - m_pYuvFile = fopen("F://md_sample_sp420_1080p.yuv", "rb"); - //根据yuv视频数据的分辨率设置宽高,demo当中是1080p,这个地方要注意跟实际数据分辨率对应上 -// m_nVideoW = 1920; -// m_nVideoH = 1080; - } - //申请内存存一帧yuv图像数据,其大小为分辨率的1.5倍 +// if(NULL == m_pYuvFile) +// { +// //打开yuv视频文件 注意修改文件路径 +// // m_pYuvFile = fopen("F://OpenglYuvDemo//1920_1080.yuv", "rb"); +// m_pYuvFile = fopen("F://md_sample_sp420_1080p.yuv", "rb"); +// //根据yuv视频数据的分辨率设置宽高,demo当中是1080p,这个地方要注意跟实际数据分辨率对应上 +//// m_nVideoW = 1920; +//// m_nVideoH = 1080; +// } +// //申请内存存一帧yuv图像数据,其大小为分辨率的1.5倍 - int nLen = m_nVideoW*m_nVideoH*3/2; - if(nullptr == m_pBufYuv420p) - { - m_pBufYuv420p = new unsigned char[nLen]; - qDebug("CPlayWidget::PlayOneFrame new data memory. Len=%d width=%d height=%d\n", - nLen, m_nVideoW, m_nVideoW); - } - //将一帧yuv图像读到内存中 +// int nLen = m_nVideoW*m_nVideoH*3/2; +// if(nullptr == m_pBufYuv420p) +// { +// m_pBufYuv420p = new unsigned char[nLen]; +// qDebug("CPlayWidget::PlayOneFrame new data memory. Len=%d width=%d height=%d\n", +// nLen, m_nVideoW, m_nVideoW); +// } +// //将一帧yuv图像读到内存中 - if(NULL == m_pYuvFile) - { - qFatal("read yuv file err.may be path is wrong!\n"); - return; - } - fread(m_pBufYuv420p, 1, nLen, m_pYuvFile); +// if(NULL == m_pYuvFile) +// { +// qFatal("read yuv file err.may be path is wrong!\n"); +// return; +// } +// fread(m_pBufYuv420p, 1, nLen, m_pYuvFile); //刷新界面,触发paintGL接口 - update(); +// update(); return; } @@ -134,7 +136,6 @@ int CPlayWidget::OnCameraData(uint8_t *dat, uint32_t size) memcpy(this->m_pBufRgb32,dat,size); update(); return 0; - } int CPlayWidget::SetImgSize(uint32_t width, uint32_t height) @@ -150,6 +151,12 @@ int CPlayWidget::SetImgSize(uint32_t width, uint32_t height) return 0; } +void CPlayWidget::OnScreenData(const void *frameaddress, uint32_t framelen) +{ + memcpy(this->m_pBufRgb32,frameaddress,framelen); + update(); +} + /* @@ -244,6 +251,14 @@ void CPlayWidget::resizeGL(int w, int h) void CPlayWidget::paintGL() { + glClearColor(0.2f, 0.3f, 0.3f, 1.0f); + glClear(GL_COLOR_BUFFER_BIT); + QMatrix4x4 matrix; + matrix.setToIdentity(); + matrix.translate(0.4f,0.0,0.0); + matrix.rotate(45,0,0,1); + matrix.scale(0.5); + if(mType == TYPE_YUV420P) loadYuvTexture(); if(mType == TYPE_RGB32){ diff --git a/cplaywidget.h b/cplaywidget.h index ec88d6e..1f31a17 100644 --- a/cplaywidget.h +++ b/cplaywidget.h @@ -6,6 +6,7 @@ #include #include #include "media/CameraCapture.h" +#include "media/screen_capture.h" #include @@ -14,7 +15,11 @@ -class CPlayWidget:public QOpenGLWidget,protected QOpenGLFunctions,public Camera::CameraObserver +class CPlayWidget:public QOpenGLWidget, + protected QOpenGLFunctions, + public Camera::CameraObserver, + public ScreenCapture::CaptureVideoObserver + { Q_OBJECT public slots: @@ -31,6 +36,7 @@ public: int SetDataType(IMG_TYPE); int OnCameraData(uint8_t *dat, uint32_t size) override; int SetImgSize(uint32_t width,uint32_t ); + void OnScreenData(const void *frameaddress, uint32_t framelen) override; protected: QTimer tm; void initializeGL() override; @@ -63,7 +69,7 @@ private: int m_nVideoH; //视频分辨率高 unsigned char *m_pBufYuv420p; unsigned char* m_pBufRgb32; - + QTimer mTimer; FILE* m_pYuvFile; void initShaderYuv(); diff --git a/main.cpp b/main.cpp index f429904..0a61ebb 100644 --- a/main.cpp +++ b/main.cpp @@ -14,6 +14,8 @@ #include #include #include +#include +#include #if _MSC_VER >= 1600 #pragma execution_character_set("utf-8") @@ -30,13 +32,9 @@ int RegiesterOwnType(){ int main(int argc, char *argv[]) { - setbuf(stdout, NULL);//让printf立即输出 ScreenCapture p; p.EnumScreen(); - SYSTEMTIME st; - GetSystemTime(&st); - Direct3D9TakeScreenshots(0,4); QssEventFilter filter; QApplication app(argc, argv); diff --git a/mainwindow.cpp b/mainwindow.cpp index 62fb307..711a13c 100644 --- a/mainwindow.cpp +++ b/mainwindow.cpp @@ -3,6 +3,7 @@ #include #include + #if _MSC_VER >= 1600 #pragma execution_character_set("utf-8") #endif @@ -20,7 +21,10 @@ MainWindow::MainWindow(QWidget *parent) : mTimer(nullptr), mChart(nullptr), mChartView(nullptr), - mSeries(nullptr) + mSeries(nullptr), + mScreenCap(nullptr), + mCapScreen(false), + mScreenshot(nullptr) { ui->setupUi(this); this->move(50,50); @@ -65,7 +69,7 @@ MainWindow::MainWindow(QWidget *parent) : ui->verticalLayout->addWidget(mChartView); mChartView->setChart(mChart); - ui->verticalLayout->setStretch(0,1); + ui->verticalLayout->setStretch(0,2); ui->verticalLayout->setStretch(1,8); ui->verticalLayout->setStretch(2,4); @@ -85,18 +89,6 @@ void MainWindow::OnAudioData(const void *frameaddress, uint32_t framelen) mMux.lock(); mBufferAudio.push_front((void *)frameaddress); mMux.unlock(); -// if(mSeries->points().size() > 100){ -// for(int i = 0;i < framelen/1000 - 10;i++){ -// int16_t *v = ((int16_t*)(frameaddress) + i*1000); -// mSeries->append(QPointF(i,*v)); -// } -// }else{ -// for(int i = 0;i < framelen/1000 - 10;i++){ -// int16_t *v = ((int16_t*)(frameaddress) + i*1000); -// mSeries->append(QPointF(i,*v)); -// } -// } - } void MainWindow::on_pushButton_clicked(){ @@ -191,7 +183,6 @@ void MainWindow::on_pushButton_3_clicked() void MainWindow::DetectDpi() { - qDebug()<<"detect dpi"; int horizontalDPI = logicalDpiX(); int verticalDPI = logicalDpiY(); mMux.lock(); @@ -213,3 +204,58 @@ void MainWindow::DetectDpi() mMux.unlock(); } + +void MainWindow::on_pushButton_4_clicked() +{ + if(!mCapScreen){ + if(nullptr == mScreenCap){ + mScreenCap = new ScreenCapture(); + } + mScreenCap->InitCap(); + mScreenCap->InitFilter("crop=w=400:h=500:x=100:y=500"); + + mScreenCap->Start(this); + + if(nullptr == mPlayerWidget){ + mPlayerWidget = new CPlayWidget(nullptr); + } + mPlayerWidget->SetDataType(CPlayWidget::IMG_TYPE::TYPE_RGB32); + mPlayerWidget->SetImgSize(300,300); + + mScreenCap->SetObserver(this->mPlayerWidget); + mPlayerWidget->show(); + ui->verticalLayout->insertWidget(2,mPlayerWidget,9); + qDebug()<verticalLayout->layout(); + ui->verticalLayout->setStretch(0,1); + ui->verticalLayout->setStretch(1,0); + ui->verticalLayout->setStretch(2,9); + ui->verticalLayout->setStretch(3,5); + mCapScreen = true; + } + else{ + mScreenCap->Stop(); + } +} + + +void MainWindow::on_pushButton_5_clicked() +{ + if(nullptr == mScreenshot){ + mScreenshot = new ScreenShotWidget(nullptr); + connect(mScreenshot,&ScreenShotWidget::rect_selected, + this,[=](QRect rect){ + qDebug()<<"screen rect selected"<showFullScreen(); +} + +void MainWindow::on_rect_selected(QRect rect) +{ + qDebug()<<"screen rect selected"< #include #include +#include "media/screen_capture.h" +#include "utils/screenshotwidget.h" + using namespace QtCharts; using namespace std; @@ -25,6 +28,7 @@ using namespace std; namespace Ui { class MainWindow; } +#include class MainWindow : public QssMainWindow ,CaptureAudioFfmpeg::CaptureAudioObserver { @@ -34,11 +38,19 @@ public: explicit MainWindow(QWidget *parent = nullptr); ~MainWindow(); void OnAudioData(const void *frameaddress, uint32_t framelen) ; +public slots: + void on_rect_selected(QRect rect); + private slots: void on_pushButton_clicked(); void on_pushButton_2_clicked(); void on_pushButton_3_clicked(); void DetectDpi(); + void on_pushButton_4_clicked(); + + void on_pushButton_5_clicked(); + + private: QLineSeries* mSeries; QVector mPoint1,mPoint2; @@ -56,8 +68,10 @@ private: QChart *mChart; QChartView *mChartView; QListmBufferAudio; - QMutex mMux; - + QMutex mMux; // lock for audio + ScreenShotWidget *mScreenshot; + ScreenCapture *mScreenCap; + bool mCapScreen; }; #endif // MAINWINDOW_H diff --git a/mainwindow.ui b/mainwindow.ui index 45d9f32..f8776ae 100644 --- a/mainwindow.ui +++ b/mainwindow.ui @@ -36,7 +36,7 @@ - + 2 @@ -70,7 +70,7 @@ - ??????? + camera cap @@ -99,7 +99,7 @@ - ?????? + audio record @@ -116,7 +116,7 @@ - rtmp??????? + rtmp addr @@ -142,7 +142,33 @@ - ???? + 鎺ㄦ祦 + + + + + + + + 60 + 50 + + + + 褰曞睆 + + + + + + + + 60 + 50 + + + + 鑷畾涔夊綍灞 diff --git a/media/VideoCoder.h b/media/VideoCoder.h index 4bbf112..e61868b 100644 --- a/media/VideoCoder.h +++ b/media/VideoCoder.h @@ -15,6 +15,7 @@ extern "C" #include #include #include +#include } #include diff --git a/media/audioplayerff.cpp b/media/audioplayerff.cpp new file mode 100644 index 0000000..a472f4e --- /dev/null +++ b/media/audioplayerff.cpp @@ -0,0 +1,10 @@ +#include "audioplayerff.h" + +AudioPlayerFF::AudioPlayerFF() +{ +// if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) { +// printf("Could not initialize SDL - %s\n", SDL_GetError()); +// return -1; +// } + +} diff --git a/media/audioplayerff.h b/media/audioplayerff.h new file mode 100644 index 0000000..795d6b9 --- /dev/null +++ b/media/audioplayerff.h @@ -0,0 +1,17 @@ +#ifndef AUDIOPLAYERFF_H +#define AUDIOPLAYERFF_H + + +//#include +//#include +#include + + + +class AudioPlayerFF +{ +public: + AudioPlayerFF(); +}; + +#endif // AUDIOPLAYERFF_H diff --git a/media/screen_capture.cpp b/media/screen_capture.cpp index 3cbcaaa..e733388 100644 --- a/media/screen_capture.cpp +++ b/media/screen_capture.cpp @@ -4,6 +4,9 @@ #include #include #include +#include + + #if _MSC_VER >= 1600 #pragma execution_character_set("utf-8") @@ -55,8 +58,7 @@ HRESULT SavePixelsToFile32bppPBGRA(UINT width, UINT height, UINT stride, return hr; } -HRESULT Direct3D9TakeScreenshots(UINT adapter, UINT count) -{ +HRESULT Direct3D9TakeScreenshots(UINT adapter, UINT count) { HRESULT hr = S_OK; IDirect3D9 *d3d = nullptr; IDirect3DDevice9 *device = nullptr; @@ -134,6 +136,7 @@ cleanup: } ScreenCapture::ScreenCapture() + :mObserver(nullptr) { m_d3d9_dev = ::Direct3DCreate9(D3D_SDK_VERSION); @@ -165,14 +168,381 @@ void ScreenCapture::EnumScreen() D3DADAPTER_IDENTIFIER9 adapterID; // Used to store device info char strBuffer[20480]; DWORD dwDisplayCount = m_d3d9_dev->GetAdapterCount(); - for(DWORD i = 0; i < dwDisplayCount; i++) - { - if( m_d3d9_dev->GetAdapterIdentifier( i/*D3DADAPTER_DEFAULT*/, 0,&adapterID ) != D3D_OK ) - { + for(DWORD i = 0; i < dwDisplayCount; i++) { + if( m_d3d9_dev->GetAdapterIdentifier( i/*D3DADAPTER_DEFAULT*/, 0,&adapterID ) != D3D_OK) { return; } qDebug()<mObserver = ob; +} + + + +int ScreenCapture::InitCap() { + avdevice_register_all(); + avcodec_register_all(); + const char* deviceName = "desktop"; + const char* inputformat = "gdigrab"; + int FPS = 23; //15 + m_fmt_ctx = avformat_alloc_context(); + m_input_fmt = av_find_input_format(inputformat); + AVDictionary* deoptions = NULL; + AVDictionary* dic = NULL; + + av_dict_set_int(&deoptions, "framerate", FPS, AV_DICT_MATCH_CASE); + av_dict_set_int(&deoptions, "rtbufsize", 3041280 * 100 * 5, 0); + + //如果不设置的话,在输入源是直播流的时候,会花屏。单位bytes + //av_dict_set(&deoptions, "buffer_size", "10485760", 0); + //av_dict_set(&deoptions, "reuse", "1", 0); + + int ret = avformat_open_input(&m_fmt_ctx, deviceName, m_input_fmt, &deoptions); + if (ret != 0) { + + return ret; + } + av_dict_free(&deoptions); + ret = avformat_find_stream_info(m_fmt_ctx, NULL); + if (ret < 0) { + + return ret; + } + av_dump_format(m_fmt_ctx, 0, deviceName, 0); + video_stream = av_find_best_stream(m_fmt_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, NULL, 0); + if (video_stream < 0) { + + return -1; + } + + _codec_ctx = m_fmt_ctx->streams[video_stream]->codec; + _codec = avcodec_find_decoder(_codec_ctx->codec_id); + if (_codec == NULL) { + + return -1; + } + ret = avcodec_open2(_codec_ctx, _codec, NULL); + if (ret != 0) { + return -1; + } + width = m_fmt_ctx->streams[video_stream]->codec->width; + height = m_fmt_ctx->streams[video_stream]->codec->height; + int fps = m_fmt_ctx->streams[video_stream]->codec->framerate.num > 0 ? m_fmt_ctx->streams[video_stream]->codec->framerate.num : 25; + AVPixelFormat videoType = m_fmt_ctx->streams[video_stream]->codec->pix_fmt; + std::cout << "avstream timebase : " << m_fmt_ctx->streams[video_stream]->time_base.num << " / " << m_fmt_ctx->streams[video_stream]->time_base.den << endl; + AVDictionary* enoptions = 0; + //av_dict_set(&enoptions, "preset", "superfast", 0); + //av_dict_set(&enoptions, "tune", "zerolatency", 0); + av_dict_set(&enoptions, "preset", "ultrafast", 0); + av_dict_set(&enoptions, "tune", "zerolatency", 0); + + //TODO + //av_dict_set(&enoptions, "pkt_size", "1316", 0); //Maximum UDP packet size + av_dict_set(&dic, "fifo_size", "18800", 0); + av_dict_set(&enoptions, "buffer_size", "0", 1); + av_dict_set(&dic, "bitrate", "11000000", 0); + av_dict_set(&dic, "buffer_size", "1000000", 0);//1316 + //av_dict_set(&enoptions, "reuse", "1", 0); + + AVCodec* codec = avcodec_find_encoder(AV_CODEC_ID_H264); + if (!codec) + { + std::cout << "avcodec_find_encoder failed!" << endl; + return NULL; + } + vc = avcodec_alloc_context3(codec); + if (!vc) + { + std::cout << "avcodec_alloc_context3 failed!" << endl; + return NULL; + } + std::cout << "avcodec_alloc_context3 success!" << endl; + vc->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; + vc->codec_id = AV_CODEC_ID_H264; + vc->codec_type = AVMEDIA_TYPE_VIDEO; + vc->pix_fmt = AV_PIX_FMT_YUV420P; + vc->width = width; + vc->height = height; + vc->time_base.num = 1; + vc->time_base.den = FPS; + vc->framerate = { FPS,1 }; + vc->bit_rate = 10241000; + vc->gop_size = 120; + vc->qmin = 10; + vc->qmax = 51; + vc->max_b_frames = 0; + vc->profile = FF_PROFILE_H264_MAIN; + ret = avcodec_open2(vc, codec, &enoptions); + if (ret != 0) + { + return ret; + } + std::cout << "avcodec_open2 success!" << endl; + av_dict_free(&enoptions); + vsc = nullptr; + vsc = sws_getCachedContext(vsc, + width, height, (AVPixelFormat)videoType, //源宽、高、像素格式 + width, height, AV_PIX_FMT_YUV420P,//目标宽、高、像素格式 + SWS_BICUBIC, // 尺寸变化使用算法 + 0, 0, 0 + ); + if (!vsc) + { + std::cout << "sws_getCachedContext failed!"; + return false; + } + yuv = av_frame_alloc(); + yuv->format = AV_PIX_FMT_YUV420P; + yuv->width = width; + yuv->height = height; + yuv->pts = 0; + ret = av_frame_get_buffer(yuv, 32); + if (ret != 0) + { + + return ret; + } + ic = NULL; + //ret = avformat_alloc_output_context2(&ic, 0, "flv", rtmpurl); + ret = avformat_alloc_output_context2(&ic, NULL, "mpegts", "output.mp4");//UDP + if (ret < 0) + { + + return ret; + } + st = avformat_new_stream(ic, NULL); + if (!st) + { + + return -1; + } + st->codecpar->codec_tag = 0; + avcodec_parameters_from_context(st->codecpar, vc); + ret = avio_open(&ic->pb, "output1.mp4", AVIO_FLAG_WRITE); + if (ret != 0) + { + + return ret; + } + ret = avformat_write_header(ic, NULL); + if (ret != 0) + { + + return ret; + } + packet = av_packet_alloc(); + Encodepacket = av_packet_alloc(); + rgb = av_frame_alloc(); + m_cut_frame = av_frame_alloc(); + + AVBitStreamFilterContext* h264bsfc = av_bitstream_filter_init("h264_mp4toannexb"); + startpts = m_fmt_ctx->start_time; + lastpts = 0; + duration = av_rescale_q(1, { 1,FPS }, { 1,AV_TIME_BASE }); +} + + +static AVFrame *crop_frame(const AVFrame *in, int left, int top, int right, int bottom) +{ + AVFilterContext *buffersink_ctx; + AVFilterContext *buffersrc_ctx; + AVFilterGraph *filter_graph = avfilter_graph_alloc(); + AVFrame *f = av_frame_alloc(); + AVFilterInOut *inputs = NULL, *outputs = NULL; + char args[512]; + int ret; + snprintf(args, sizeof(args), + "buffer=video_size=%dx%d:pix_fmt=%d:time_base=1/1:pixel_aspect=0/1[in];" + "[in]crop=300:300:0:0[out];" + "[out]buffersink", + in->width, in->height, in->format, + left, top, right, bottom); + + ret = avfilter_graph_parse2(filter_graph, args, &inputs, &outputs); + if (ret < 0) return NULL; + assert(inputs == NULL && outputs == NULL); + ret = avfilter_graph_config(filter_graph, NULL); + if (ret < 0) return NULL; + + buffersrc_ctx = avfilter_graph_get_filter(filter_graph, "Parsed_buffer_0"); + buffersink_ctx = avfilter_graph_get_filter(filter_graph, "Parsed_buffersink_2"); + assert(buffersrc_ctx != NULL); + assert(buffersink_ctx != NULL); + + av_frame_ref(f, in); + ret = av_buffersrc_add_frame(buffersrc_ctx, f); + if (ret < 0) return NULL; + ret = av_buffersink_get_frame(buffersink_ctx, f); + if (ret < 0) return NULL; + + avfilter_graph_free(&filter_graph); + + return f; +} + +void crop_rgb32(unsigned char* src, int src_width, int src_height, + unsigned char* dst, int dst_width, int dst_height, + int start_x, int start_y) { + int src_stride = src_width * 4; + int dst_stride = dst_width * 4; + unsigned char* src_row = src + start_y * src_stride; + unsigned char* dst_row = dst; + + for (int y = start_y; y < start_y + dst_height && y < src_height; ++y) { + unsigned char* src_pixel = src_row + (start_x * 4); + unsigned char* dst_pixel = dst_row; + + for (int x = start_x; x < start_x + dst_width && x < src_width; ++x) { + // 复制像素数据 + memcpy(dst_pixel, src_pixel, 4); + dst_pixel += 4; + src_pixel += 4; + } + + src_row += src_stride; + dst_row += dst_stride; + } +} + +int ScreenCapture::Process(void *p) +{ + int got_picture = 0; + unsigned char *dat = new unsigned char[300*300*4]; + + int ret = av_read_frame(m_fmt_ctx, packet); + if (ret < 0) { + return -1; + } + if (packet->stream_index == video_stream) { + + + ret = avcodec_decode_video2(_codec_ctx, rgb, &got_picture, packet); + if (ret < 0) { + + printf("Decode Error.\n"); + return ret; + } + + if (got_picture) { + if(ret < 0){ + qDebug()<<"fail filter"; + } + + crop_rgb32(rgb->data[0],1920,1200,dat,300,300,300,300); + av_frame_unref(rgb); + av_frame_unref(m_cut_frame); + if(this->mObserver != nullptr){ + this->mObserver->OnScreenData(dat , + 300*4*300); + } + delete dat; +// qDebug()<linesize[0]<data, rgb->linesize, 0, height, //源数据 +// yuv->data, yuv->linesize); +// int guesspts = frameIndex * duration; +// yuv->pts = guesspts; +// frameIndex++; +// ret = avcodec_encode_video2(vc, Encodepacket, yuv, &got_picture); +// if (ret < 0) { + +// printf("Failed to encode!\n"); +// } +// if (got_picture == 1) { +// Encodepacket->pts = av_rescale_q(EncodeIndex, vc->time_base, st->time_base); +// Encodepacket->dts = Encodepacket->pts; +// qDebug() << "frameindex : " << EncodeIndex << " pts : " << Encodepacket->pts << " dts: " << Encodepacket->dts << " encodeSize:" << Encodepacket->size << " curtime - lasttime " << Encodepacket->pts - lastpts << endl; +// lastpts = Encodepacket->pts; +// ret = av_interleaved_write_frame(ic, Encodepacket); +// EncodeIndex++; +// av_packet_unref(Encodepacket); +// } + } + } + av_packet_unref(packet); + + return 0; +} + +int ScreenCapture::Height() +{ + return this->height; +} + +int ScreenCapture::Width() +{ + return this->width; +} + + + + +int ScreenCapture::InitFilter(const char *filters_descr) +{ + char args[512]; + int ret; + AVFilter *buffersrc = (AVFilter *)avfilter_get_by_name("buffer"); + AVFilter *buffersink = (AVFilter *)avfilter_get_by_name("buffersink"); + AVFilterInOut *outputs = avfilter_inout_alloc(); + AVFilterInOut *inputs = avfilter_inout_alloc(); + enum AVPixelFormat pix_fmts[] = { (AVPixelFormat)28, AV_PIX_FMT_NONE }; + AVBufferSinkParams *buffersink_params; + + m_filter_graph = avfilter_graph_alloc(); + + /* buffer video source: the decoded frames from the decoder will be inserted here. */ +// snprintf(args, sizeof(args), +// "video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:pixel_aspect=%d/%d", +// _codec_ctx->width, _codec_ctx->height, _codec_ctx->pix_fmt, +// _codec_ctx->time_base.num, _codec_ctx->time_base.den, +// _codec_ctx->sample_aspect_ratio.num, _codec_ctx->sample_aspect_ratio.den); + + snprintf(args, sizeof(args), + "video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:pixel_aspect=%d/%d", + _codec_ctx->width, _codec_ctx->height, _codec_ctx->pix_fmt, + _codec_ctx->time_base.num, _codec_ctx->time_base.den, + _codec_ctx->sample_aspect_ratio.num, _codec_ctx->sample_aspect_ratio.den); + + qDebug()<pixel_fmts = pix_fmts; + ret = avfilter_graph_create_filter(&m_buffersink_ctx, buffersink, "out", + NULL, buffersink_params, m_filter_graph); + av_free(buffersink_params); + if (ret < 0) { + qDebug()<<"Cannot create buffer sink\n"; + return ret; + } + + /* Endpoints for the filter graph. */ + outputs->name = av_strdup("in"); + outputs->filter_ctx = m_buffersrc_ctx; + outputs->pad_idx = 0; + outputs->next = NULL; + + inputs->name = av_strdup("out"); + inputs->filter_ctx = m_buffersink_ctx; + inputs->pad_idx = 0; + inputs->next = NULL; + + if ((ret = avfilter_graph_parse_ptr(m_filter_graph, filters_descr, + &inputs, &outputs, NULL)) < 0) + return ret; + + if ((ret = avfilter_graph_config(m_filter_graph, NULL)) < 0) + return ret; + return 0; +} + diff --git a/media/screen_capture.h b/media/screen_capture.h index 9fa236c..0c9d595 100644 --- a/media/screen_capture.h +++ b/media/screen_capture.h @@ -5,19 +5,80 @@ #include // we use WIC for saving images #include // DirectX 9 header #include +#include +#include "streamcontrol.h" +#include +extern "C" +{ +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" +#include "libavutil/avutil.h" +#include "libswscale/swscale.h" +#include "libavutil/opt.h" +#include "libavutil/imgutils.h" +#include +#include +#include +#include +#include +#include "libavdevice/avdevice.h" - +} HRESULT Direct3D9TakeScreenshots(UINT adapter, UINT count); -class ScreenCapture +class ScreenCapture :public StreamControl { public: + class CaptureVideoObserver { + public: + virtual void OnScreenData(const void *frameaddress, uint32_t framelen) {}; + }; + ScreenCapture(); void EnumScreen(); void PrintDisplayModeInfo(IDirect3D9 *pD3D, D3DFORMAT fmt); + void SetObserver(CaptureVideoObserver * ); + int InitCap(); + int Process(void*) override; + int Height(); + int Width(); + int InitFilter(const char *filters_descr); + int InitFilter2(const char *filters_descr); + private: + IDirect3D9* m_d3d9_dev = nullptr; + CaptureVideoObserver* mObserver; + std::thread *mThread; + AVFormatContext* m_fmt_ctx = NULL; + AVPacket* packet = NULL; + AVInputFormat* m_input_fmt = NULL; + int video_stream = -1; + AVCodecContext * _codec_ctx= NULL; + AVFrame* rgb= NULL; + AVFrame* m_cut_frame = NULL; + + SwsContext *vsc = NULL; + int height = 0; + int width = 0; + AVFrame* yuv = NULL; + int frameIndex = 0; + AVCodecContext* vc = NULL; + long long duration = 0; + AVPacket* Encodepacket = NULL; + int EncodeIndex = 0; + AVStream* st = NULL; + long long startpts = 0; + long long lastpts = 0; + AVFormatContext * ic = NULL; + AVCodec* _codec = NULL; + AVFilterGraph *m_filter_graph; + + AVFilterContext *m_buffersrc_ctx; + AVFilterContext *m_buffersink_ctx; + + }; #endif // SCREEN_CAPTURE_H diff --git a/media/streamcontrol.cpp b/media/streamcontrol.cpp new file mode 100644 index 0000000..e33e363 --- /dev/null +++ b/media/streamcontrol.cpp @@ -0,0 +1,66 @@ +#include "streamcontrol.h" + +StreamControl::StreamControl() +{ + this->mStatus = STOP; + this->mThread = nullptr; +} + +int StreamControl::Process(void*data) { + + return 0; +} + +int StreamControl::Start(void *dat) { + + switch (this->mStatus){ + case RUNNING: + break; + case STOP: + if(nullptr != mThread){ + + }else{ + this->mThread = new std::thread( + [&](){ + while(true){ + switch (this->mStatus) { + case STOP: + return; + case PAUSHED: + break; + case RUNNING: + this->Process(dat); + break; + default: + break; + } + } + }); + } + break; + case PAUSHED: + break; + } + + this->mStatus = RUNNING; + + return 0; +} + +int StreamControl::Stop() +{ + this->mStatus = STOP; + return 0; +} + +int StreamControl::Pause() +{ + this->mStatus = PAUSHED; + return 0; +} + +StreamControl::Status StreamControl::CurrentStatus() +{ + return mStatus; +} + diff --git a/media/streamcontrol.h b/media/streamcontrol.h new file mode 100644 index 0000000..76ce09f --- /dev/null +++ b/media/streamcontrol.h @@ -0,0 +1,28 @@ +#ifndef STREAMCONTROL_H +#define STREAMCONTROL_H + +#include + + +class StreamControl +{ +public: + typedef enum { + STOP, + RUNNING, + PAUSHED + }Status; + StreamControl(); + + virtual int Process(void*); + + int Start(void *); + int Stop(); + int Pause(); + StreamControl::Status CurrentStatus(); +private: + StreamControl::Status mStatus; + std::thread *mThread; +}; + +#endif // STREAMCONTROL_H diff --git a/ui_mainwindow.h b/ui_mainwindow.h index edfe545..34a6bf5 100644 --- a/ui_mainwindow.h +++ b/ui_mainwindow.h @@ -1,7 +1,7 @@ /******************************************************************************** ** Form generated from reading UI file 'mainwindow.ui' ** -** Created by: Qt User Interface Compiler version 5.14.0 +** Created by: Qt User Interface Compiler version 5.15.2 ** ** WARNING! All changes made in this file will be lost when recompiling UI file! ********************************************************************************/ @@ -38,6 +38,8 @@ public: QLabel *label; QLineEdit *lineEdit; QPushButton *pushButton_2; + QPushButton *pushButton_4; + QPushButton *pushButton_5; QSpacerItem *horizontalSpacer_2; QSpacerItem *verticalSpacer; @@ -114,13 +116,24 @@ public: horizontalLayout->addWidget(pushButton_2); + pushButton_4 = new QPushButton(centralWidget); + pushButton_4->setObjectName(QString::fromUtf8("pushButton_4")); + pushButton_4->setMinimumSize(QSize(60, 50)); + + horizontalLayout->addWidget(pushButton_4); + + pushButton_5 = new QPushButton(centralWidget); + pushButton_5->setObjectName(QString::fromUtf8("pushButton_5")); + pushButton_5->setMinimumSize(QSize(60, 50)); + + horizontalLayout->addWidget(pushButton_5); + horizontalSpacer_2 = new QSpacerItem(40, 20, QSizePolicy::Expanding, QSizePolicy::Minimum); horizontalLayout->addItem(horizontalSpacer_2); - horizontalLayout->setStretch(0, 1); - horizontalLayout->setStretch(1, 2); - horizontalLayout->setStretch(7, 13); + horizontalLayout->setStretch(7, 1); + horizontalLayout->setStretch(9, 2); verticalLayout->addLayout(horizontalLayout); @@ -143,11 +156,13 @@ public: void retranslateUi(QMainWindow *MainWindow) { MainWindow->setWindowTitle(QCoreApplication::translate("MainWindow", "MainWindow", nullptr)); - pushButton->setText(QCoreApplication::translate("MainWindow", "\346\211\223\345\274\200\346\221\204\345\203\217\345\244\264", nullptr)); - pushButton_3->setText(QCoreApplication::translate("MainWindow", "\346\211\223\345\274\200\351\272\246\345\205\213\351\243\216", nullptr)); - label->setText(QCoreApplication::translate("MainWindow", "rtmp\346\216\250\346\265\201\345\234\260\345\235\200", nullptr)); + pushButton->setText(QCoreApplication::translate("MainWindow", "camera cap", nullptr)); + pushButton_3->setText(QCoreApplication::translate("MainWindow", "audio record", nullptr)); + label->setText(QCoreApplication::translate("MainWindow", "rtmp addr", nullptr)); lineEdit->setText(QCoreApplication::translate("MainWindow", "rtmp://127.0.0.1:1935/live/1", nullptr)); pushButton_2->setText(QCoreApplication::translate("MainWindow", "\346\216\250\346\265\201", nullptr)); + pushButton_4->setText(QCoreApplication::translate("MainWindow", "\345\275\225\345\261\217", nullptr)); + pushButton_5->setText(QCoreApplication::translate("MainWindow", "\350\207\252\345\256\232\344\271\211\345\275\225\345\261\217", nullptr)); } // retranslateUi }; diff --git a/ui_process.h b/ui_process.h index 06347c1..8e27d7e 100644 --- a/ui_process.h +++ b/ui_process.h @@ -1,7 +1,7 @@ /******************************************************************************** ** Form generated from reading UI file 'process.ui' ** -** Created by: Qt User Interface Compiler version 5.14.0 +** Created by: Qt User Interface Compiler version 5.15.2 ** ** WARNING! All changes made in this file will be lost when recompiling UI file! ********************************************************************************/ diff --git a/ui_qsstoast.h b/ui_qsstoast.h index 79a6455..2da935f 100644 --- a/ui_qsstoast.h +++ b/ui_qsstoast.h @@ -1,7 +1,7 @@ /******************************************************************************** ** Form generated from reading UI file 'qsstoast.ui' ** -** Created by: Qt User Interface Compiler version 5.14.0 +** Created by: Qt User Interface Compiler version 5.15.2 ** ** WARNING! All changes made in this file will be lost when recompiling UI file! ********************************************************************************/ diff --git a/ui_toast.h b/ui_toast.h index 4b59238..ce75d46 100644 --- a/ui_toast.h +++ b/ui_toast.h @@ -1,7 +1,7 @@ /******************************************************************************** ** Form generated from reading UI file 'toast.ui' ** -** Created by: Qt User Interface Compiler version 5.14.0 +** Created by: Qt User Interface Compiler version 5.15.2 ** ** WARNING! All changes made in this file will be lost when recompiling UI file! ********************************************************************************/ diff --git a/utils/screenshotwidget.cpp b/utils/screenshotwidget.cpp new file mode 100644 index 0000000..945307a --- /dev/null +++ b/utils/screenshotwidget.cpp @@ -0,0 +1,299 @@ +#include "screenshotwidget.h" + +ScreenShotWidget::ScreenShotWidget(QWidget *parent) : + QWidget(parent) +{ + //初始化截图激活标志 + m_screenshot_active=false; + //设置窗口,无边框,顶部显示 + setWindowFlags(Qt::FramelessWindowHint | Qt::WindowStaysOnTopHint); + setWindowModality(Qt::ApplicationModal); + //窗口背景为透明 + setAttribute(Qt::WA_TranslucentBackground); + //设置窗口跟踪鼠标移动事件 + setMouseTracking(true); + //设置保存、取消按钮以及长宽label并暂时隐藏 + buttonSave=new QPushButton(this); + buttonSave->setParent(this); + buttonSave->setFixedSize(35,30); + buttonSave->setText("ok"); + buttonSave->setStyleSheet("QPushButton {font-size:16pt; color:green; font-weight:800;}"); + buttonSave->setVisible(false); + buttonCancel=new QPushButton(this); + buttonCancel->setParent(this); + buttonCancel->setFixedSize(35,30); + buttonCancel->setText("x"); + buttonCancel->setStyleSheet("QPushButton {font-size:20pt; color:red;font-weight:100;}"); + buttonCancel->setVisible(false); + lblHeightWidth=new QLabel(this); + lblHeightWidth->setParent(this); + lblHeightWidth->setFixedSize(100,10); + lblHeightWidth->setStyleSheet("QLabel {color:white;}"); + lblHeightWidth->setVisible(false); + //设置放大镜 + lblMagnifyGlass=new QLabel(this); + lblMagnifyGlass->setParent(this); + lblMagnifyGlass->setFixedSize(100,100); + lblMagnifyGlass->setVisible(true); + lblMagnifyGlass->setText("wo zai zhe"); + //设置鼠标光标模式为十字 + setCursor(Qt::CursorShape::CrossCursor); + //连接按钮的信号槽 + connect(this->buttonSave,SIGNAL(clicked()),this,SLOT(on_buttonSave_Clicked())); + connect(this->buttonCancel,SIGNAL(clicked()),this,SLOT(on_buttonCancel_Clicked())); +} + +void ScreenShotWidget::mousePressEvent(QMouseEvent *event) +{ + if(event->button()==Qt::LeftButton) + { + QRect rect(m_startPos,m_endPos); + if((rect.contains(event->pos())||m_cursorMode!=0)&&m_startPos!=m_endPos){ + m_oldPos=event->pos(); + } + else if(m_startPos==m_endPos){ + //记录鼠标点击的起始位置 + m_startPos = event->pos(); + //截图激活标志 + m_screenshot_active=true; + } + } + else if(event->button()==Qt::RightButton)//重新截图 + { + if(m_endPos.isNull()) + on_buttonCancel_Clicked(); + m_startPos=QPoint(0,0); + m_endPos=QPoint(0,0); + buttonSave->setVisible(false); + buttonCancel->setVisible(false); + lblHeightWidth->setVisible(false); + setCursor(Qt::CursorShape::CrossCursor); + update(); + } +} + +void ScreenShotWidget::mouseMoveEvent(QMouseEvent *event) +{ + QRect rect(m_startPos,m_endPos); + QPoint point=getPointInWhichSqure(event->pos()); + //截图后不点击鼠标时的光标更新 + if(!m_screenshot_active && m_startPos!=m_endPos && !event->buttons()){ + if(point==m_topLeft) {setCursor(Qt::CursorShape::SizeFDiagCursor);m_cursorMode=1;} + else if(point==m_topRight) {setCursor(Qt::CursorShape::SizeBDiagCursor);m_cursorMode=2;} + else if(point==m_bottomLeft) {setCursor(Qt::CursorShape::SizeBDiagCursor);m_cursorMode=3;} + else if(point==m_bottomRight) {setCursor(Qt::CursorShape::SizeFDiagCursor);m_cursorMode=4;} + else if(point==m_topCenter) {setCursor(Qt::CursorShape::SizeVerCursor);m_cursorMode=5;} + else if(point==m_bottomCenter) {setCursor(Qt::CursorShape::SizeVerCursor);m_cursorMode=6;} + else if(point==m_leftCenter) {setCursor(Qt::CursorShape::SizeHorCursor);m_cursorMode=7;} + else if(point==m_rightCenter) {setCursor(Qt::CursorShape::SizeHorCursor);m_cursorMode=8;} + else if(rect.contains(event->pos())) {setCursor(Qt::CursorShape::ClosedHandCursor);m_cursorMode=9;} + else {setCursor(Qt::CursorShape::ArrowCursor);m_cursorMode=0;} + } + //鼠标左键按下 + if(event->buttons() & Qt::LeftButton) + { + //截图中位置更新 + if(m_screenshot_active){ + m_endPos = event->pos(); + } + //拖动已截图区域 + if(rect.contains(event->pos()) && !m_screenshot_active &&cursor().shape()==Qt::ClosedHandCursor){ + m_distancePos=event->pos()-m_oldPos; + m_oldPos=event->pos(); + m_startPos=m_startPos+m_distancePos; + m_endPos=m_endPos+m_distancePos; + //边界条件限制 + if(m_startPos.x()<=0){m_startPos.rx()=0;m_endPos.rx()=m_endPos.x()-m_distancePos.x();} + if(m_startPos.y()<=0){m_startPos.ry()=0;m_endPos.ry()=m_endPos.y()-m_distancePos.y();} + if(m_endPos.x()>=this->width()){m_endPos.rx()=this->width();m_startPos.rx()=m_startPos.x()-m_distancePos.x();} + if(m_endPos.y()>=this->height()){m_endPos.ry()=this->height();m_startPos.ry()=m_startPos.y()-m_distancePos.y();} + } + else if(cursor().shape()==Qt::SizeFDiagCursor) + { + if(m_cursorMode==1){ + m_distancePos=event->pos()-m_oldPos; + m_oldPos=event->pos(); + m_startPos=m_startPos+m_distancePos; + } + if(m_cursorMode==4){ + m_distancePos=event->pos()-m_oldPos; + m_oldPos=event->pos(); + m_endPos=m_endPos+m_distancePos; + } + } + else if(cursor().shape()==Qt::SizeBDiagCursor) + { + if(m_cursorMode==2){ + m_distancePos=event->pos()-m_oldPos; + m_oldPos=event->pos(); + m_startPos.ry()=m_startPos.y()+m_distancePos.y(); + m_endPos.rx()=m_endPos.x()+m_distancePos.x(); + } + if(m_cursorMode==3){ + m_distancePos=event->pos()-m_oldPos; + m_oldPos=event->pos(); + m_startPos.rx()=m_startPos.x()+m_distancePos.x(); + m_endPos.ry()=m_endPos.y()+m_distancePos.y(); + } + } + else if(cursor().shape()==Qt::SizeHorCursor) + { + if(m_cursorMode==7){ + m_distancePos=event->pos()-m_oldPos; + m_oldPos=event->pos(); + m_startPos.rx()=m_startPos.x()+m_distancePos.x(); + } + if(m_cursorMode==8){ + m_distancePos=event->pos()-m_oldPos; + m_oldPos=event->pos(); + m_endPos.rx()=m_endPos.x()+m_distancePos.x(); + } + } + else if(cursor().shape()==Qt::SizeVerCursor) + { + if(m_cursorMode==5){ + m_distancePos=event->pos()-m_oldPos; + m_oldPos=event->pos(); + m_startPos.ry()=m_startPos.y()+m_distancePos.y(); + } + if(m_cursorMode==6){ + m_distancePos=event->pos()-m_oldPos; + m_oldPos=event->pos(); + m_endPos.ry()=m_endPos.y()+m_distancePos.y(); + } + } + update(); + } + +} + +void ScreenShotWidget::mouseReleaseEvent(QMouseEvent *event) +{ + Q_UNUSED(event); + if (m_screenshot_active) { + m_screenshot_active=false; + if (m_endPos != m_startPos) { + //弹出保存/取消按钮 + buttonSave->setVisible(true); + buttonCancel->setVisible(true); + } + } +} + +void ScreenShotWidget::paintEvent(QPaintEvent *event) +{ + Q_UNUSED(event); + QPainter painter(this); + painter.fillRect(rect(),QColor(128,128,128,128)); + QColor color(11,218,81); + QPen pen(color,1,Qt::SolidLine); + painter.setPen(pen); + //边界条件限制 + if(m_startPos.x()>=m_endPos.x())m_endPos.rx()=m_startPos.x(); + if(m_startPos.y()>=m_endPos.y())m_endPos.ry()=m_startPos.y(); + //画矩形,并将内部置为透明 + QRect rect(m_startPos, m_endPos); + painter.setCompositionMode(QPainter::CompositionMode_DestinationIn); + painter.fillRect(rect,QColor(0,0,0,0)); + painter.setCompositionMode(QPainter::CompositionMode_SourceOver); + painter.drawRect(rect.normalized()); + // 获取矩形的四个顶点和四条边的中点 + m_topLeft=QPoint(rect.left(),rect.top()); + m_topRight=QPoint(rect.right()+1,rect.top()); + m_bottomLeft=QPoint(rect.left(),rect.bottom()+1); + m_bottomRight=QPoint(rect.right()+1,rect.bottom()+1); + m_topCenter=QPoint(rect.center().x(), rect.top()); + m_bottomCenter=QPoint(rect.center().x(), rect.bottom()+1); + m_leftCenter=QPoint(rect.left(), rect.center().y()); + m_rightCenter=QPoint(rect.right()+1, rect.center().y()+1); + + // 定义一个小矩形的大小,用来画实心点 + int pointSize = 5; + QRect pointRect(0, 0, pointSize, pointSize); + + // 在每个点的位置画一个小矩形,作为实心点 + pointRect.moveCenter(m_topLeft); + painter.fillRect(pointRect, color); + pointRect.moveCenter(m_topRight); + painter.fillRect(pointRect, color); + pointRect.moveCenter(m_bottomLeft); + painter.fillRect(pointRect, color); + pointRect.moveCenter(m_bottomRight); + painter.fillRect(pointRect, color); + pointRect.moveCenter(m_topCenter); + painter.fillRect(pointRect, color); + pointRect.moveCenter(m_bottomCenter); + painter.fillRect(pointRect, color); + pointRect.moveCenter(m_leftCenter); + painter.fillRect(pointRect, color); + pointRect.moveCenter(m_rightCenter); + painter.fillRect(pointRect, color); + //显示放大镜 + if(cursor().shape()!=Qt::ClosedHandCursor) + { + QRect cursorRect(QCursor::pos().x()-25,QCursor::pos().y()-25,51,51); + QScreen *screen=QGuiApplication::primaryScreen(); + QPixmap cursorPixmap=screen->grabWindow(QApplication::desktop()->winId(),cursorRect.x(),cursorRect.y(),cursorRect.width(),cursorRect.height()); + cursorPixmap=cursorPixmap.scaled(cursorPixmap.width()*2,cursorPixmap.height()*2,Qt::KeepAspectRatio); + QPoint startPoint(QCursor::pos().x()+26,QCursor::pos().y()+26); + painter.drawPixmap(startPoint.x(),startPoint.y(),cursorPixmap.width(),cursorPixmap.height(),cursorPixmap); + painter.drawRect(startPoint.x(),startPoint.y(),cursorPixmap.width(),cursorPixmap.height()); + QPen pen2(color,2,Qt::SolidLine); + painter.setPen(pen2); + painter.drawLine(startPoint.x(),startPoint.y()+cursorPixmap.height()/2+1,startPoint.x()+cursorPixmap.width(),startPoint.y()+cursorPixmap.height()/2+1); + painter.drawLine(startPoint.x()+cursorPixmap.width()/2+1,startPoint.y(),startPoint.x()+cursorPixmap.width()/2+1,startPoint.y()+cursorPixmap.height()); + } + update(); + //显示按钮与标签 + if(this->height()move(rect.x()+rect.width()-buttonSave->width()-10,rect.y()+rect.height()-30); + buttonCancel->move(rect.x()+rect.width()-buttonSave->width()-buttonCancel->width()-8,rect.y()+rect.height()-30); + } + else{ + buttonSave->move(rect.x()+rect.width()-buttonSave->width()-10,rect.y()+rect.height()); + buttonCancel->move(rect.x()+rect.width()-buttonSave->width()-buttonCancel->width()-8,rect.y()+rect.height()); + } + lblHeightWidth->setVisible(true); + lblHeightWidth->move(rect.x(),rect.y()-12); + lblHeightWidth->setText(QString::number(rect.width())+"x"+QString::number(rect.height())); +} + +void ScreenShotWidget::on_buttonSave_Clicked() +{ + //清除笔刷,创建一个矩形区域 + close(); + QThread::msleep(30); + QRect rect = QRect(m_startPos, m_endPos); + emit(rect_selected(rect)); + + //获取存储路径 + QDateTime currentDate=QDateTime::currentDateTime(); + QString mCURDIr = QCoreApplication::applicationDirPath(); + QString filePath=mCURDIr + "/screenshot/"+currentDate.toString("MMdd_hhmmss")+".bmp"; + QDir dir; + if(!dir.exists(mCURDIr + "/screenshot")) dir.mkdir(mCURDIr + "/screenshot"); + //获取矩形区域截图(减去笔刷),存储在pixmap中 + QScreen *screen=QGuiApplication::primaryScreen(); + QPixmap pixmap=screen->grabWindow(QApplication::desktop()->winId(),rect.x(),rect.y(),rect.width(),rect.height()); + if (!pixmap.save(filePath)) QMessageBox::warning(this, tr("提示"), tr("保存图片失败")); +} + +void ScreenShotWidget::on_buttonCancel_Clicked() +{ + //关闭透明页面 + close(); +} + +QPoint ScreenShotWidget::getPointInWhichSqure(QPoint point) +{ + int distance=8; + if(abs(point.x()-m_topLeft.x())+abs(point.y()-m_topLeft.y()) +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +class ScreenShotWidget : public QWidget +{ + Q_OBJECT + +Q_SIGNALS: + void rect_selected(QRect rect); +public: + ScreenShotWidget(QWidget *parent = nullptr); +protected: + //鼠标按下事件 + void mousePressEvent(QMouseEvent *event) override; + //鼠标移动事件 + void mouseMoveEvent(QMouseEvent *event) override; + //鼠标松开事件 + void mouseReleaseEvent(QMouseEvent *event) override; + //绘制边框事件 + void paintEvent(QPaintEvent *event) override; +private slots: + void on_buttonSave_Clicked(); + void on_buttonCancel_Clicked(); +private: + QPoint getPointInWhichSqure(QPoint point); + +private: + QPushButton *buttonSave,*buttonCancel; + QLabel *lblHeightWidth; + QLabel *lblMagnifyGlass; + QPainter m_painter; + QPoint m_startPos,m_endPos; //用于记录生成的截图区域 + QPoint m_oldPos,m_distancePos; //用于拖拽截图区域 + bool m_screenshot_active; + + QPoint m_topLeft; + QPoint m_topRight; + QPoint m_bottomLeft; + QPoint m_bottomRight; + QPoint m_topCenter; + QPoint m_bottomCenter; + QPoint m_leftCenter; + QPoint m_rightCenter; + int m_cursorMode; +}; + +#endif // SCREENSHOTWIDGET_H diff --git a/yuvgl.pro b/yuvgl.pro index f681605..a2abcca 100644 --- a/yuvgl.pro +++ b/yuvgl.pro @@ -50,7 +50,9 @@ SOURCES += \ media/audioplayerff.cpp \ media/screen_capture.cpp \ media/sps_decode.cpp \ + media/streamcontrol.cpp \ utils/Debuger.cpp \ + utils/screenshotwidget.cpp \ utils/utils.cpp HEADERS += \ components/toast.h \ @@ -59,6 +61,8 @@ HEADERS += \ cplaywidget.h \ media/audioplayerff.h \ media/screen_capture.h \ + media/streamcontrol.h \ + utils/screenshotwidget.h \ utils/utils.h diff --git a/yuvgl.pro.user b/yuvgl.pro.user index 0c6b918..40409e2 100644 --- a/yuvgl.pro.user +++ b/yuvgl.pro.user @@ -1,6 +1,6 @@ - + EnvironmentId @@ -99,8 +99,8 @@ 0 0 - G:\project\multimedia\client\rtmp_demo\build-yuvgl-Desktop_Qt_5_15_2_MSVC2019_64bit-Debug - G:/project/multimedia/client/rtmp_demo/build-yuvgl-Desktop_Qt_5_15_2_MSVC2019_64bit-Debug + G:\project\c++qt\qt_rtmp_demo + G:/project/c++qt/qt_rtmp_demo true