添加自定义录屏功能

master 1.0
zcy 2024-06-17 00:14:06 +08:00
parent b412013744
commit 822b35dd51
21 changed files with 1119 additions and 77 deletions

View File

@ -14,7 +14,7 @@ varying vec2 textureOut; \
void main(void) \
{ \
gl_Position = vertexIn; \
textureOut = textureIn; \
textureOut = vec2(textureIn.x,1.0-textureIn.y);; \
}";
@ -85,42 +85,44 @@ CPlayWidget::CPlayWidget(QWidget *parent):QOpenGLWidget(parent) {
m_nVideoW = 0;
mType = TYPE_YUV420P;
connect(&this->tm,SIGNAL(timeout()),this,SLOT(OnUpdateFrame()));
//tm.start(1000);
tm.start(1000);
}
CPlayWidget::~CPlayWidget() {
}
void CPlayWidget::PlayOneFrame() {//函数功能读取一张yuv图像数据进行显示,每单击一次,就显示一张图片
if(NULL == m_pYuvFile)
{
//打开yuv视频文件 注意修改文件路径
// m_pYuvFile = fopen("F://OpenglYuvDemo//1920_1080.yuv", "rb");
m_pYuvFile = fopen("F://md_sample_sp420_1080p.yuv", "rb");
//根据yuv视频数据的分辨率设置宽高,demo当中是1080p这个地方要注意跟实际数据分辨率对应上
// m_nVideoW = 1920;
// m_nVideoH = 1080;
}
//申请内存存一帧yuv图像数据,其大小为分辨率的1.5倍
// if(NULL == m_pYuvFile)
// {
// //打开yuv视频文件 注意修改文件路径
// // m_pYuvFile = fopen("F://OpenglYuvDemo//1920_1080.yuv", "rb");
// m_pYuvFile = fopen("F://md_sample_sp420_1080p.yuv", "rb");
// //根据yuv视频数据的分辨率设置宽高,demo当中是1080p这个地方要注意跟实际数据分辨率对应上
//// m_nVideoW = 1920;
//// m_nVideoH = 1080;
// }
// //申请内存存一帧yuv图像数据,其大小为分辨率的1.5倍
int nLen = m_nVideoW*m_nVideoH*3/2;
if(nullptr == m_pBufYuv420p)
{
m_pBufYuv420p = new unsigned char[nLen];
qDebug("CPlayWidget::PlayOneFrame new data memory. Len=%d width=%d height=%d\n",
nLen, m_nVideoW, m_nVideoW);
}
//将一帧yuv图像读到内存中
// int nLen = m_nVideoW*m_nVideoH*3/2;
// if(nullptr == m_pBufYuv420p)
// {
// m_pBufYuv420p = new unsigned char[nLen];
// qDebug("CPlayWidget::PlayOneFrame new data memory. Len=%d width=%d height=%d\n",
// nLen, m_nVideoW, m_nVideoW);
// }
// //将一帧yuv图像读到内存中
if(NULL == m_pYuvFile)
{
qFatal("read yuv file err.may be path is wrong!\n");
return;
}
fread(m_pBufYuv420p, 1, nLen, m_pYuvFile);
// if(NULL == m_pYuvFile)
// {
// qFatal("read yuv file err.may be path is wrong!\n");
// return;
// }
// fread(m_pBufYuv420p, 1, nLen, m_pYuvFile);
//刷新界面,触发paintGL接口
update();
// update();
return;
}
@ -134,7 +136,6 @@ int CPlayWidget::OnCameraData(uint8_t *dat, uint32_t size)
memcpy(this->m_pBufRgb32,dat,size);
update();
return 0;
}
int CPlayWidget::SetImgSize(uint32_t width, uint32_t height)
@ -150,6 +151,12 @@ int CPlayWidget::SetImgSize(uint32_t width, uint32_t height)
return 0;
}
void CPlayWidget::OnScreenData(const void *frameaddress, uint32_t framelen)
{
memcpy(this->m_pBufRgb32,frameaddress,framelen);
update();
}
/*
@ -244,6 +251,14 @@ void CPlayWidget::resizeGL(int w, int h)
void CPlayWidget::paintGL()
{
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
QMatrix4x4 matrix;
matrix.setToIdentity();
matrix.translate(0.4f,0.0,0.0);
matrix.rotate(45,0,0,1);
matrix.scale(0.5);
if(mType == TYPE_YUV420P)
loadYuvTexture();
if(mType == TYPE_RGB32){

View File

@ -6,6 +6,7 @@
#include <QOpenGLTexture>
#include <QFile>
#include "media/CameraCapture.h"
#include "media/screen_capture.h"
#include <QTimer>
@ -14,7 +15,11 @@
class CPlayWidget:public QOpenGLWidget,protected QOpenGLFunctions,public Camera::CameraObserver
class CPlayWidget:public QOpenGLWidget,
protected QOpenGLFunctions,
public Camera::CameraObserver,
public ScreenCapture::CaptureVideoObserver
{
Q_OBJECT
public slots:
@ -31,6 +36,7 @@ public:
int SetDataType(IMG_TYPE);
int OnCameraData(uint8_t *dat, uint32_t size) override;
int SetImgSize(uint32_t width,uint32_t );
void OnScreenData(const void *frameaddress, uint32_t framelen) override;
protected:
QTimer tm;
void initializeGL() override;
@ -63,7 +69,7 @@ private:
int m_nVideoH; //视频分辨率高
unsigned char *m_pBufYuv420p;
unsigned char* m_pBufRgb32;
QTimer mTimer;
FILE* m_pYuvFile;
void initShaderYuv();

View File

@ -14,6 +14,8 @@
#include <tchar.h>
#include <shlobj.h>
#include <D3D9.h>
#include <winml.h>
#include <minwinbase.h>
#if _MSC_VER >= 1600
#pragma execution_character_set("utf-8")
@ -30,13 +32,9 @@ int RegiesterOwnType(){
int main(int argc, char *argv[])
{
setbuf(stdout, NULL);//让printf立即输出
ScreenCapture p;
p.EnumScreen();
SYSTEMTIME st;
GetSystemTime(&st);
Direct3D9TakeScreenshots(0,4);
QssEventFilter filter;
QApplication app(argc, argv);

View File

@ -3,6 +3,7 @@
#include <QDesktopWidget>
#include <QPaintDevice>
#if _MSC_VER >= 1600
#pragma execution_character_set("utf-8")
#endif
@ -20,7 +21,10 @@ MainWindow::MainWindow(QWidget *parent) :
mTimer(nullptr),
mChart(nullptr),
mChartView(nullptr),
mSeries(nullptr)
mSeries(nullptr),
mScreenCap(nullptr),
mCapScreen(false),
mScreenshot(nullptr)
{
ui->setupUi(this);
this->move(50,50);
@ -65,7 +69,7 @@ MainWindow::MainWindow(QWidget *parent) :
ui->verticalLayout->addWidget(mChartView);
mChartView->setChart(mChart);
ui->verticalLayout->setStretch(0,1);
ui->verticalLayout->setStretch(0,2);
ui->verticalLayout->setStretch(1,8);
ui->verticalLayout->setStretch(2,4);
@ -85,18 +89,6 @@ void MainWindow::OnAudioData(const void *frameaddress, uint32_t framelen)
mMux.lock();
mBufferAudio.push_front((void *)frameaddress);
mMux.unlock();
// if(mSeries->points().size() > 100){
// for(int i = 0;i < framelen/1000 - 10;i++){
// int16_t *v = ((int16_t*)(frameaddress) + i*1000);
// mSeries->append(QPointF(i,*v));
// }
// }else{
// for(int i = 0;i < framelen/1000 - 10;i++){
// int16_t *v = ((int16_t*)(frameaddress) + i*1000);
// mSeries->append(QPointF(i,*v));
// }
// }
}
void MainWindow::on_pushButton_clicked(){
@ -191,7 +183,6 @@ void MainWindow::on_pushButton_3_clicked()
void MainWindow::DetectDpi()
{
qDebug()<<"detect dpi";
int horizontalDPI = logicalDpiX();
int verticalDPI = logicalDpiY();
mMux.lock();
@ -213,3 +204,58 @@ void MainWindow::DetectDpi()
mMux.unlock();
}
void MainWindow::on_pushButton_4_clicked()
{
if(!mCapScreen){
if(nullptr == mScreenCap){
mScreenCap = new ScreenCapture();
}
mScreenCap->InitCap();
mScreenCap->InitFilter("crop=w=400:h=500:x=100:y=500");
mScreenCap->Start(this);
if(nullptr == mPlayerWidget){
mPlayerWidget = new CPlayWidget(nullptr);
}
mPlayerWidget->SetDataType(CPlayWidget::IMG_TYPE::TYPE_RGB32);
mPlayerWidget->SetImgSize(300,300);
mScreenCap->SetObserver(this->mPlayerWidget);
mPlayerWidget->show();
ui->verticalLayout->insertWidget(2,mPlayerWidget,9);
qDebug()<<ui->verticalLayout->layout();
ui->verticalLayout->setStretch(0,1);
ui->verticalLayout->setStretch(1,0);
ui->verticalLayout->setStretch(2,9);
ui->verticalLayout->setStretch(3,5);
mCapScreen = true;
}
else{
mScreenCap->Stop();
}
}
void MainWindow::on_pushButton_5_clicked()
{
if(nullptr == mScreenshot){
mScreenshot = new ScreenShotWidget(nullptr);
connect(mScreenshot,&ScreenShotWidget::rect_selected,
this,[=](QRect rect){
qDebug()<<"screen rect selected"<<rect;
});
}
mScreenshot->showFullScreen();
}
void MainWindow::on_rect_selected(QRect rect)
{
qDebug()<<"screen rect selected"<<rect;
}

View File

@ -18,6 +18,9 @@
#include <QVector>
#include <QList>
#include <QMutex>
#include "media/screen_capture.h"
#include "utils/screenshotwidget.h"
using namespace QtCharts;
using namespace std;
@ -25,6 +28,7 @@ using namespace std;
namespace Ui {
class MainWindow;
}
#include <QMutex>
class MainWindow : public QssMainWindow ,CaptureAudioFfmpeg::CaptureAudioObserver
{
@ -34,11 +38,19 @@ public:
explicit MainWindow(QWidget *parent = nullptr);
~MainWindow();
void OnAudioData(const void *frameaddress, uint32_t framelen) ;
public slots:
void on_rect_selected(QRect rect);
private slots:
void on_pushButton_clicked();
void on_pushButton_2_clicked();
void on_pushButton_3_clicked();
void DetectDpi();
void on_pushButton_4_clicked();
void on_pushButton_5_clicked();
private:
QLineSeries* mSeries;
QVector<QPoint> mPoint1,mPoint2;
@ -56,8 +68,10 @@ private:
QChart *mChart;
QChartView *mChartView;
QList<void *>mBufferAudio;
QMutex mMux;
QMutex mMux; // lock for audio
ScreenShotWidget *mScreenshot;
ScreenCapture *mScreenCap;
bool mCapScreen;
};
#endif // MAINWINDOW_H

View File

@ -36,7 +36,7 @@
<item row="0" column="0">
<layout class="QVBoxLayout" name="verticalLayout" stretch="1,9">
<item>
<layout class="QHBoxLayout" name="horizontalLayout" stretch="1,2,0,0,0,0,0,13">
<layout class="QHBoxLayout" name="horizontalLayout" stretch="0,0,0,0,0,0,0,1,0,2">
<property name="leftMargin">
<number>2</number>
</property>
@ -70,7 +70,7 @@
</size>
</property>
<property name="text">
<string>???????</string>
<string>camera cap</string>
</property>
</widget>
</item>
@ -99,7 +99,7 @@
</size>
</property>
<property name="text">
<string>??????</string>
<string>audio record</string>
</property>
</widget>
</item>
@ -116,7 +116,7 @@
<item>
<widget class="QLabel" name="label">
<property name="text">
<string>rtmp???????</string>
<string>rtmp addr</string>
</property>
</widget>
</item>
@ -142,7 +142,33 @@
</size>
</property>
<property name="text">
<string>????</string>
<string>推流</string>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="pushButton_4">
<property name="minimumSize">
<size>
<width>60</width>
<height>50</height>
</size>
</property>
<property name="text">
<string>录屏</string>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="pushButton_5">
<property name="minimumSize">
<size>
<width>60</width>
<height>50</height>
</size>
</property>
<property name="text">
<string>自定义录屏</string>
</property>
</widget>
</item>

View File

@ -15,6 +15,7 @@ extern "C"
#include <libavfilter/buffersink.h>
#include <libavfilter/buffersrc.h>
#include <libavutil/opt.h>
#include <libavutil/dict.h>
}
#include <iostream>

10
media/audioplayerff.cpp Normal file
View File

@ -0,0 +1,10 @@
#include "audioplayerff.h"
AudioPlayerFF::AudioPlayerFF()
{
// if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
// printf("Could not initialize SDL - %s\n", SDL_GetError());
// return -1;
// }
}

17
media/audioplayerff.h Normal file
View File

@ -0,0 +1,17 @@
#ifndef AUDIOPLAYERFF_H
#define AUDIOPLAYERFF_H
//#include<SDL/SDL.h>
//#include<SDL\SDL_mixer.h>
#include<iostream>
class AudioPlayerFF
{
public:
AudioPlayerFF();
};
#endif // AUDIOPLAYERFF_H

View File

@ -4,6 +4,9 @@
#include <stdio.h>
#include <QDebug>
#include <QString>
#include <iostream>
#if _MSC_VER >= 1600
#pragma execution_character_set("utf-8")
@ -55,8 +58,7 @@ HRESULT SavePixelsToFile32bppPBGRA(UINT width, UINT height, UINT stride,
return hr;
}
HRESULT Direct3D9TakeScreenshots(UINT adapter, UINT count)
{
HRESULT Direct3D9TakeScreenshots(UINT adapter, UINT count) {
HRESULT hr = S_OK;
IDirect3D9 *d3d = nullptr;
IDirect3DDevice9 *device = nullptr;
@ -134,6 +136,7 @@ cleanup:
}
ScreenCapture::ScreenCapture()
:mObserver(nullptr)
{
m_d3d9_dev = ::Direct3DCreate9(D3D_SDK_VERSION);
@ -165,14 +168,381 @@ void ScreenCapture::EnumScreen()
D3DADAPTER_IDENTIFIER9 adapterID; // Used to store device info
char strBuffer[20480];
DWORD dwDisplayCount = m_d3d9_dev->GetAdapterCount();
for(DWORD i = 0; i < dwDisplayCount; i++)
{
if( m_d3d9_dev->GetAdapterIdentifier( i/*D3DADAPTER_DEFAULT*/, 0,&adapterID ) != D3D_OK )
{
for(DWORD i = 0; i < dwDisplayCount; i++) {
if( m_d3d9_dev->GetAdapterIdentifier( i/*D3DADAPTER_DEFAULT*/, 0,&adapterID ) != D3D_OK) {
return;
}
qDebug()<<adapterID.DeviceName;
}
}
void ScreenCapture::SetObserver(CaptureVideoObserver *ob)
{
this->mObserver = ob;
}
int ScreenCapture::InitCap() {
avdevice_register_all();
avcodec_register_all();
const char* deviceName = "desktop";
const char* inputformat = "gdigrab";
int FPS = 23; //15
m_fmt_ctx = avformat_alloc_context();
m_input_fmt = av_find_input_format(inputformat);
AVDictionary* deoptions = NULL;
AVDictionary* dic = NULL;
av_dict_set_int(&deoptions, "framerate", FPS, AV_DICT_MATCH_CASE);
av_dict_set_int(&deoptions, "rtbufsize", 3041280 * 100 * 5, 0);
//如果不设置的话在输入源是直播流的时候会花屏。单位bytes
//av_dict_set(&deoptions, "buffer_size", "10485760", 0);
//av_dict_set(&deoptions, "reuse", "1", 0);
int ret = avformat_open_input(&m_fmt_ctx, deviceName, m_input_fmt, &deoptions);
if (ret != 0) {
return ret;
}
av_dict_free(&deoptions);
ret = avformat_find_stream_info(m_fmt_ctx, NULL);
if (ret < 0) {
return ret;
}
av_dump_format(m_fmt_ctx, 0, deviceName, 0);
video_stream = av_find_best_stream(m_fmt_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, NULL, 0);
if (video_stream < 0) {
return -1;
}
_codec_ctx = m_fmt_ctx->streams[video_stream]->codec;
_codec = avcodec_find_decoder(_codec_ctx->codec_id);
if (_codec == NULL) {
return -1;
}
ret = avcodec_open2(_codec_ctx, _codec, NULL);
if (ret != 0) {
return -1;
}
width = m_fmt_ctx->streams[video_stream]->codec->width;
height = m_fmt_ctx->streams[video_stream]->codec->height;
int fps = m_fmt_ctx->streams[video_stream]->codec->framerate.num > 0 ? m_fmt_ctx->streams[video_stream]->codec->framerate.num : 25;
AVPixelFormat videoType = m_fmt_ctx->streams[video_stream]->codec->pix_fmt;
std::cout << "avstream timebase : " << m_fmt_ctx->streams[video_stream]->time_base.num << " / " << m_fmt_ctx->streams[video_stream]->time_base.den << endl;
AVDictionary* enoptions = 0;
//av_dict_set(&enoptions, "preset", "superfast", 0);
//av_dict_set(&enoptions, "tune", "zerolatency", 0);
av_dict_set(&enoptions, "preset", "ultrafast", 0);
av_dict_set(&enoptions, "tune", "zerolatency", 0);
//TODO
//av_dict_set(&enoptions, "pkt_size", "1316", 0); //Maximum UDP packet size
av_dict_set(&dic, "fifo_size", "18800", 0);
av_dict_set(&enoptions, "buffer_size", "0", 1);
av_dict_set(&dic, "bitrate", "11000000", 0);
av_dict_set(&dic, "buffer_size", "1000000", 0);//1316
//av_dict_set(&enoptions, "reuse", "1", 0);
AVCodec* codec = avcodec_find_encoder(AV_CODEC_ID_H264);
if (!codec)
{
std::cout << "avcodec_find_encoder failed!" << endl;
return NULL;
}
vc = avcodec_alloc_context3(codec);
if (!vc)
{
std::cout << "avcodec_alloc_context3 failed!" << endl;
return NULL;
}
std::cout << "avcodec_alloc_context3 success!" << endl;
vc->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
vc->codec_id = AV_CODEC_ID_H264;
vc->codec_type = AVMEDIA_TYPE_VIDEO;
vc->pix_fmt = AV_PIX_FMT_YUV420P;
vc->width = width;
vc->height = height;
vc->time_base.num = 1;
vc->time_base.den = FPS;
vc->framerate = { FPS,1 };
vc->bit_rate = 10241000;
vc->gop_size = 120;
vc->qmin = 10;
vc->qmax = 51;
vc->max_b_frames = 0;
vc->profile = FF_PROFILE_H264_MAIN;
ret = avcodec_open2(vc, codec, &enoptions);
if (ret != 0)
{
return ret;
}
std::cout << "avcodec_open2 success!" << endl;
av_dict_free(&enoptions);
vsc = nullptr;
vsc = sws_getCachedContext(vsc,
width, height, (AVPixelFormat)videoType, //源宽、高、像素格式
width, height, AV_PIX_FMT_YUV420P,//目标宽、高、像素格式
SWS_BICUBIC, // 尺寸变化使用算法
0, 0, 0
);
if (!vsc)
{
std::cout << "sws_getCachedContext failed!";
return false;
}
yuv = av_frame_alloc();
yuv->format = AV_PIX_FMT_YUV420P;
yuv->width = width;
yuv->height = height;
yuv->pts = 0;
ret = av_frame_get_buffer(yuv, 32);
if (ret != 0)
{
return ret;
}
ic = NULL;
//ret = avformat_alloc_output_context2(&ic, 0, "flv", rtmpurl);
ret = avformat_alloc_output_context2(&ic, NULL, "mpegts", "output.mp4");//UDP
if (ret < 0)
{
return ret;
}
st = avformat_new_stream(ic, NULL);
if (!st)
{
return -1;
}
st->codecpar->codec_tag = 0;
avcodec_parameters_from_context(st->codecpar, vc);
ret = avio_open(&ic->pb, "output1.mp4", AVIO_FLAG_WRITE);
if (ret != 0)
{
return ret;
}
ret = avformat_write_header(ic, NULL);
if (ret != 0)
{
return ret;
}
packet = av_packet_alloc();
Encodepacket = av_packet_alloc();
rgb = av_frame_alloc();
m_cut_frame = av_frame_alloc();
AVBitStreamFilterContext* h264bsfc = av_bitstream_filter_init("h264_mp4toannexb");
startpts = m_fmt_ctx->start_time;
lastpts = 0;
duration = av_rescale_q(1, { 1,FPS }, { 1,AV_TIME_BASE });
}
static AVFrame *crop_frame(const AVFrame *in, int left, int top, int right, int bottom)
{
AVFilterContext *buffersink_ctx;
AVFilterContext *buffersrc_ctx;
AVFilterGraph *filter_graph = avfilter_graph_alloc();
AVFrame *f = av_frame_alloc();
AVFilterInOut *inputs = NULL, *outputs = NULL;
char args[512];
int ret;
snprintf(args, sizeof(args),
"buffer=video_size=%dx%d:pix_fmt=%d:time_base=1/1:pixel_aspect=0/1[in];"
"[in]crop=300:300:0:0[out];"
"[out]buffersink",
in->width, in->height, in->format,
left, top, right, bottom);
ret = avfilter_graph_parse2(filter_graph, args, &inputs, &outputs);
if (ret < 0) return NULL;
assert(inputs == NULL && outputs == NULL);
ret = avfilter_graph_config(filter_graph, NULL);
if (ret < 0) return NULL;
buffersrc_ctx = avfilter_graph_get_filter(filter_graph, "Parsed_buffer_0");
buffersink_ctx = avfilter_graph_get_filter(filter_graph, "Parsed_buffersink_2");
assert(buffersrc_ctx != NULL);
assert(buffersink_ctx != NULL);
av_frame_ref(f, in);
ret = av_buffersrc_add_frame(buffersrc_ctx, f);
if (ret < 0) return NULL;
ret = av_buffersink_get_frame(buffersink_ctx, f);
if (ret < 0) return NULL;
avfilter_graph_free(&filter_graph);
return f;
}
void crop_rgb32(unsigned char* src, int src_width, int src_height,
unsigned char* dst, int dst_width, int dst_height,
int start_x, int start_y) {
int src_stride = src_width * 4;
int dst_stride = dst_width * 4;
unsigned char* src_row = src + start_y * src_stride;
unsigned char* dst_row = dst;
for (int y = start_y; y < start_y + dst_height && y < src_height; ++y) {
unsigned char* src_pixel = src_row + (start_x * 4);
unsigned char* dst_pixel = dst_row;
for (int x = start_x; x < start_x + dst_width && x < src_width; ++x) {
// 复制像素数据
memcpy(dst_pixel, src_pixel, 4);
dst_pixel += 4;
src_pixel += 4;
}
src_row += src_stride;
dst_row += dst_stride;
}
}
int ScreenCapture::Process(void *p)
{
int got_picture = 0;
unsigned char *dat = new unsigned char[300*300*4];
int ret = av_read_frame(m_fmt_ctx, packet);
if (ret < 0) {
return -1;
}
if (packet->stream_index == video_stream) {
ret = avcodec_decode_video2(_codec_ctx, rgb, &got_picture, packet);
if (ret < 0) {
printf("Decode Error.\n");
return ret;
}
if (got_picture) {
if(ret < 0){
qDebug()<<"fail filter";
}
crop_rgb32(rgb->data[0],1920,1200,dat,300,300,300,300);
av_frame_unref(rgb);
av_frame_unref(m_cut_frame);
if(this->mObserver != nullptr){
this->mObserver->OnScreenData(dat ,
300*4*300);
}
delete dat;
// qDebug()<<rgb->linesize[0]<<height;
// int h = sws_scale(vsc, rgb->data, rgb->linesize, 0, height, //源数据
// yuv->data, yuv->linesize);
// int guesspts = frameIndex * duration;
// yuv->pts = guesspts;
// frameIndex++;
// ret = avcodec_encode_video2(vc, Encodepacket, yuv, &got_picture);
// if (ret < 0) {
// printf("Failed to encode!\n");
// }
// if (got_picture == 1) {
// Encodepacket->pts = av_rescale_q(EncodeIndex, vc->time_base, st->time_base);
// Encodepacket->dts = Encodepacket->pts;
// qDebug() << "frameindex : " << EncodeIndex << " pts : " << Encodepacket->pts << " dts: " << Encodepacket->dts << " encodeSize:" << Encodepacket->size << " curtime - lasttime " << Encodepacket->pts - lastpts << endl;
// lastpts = Encodepacket->pts;
// ret = av_interleaved_write_frame(ic, Encodepacket);
// EncodeIndex++;
// av_packet_unref(Encodepacket);
// }
}
}
av_packet_unref(packet);
return 0;
}
int ScreenCapture::Height()
{
return this->height;
}
int ScreenCapture::Width()
{
return this->width;
}
int ScreenCapture::InitFilter(const char *filters_descr)
{
char args[512];
int ret;
AVFilter *buffersrc = (AVFilter *)avfilter_get_by_name("buffer");
AVFilter *buffersink = (AVFilter *)avfilter_get_by_name("buffersink");
AVFilterInOut *outputs = avfilter_inout_alloc();
AVFilterInOut *inputs = avfilter_inout_alloc();
enum AVPixelFormat pix_fmts[] = { (AVPixelFormat)28, AV_PIX_FMT_NONE };
AVBufferSinkParams *buffersink_params;
m_filter_graph = avfilter_graph_alloc();
/* buffer video source: the decoded frames from the decoder will be inserted here. */
// snprintf(args, sizeof(args),
// "video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:pixel_aspect=%d/%d",
// _codec_ctx->width, _codec_ctx->height, _codec_ctx->pix_fmt,
// _codec_ctx->time_base.num, _codec_ctx->time_base.den,
// _codec_ctx->sample_aspect_ratio.num, _codec_ctx->sample_aspect_ratio.den);
snprintf(args, sizeof(args),
"video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:pixel_aspect=%d/%d",
_codec_ctx->width, _codec_ctx->height, _codec_ctx->pix_fmt,
_codec_ctx->time_base.num, _codec_ctx->time_base.den,
_codec_ctx->sample_aspect_ratio.num, _codec_ctx->sample_aspect_ratio.den);
qDebug()<<args;
ret = avfilter_graph_create_filter(&m_buffersrc_ctx, buffersrc, "in",
args, NULL, m_filter_graph);
if (ret < 0) {
printf("Cannot create buffer source\n");
return ret;
}
/* buffer video sink: to terminate the filter chain. */
buffersink_params = av_buffersink_params_alloc();
buffersink_params->pixel_fmts = pix_fmts;
ret = avfilter_graph_create_filter(&m_buffersink_ctx, buffersink, "out",
NULL, buffersink_params, m_filter_graph);
av_free(buffersink_params);
if (ret < 0) {
qDebug()<<"Cannot create buffer sink\n";
return ret;
}
/* Endpoints for the filter graph. */
outputs->name = av_strdup("in");
outputs->filter_ctx = m_buffersrc_ctx;
outputs->pad_idx = 0;
outputs->next = NULL;
inputs->name = av_strdup("out");
inputs->filter_ctx = m_buffersink_ctx;
inputs->pad_idx = 0;
inputs->next = NULL;
if ((ret = avfilter_graph_parse_ptr(m_filter_graph, filters_descr,
&inputs, &outputs, NULL)) < 0)
return ret;
if ((ret = avfilter_graph_config(m_filter_graph, NULL)) < 0)
return ret;
return 0;
}

View File

@ -5,19 +5,80 @@
#include <Wincodec.h> // we use WIC for saving images
#include <d3d9.h> // DirectX 9 header
#include <d3d9helper.h>
#include <stdint.h>
#include "streamcontrol.h"
#include <thread>
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libavutil/avutil.h"
#include "libswscale/swscale.h"
#include "libavutil/opt.h"
#include "libavutil/imgutils.h"
#include <libavfilter/avfilter.h>
#include <libavfilter/buffersink.h>
#include <libavfilter/buffersrc.h>
#include <libavutil/opt.h>
#include <libavutil/dict.h>
#include "libavdevice/avdevice.h"
}
HRESULT Direct3D9TakeScreenshots(UINT adapter, UINT count);
class ScreenCapture
class ScreenCapture :public StreamControl
{
public:
class CaptureVideoObserver {
public:
virtual void OnScreenData(const void *frameaddress, uint32_t framelen) {};
};
ScreenCapture();
void EnumScreen();
void PrintDisplayModeInfo(IDirect3D9 *pD3D, D3DFORMAT fmt);
void SetObserver(CaptureVideoObserver * );
int InitCap();
int Process(void*) override;
int Height();
int Width();
int InitFilter(const char *filters_descr);
int InitFilter2(const char *filters_descr);
private:
IDirect3D9* m_d3d9_dev = nullptr;
CaptureVideoObserver* mObserver;
std::thread *mThread;
AVFormatContext* m_fmt_ctx = NULL;
AVPacket* packet = NULL;
AVInputFormat* m_input_fmt = NULL;
int video_stream = -1;
AVCodecContext * _codec_ctx= NULL;
AVFrame* rgb= NULL;
AVFrame* m_cut_frame = NULL;
SwsContext *vsc = NULL;
int height = 0;
int width = 0;
AVFrame* yuv = NULL;
int frameIndex = 0;
AVCodecContext* vc = NULL;
long long duration = 0;
AVPacket* Encodepacket = NULL;
int EncodeIndex = 0;
AVStream* st = NULL;
long long startpts = 0;
long long lastpts = 0;
AVFormatContext * ic = NULL;
AVCodec* _codec = NULL;
AVFilterGraph *m_filter_graph;
AVFilterContext *m_buffersrc_ctx;
AVFilterContext *m_buffersink_ctx;
};
#endif // SCREEN_CAPTURE_H

66
media/streamcontrol.cpp Normal file
View File

@ -0,0 +1,66 @@
#include "streamcontrol.h"
StreamControl::StreamControl()
{
this->mStatus = STOP;
this->mThread = nullptr;
}
int StreamControl::Process(void*data) {
return 0;
}
int StreamControl::Start(void *dat) {
switch (this->mStatus){
case RUNNING:
break;
case STOP:
if(nullptr != mThread){
}else{
this->mThread = new std::thread(
[&](){
while(true){
switch (this->mStatus) {
case STOP:
return;
case PAUSHED:
break;
case RUNNING:
this->Process(dat);
break;
default:
break;
}
}
});
}
break;
case PAUSHED:
break;
}
this->mStatus = RUNNING;
return 0;
}
int StreamControl::Stop()
{
this->mStatus = STOP;
return 0;
}
int StreamControl::Pause()
{
this->mStatus = PAUSHED;
return 0;
}
StreamControl::Status StreamControl::CurrentStatus()
{
return mStatus;
}

28
media/streamcontrol.h Normal file
View File

@ -0,0 +1,28 @@
#ifndef STREAMCONTROL_H
#define STREAMCONTROL_H
#include <thread>
class StreamControl
{
public:
typedef enum {
STOP,
RUNNING,
PAUSHED
}Status;
StreamControl();
virtual int Process(void*);
int Start(void *);
int Stop();
int Pause();
StreamControl::Status CurrentStatus();
private:
StreamControl::Status mStatus;
std::thread *mThread;
};
#endif // STREAMCONTROL_H

View File

@ -1,7 +1,7 @@
/********************************************************************************
** Form generated from reading UI file 'mainwindow.ui'
**
** Created by: Qt User Interface Compiler version 5.14.0
** Created by: Qt User Interface Compiler version 5.15.2
**
** WARNING! All changes made in this file will be lost when recompiling UI file!
********************************************************************************/
@ -38,6 +38,8 @@ public:
QLabel *label;
QLineEdit *lineEdit;
QPushButton *pushButton_2;
QPushButton *pushButton_4;
QPushButton *pushButton_5;
QSpacerItem *horizontalSpacer_2;
QSpacerItem *verticalSpacer;
@ -114,13 +116,24 @@ public:
horizontalLayout->addWidget(pushButton_2);
pushButton_4 = new QPushButton(centralWidget);
pushButton_4->setObjectName(QString::fromUtf8("pushButton_4"));
pushButton_4->setMinimumSize(QSize(60, 50));
horizontalLayout->addWidget(pushButton_4);
pushButton_5 = new QPushButton(centralWidget);
pushButton_5->setObjectName(QString::fromUtf8("pushButton_5"));
pushButton_5->setMinimumSize(QSize(60, 50));
horizontalLayout->addWidget(pushButton_5);
horizontalSpacer_2 = new QSpacerItem(40, 20, QSizePolicy::Expanding, QSizePolicy::Minimum);
horizontalLayout->addItem(horizontalSpacer_2);
horizontalLayout->setStretch(0, 1);
horizontalLayout->setStretch(1, 2);
horizontalLayout->setStretch(7, 13);
horizontalLayout->setStretch(7, 1);
horizontalLayout->setStretch(9, 2);
verticalLayout->addLayout(horizontalLayout);
@ -143,11 +156,13 @@ public:
void retranslateUi(QMainWindow *MainWindow)
{
MainWindow->setWindowTitle(QCoreApplication::translate("MainWindow", "MainWindow", nullptr));
pushButton->setText(QCoreApplication::translate("MainWindow", "\346\211\223\345\274\200\346\221\204\345\203\217\345\244\264", nullptr));
pushButton_3->setText(QCoreApplication::translate("MainWindow", "\346\211\223\345\274\200\351\272\246\345\205\213\351\243\216", nullptr));
label->setText(QCoreApplication::translate("MainWindow", "rtmp\346\216\250\346\265\201\345\234\260\345\235\200", nullptr));
pushButton->setText(QCoreApplication::translate("MainWindow", "camera cap", nullptr));
pushButton_3->setText(QCoreApplication::translate("MainWindow", "audio record", nullptr));
label->setText(QCoreApplication::translate("MainWindow", "rtmp addr", nullptr));
lineEdit->setText(QCoreApplication::translate("MainWindow", "rtmp://127.0.0.1:1935/live/1", nullptr));
pushButton_2->setText(QCoreApplication::translate("MainWindow", "\346\216\250\346\265\201", nullptr));
pushButton_4->setText(QCoreApplication::translate("MainWindow", "\345\275\225\345\261\217", nullptr));
pushButton_5->setText(QCoreApplication::translate("MainWindow", "\350\207\252\345\256\232\344\271\211\345\275\225\345\261\217", nullptr));
} // retranslateUi
};

View File

@ -1,7 +1,7 @@
/********************************************************************************
** Form generated from reading UI file 'process.ui'
**
** Created by: Qt User Interface Compiler version 5.14.0
** Created by: Qt User Interface Compiler version 5.15.2
**
** WARNING! All changes made in this file will be lost when recompiling UI file!
********************************************************************************/

View File

@ -1,7 +1,7 @@
/********************************************************************************
** Form generated from reading UI file 'qsstoast.ui'
**
** Created by: Qt User Interface Compiler version 5.14.0
** Created by: Qt User Interface Compiler version 5.15.2
**
** WARNING! All changes made in this file will be lost when recompiling UI file!
********************************************************************************/

View File

@ -1,7 +1,7 @@
/********************************************************************************
** Form generated from reading UI file 'toast.ui'
**
** Created by: Qt User Interface Compiler version 5.14.0
** Created by: Qt User Interface Compiler version 5.15.2
**
** WARNING! All changes made in this file will be lost when recompiling UI file!
********************************************************************************/

299
utils/screenshotwidget.cpp Normal file
View File

@ -0,0 +1,299 @@
#include "screenshotwidget.h"
ScreenShotWidget::ScreenShotWidget(QWidget *parent) :
QWidget(parent)
{
//初始化截图激活标志
m_screenshot_active=false;
//设置窗口,无边框,顶部显示
setWindowFlags(Qt::FramelessWindowHint | Qt::WindowStaysOnTopHint);
setWindowModality(Qt::ApplicationModal);
//窗口背景为透明
setAttribute(Qt::WA_TranslucentBackground);
//设置窗口跟踪鼠标移动事件
setMouseTracking(true);
//设置保存、取消按钮以及长宽label并暂时隐藏
buttonSave=new QPushButton(this);
buttonSave->setParent(this);
buttonSave->setFixedSize(35,30);
buttonSave->setText("ok");
buttonSave->setStyleSheet("QPushButton {font-size:16pt; color:green; font-weight:800;}");
buttonSave->setVisible(false);
buttonCancel=new QPushButton(this);
buttonCancel->setParent(this);
buttonCancel->setFixedSize(35,30);
buttonCancel->setText("x");
buttonCancel->setStyleSheet("QPushButton {font-size:20pt; color:red;font-weight:100;}");
buttonCancel->setVisible(false);
lblHeightWidth=new QLabel(this);
lblHeightWidth->setParent(this);
lblHeightWidth->setFixedSize(100,10);
lblHeightWidth->setStyleSheet("QLabel {color:white;}");
lblHeightWidth->setVisible(false);
//设置放大镜
lblMagnifyGlass=new QLabel(this);
lblMagnifyGlass->setParent(this);
lblMagnifyGlass->setFixedSize(100,100);
lblMagnifyGlass->setVisible(true);
lblMagnifyGlass->setText("wo zai zhe");
//设置鼠标光标模式为十字
setCursor(Qt::CursorShape::CrossCursor);
//连接按钮的信号槽
connect(this->buttonSave,SIGNAL(clicked()),this,SLOT(on_buttonSave_Clicked()));
connect(this->buttonCancel,SIGNAL(clicked()),this,SLOT(on_buttonCancel_Clicked()));
}
void ScreenShotWidget::mousePressEvent(QMouseEvent *event)
{
if(event->button()==Qt::LeftButton)
{
QRect rect(m_startPos,m_endPos);
if((rect.contains(event->pos())||m_cursorMode!=0)&&m_startPos!=m_endPos){
m_oldPos=event->pos();
}
else if(m_startPos==m_endPos){
//记录鼠标点击的起始位置
m_startPos = event->pos();
//截图激活标志
m_screenshot_active=true;
}
}
else if(event->button()==Qt::RightButton)//重新截图
{
if(m_endPos.isNull())
on_buttonCancel_Clicked();
m_startPos=QPoint(0,0);
m_endPos=QPoint(0,0);
buttonSave->setVisible(false);
buttonCancel->setVisible(false);
lblHeightWidth->setVisible(false);
setCursor(Qt::CursorShape::CrossCursor);
update();
}
}
void ScreenShotWidget::mouseMoveEvent(QMouseEvent *event)
{
QRect rect(m_startPos,m_endPos);
QPoint point=getPointInWhichSqure(event->pos());
//截图后不点击鼠标时的光标更新
if(!m_screenshot_active && m_startPos!=m_endPos && !event->buttons()){
if(point==m_topLeft) {setCursor(Qt::CursorShape::SizeFDiagCursor);m_cursorMode=1;}
else if(point==m_topRight) {setCursor(Qt::CursorShape::SizeBDiagCursor);m_cursorMode=2;}
else if(point==m_bottomLeft) {setCursor(Qt::CursorShape::SizeBDiagCursor);m_cursorMode=3;}
else if(point==m_bottomRight) {setCursor(Qt::CursorShape::SizeFDiagCursor);m_cursorMode=4;}
else if(point==m_topCenter) {setCursor(Qt::CursorShape::SizeVerCursor);m_cursorMode=5;}
else if(point==m_bottomCenter) {setCursor(Qt::CursorShape::SizeVerCursor);m_cursorMode=6;}
else if(point==m_leftCenter) {setCursor(Qt::CursorShape::SizeHorCursor);m_cursorMode=7;}
else if(point==m_rightCenter) {setCursor(Qt::CursorShape::SizeHorCursor);m_cursorMode=8;}
else if(rect.contains(event->pos())) {setCursor(Qt::CursorShape::ClosedHandCursor);m_cursorMode=9;}
else {setCursor(Qt::CursorShape::ArrowCursor);m_cursorMode=0;}
}
//鼠标左键按下
if(event->buttons() & Qt::LeftButton)
{
//截图中位置更新
if(m_screenshot_active){
m_endPos = event->pos();
}
//拖动已截图区域
if(rect.contains(event->pos()) && !m_screenshot_active &&cursor().shape()==Qt::ClosedHandCursor){
m_distancePos=event->pos()-m_oldPos;
m_oldPos=event->pos();
m_startPos=m_startPos+m_distancePos;
m_endPos=m_endPos+m_distancePos;
//边界条件限制
if(m_startPos.x()<=0){m_startPos.rx()=0;m_endPos.rx()=m_endPos.x()-m_distancePos.x();}
if(m_startPos.y()<=0){m_startPos.ry()=0;m_endPos.ry()=m_endPos.y()-m_distancePos.y();}
if(m_endPos.x()>=this->width()){m_endPos.rx()=this->width();m_startPos.rx()=m_startPos.x()-m_distancePos.x();}
if(m_endPos.y()>=this->height()){m_endPos.ry()=this->height();m_startPos.ry()=m_startPos.y()-m_distancePos.y();}
}
else if(cursor().shape()==Qt::SizeFDiagCursor)
{
if(m_cursorMode==1){
m_distancePos=event->pos()-m_oldPos;
m_oldPos=event->pos();
m_startPos=m_startPos+m_distancePos;
}
if(m_cursorMode==4){
m_distancePos=event->pos()-m_oldPos;
m_oldPos=event->pos();
m_endPos=m_endPos+m_distancePos;
}
}
else if(cursor().shape()==Qt::SizeBDiagCursor)
{
if(m_cursorMode==2){
m_distancePos=event->pos()-m_oldPos;
m_oldPos=event->pos();
m_startPos.ry()=m_startPos.y()+m_distancePos.y();
m_endPos.rx()=m_endPos.x()+m_distancePos.x();
}
if(m_cursorMode==3){
m_distancePos=event->pos()-m_oldPos;
m_oldPos=event->pos();
m_startPos.rx()=m_startPos.x()+m_distancePos.x();
m_endPos.ry()=m_endPos.y()+m_distancePos.y();
}
}
else if(cursor().shape()==Qt::SizeHorCursor)
{
if(m_cursorMode==7){
m_distancePos=event->pos()-m_oldPos;
m_oldPos=event->pos();
m_startPos.rx()=m_startPos.x()+m_distancePos.x();
}
if(m_cursorMode==8){
m_distancePos=event->pos()-m_oldPos;
m_oldPos=event->pos();
m_endPos.rx()=m_endPos.x()+m_distancePos.x();
}
}
else if(cursor().shape()==Qt::SizeVerCursor)
{
if(m_cursorMode==5){
m_distancePos=event->pos()-m_oldPos;
m_oldPos=event->pos();
m_startPos.ry()=m_startPos.y()+m_distancePos.y();
}
if(m_cursorMode==6){
m_distancePos=event->pos()-m_oldPos;
m_oldPos=event->pos();
m_endPos.ry()=m_endPos.y()+m_distancePos.y();
}
}
update();
}
}
void ScreenShotWidget::mouseReleaseEvent(QMouseEvent *event)
{
Q_UNUSED(event);
if (m_screenshot_active) {
m_screenshot_active=false;
if (m_endPos != m_startPos) {
//弹出保存/取消按钮
buttonSave->setVisible(true);
buttonCancel->setVisible(true);
}
}
}
void ScreenShotWidget::paintEvent(QPaintEvent *event)
{
Q_UNUSED(event);
QPainter painter(this);
painter.fillRect(rect(),QColor(128,128,128,128));
QColor color(11,218,81);
QPen pen(color,1,Qt::SolidLine);
painter.setPen(pen);
//边界条件限制
if(m_startPos.x()>=m_endPos.x())m_endPos.rx()=m_startPos.x();
if(m_startPos.y()>=m_endPos.y())m_endPos.ry()=m_startPos.y();
//画矩形,并将内部置为透明
QRect rect(m_startPos, m_endPos);
painter.setCompositionMode(QPainter::CompositionMode_DestinationIn);
painter.fillRect(rect,QColor(0,0,0,0));
painter.setCompositionMode(QPainter::CompositionMode_SourceOver);
painter.drawRect(rect.normalized());
// 获取矩形的四个顶点和四条边的中点
m_topLeft=QPoint(rect.left(),rect.top());
m_topRight=QPoint(rect.right()+1,rect.top());
m_bottomLeft=QPoint(rect.left(),rect.bottom()+1);
m_bottomRight=QPoint(rect.right()+1,rect.bottom()+1);
m_topCenter=QPoint(rect.center().x(), rect.top());
m_bottomCenter=QPoint(rect.center().x(), rect.bottom()+1);
m_leftCenter=QPoint(rect.left(), rect.center().y());
m_rightCenter=QPoint(rect.right()+1, rect.center().y()+1);
// 定义一个小矩形的大小,用来画实心点
int pointSize = 5;
QRect pointRect(0, 0, pointSize, pointSize);
// 在每个点的位置画一个小矩形,作为实心点
pointRect.moveCenter(m_topLeft);
painter.fillRect(pointRect, color);
pointRect.moveCenter(m_topRight);
painter.fillRect(pointRect, color);
pointRect.moveCenter(m_bottomLeft);
painter.fillRect(pointRect, color);
pointRect.moveCenter(m_bottomRight);
painter.fillRect(pointRect, color);
pointRect.moveCenter(m_topCenter);
painter.fillRect(pointRect, color);
pointRect.moveCenter(m_bottomCenter);
painter.fillRect(pointRect, color);
pointRect.moveCenter(m_leftCenter);
painter.fillRect(pointRect, color);
pointRect.moveCenter(m_rightCenter);
painter.fillRect(pointRect, color);
//显示放大镜
if(cursor().shape()!=Qt::ClosedHandCursor)
{
QRect cursorRect(QCursor::pos().x()-25,QCursor::pos().y()-25,51,51);
QScreen *screen=QGuiApplication::primaryScreen();
QPixmap cursorPixmap=screen->grabWindow(QApplication::desktop()->winId(),cursorRect.x(),cursorRect.y(),cursorRect.width(),cursorRect.height());
cursorPixmap=cursorPixmap.scaled(cursorPixmap.width()*2,cursorPixmap.height()*2,Qt::KeepAspectRatio);
QPoint startPoint(QCursor::pos().x()+26,QCursor::pos().y()+26);
painter.drawPixmap(startPoint.x(),startPoint.y(),cursorPixmap.width(),cursorPixmap.height(),cursorPixmap);
painter.drawRect(startPoint.x(),startPoint.y(),cursorPixmap.width(),cursorPixmap.height());
QPen pen2(color,2,Qt::SolidLine);
painter.setPen(pen2);
painter.drawLine(startPoint.x(),startPoint.y()+cursorPixmap.height()/2+1,startPoint.x()+cursorPixmap.width(),startPoint.y()+cursorPixmap.height()/2+1);
painter.drawLine(startPoint.x()+cursorPixmap.width()/2+1,startPoint.y(),startPoint.x()+cursorPixmap.width()/2+1,startPoint.y()+cursorPixmap.height());
}
update();
//显示按钮与标签
if(this->height()<m_endPos.y()+30){
buttonSave->move(rect.x()+rect.width()-buttonSave->width()-10,rect.y()+rect.height()-30);
buttonCancel->move(rect.x()+rect.width()-buttonSave->width()-buttonCancel->width()-8,rect.y()+rect.height()-30);
}
else{
buttonSave->move(rect.x()+rect.width()-buttonSave->width()-10,rect.y()+rect.height());
buttonCancel->move(rect.x()+rect.width()-buttonSave->width()-buttonCancel->width()-8,rect.y()+rect.height());
}
lblHeightWidth->setVisible(true);
lblHeightWidth->move(rect.x(),rect.y()-12);
lblHeightWidth->setText(QString::number(rect.width())+"x"+QString::number(rect.height()));
}
void ScreenShotWidget::on_buttonSave_Clicked()
{
//清除笔刷,创建一个矩形区域
close();
QThread::msleep(30);
QRect rect = QRect(m_startPos, m_endPos);
emit(rect_selected(rect));
//获取存储路径
QDateTime currentDate=QDateTime::currentDateTime();
QString mCURDIr = QCoreApplication::applicationDirPath();
QString filePath=mCURDIr + "/screenshot/"+currentDate.toString("MMdd_hhmmss")+".bmp";
QDir dir;
if(!dir.exists(mCURDIr + "/screenshot")) dir.mkdir(mCURDIr + "/screenshot");
//获取矩形区域截图(减去笔刷)存储在pixmap中
QScreen *screen=QGuiApplication::primaryScreen();
QPixmap pixmap=screen->grabWindow(QApplication::desktop()->winId(),rect.x(),rect.y(),rect.width(),rect.height());
if (!pixmap.save(filePath)) QMessageBox::warning(this, tr("提示"), tr("保存图片失败"));
}
void ScreenShotWidget::on_buttonCancel_Clicked()
{
//关闭透明页面
close();
}
QPoint ScreenShotWidget::getPointInWhichSqure(QPoint point)
{
int distance=8;
if(abs(point.x()-m_topLeft.x())+abs(point.y()-m_topLeft.y())<distance) return m_topLeft;
else if(abs(point.x()-m_topRight.x())+abs(point.y()-m_topRight.y())<distance) return m_topRight;
else if(abs(point.x()-m_bottomLeft.x())+abs(point.y()-m_bottomLeft.y())<distance) return m_bottomLeft;
else if(abs(point.x()-m_bottomRight.x())+abs(point.y()-m_bottomRight.y())<distance) return m_bottomRight;
else if(abs(point.x()-m_topCenter.x())+abs(point.y()-m_topCenter.y())<distance) return m_topCenter;
else if(abs(point.x()-m_bottomCenter.x())+abs(point.y()-m_bottomCenter.y())<distance) return m_bottomCenter;
else if(abs(point.x()-m_leftCenter.x())+abs(point.y()-m_leftCenter.y())<distance) return m_leftCenter;
else if(abs(point.x()-m_rightCenter.x())+abs(point.y()-m_rightCenter.y())<distance) return m_rightCenter;
else return QPoint(-1,-1);
}

66
utils/screenshotwidget.h Normal file
View File

@ -0,0 +1,66 @@
#ifndef SCREENSHOTWIDGET_H
#define SCREENSHOTWIDGET_H
#include <QApplication>
#include <QCoreApplication>
#include <QWidget>
#include <QMouseEvent>
#include <QPainter>
#include <QFileDialog>
#include <QMessageBox>
#include <QTimer>
#include <QScreen>
#include <QDesktopWidget>
#include <QPen>
#include <QPushButton>
#include <QDateTime>
#include <QTime>
#include <QPixmap>
#include <QThread>
#include <QDebug>
#include <QLabel>
class ScreenShotWidget : public QWidget
{
Q_OBJECT
Q_SIGNALS:
void rect_selected(QRect rect);
public:
ScreenShotWidget(QWidget *parent = nullptr);
protected:
//鼠标按下事件
void mousePressEvent(QMouseEvent *event) override;
//鼠标移动事件
void mouseMoveEvent(QMouseEvent *event) override;
//鼠标松开事件
void mouseReleaseEvent(QMouseEvent *event) override;
//绘制边框事件
void paintEvent(QPaintEvent *event) override;
private slots:
void on_buttonSave_Clicked();
void on_buttonCancel_Clicked();
private:
QPoint getPointInWhichSqure(QPoint point);
private:
QPushButton *buttonSave,*buttonCancel;
QLabel *lblHeightWidth;
QLabel *lblMagnifyGlass;
QPainter m_painter;
QPoint m_startPos,m_endPos; //用于记录生成的截图区域
QPoint m_oldPos,m_distancePos; //用于拖拽截图区域
bool m_screenshot_active;
QPoint m_topLeft;
QPoint m_topRight;
QPoint m_bottomLeft;
QPoint m_bottomRight;
QPoint m_topCenter;
QPoint m_bottomCenter;
QPoint m_leftCenter;
QPoint m_rightCenter;
int m_cursorMode;
};
#endif // SCREENSHOTWIDGET_H

View File

@ -50,7 +50,9 @@ SOURCES += \
media/audioplayerff.cpp \
media/screen_capture.cpp \
media/sps_decode.cpp \
media/streamcontrol.cpp \
utils/Debuger.cpp \
utils/screenshotwidget.cpp \
utils/utils.cpp
HEADERS += \
components/toast.h \
@ -59,6 +61,8 @@ HEADERS += \
cplaywidget.h \
media/audioplayerff.h \
media/screen_capture.h \
media/streamcontrol.h \
utils/screenshotwidget.h \
utils/utils.h

View File

@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE QtCreatorProject>
<!-- Written by QtCreator 11.0.3, 2023-12-18T00:33:20. -->
<!-- Written by QtCreator 11.0.3, 2024-06-16T16:41:32. -->
<qtcreator>
<data>
<variable>EnvironmentId</variable>
@ -99,8 +99,8 @@
<value type="qlonglong" key="ProjectExplorer.Target.ActiveRunConfiguration">0</value>
<valuemap type="QVariantMap" key="ProjectExplorer.Target.BuildConfiguration.0">
<value type="int" key="EnableQmlDebugging">0</value>
<value type="QString" key="ProjectExplorer.BuildConfiguration.BuildDirectory">G:\project\multimedia\client\rtmp_demo\build-yuvgl-Desktop_Qt_5_15_2_MSVC2019_64bit-Debug</value>
<value type="QString" key="ProjectExplorer.BuildConfiguration.BuildDirectory.shadowDir">G:/project/multimedia/client/rtmp_demo/build-yuvgl-Desktop_Qt_5_15_2_MSVC2019_64bit-Debug</value>
<value type="QString" key="ProjectExplorer.BuildConfiguration.BuildDirectory">G:\project\c++qt\qt_rtmp_demo</value>
<value type="QString" key="ProjectExplorer.BuildConfiguration.BuildDirectory.shadowDir">G:/project/c++qt/qt_rtmp_demo</value>
<valuemap type="QVariantMap" key="ProjectExplorer.BuildConfiguration.BuildStepList.0">
<valuemap type="QVariantMap" key="ProjectExplorer.BuildStepList.Step.0">
<value type="bool" key="ProjectExplorer.BuildStep.Enabled">true</value>