no message

master
zcy 2021-10-30 23:20:58 +08:00
parent 387543ed11
commit 5fb054e574
9 changed files with 282 additions and 56 deletions

View File

@ -1,6 +1,8 @@
#include "MyCapturer.h" #include "MyCapturer.h"
#include "rtc_base/thread.h" #include "rtc_base/thread.h"
#include <modules/desktop_capture/desktop_capture_options.h> #include <modules/desktop_capture/desktop_capture_options.h>
#include <QDebug>
#include <iostream>
MyCapturer::MyCapturer() { MyCapturer::MyCapturer() {
@ -30,6 +32,85 @@ absl::optional<bool> MyCapturer::needs_denoising() const {
return false; return false;
} }
static void RawToBmp(unsigned char *pRawImage, int ImageHeight, int ImageWidth, int bitcount,std::string filePath)
{
unsigned char *imgData;
LONGLONG dataSizePerLine = ((ImageWidth * bitcount + 31) >> 5) << 2;
int plaLen = bitcount <= 8 ? 2 << (bitcount - 1) : 0;
std::cout << "plaLen = " << plaLen << endl;
int headerLength = 14 + 40 + plaLen * 4;
LONGLONG dataLength = dataSizePerLine*(ImageHeight - 1) + ImageWidth*bitcount / 8;
std::cout << "headerLength = " << headerLength << endl;
std::cout << "dataLength = " << dataLength << endl;
short int biPlanes = 1;
char bm[2] = { 'B', 'M' };
long bfReserved1 = 0;
LONGLONG bfSize = headerLength + dataLength;
long biHeight = ImageHeight*(-1);
long bfOffBits = headerLength;
long bisize = 40;
long biCompression = 0; LONGLONG biSizeImage = dataLength;
long biXPelsPerMeter = 0, biYPelsPerMeter = 0;
long biClrUsed = plaLen; int biClrImportant = 0;
imgData = new unsigned char[headerLength + dataLength];
memset(imgData, 0, headerLength + dataLength);
memcpy(imgData, &bm, 2);
memcpy(imgData + 2, &bfSize, 4);
memcpy(imgData + 6, &bfReserved1, 4);
memcpy(imgData + 10, &bfOffBits, 4);
memcpy(imgData + 14, &bisize, 4);
memcpy(imgData + 18, &ImageWidth, 4);
memcpy(imgData + 22, &biHeight, 4);
memcpy(imgData + 26, &biPlanes, 2);
memcpy(imgData + 28, &bitcount, 2);
memcpy(imgData + 30, &biCompression, 4);
memcpy(imgData + 34, &biSizeImage, 4);
memcpy(imgData + 38, &biXPelsPerMeter, 4);
memcpy(imgData + 42, &biYPelsPerMeter, 4);
memcpy(imgData + 46, &biClrUsed, 4);
memcpy(imgData + 50, &biCompression, 4);
if (headerLength > 54) {
if (bitcount == 1) {
for (char i = 0; i < 2; i++) {
char gray[4] = { char(i * 255), char(i * 255), char(i * 255), char(255) };
memcpy(imgData + 54 + 4 * i, gray, 4);
}
}
else if (bitcount == 4) {
for (char i = 0; i < 16; i++) {
char gray[4] = { char(i * 17), char(i * 17), char(i * 17), char(255) };
memcpy(imgData + 54 + 4 * i, gray, 4);
}
}
else if (bitcount == 8) {
for (int i = 0; i < 255; i++) {
char gray[4] = { char(i), char(i), char(i), char(255)};
memcpy(imgData + 54 + 4 * i, gray, 4);
}
}
}
int dw = ImageWidth*bitcount / 8;
for (int i = 0; i < ImageHeight; i++)
{
memcpy(imgData + headerLength + dataSizePerLine*i, pRawImage + dw*i, dw);
}
FILE *fp_bmp;
fp_bmp = fopen(filePath.c_str()/*"temp.bmp"*/, "wb");
fwrite((void*)imgData, bfSize, 1, fp_bmp);
fclose(fp_bmp);
}
void MyCapturer::OnCaptureResult(webrtc::DesktopCapturer::Result result, void MyCapturer::OnCaptureResult(webrtc::DesktopCapturer::Result result,
std::unique_ptr<webrtc::DesktopFrame> frame) { std::unique_ptr<webrtc::DesktopFrame> frame) {
if (result != webrtc::DesktopCapturer::Result::SUCCESS) if (result != webrtc::DesktopCapturer::Result::SUCCESS)
@ -43,11 +124,12 @@ void MyCapturer::OnCaptureResult(webrtc::DesktopCapturer::Result result,
i420_buffer_ = webrtc::I420Buffer::Create(width, height); i420_buffer_ = webrtc::I420Buffer::Create(width, height);
} }
// libyuv::ConvertToI420(frame->data(), 0, i420_buffer_->MutableDataY(), libyuv::ConvertToI420(frame->data(), 0, i420_buffer_->MutableDataY(),
// i420_buffer_->StrideY(), i420_buffer_->MutableDataU(), i420_buffer_->StrideY(), i420_buffer_->MutableDataU(),
// i420_buffer_->StrideU(), i420_buffer_->MutableDataV(), i420_buffer_->StrideU(), i420_buffer_->MutableDataV(),
// i420_buffer_->StrideV(), 0, 0, width, height, width, i420_buffer_->StrideV(), 0, 0, width, height, width,
// height, libyuv::kRotate0, libyuv::FOURCC_ARGB); height, libyuv::kRotate0, libyuv::FOURCC_ARGB);
// RawToBmp(frame->data(),height,width,32,"d://sss.bmp");
OnFrame(webrtc::VideoFrame(i420_buffer_, 0, 0, webrtc::kVideoRotation_0)); OnFrame(webrtc::VideoFrame(i420_buffer_, 0, 0, webrtc::kVideoRotation_0));
} }

View File

@ -10,7 +10,7 @@
#include "rtc_base/thread.h" #include "rtc_base/thread.h"
#include "media/base/adapted_video_track_source.h" #include "media/base/adapted_video_track_source.h"
#include "rtc_base/message_handler.h" #include "rtc_base/message_handler.h"
#include "libyuv.h"
class MyCapturer : public rtc::AdaptedVideoTrackSource, class MyCapturer : public rtc::AdaptedVideoTrackSource,
public rtc::MessageHandler, public rtc::MessageHandler,

View File

@ -90,6 +90,8 @@ void CPlayWidget::OnPaintData(const rtc::scoped_refptr<webrtc::I420BufferInterfa
update(); update();
} }
CPlayWidget::CPlayWidget(QWidget *parent):QOpenGLWidget(parent) { CPlayWidget::CPlayWidget(QWidget *parent):QOpenGLWidget(parent) {
textureUniformY = 0; textureUniformY = 0;
textureUniformU = 0; textureUniformU = 0;
@ -119,15 +121,14 @@ int CPlayWidget::SetDataType(CPlayWidget::IMG_TYPE type){
return 0; return 0;
} }
int CPlayWidget::OnCameraData( rtc::scoped_refptr<webrtc::I420BufferInterface> &buffer) void CPlayWidget::CameraData( rtc::scoped_refptr<webrtc::I420BufferInterface> &buffer)
{ {
m_buffer = buffer; m_buffer = buffer;
memcpy(this->m_pBufYuv420p,buffer->GetI420()->DataY(),640*480); memcpy(this->m_pBufYuv420p,buffer->GetI420()->DataY(),2560*1600);
memcpy(this->m_pBufYuv420p + 640*480 ,buffer->GetI420()->DataU(),640*480/4); memcpy(this->m_pBufYuv420p + 2560*1600 ,buffer->GetI420()->DataU(),2560*1600/4);
memcpy(this->m_pBufYuv420p+ 640*480 + 640*480/4,buffer->GetI420()->DataV(),640*480/4); memcpy(this->m_pBufYuv420p+ 2560*1600+ 2560*1600/4,buffer->GetI420()->DataV(),2560*1600/4);
update(); update();
return 0;
} }
int CPlayWidget::OnCameraData(uint8_t *p) int CPlayWidget::OnCameraData(uint8_t *p)
@ -137,12 +138,19 @@ int CPlayWidget::OnCameraData(uint8_t *p)
return 0; return 0;
} }
int CPlayWidget::OnCameraDataRgb(uint8_t *p)
{
memcpy(m_pBufRgb32,p,this->m_nVideoH*this->m_nVideoW*4);
update();
return 0;
}
int CPlayWidget::SetImgSize(uint32_t width, uint32_t height) int CPlayWidget::SetImgSize(uint32_t width, uint32_t height)
{ {
m_nVideoH = height; m_nVideoH = height;
m_nVideoW = width; m_nVideoW = width;
if(mType == TYPE_RGB32){ if(mType == TYPE_RGB32){
m_pBufRgb32 = new uint8_t[width * height *4]; m_pBufRgb32 = new uint8_t[width * height *8];
} }
if(mType == TYPE_YUV420P){ if(mType == TYPE_YUV420P){
m_pBufYuv420p = new uint8_t[width * height *3/2]; m_pBufYuv420p = new uint8_t[width * height *3/2];

View File

@ -25,8 +25,10 @@
class CPlayWidget:public QOpenGLWidget,protected QOpenGLFunctions class CPlayWidget:public QOpenGLWidget,protected QOpenGLFunctions
{ {
Q_OBJECT Q_OBJECT
signals:
public slots: public slots:
void OnPaintData(const rtc::scoped_refptr<webrtc::I420BufferInterface> &buffer); void OnPaintData(const rtc::scoped_refptr<webrtc::I420BufferInterface> &buffer);
void CameraData(rtc::scoped_refptr<webrtc::I420BufferInterface> &buffer);
public: public:
typedef enum{ typedef enum{
@ -37,8 +39,9 @@ public:
CPlayWidget(QWidget* parent); CPlayWidget(QWidget* parent);
~CPlayWidget(); ~CPlayWidget();
int SetDataType(IMG_TYPE); int SetDataType(IMG_TYPE);
int OnCameraData(rtc::scoped_refptr<webrtc::I420BufferInterface> &buffer);
int OnCameraData(uint8_t *); int OnCameraData(uint8_t *);
int OnCameraDataRgb(uint8_t *);
int SetImgSize(uint32_t width,uint32_t ); int SetImgSize(uint32_t width,uint32_t );
protected: protected:
QTimer tm; QTimer tm;

View File

@ -96,6 +96,9 @@ void InitCustomMetaType(){
qRegisterMetaType<rtc::scoped_refptr<webrtc::I420BufferInterface>>("rtc::scoped_refptr<webrtc::I420BufferInterface>&"); qRegisterMetaType<rtc::scoped_refptr<webrtc::I420BufferInterface>>("rtc::scoped_refptr<webrtc::I420BufferInterface>&");
qRegisterMetaType<webrtc::VideoTrackInterface*>("webrtc::VideoTrackInterface*"); qRegisterMetaType<webrtc::VideoTrackInterface*>("webrtc::VideoTrackInterface*");
qRegisterMetaType<webrtc::MediaStreamTrackInterface*>("webrtc::MediaStreamTrackInterface*"); qRegisterMetaType<webrtc::MediaStreamTrackInterface*>("webrtc::MediaStreamTrackInterface*");
qRegisterMetaType<uint8_t*>("uint8_t*");
qRegisterMetaType<uint64_t>("uint64_t");
} }
int main(int argc, char *argv[]) int main(int argc, char *argv[])

View File

@ -7,6 +7,9 @@
#include <windows.h> #include <windows.h>
#include <Lmcons.h> #include <Lmcons.h>
#include <QAction> #include <QAction>
#include "MyCapturer.h"
#include <iostream>
const char kCandidateSdpMidName[] = "sdpMid"; const char kCandidateSdpMidName[] = "sdpMid";
const char kCandidateSdpMlineIndexName[] = "sdpMLineIndex"; const char kCandidateSdpMlineIndexName[] = "sdpMLineIndex";
@ -20,16 +23,19 @@ MainWindow::MainWindow(QWidget *parent)
,mModel(nullptr) ,mModel(nullptr)
,mCalling(false) ,mCalling(false)
{ {
mHandler->SetParent(this);
ui->setupUi(this); ui->setupUi(this);
ui->openGLWidget->SetImgSize(640,480); ui->openGLWidget->SetDataType(CPlayWidget::TYPE_I420);
ui->openGLWidget->SetImgSize(2560,1600);
ui->openGLWidget->show(); ui->openGLWidget->show();
mHandler->InitWebrtc();
TCHAR username[UNLEN + 1]; TCHAR username[UNLEN + 1];
DWORD size = UNLEN + 1; DWORD size = UNLEN + 1;
GetUserName((TCHAR*)username, &size); GetUserName((TCHAR*)username, &size);
mModel = new QStandardItemModel(this); mModel = new QStandardItemModel(this);
ui->treeView->setModel(mModel); ui->treeView->setModel(mModel);
mHandler->InitWebrtc();
connect(ui->treeView,SIGNAL(doubleClicked(QModelIndex)), connect(ui->treeView,SIGNAL(doubleClicked(QModelIndex)),
this,SLOT(itemClicked(QModelIndex))); this,SLOT(itemClicked(QModelIndex)));
connect((WebrtcHanlder*)(mHandler.get()),SIGNAL(OnOfferSdp(QString)), connect((WebrtcHanlder*)(mHandler.get()),SIGNAL(OnOfferSdp(QString)),
@ -38,7 +44,7 @@ MainWindow::MainWindow(QWidget *parent)
this,SLOT(on_track_add(webrtc::MediaStreamTrackInterface*))); this,SLOT(on_track_add(webrtc::MediaStreamTrackInterface*)));
connect((WebrtcHanlder*)(mHandler.get()),SIGNAL(OnLocalTrack(webrtc::VideoTrackInterface* )), connect((WebrtcHanlder*)(mHandler.get()),SIGNAL(OnLocalTrack(webrtc::VideoTrackInterface* )),
this,SLOT(on_local_track_add(webrtc::VideoTrackInterface* ))); this,SLOT(on_local_track_add(webrtc::VideoTrackInterface* )));
mHandler.get()->setParent(this);
} }
MainWindow::~MainWindow() MainWindow::~MainWindow()
@ -46,15 +52,19 @@ MainWindow::~MainWindow()
delete ui; delete ui;
} }
CPlayWidget *MainWindow::OpenglWidget()
{
return this->ui->openGLWidget;
}
void MainWindow::OnUpdateFrame( rtc::scoped_refptr<webrtc::I420BufferInterface>& buffer) void MainWindow::OnUpdateFrame( rtc::scoped_refptr<webrtc::I420BufferInterface>& buffer)
{ {
qDebug()<<"1234"; qDebug()<<"1234";
ui->openGLWidget->OnCameraData(buffer); // ui->openGLWidget->OnCameraData(buffer);
} }
void MainWindow::OnUpdateFrame1(uint8_t *dat) void MainWindow::OnUpdateFrame1(uint8_t *dat)
{ {
qDebug()<<"4321";
ui->openGLWidget->OnCameraData(dat); ui->openGLWidget->OnCameraData(dat);
} }
@ -101,7 +111,10 @@ int WebrtcHanlder::InitWebrtc()
x->show(); x->show();
exit(0); exit(0);
} }
if (!m_peer_connection_->GetSenders().empty()) {
return -1; // Already added tracks.
}
// add audio tracks
rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track( rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
m_peer_connection_factory_->CreateAudioTrack( m_peer_connection_factory_->CreateAudioTrack(
kAudioLabel, m_peer_connection_factory_->CreateAudioSource( kAudioLabel, m_peer_connection_factory_->CreateAudioSource(
@ -109,7 +122,7 @@ int WebrtcHanlder::InitWebrtc()
auto result_or_error = m_peer_connection_->AddTrack(audio_track, {kStreamId}); auto result_or_error = m_peer_connection_->AddTrack(audio_track, {kStreamId});
if (!result_or_error.ok()) { if (!result_or_error.ok()) {
qDebug() << "Failed to add audio track to PeerConnection: " RTC_LOG(LS_ERROR) << "Failed to add audio track to PeerConnection: "
<< result_or_error.error().message(); << result_or_error.error().message();
} }
@ -118,17 +131,16 @@ int WebrtcHanlder::InitWebrtc()
video_device->startCapturer(); video_device->startCapturer();
rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track_( rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track_(
m_peer_connection_factory_->CreateVideoTrack(kVideoLabel, video_device)); m_peer_connection_factory_->CreateVideoTrack(kVideoLabel, video_device));
// main_wnd_->StartLocalRenderer(video_track_); auto p = video_device.get();
// mParent->on_local_track_add(video_track_);
result_or_error = m_peer_connection_->AddTrack(video_track_, { kStreamId }); result_or_error = m_peer_connection_->AddTrack(video_track_, { kStreamId });
if (!result_or_error.ok()) { if (!result_or_error.ok()) {
qDebug() << "Failed to add video track to PeerConnection: " RTC_LOG(LS_ERROR) << "Failed to add video track to PeerConnection: "
<< result_or_error.error().message(); << result_or_error.error().message();
} }
} else { } else {
qDebug()<< "OpenVideoCaptureDevice failed"; RTC_LOG(LS_ERROR) << "OpenVideoCaptureDevice failed";
} }
} }
@ -237,7 +249,7 @@ void WebrtcHanlder::OnSignalingChange(webrtc::PeerConnectionInterface::Signaling
} }
// 接受者创建track成功
void WebrtcHanlder::OnAddTrack(rtc::scoped_refptr<webrtc::RtpReceiverInterface> receiver, void WebrtcHanlder::OnAddTrack(rtc::scoped_refptr<webrtc::RtpReceiverInterface> receiver,
const std::vector<rtc::scoped_refptr<webrtc::MediaStreamInterface> > &streams) const std::vector<rtc::scoped_refptr<webrtc::MediaStreamInterface> > &streams)
{ {
@ -310,7 +322,7 @@ void WebrtcHanlder::OnFailure(webrtc::RTCError error)
void MainWindow::on_pushButton_clicked() void MainWindow::on_pushButton_clicked()
{ {
mSignalClient = new SignalClient(QUrl("ws://127.0.0.1:9555/ws"),true,this); mSignalClient = new SignalClient(QUrl(ui->lineEdit_2->text()),true,this);
connect(this->mSignalClient,SIGNAL(connected()),this,SLOT(signal_conneted())); connect(this->mSignalClient,SIGNAL(connected()),this,SLOT(signal_conneted()));
connect(this->mSignalClient,SIGNAL(response(int ,QJsonObject)), connect(this->mSignalClient,SIGNAL(response(int ,QJsonObject)),
this,SLOT( signal_response(int,QJsonObject))); this,SLOT( signal_response(int,QJsonObject)));
@ -386,6 +398,7 @@ void MainWindow::itemClicked(QModelIndex index)
{ {
if(!mSignalClient->Connected()){ if(!mSignalClient->Connected()){
qDebug()<<"请先连接信令服务"; qDebug()<<"请先连接信令服务";
return;
} }
mHandler->CreateOffer(); mHandler->CreateOffer();
qDebug()<<mModel->item(index.row())->text(); qDebug()<<mModel->item(index.row())->text();
@ -407,7 +420,13 @@ void MainWindow::on_track_add(webrtc::MediaStreamTrackInterface *data)
auto* track = reinterpret_cast<webrtc::MediaStreamTrackInterface*>(data); auto* track = reinterpret_cast<webrtc::MediaStreamTrackInterface*>(data);
if (track->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) { if (track->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) {
auto* video_track = static_cast<webrtc::VideoTrackInterface*>(track); auto* video_track = static_cast<webrtc::VideoTrackInterface*>(track);
qDebug()<<"remote trackid is "<<video_track->id().c_str();
remote_renderer_.reset(new VideoRenderer( 1, 1, video_track)); remote_renderer_.reset(new VideoRenderer( 1, 1, video_track));
auto p = remote_renderer_.get();
connect((VideoRenderer*)(p),SIGNAL(UpdateFrame(rtc::scoped_refptr<webrtc::I420BufferInterface>&)),
ui->openGLWidget,SLOT(CameraData(rtc::scoped_refptr<webrtc::I420BufferInterface> &)),
Qt::ConnectionType::QueuedConnection);
} }
track->Release(); track->Release();
} }
@ -418,6 +437,11 @@ void MainWindow::on_local_track_add(webrtc::VideoTrackInterface* data)
local_renderer_.reset(new VideoRenderer( 1, 1, data)); local_renderer_.reset(new VideoRenderer( 1, 1, data));
} }
void MainWindow::rgba_data(uint8_t *buffer, uint64_t)
{
ui->openGLWidget->OnCameraDataRgb(buffer);
}
VideoRenderer::VideoRenderer(int width, int height, webrtc::VideoTrackInterface *track_to_render) VideoRenderer::VideoRenderer(int width, int height, webrtc::VideoTrackInterface *track_to_render)
: rendered_track_(track_to_render) : rendered_track_(track_to_render)
@ -441,9 +465,86 @@ VideoRenderer::~VideoRenderer()
::DeleteCriticalSection(&buffer_lock_); ::DeleteCriticalSection(&buffer_lock_);
} }
static void RawToBmp(unsigned char *pRawImage, int ImageHeight, int ImageWidth, int bitcount,std::string filePath)
{
unsigned char *imgData;
LONGLONG dataSizePerLine = ((ImageWidth * bitcount + 31) >> 5) << 2;
int plaLen = bitcount <= 8 ? 2 << (bitcount - 1) : 0;
std::cout << "plaLen = " << plaLen << endl;
int headerLength = 14 + 40 + plaLen * 4;
LONGLONG dataLength = dataSizePerLine*(ImageHeight - 1) + ImageWidth*bitcount / 8;
std::cout << "headerLength = " << headerLength << endl;
std::cout << "dataLength = " << dataLength << endl;
short int biPlanes = 1;
char bm[2] = { 'B', 'M' };
long bfReserved1 = 0;
LONGLONG bfSize = headerLength + dataLength;
long biHeight = ImageHeight*(-1);
long bfOffBits = headerLength;
long bisize = 40;
long biCompression = 0; LONGLONG biSizeImage = dataLength;
long biXPelsPerMeter = 0, biYPelsPerMeter = 0;
long biClrUsed = plaLen; int biClrImportant = 0;
imgData = new unsigned char[headerLength + dataLength];
memset(imgData, 0, headerLength + dataLength);
memcpy(imgData, &bm, 2);
memcpy(imgData + 2, &bfSize, 4);
memcpy(imgData + 6, &bfReserved1, 4);
memcpy(imgData + 10, &bfOffBits, 4);
memcpy(imgData + 14, &bisize, 4);
memcpy(imgData + 18, &ImageWidth, 4);
memcpy(imgData + 22, &biHeight, 4);
memcpy(imgData + 26, &biPlanes, 2);
memcpy(imgData + 28, &bitcount, 2);
memcpy(imgData + 30, &biCompression, 4);
memcpy(imgData + 34, &biSizeImage, 4);
memcpy(imgData + 38, &biXPelsPerMeter, 4);
memcpy(imgData + 42, &biYPelsPerMeter, 4);
memcpy(imgData + 46, &biClrUsed, 4);
memcpy(imgData + 50, &biCompression, 4);
if (headerLength > 54) {
if (bitcount == 1) {
for (char i = 0; i < 2; i++) {
char gray[4] = { char(i * 255), char(i * 255), char(i * 255), char(255) };
memcpy(imgData + 54 + 4 * i, gray, 4);
}
}
else if (bitcount == 4) {
for (char i = 0; i < 16; i++) {
char gray[4] = { char(i * 17), char(i * 17), char(i * 17), char(255) };
memcpy(imgData + 54 + 4 * i, gray, 4);
}
}
else if (bitcount == 8) {
for (int i = 0; i < 255; i++) {
char gray[4] = { char(i), char(i), char(i), char(255)};
memcpy(imgData + 54 + 4 * i, gray, 4);
}
}
}
int dw = ImageWidth*bitcount / 8;
for (int i = 0; i < ImageHeight; i++)
{
memcpy(imgData + headerLength + dataSizePerLine*i, pRawImage + dw*i, dw);
}
FILE *fp_bmp;
fp_bmp = fopen(filePath.c_str()/*"temp.bmp"*/, "wb");
fwrite((void*)imgData, bfSize, 1, fp_bmp);
fclose(fp_bmp);
}
void VideoRenderer::OnFrame(const webrtc::VideoFrame &video_frame) void VideoRenderer::OnFrame(const webrtc::VideoFrame &video_frame)
{ {
qDebug()<<"onframe";
{ {
AutoLock<VideoRenderer> lock(this); AutoLock<VideoRenderer> lock(this);
rtc::scoped_refptr<webrtc::I420BufferInterface> buffer( rtc::scoped_refptr<webrtc::I420BufferInterface> buffer(
@ -452,13 +553,25 @@ void VideoRenderer::OnFrame(const webrtc::VideoFrame &video_frame)
buffer = webrtc::I420Buffer::Rotate(*buffer, video_frame.rotation()); buffer = webrtc::I420Buffer::Rotate(*buffer, video_frame.rotation());
} }
SetSize(buffer->width(), buffer->height()); SetSize(buffer->width(), buffer->height());
RTC_DCHECK(image_.get() != NULL); RTC_DCHECK(image_ != NULL);
qDebug()<<buffer->width() << buffer->height();
// libyuv::I420ToARGB(buffer->DataY(), buffer->StrideY(), buffer->DataU(), // libyuv::I420ToARGB(buffer->DataY(), buffer->StrideY(), buffer->DataU(),
// buffer->StrideU(), buffer->DataV(), buffer->StrideV(), // buffer->StrideU(), buffer->DataV(), buffer->StrideV(),
// image_.get(), // image_,
// bmi_.bmiHeader.biWidth * bmi_.bmiHeader.biBitCount / 8, // bmi_.bmiHeader.biWidth * bmi_.bmiHeader.biBitCount / 8,
// buffer->width(), buffer->height()); // buffer->width(), buffer->height());
// rgbadata(image_,
// (uint64_t)(buffer->width()*buffer->height()*4));
//UpdateFrame(buffer);
auto i420_buffer_ = video_frame.video_frame_buffer()->ToI420();
auto type = video_frame.video_frame_buffer()->type();
libyuv::I420ToARGB(i420_buffer_->DataY(), i420_buffer_->StrideY(), i420_buffer_->DataU(),
i420_buffer_->StrideU(), i420_buffer_->DataV(), i420_buffer_->StrideV(),
image_,
i420_buffer_->width()*4,
i420_buffer_->width(), i420_buffer_->height());
RawToBmp(image_,i420_buffer_->height(),i420_buffer_->width(),32,"d://sss2.bmp");
qDebug()<<"local size: "<<i420_buffer_->width() << i420_buffer_->height();
} }
} }
@ -472,5 +585,5 @@ void VideoRenderer::SetSize(int width, int height)
bmi_.bmiHeader.biHeight = -height; bmi_.bmiHeader.biHeight = -height;
bmi_.bmiHeader.biSizeImage = bmi_.bmiHeader.biSizeImage =
width * height * (bmi_.bmiHeader.biBitCount >> 3); width * height * (bmi_.bmiHeader.biBitCount >> 3);
image_.reset(new uint8_t[bmi_.bmiHeader.biSizeImage]); image_ = new uint8_t[bmi_.bmiHeader.biSizeImage];
} }

View File

@ -29,44 +29,53 @@
#include "api/video/i420_buffer.h" #include "api/video/i420_buffer.h"
#include "signal_client.h" #include "signal_client.h"
#include <QStandardItemModel> #include <QStandardItemModel>
#include "cplaywidget.h"
QT_BEGIN_NAMESPACE QT_BEGIN_NAMESPACE
namespace Ui { class MainWindow; } namespace Ui { class MainWindow; }
QT_END_NAMESPACE QT_END_NAMESPACE
class VideoRenderer : public rtc::VideoSinkInterface<webrtc::VideoFrame> { class VideoRenderer : public QObject,
public rtc::VideoSinkInterface<webrtc::VideoFrame>
public: {
VideoRenderer( Q_OBJECT
public:
VideoRenderer(
int width, int width,
int height, int height,
webrtc::VideoTrackInterface* track_to_render); webrtc::VideoTrackInterface* track_to_render);
virtual ~VideoRenderer(); virtual ~VideoRenderer();
void Lock() { ::EnterCriticalSection(&buffer_lock_); } void Lock() { ::EnterCriticalSection(&buffer_lock_); }
void Unlock() { ::LeaveCriticalSection(&buffer_lock_); } void Unlock() { ::LeaveCriticalSection(&buffer_lock_); }
// VideoSinkInterface implementation // VideoSinkInterface implementation
void OnFrame(const webrtc::VideoFrame& frame) override; void OnFrame(const webrtc::VideoFrame& frame) override;
const BITMAPINFO& bmi() const { return bmi_; } const BITMAPINFO& bmi() const { return bmi_; }
const uint8_t* image() const { return image_.get(); } const uint8_t* image() const { return image_; }
protected:
void SetSize(int width, int height);
enum { signals:
SET_SIZE, void rgbadata(uint8_t *data,uint64_t len);
RENDER_FRAME, void UpdateFrame(rtc::scoped_refptr<webrtc::I420BufferInterface>& buffer);
};
HWND wnd_; protected:
BITMAPINFO bmi_; void SetSize(int width, int height);
std::unique_ptr<uint8_t[]> image_;
CRITICAL_SECTION buffer_lock_; enum {
rtc::scoped_refptr<webrtc::VideoTrackInterface> rendered_track_; SET_SIZE,
RENDER_FRAME,
};
HWND wnd_;
BITMAPINFO bmi_;
uint8_t* image_;
CRITICAL_SECTION buffer_lock_;
rtc::scoped_refptr<webrtc::VideoTrackInterface> rendered_track_;
}; };
// A little helper class to make sure we always to proper locking and // A little helper class to make sure we always to proper locking and
@ -86,6 +95,9 @@ class WebrtcHanlder :public QObject,
public webrtc::PeerConnectionObserver, public webrtc::PeerConnectionObserver,
public webrtc::CreateSessionDescriptionObserver{ public webrtc::CreateSessionDescriptionObserver{
Q_OBJECT Q_OBJECT
public:
signals: signals:
void OnOfferSdp(QString); void OnOfferSdp(QString);
void OnAnswerSdp(QString); void OnAnswerSdp(QString);
@ -101,6 +113,7 @@ public:
void SetRemoteCandidate(QString); void SetRemoteCandidate(QString);
void SetParent(MainWindow *p); void SetParent(MainWindow *p);
void CreateAnwer(); void CreateAnwer();
protected: protected:
~WebrtcHanlder(); ~WebrtcHanlder();
// //
@ -140,8 +153,12 @@ class MainWindow :public QMainWindow
{ {
Q_OBJECT Q_OBJECT
public: public:
Ui::MainWindow *ui;
MainWindow(QWidget *parent = nullptr); MainWindow(QWidget *parent = nullptr);
~MainWindow(); ~MainWindow();
CPlayWidget* OpenglWidget();
protected: protected:
public slots: public slots:
@ -157,9 +174,9 @@ public slots:
void on_local_sdp(QString); void on_local_sdp(QString);
void on_track_add(webrtc::MediaStreamTrackInterface*); void on_track_add(webrtc::MediaStreamTrackInterface*);
void on_local_track_add(webrtc::VideoTrackInterface* ); void on_local_track_add(webrtc::VideoTrackInterface* );
void rgba_data(uint8_t *,uint64_t );
private: private:
Ui::MainWindow *ui;
rtc::scoped_refptr<WebrtcHanlder> mHandler; rtc::scoped_refptr<WebrtcHanlder> mHandler;
SignalClient *mSignalClient; SignalClient *mSignalClient;
QStandardItemModel *mModel; QStandardItemModel *mModel;

View File

@ -18,7 +18,7 @@ DEFINES += QT_DEPRECATED_WARNINGS NOMINMAX WEBRTC_WIN NOMINMAX WIN32_LEAN_AND_ME
INCLUDEPATH += third/include/ INCLUDEPATH += third/include/
LIBS += -L$$PWD/third/lib libwebrtc.lib ole32.lib oleaut32.lib strmiids.lib LIBS += -L$$PWD/third/lib libwebrtc.lib ole32.lib oleaut32.lib strmiids.lib yuv.lib
SOURCES += \ SOURCES += \
src/MyCapturer.cpp \ src/MyCapturer.cpp \

View File

@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE QtCreatorProject> <!DOCTYPE QtCreatorProject>
<!-- Written by QtCreator 4.11.0, 2021-10-21T01:30:59. --> <!-- Written by QtCreator 4.11.0, 2021-10-30T03:12:14. -->
<qtcreator> <qtcreator>
<data> <data>
<variable>EnvironmentId</variable> <variable>EnvironmentId</variable>