merge callee and caller

master
zcy 2021-11-02 00:39:01 +08:00
parent 95cd1c24da
commit 96726fec7e
5 changed files with 117 additions and 189 deletions

View File

@ -90,8 +90,6 @@ void CPlayWidget::OnPaintData(const rtc::scoped_refptr<webrtc::I420BufferInterfa
update();
}
CPlayWidget::CPlayWidget(QWidget *parent):QOpenGLWidget(parent) {
textureUniformY = 0;
textureUniformU = 0;
@ -121,12 +119,13 @@ int CPlayWidget::SetDataType(CPlayWidget::IMG_TYPE type){
return 0;
}
void CPlayWidget::CameraData( rtc::scoped_refptr<webrtc::I420BufferInterface> &buffer)
void CPlayWidget::OnCameraData( rtc::scoped_refptr<webrtc::I420BufferInterface> &buffer)
{
m_buffer = buffer;
memcpy(this->m_pBufYuv420p,buffer->GetI420()->DataY(),2560*1600);
memcpy(this->m_pBufYuv420p + 2560*1600 ,buffer->GetI420()->DataU(),2560*1600/4);
memcpy(this->m_pBufYuv420p+ 2560*1600+ 2560*1600/4,buffer->GetI420()->DataV(),2560*1600/4);
memcpy(this->m_pBufYuv420p,buffer->GetI420()->DataY(),m_nVideoW*m_nVideoH);
memcpy(this->m_pBufYuv420p + m_nVideoW*m_nVideoH ,buffer->GetI420()->DataU(),m_nVideoW*m_nVideoH/4);
memcpy(this->m_pBufYuv420p+ m_nVideoW*m_nVideoH + m_nVideoW*m_nVideoH/4,
buffer->GetI420()->DataV(),m_nVideoW*m_nVideoH/4);
update();
}
@ -138,13 +137,6 @@ int CPlayWidget::OnCameraData(uint8_t *p)
return 0;
}
int CPlayWidget::OnCameraDataRgb(uint8_t *p)
{
memcpy(m_pBufRgb32,p,this->m_nVideoH*this->m_nVideoW*4);
update();
return 0;
}
int CPlayWidget::SetImgSize(uint32_t width, uint32_t height)
{
m_nVideoH = height;
@ -157,9 +149,6 @@ int CPlayWidget::SetImgSize(uint32_t width, uint32_t height)
}
if(mType == TYPE_I420){
m_pBufYuv420p = new uint8_t[width * height *3/2];
memset(m_pBufYuv420p,0x00,width * height );
memset(m_pBufYuv420p + width * height ,0xff,width * height/2 );
}
return 0;
}

View File

@ -25,10 +25,9 @@
class CPlayWidget:public QOpenGLWidget,protected QOpenGLFunctions
{
Q_OBJECT
signals:
public slots:
void OnPaintData(const rtc::scoped_refptr<webrtc::I420BufferInterface> &buffer);
void CameraData(rtc::scoped_refptr<webrtc::I420BufferInterface> &buffer);
void OnCameraData(rtc::scoped_refptr<webrtc::I420BufferInterface> &);
public:
typedef enum{
@ -39,9 +38,8 @@ public:
CPlayWidget(QWidget* parent);
~CPlayWidget();
int SetDataType(IMG_TYPE);
int OnCameraData(uint8_t *);
int OnCameraDataRgb(uint8_t *);
int OnCameraData(uint8_t *);
int SetImgSize(uint32_t width,uint32_t );
protected:
QTimer tm;

View File

@ -98,6 +98,7 @@ void InitCustomMetaType(){
qRegisterMetaType<webrtc::MediaStreamTrackInterface*>("webrtc::MediaStreamTrackInterface*");
qRegisterMetaType<uint8_t*>("uint8_t*");
qRegisterMetaType<uint64_t>("uint64_t");
qRegisterMetaType<uint32_t>("uint32_t");
}

View File

@ -7,7 +7,8 @@
#include <windows.h>
#include <Lmcons.h>
#include <QAction>
#include "MyCapturer.h"
#include "video_capture.h"
#include "video_capturer_test.h"
#include <iostream>
@ -15,6 +16,7 @@ const char kCandidateSdpMidName[] = "sdpMid";
const char kCandidateSdpMlineIndexName[] = "sdpMLineIndex";
const char kCandidateSdpName[] = "candidate";
MainWindow::MainWindow(QWidget *parent)
: QMainWindow(parent)
, ui(new Ui::MainWindow)
@ -23,47 +25,21 @@ MainWindow::MainWindow(QWidget *parent)
,mModel(nullptr)
,mCalling(false)
{
mHandler->SetParent(this);
ui->setupUi(this);
ui->openGLWidget->SetDataType(CPlayWidget::TYPE_I420);
ui->openGLWidget->SetImgSize(2560,1600);
ui->openGLWidget->show();
mHandler->InitWebrtc();
TCHAR username[UNLEN + 1];
DWORD size = UNLEN + 1;
GetUserName((TCHAR*)username, &size);
mModel = new QStandardItemModel(this);
ui->treeView->setModel(mModel);
mHandler->InitWebrtc();
connect(ui->treeView,SIGNAL(doubleClicked(QModelIndex)),
this,SLOT(itemClicked(QModelIndex)));
connect((WebrtcHanlder*)(mHandler.get()),SIGNAL(OnOfferSdp(QString)),
this,SLOT(on_local_sdp(QString)));
connect((WebrtcHanlder*)(mHandler.get()),SIGNAL(OnRemoteTrack(webrtc::MediaStreamTrackInterface*)),
this,SLOT(on_track_add(webrtc::MediaStreamTrackInterface*)));
connect((WebrtcHanlder*)(mHandler.get()),SIGNAL(OnLocalTrack(webrtc::VideoTrackInterface* )),
this,SLOT(on_local_track_add(webrtc::VideoTrackInterface* )));
std::unique_ptr<webrtc::VideoCaptureModule::DeviceInfo> info(
webrtc::VideoCaptureFactory::CreateDeviceInfo());
if (!info) {
RTC_LOG(LERROR) << "CreateDeviceInfo failed";
}
int num_devices = info->NumberOfDevices();
for (int i = 0; i < num_devices; ++i) {
char devicename[200];
char device_unique[200];
char product_unique[200];
info->GetDeviceName(i,devicename,200,device_unique,200,product_unique,200);
qDebug()<< "device name : "<< devicename<<"unique device: "<<device_unique<<"produce unique: "<<product_unique;
webrtc::VideoCaptureCapability p;
info->GetCapability(device_unique,0,p);
qDebug()<<devicename<<" capability: width"<<p.width << p.height<<int(p.videoType);
ui->comboBox->addItem(QString(devicename),device_unique);
}
}
MainWindow::~MainWindow()
@ -71,21 +47,6 @@ MainWindow::~MainWindow()
delete ui;
}
CPlayWidget *MainWindow::OpenglWidget()
{
return this->ui->openGLWidget;
}
void MainWindow::OnUpdateFrame( rtc::scoped_refptr<webrtc::I420BufferInterface>& buffer)
{
qDebug()<<"1234";
// ui->openGLWidget->OnCameraData(buffer);
}
void MainWindow::OnUpdateFrame1(uint8_t *dat)
{
ui->openGLWidget->OnCameraData(dat);
}
const char kAudioLabel[] = "audio_label";
const char kVideoLabel[] = "video_label";
@ -108,7 +69,7 @@ int WebrtcHanlder::InitWebrtc()
nullptr /* audio_processing */);
if (!m_peer_connection_factory_) {
auto x = new QMessageBox(nullptr);
auto x = new QMessageBox(nullptr);
x->setText("创建peerconnection factory失败");
x->show();
exit(0);
@ -130,6 +91,38 @@ int WebrtcHanlder::InitWebrtc()
x->show();
exit(0);
}
rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
m_peer_connection_factory_->CreateAudioTrack(
kAudioLabel, m_peer_connection_factory_->CreateAudioSource(
cricket::AudioOptions())));
auto result_or_error = m_peer_connection_->AddTrack(audio_track, {kStreamId});
if (!result_or_error.ok()) {
qDebug() << "Failed to add audio track to PeerConnection: "
<< result_or_error.error().message();
}
//rtc::scoped_refptr<CapturerTrackSource> video_device =
// CapturerTrackSource::Create();
rtc::scoped_refptr<MyCapturer> video_device = new rtc::RefCountedObject<MyCapturer>();
if (video_device) {
video_device->startCapturer();
rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track_(
m_peer_connection_factory_->CreateVideoTrack(kVideoLabel, video_device));
qDebug()<<"local track is "<<video_track_->id().c_str();
result_or_error = m_peer_connection_->AddTrack(video_track_, { kStreamId });
if (!result_or_error.ok()) {
qDebug() << "Failed to add video track to PeerConnection: "
<< result_or_error.error().message();
}
} else {
qDebug()<< "OpenVideoCaptureDevice failed";
}
}
int WebrtcHanlder::AddTrack()
{
if (!m_peer_connection_->GetSenders().empty()) {
return -1; // Already added tracks.
}
@ -144,13 +137,13 @@ int WebrtcHanlder::InitWebrtc()
RTC_LOG(LS_ERROR) << "Failed to add audio track to PeerConnection: "
<< result_or_error.error().message();
}
//rtc::scoped_refptr<CapturerTrackSource> video_device =
// CapturerTrackSource::Create();
rtc::scoped_refptr<MyCapturer> video_device = new rtc::RefCountedObject<MyCapturer>();
if (video_device) {
video_device->startCapturer();
rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track_(
m_peer_connection_factory_->CreateVideoTrack(kVideoLabel, video_device));
auto p = video_device.get();
result_or_error = m_peer_connection_->AddTrack(video_track_, { kStreamId });
if (!result_or_error.ok()) {
@ -160,10 +153,9 @@ int WebrtcHanlder::InitWebrtc()
} else {
RTC_LOG(LS_ERROR) << "OpenVideoCaptureDevice failed";
}
webrtc::DataChannelInit config;
}
void WebrtcHanlder::SetSignalClient(SignalClient * cli)
{
this->mClient = cli;
@ -176,7 +168,6 @@ void WebrtcHanlder::CreateOffer()
m_peer_connection_->CreateOffer(this,
webrtc::PeerConnectionInterface::RTCOfferAnswerOptions());
}
}
class DummySetSessionDescriptionObserver
@ -194,7 +185,7 @@ public:
void WebrtcHanlder::SetRemoteSdp(QString sdp)
{
qDebug()<<"SetRemoteSdp" << sdp;
qDebug()<<"SetRemoteSdp " << sdp;
auto doc = QJsonDocument::fromJson(sdp.toUtf8());
auto obj = doc.object();
auto ssdp = obj["sdp"].toString();
@ -215,21 +206,17 @@ void WebrtcHanlder::SetRemoteSdp(QString sdp)
<< "SdpParseError was: " << error.description;
return;
}
if(nullptr != m_peer_connection_){
m_peer_connection_->SetRemoteDescription(
DummySetSessionDescriptionObserver::Create(),
session_description.release());;
}
}
void WebrtcHanlder::SetRemoteCandidate(QString scandidate)
{
qDebug()<<scandidate;
auto doc = QJsonDocument::fromJson(scandidate.toUtf8());
void WebrtcHanlder::SetRemoteCandidate(QString sdp)
{
auto doc = QJsonDocument::fromJson(sdp.toUtf8());
auto obj = doc.object();
auto ssdp = obj["candidate"].toString();
std::string sdp_mid,sdps;
@ -238,7 +225,6 @@ void WebrtcHanlder::SetRemoteCandidate(QString scandidate)
sdp_mlineindex = obj[kCandidateSdpMlineIndexName].toInt();
sdp_mid = obj[kCandidateSdpMlineIndexName].toString().toStdString();
sdps = obj[kCandidateSdpName].toString().toStdString();
qDebug()<<"remote candidate"<<sdp_mlineindex<<sdp_mid.c_str()<<sdps.c_str();
webrtc::SdpParseError error;
std::unique_ptr<webrtc::IceCandidateInterface> candidate(
webrtc::CreateIceCandidate(sdp_mid, sdp_mlineindex, sdps, &error));
@ -248,11 +234,6 @@ void WebrtcHanlder::SetRemoteCandidate(QString scandidate)
}
}
void WebrtcHanlder::SetParent(MainWindow *p)
{
mParent = p;
}
void WebrtcHanlder::CreateAnwer()
{
qDebug()<<"create answer";
@ -269,15 +250,9 @@ void WebrtcHanlder::OnSignalingChange(webrtc::PeerConnectionInterface::Signaling
}
// 接受者创建track成功
void WebrtcHanlder::OnAddTrack(rtc::scoped_refptr<webrtc::RtpReceiverInterface> receiver,
const std::vector<rtc::scoped_refptr<webrtc::MediaStreamInterface> > &streams)
void WebrtcHanlder::OnAddTrack(rtc::scoped_refptr<webrtc::RtpReceiverInterface> receiver, const std::vector<rtc::scoped_refptr<webrtc::MediaStreamInterface> > &streams)
{
qDebug() << __FUNCTION__ << " " << receiver->id().c_str();
auto srcs = receiver->GetSources();
for (auto x : srcs){
qDebug()<<"sources "<<x.source_id();
}
qDebug()<<"OnAddTrack"<<receiver->id().c_str();
if (receiver->track().release()->kind() == webrtc::MediaStreamTrackInterface::kVideoKind)
OnRemoteTrack(receiver->track().release());
}
@ -301,7 +276,7 @@ void WebrtcHanlder::OnIceGatheringChange(webrtc::PeerConnectionInterface::IceGat
void WebrtcHanlder::OnIceCandidate(const webrtc::IceCandidateInterface *candidate)
{
qDebug() << __FUNCTION__ << " " <<candidate->sdp_mid().c_str()<< " " <<candidate->sdp_mline_index();
qDebug()<<"on condidate\r\n";
QJsonObject addr;
addr.insert(kCandidateSdpMidName, candidate->sdp_mid().c_str());
@ -312,7 +287,7 @@ void WebrtcHanlder::OnIceCandidate(const webrtc::IceCandidateInterface *candidat
return;
}
addr.insert(kCandidateSdpName,sdp.c_str());
qDebug()<<"OnIceCandidate:\r\n"<< QString(QJsonDocument(addr).toJson());
qDebug()<<"condidate:\r\n"<< QString(QJsonDocument(addr).toJson());
this->mClient->SendICECandidate(mRemoteName,QString(QJsonDocument(addr).toJson()));
}
@ -325,14 +300,14 @@ const char kSessionDescriptionTypeName[] = "type";
const char kSessionDescriptionSdpName[] = "sdp";
void WebrtcHanlder::OnSuccess(webrtc::SessionDescriptionInterface *desc)
{
qDebug()<<desc->type().c_str();
qDebug()<<__FUNCTION__<<desc->type().c_str();
std::string sdp;
desc->ToString(&sdp);
QJsonObject addr;
addr.insert(kSessionDescriptionTypeName,QString(webrtc::SdpTypeToString(desc->GetType())));
addr.insert(kSessionDescriptionSdpName,sdp.c_str());
qDebug()<<"sdp : \r\n "<<QJsonDocument(addr).toJson();
qDebug()<<"OnSuccess sdp : "<<QJsonDocument(addr).toJson()<<" "<<desc->type().c_str();
// 设置本地sdp
m_peer_connection_->SetLocalDescription(
DummySetSessionDescriptionObserver::Create(), desc);
if(QString(desc->type().c_str()) == "offer"){
@ -364,7 +339,6 @@ void MainWindow::signal_conneted()
ui->label_5->setText("信令服务器已连接");
this->mSignalClient->SendLogin();
ui->label_6->setText(QString("本节点名称: " + mSignalClient->PeerName()));
this->mPeerName = mSignalClient->PeerName();
}
void MainWindow::on_pushButton_2_clicked()
@ -387,40 +361,38 @@ void MainWindow::signal_response(int type,QJsonObject data)
qDebug()<<type<<data;
switch (type) {
case 2004:
{
qDebug()<<"2004";
this->mModel->clear();
for (auto itr = data.begin();itr != data.end();itr++){
auto item = new QStandardItem(itr.key());
item->setEditable(false);
mModel->appendRow(item);
}
break;
this->mModel->clear();
for (auto itr = data.begin();itr != data.end();itr++){
auto item = new QStandardItem(itr.key());
item->setEditable(false);
mModel->appendRow(item);
}
case 2005:
{
qDebug()<<"2005";
auto sdp = data["sdp"].toString();
auto remote_name = data["remote_name"].toString();
mHandler->SetRemotePeerName(remote_name);
break;
case 2005:
{
auto sdp = data["sdp"].toString();
auto remote_name = data["remote_name"].toString();
mHandler->SetRemotePeerName(remote_name);
this->mHandler.get()->SetRemoteSdp(sdp);
mRemoteName = remote_name;
if(!mCalling)
mHandler->CreateAnwer();
break;
}
this->mHandler.get()->SetRemoteSdp(sdp);
mRemoteName = remote_name;
if(!mCalling)
mHandler->CreateAnwer();
break;
}
case 2006:
{
qDebug()<<"2006";
auto candidate = data["candidate"].toString();
auto remote_names = data["remote_name"].toString();
mHandler->SetRemotePeerName(remote_names);
this->mHandler.get()->SetRemoteCandidate(candidate);
mRemoteName = remote_names;
qDebug()<<"recv candidate"<<candidate;
break;
}
{
auto candidate = data["candidate"].toString();
auto remote_names = data["remote_name"].toString();
mHandler->SetRemotePeerName(remote_names);
mRemoteName = remote_names;
this->mHandler.get()->SetRemoteCandidate(candidate);
qDebug()<<"recv candidate"<<candidate;
break;
}
default:
break;
}
}
@ -428,19 +400,13 @@ void MainWindow::itemClicked(QModelIndex index)
{
if(!mSignalClient->Connected()){
qDebug()<<"请先连接信令服务";
return;
}
mHandler->CreateOffer();
//mSignalClient->SendSDPOffer();
qDebug()<<mModel->item(index.row())->text();
mRemoteName = mModel->item(index.row())->text();
if(mRemoteName == mPeerName){
auto x = new QMessageBox(nullptr);
x->setText("无法与自己通信");
x->show();
return;
}
mHandler->SetRemotePeerName(mRemoteName);
mCalling = true;
mHandler->CreateOffer();
}
@ -454,30 +420,30 @@ void MainWindow::on_track_add(webrtc::MediaStreamTrackInterface *data)
{
qDebug()<<"on_track_add"<<data->kind().c_str();
auto* track = reinterpret_cast<webrtc::MediaStreamTrackInterface*>(data);
if (track->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) {
auto* video_track = static_cast<webrtc::VideoTrackInterface*>(track);
qDebug()<<"remote trackid is "<<video_track->id().c_str();
remote_renderer_.reset(new VideoRenderer( 1, 1, video_track));
connect((VideoRenderer*)(remote_renderer_.get()),SIGNAL(UpdateFrame(rtc::scoped_refptr<webrtc::I420BufferInterface>&)),
ui->openGLWidget,SLOT(CameraData(rtc::scoped_refptr<webrtc::I420BufferInterface> &)),
connect((VideoRenderer*)(remote_renderer_.get()),
SIGNAL(UpdateFrame(rtc::scoped_refptr<webrtc::I420BufferInterface>&)),
(CPlayWidget*)ui->openGLWidget,
SLOT(OnCameraData(rtc::scoped_refptr<webrtc::I420BufferInterface> &)),
Qt::ConnectionType::QueuedConnection);
connect((VideoRenderer*)(remote_renderer_.get()),SIGNAL(NotifySize(uint32_t ,uint32_t )),
this,SLOT(on_notify_size(uint32_t ,uint32_t)),
Qt::ConnectionType::QueuedConnection);
}
track->Release();
}
void MainWindow::on_local_track_add(webrtc::VideoTrackInterface* data)
void MainWindow::on_notify_size(uint32_t width, uint32_t height)
{
qDebug()<<"on_local_track_add";
local_renderer_.reset(new VideoRenderer( 1, 1, data));
ui->openGLWidget->SetImgSize(width,height);
ui->openGLWidget->SetDataType(CPlayWidget::IMG_TYPE::TYPE_I420);
}
void MainWindow::rgba_data(uint8_t *buffer, uint64_t)
{
ui->openGLWidget->OnCameraDataRgb(buffer);
}
VideoRenderer::VideoRenderer(int width, int height, webrtc::VideoTrackInterface *track_to_render)
: rendered_track_(track_to_render)
{
@ -582,6 +548,7 @@ static void RawToBmp(unsigned char *pRawImage, int ImageHeight, int ImageWidth,
void VideoRenderer::OnFrame(const webrtc::VideoFrame &video_frame)
{
{
static bool first = true;
AutoLock<VideoRenderer> lock(this);
rtc::scoped_refptr<webrtc::I420BufferInterface> buffer(
video_frame.video_frame_buffer()->ToI420());
@ -589,26 +556,17 @@ void VideoRenderer::OnFrame(const webrtc::VideoFrame &video_frame)
buffer = webrtc::I420Buffer::Rotate(*buffer, video_frame.rotation());
}
RTC_DCHECK(image_ != NULL);
// libyuv::I420ToARGB(buffer->DataY(), buffer->StrideY(), buffer->DataU(),
// buffer->StrideU(), buffer->DataV(), buffer->StrideV(),
// image_,
// bmi_.bmiHeader.biWidth * bmi_.bmiHeader.biBitCount / 8,
// buffer->width(), buffer->height());
// rgbadata(image_,
// (uint64_t)(buffer->width()*buffer->height()*4));
auto i420_buffer_ = video_frame.video_frame_buffer()->ToI420();
auto type = video_frame.video_frame_buffer()->type();
SetSize(i420_buffer_->width(), i420_buffer_->height());
if(first){
NotifySize(i420_buffer_->width(),
i420_buffer_->height());
first = false;
}
qDebug()<<"onframe "<< i420_buffer_->width()
<< i420_buffer_->height();
UpdateFrame(buffer);
// libyuv::I420ToARGB(i420_buffer_->DataY(), i420_buffer_->StrideY(), i420_buffer_->DataU(),
// i420_buffer_->StrideU(), i420_buffer_->DataV(), i420_buffer_->StrideV(),
// image_,
// i420_buffer_->width()*4,
// i420_buffer_->width(), i420_buffer_->height());
// RawToBmp(image_,i420_buffer_->height(),i420_buffer_->width(),32,"d://sss2.bmp");
// qDebug()<<"local size: "<<i420_buffer_->width() << i420_buffer_->height();
}
}
@ -619,7 +577,7 @@ void VideoRenderer::SetSize(int width, int height)
}
bmi_.bmiHeader.biWidth = width;
bmi_.bmiHeader.biHeight = -height;
bmi_.bmiHeader.biHeight = height;
bmi_.bmiHeader.biSizeImage =
width * height * (bmi_.bmiHeader.biBitCount >> 3);
image_ = new uint8_t[bmi_.bmiHeader.biSizeImage];

View File

@ -29,7 +29,6 @@
#include "api/video/i420_buffer.h"
#include "signal_client.h"
#include <QStandardItemModel>
#include "cplaywidget.h"
QT_BEGIN_NAMESPACE
namespace Ui { class MainWindow; }
@ -62,7 +61,7 @@ public:
signals:
void rgbadata(uint8_t *data,uint64_t len);
void UpdateFrame(rtc::scoped_refptr<webrtc::I420BufferInterface>& buffer);
void NotifySize(uint32_t width,uint32_t height);
protected:
void SetSize(int width, int height);
@ -89,31 +88,26 @@ class AutoLock {
protected:
T* obj_;
};
class MainWindow;
class WebrtcHanlder :public QObject,
public webrtc::PeerConnectionObserver,
public webrtc::CreateSessionDescriptionObserver{
Q_OBJECT
public:
signals:
void OnOfferSdp(QString);
void OnAnswerSdp(QString);
void OnRemoteTrack(webrtc::MediaStreamTrackInterface*);
void OnLocalTrack(webrtc::VideoTrackInterface* );
public:
void SetRemotePeerName(QString);
int InitWebrtc();
int AddTrack();
void SetSignalClient(SignalClient *);
void CreateOffer();
void CreateAnswer();
void SetRemotePeerName(QString remote);
void SetRemoteSdp(QString);
void SetRemoteCandidate(QString);
void SetParent(MainWindow *p);
void CreateAnwer();
protected:
~WebrtcHanlder();
//
@ -145,26 +139,17 @@ private:
m_peer_connection_factory_;
SignalClient *mClient;
QString mRemoteName;
MainWindow *mParent;
};
class MainWindow :public QMainWindow
{
Q_OBJECT
public:
Ui::MainWindow *ui;
MainWindow(QWidget *parent = nullptr);
~MainWindow();
CPlayWidget* OpenglWidget();
protected:
public slots:
void OnUpdateFrame( rtc::scoped_refptr<webrtc::I420BufferInterface>& buffer);
void OnUpdateFrame1( uint8_t *);
private slots:
void on_pushButton_clicked();
void signal_conneted();
void on_pushButton_2_clicked();
@ -173,20 +158,17 @@ public slots:
void itemClicked(QModelIndex);
void on_local_sdp(QString);
void on_track_add(webrtc::MediaStreamTrackInterface*);
void on_local_track_add(webrtc::VideoTrackInterface* );
void rgba_data(uint8_t *,uint64_t );
void on_notify_size(uint32_t,uint32_t);
private:
Ui::MainWindow *ui;
rtc::scoped_refptr<WebrtcHanlder> mHandler;
SignalClient *mSignalClient;
QStandardItemModel *mModel;
QString mPeerName;
QString mRemoteName;
QString mRemoteNameCalled;
bool mCalling;
std::unique_ptr<VideoRenderer> local_renderer_;
std::unique_ptr<VideoRenderer> remote_renderer_;
};
#endif // MAINWINDOW_H