Show outgoing video stream.

This commit is contained in:
John Preston 2020-07-31 18:36:35 +04:00
parent b692242012
commit 35dfb9fab3
6 changed files with 98 additions and 76 deletions

View File

@ -18,8 +18,8 @@ CallSignalBars {
inactiveOpacity: double;
}
callWidth: 300px;
callHeight: 470px;
callWidth: 480px;
callHeight: 640px;
callRadius: 6px;
callShadow: Shadow {
left: icon {{ "call_shadow_left", windowShadowFg }};
@ -33,6 +33,7 @@ callShadow: Shadow {
extend: margins(9px, 8px, 9px, 10px);
fallback: windowShadowFgFallback;
}
callPhotoSize: 180px;
callButton: IconButton {
width: 72px;

View File

@ -44,12 +44,11 @@ namespace {
constexpr auto kMinLayer = 65;
constexpr auto kHangupTimeoutMs = 5000;
constexpr auto kSha256Size = 32;
constexpr auto kDropFramesWhileInactive = 5 * crl::time(1000);
const auto kDefaultVersion = "2.4.4"_q;
const auto RegisterTag = tgcalls::Register<tgcalls::InstanceImpl>();
const auto RegisterTagLegacy = tgcalls::Register<tgcalls::InstanceImplLegacy>();
const auto RegisterTagReference = tgcalls::Register<tgcalls::InstanceImplReference>();
//const auto RegisterTagReference = tgcalls::Register<tgcalls::InstanceImplReference>();
void AppendEndpoint(
std::vector<tgcalls::Endpoint> &list,
@ -153,14 +152,17 @@ Call::Call(
: _delegate(delegate)
, _user(user)
, _api(&_user->session().mtp())
, _type(type) {
_discardByTimeoutTimer.setCallback([this] { hangup(); });
, _type(type)
, _videoIncoming(std::make_unique<webrtc::VideoTrack>())
, _videoOutgoing(std::make_unique<webrtc::VideoTrack>()) {
_discardByTimeoutTimer.setCallback([=] { hangup(); });
if (_type == Type::Outgoing) {
setState(State::Requesting);
} else {
startWaitingTrack();
}
setupOutgoingVideo();
}
void Call::generateModExpFirst(bytes::const_span randomSeed) {
@ -308,6 +310,7 @@ void Call::actuallyAnswer() {
MTP_vector(CollectVersionsForApi()))
)).done([=](const MTPphone_PhoneCall &result) {
Expects(result.type() == mtpc_phone_phoneCall);
auto &call = result.c_phone_phoneCall();
_user->session().data().processUsers(call.vusers());
if (call.vphone_call().type() != mtpc_phoneCallWaiting) {
@ -330,24 +333,37 @@ void Call::setMuted(bool mute) {
}
}
void Call::setVideoEnabled(bool enabled) {
if (_state.current() != State::Established) {
return;
}
_videoEnabled = enabled;
if (enabled) {
if (!_videoCapture) {
_videoCapture = tgcalls::VideoCaptureInterface::Create();
void Call::setupOutgoingVideo() {
const auto started = _videoOutgoing->enabled();
_videoOutgoing->enabledValue(
) | rpl::start_with_next([=](bool enabled) {
if (_state.current() != State::Established
&& enabled != started
&& !(_type == Type::Incoming && !_id)) {
_videoOutgoing->setEnabled(started);
} else if (enabled) {
if (!_videoCapture) {
_videoCapture = tgcalls::VideoCaptureInterface::Create();
_videoCapture->setVideoOutput(_videoOutgoing->sink());
}
if (_instance) {
_instance->requestVideo(_videoCapture);
} else {
_videoState = VideoState::OutgoingRequested;
}
_videoCapture->setIsVideoEnabled(true);
} else if (_videoCapture) {
_videoCapture->setIsVideoEnabled(false);
}
if (_instance) {
_instance->requestVideo(_videoCapture);
} else {
_videoState = VideoState::OutgoingRequested;
}
_videoCapture->setIsVideoEnabled(true);
} else if (_videoCapture) {
_videoCapture->setIsVideoEnabled(false);
}
}, _lifetime);
}
not_null<webrtc::VideoTrack*> Call::videoIncoming() const {
return _videoIncoming.get();
}
not_null<webrtc::VideoTrack*> Call::videoOutgoing() const {
return _videoOutgoing.get();
}
crl::time Call::getDurationMs() const {
@ -409,10 +425,6 @@ void Call::sendSignalingData(const QByteArray &data) {
}).send();
}
void Call::displayNextFrame(QImage frame) {
_frames.fire(std::move(frame));
}
float64 Call::getWaitingSoundPeakValue() const {
if (_waitingTrack) {
auto when = crl::now() + kSoundSampleMs / 4;
@ -451,9 +463,13 @@ bool Call::handleUpdate(const MTPPhoneCall &call) {
finish(FinishType::Failed);
return true;
}
// We are allowed to change it for non-established call
// only in case `incoming && !_id`, only when we just received it.
_videoOutgoing->setEnabled(data.is_video());
_id = data.vid().v;
_accessHash = data.vaccess_hash().v;
setVideoEnabled(data.is_video());
auto gaHashBytes = bytes::make_span(data.vg_a_hash().v);
if (gaHashBytes.size() != kSha256Size) {
LOG(("Call Error: Wrong g_a_hash size %1, expected %2."
@ -671,7 +687,7 @@ void Call::createAndStartController(const MTPDphoneCall &call) {
.encryptionKey = tgcalls::EncryptionKey(
std::move(encryptionKeyValue),
(_type == Type::Outgoing)),
.videoCapture = _videoEnabled.current() ? _videoCapture : nullptr,
.videoCapture = _videoOutgoing->enabled() ? _videoCapture : nullptr,
.stateUpdated = [=](tgcalls::State state, tgcalls::VideoState videoState) {
crl::on_main(weak, [=] {
handleControllerStateChange(state, videoState);
@ -684,12 +700,7 @@ void Call::createAndStartController(const MTPDphoneCall &call) {
},
.remoteVideoIsActiveUpdated = [=](bool active) {
crl::on_main(weak, [=] {
if (!active) {
_frames.fire(QImage());
_remoteVideoInactiveFrom = crl::now();
} else {
_remoteVideoInactiveFrom = 0;
}
_videoIncoming->setEnabled(active);
});
},
.signalingDataEmitted = [=](const std::vector<uint8_t> &data) {
@ -758,18 +769,8 @@ void Call::createAndStartController(const MTPDphoneCall &call) {
raw->setMuteMicrophone(_muted.current());
}
_videoTrack = std::make_shared<webrtc::VideoTrack>();
_videoTrack->renderNextFrame(
) | rpl::start_with_next([=] {
if (_remoteVideoInactiveFrom > 0
&& (_remoteVideoInactiveFrom + kDropFramesWhileInactive
> crl::now())) {
} else {
_frames.fire_copy(_videoTrack->frame(webrtc::FrameRequest()));
_videoTrack->markFrameShown();
}
}, lifetime());
raw->setIncomingVideoOutput(_videoTrack->sink());
_videoIncoming->setEnabled(_videoOutgoing->enabled());
raw->setIncomingVideoOutput(_videoIncoming->sink());
const auto &settings = Core::App().settings();
raw->setAudioOutputDevice(

View File

@ -131,17 +131,8 @@ public:
return _muted.value();
}
void setVideoEnabled(bool enabled);
[[nodiscard]] bool videoEnabled() const {
return _videoEnabled.current();
}
[[nodiscard]] rpl::producer<bool> videoEnabledValue() const {
return _videoEnabled.value();
}
[[nodiscard]] rpl::producer<QImage> frames() const {
return _frames.events();
}
[[nodiscard]] not_null<webrtc::VideoTrack*> videoIncoming() const;
[[nodiscard]] not_null<webrtc::VideoTrack*> videoOutgoing() const;
crl::time getDurationMs() const;
float64 getWaitingSoundPeakValue() const;
@ -178,7 +169,6 @@ private:
void startIncoming();
void startWaitingTrack();
void sendSignalingData(const QByteArray &data);
void displayNextFrame(QImage frame);
void generateModExpFirst(bytes::const_span randomSeed);
void handleControllerStateChange(tgcalls::State state, tgcalls::VideoState videoState);
@ -199,8 +189,10 @@ private:
void setSignalBarCount(int count);
void destroyController();
not_null<Delegate*> _delegate;
not_null<UserData*> _user;
void setupOutgoingVideo();
const not_null<Delegate*> _delegate;
const not_null<UserData*> _user;
MTP::Sender _api;
Type _type = Type::Outgoing;
rpl::variable<State> _state = State::Starting;
@ -213,9 +205,6 @@ private:
base::Timer _discardByTimeoutTimer;
rpl::variable<bool> _muted = false;
rpl::variable<bool> _videoEnabled = false;
rpl::event_stream<QImage> _frames;
crl::time _remoteVideoInactiveFrom = 0;
DhConfig _dhConfig;
bytes::vector _ga;
@ -231,7 +220,8 @@ private:
std::unique_ptr<tgcalls::Instance> _instance;
std::shared_ptr<tgcalls::VideoCaptureInterface> _videoCapture;
std::shared_ptr<webrtc::VideoTrack> _videoTrack;
const std::unique_ptr<webrtc::VideoTrack> _videoIncoming;
const std::unique_ptr<webrtc::VideoTrack> _videoOutgoing;
std::unique_ptr<Media::Audio::Track> _waitingTrack;

View File

@ -33,6 +33,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "window/main_window.h"
#include "layout.h"
#include "app.h"
#include "webrtc/webrtc_video_track.h"
#include "styles/style_calls.h"
#include "styles/style_history.h"
@ -366,10 +367,11 @@ void Panel::initControls() {
}, lifetime());
_camera->setClickedCallback([=] {
if (_call) {
_call->setVideoEnabled(!_call->videoEnabled());
_call->videoOutgoing()->setEnabled(
!_call->videoOutgoing()->enabled());
}
});
_call->videoEnabledValue(
_call->videoOutgoing()->enabledValue(
) | rpl::start_with_next([=](bool enabled) {
_camera->setIconOverride(enabled ? nullptr : &st::callNoCameraIcon);
}, lifetime());
@ -413,8 +415,10 @@ void Panel::initControls() {
_decline->finishAnimating();
_cancel->finishAnimating();
_call->frames() | rpl::start_with_next([=](QImage frame) {
_videoFrame = std::move(frame);
rpl::merge(
_call->videoIncoming()->renderNextFrame(),
_call->videoOutgoing()->renderNextFrame()
) | rpl::start_with_next([=] {
update();
}, lifetime());
}
@ -760,16 +764,44 @@ void Panel::paintEvent(QPaintEvent *e) {
p.fillRect(0, _contentTop, width(), height() - _contentTop, brush);
}
if (!_videoFrame.isNull()) {
const auto incomingFrame = _call
? _call->videoIncoming()->frame(webrtc::FrameRequest())
: QImage();
if (!incomingFrame.isNull()) {
const auto to = rect().marginsRemoved(_padding);
p.save();
p.setClipRect(to);
const auto big = _videoFrame.size().scaled(to.size(), Qt::KeepAspectRatioByExpanding);
const auto pos = QPoint((to.width() - big.width()) / 2, (to.height() - big.height()) / 2);
const auto big = incomingFrame.size().scaled(to.size(), Qt::KeepAspectRatioByExpanding);
const auto pos = QPoint(
to.left() + (to.width() - big.width()) / 2,
to.top() + (to.height() - big.height()) / 2);
auto hq = PainterHighQualityEnabler(p);
p.drawImage(QRect(pos, big), _videoFrame);
p.drawImage(QRect(pos, big), incomingFrame);
p.restore();
}
_call->videoIncoming()->markFrameShown();
const auto outgoingFrame = _call
? _call->videoOutgoing()->frame(webrtc::FrameRequest())
: QImage();
if (!outgoingFrame.isNull()) {
const auto size = QSize(width() / 3, height() / 3);
const auto to = QRect(
width() - 2 * _padding.right() - size.width(),
2 * _padding.bottom(),
size.width(),
size.height());
p.save();
p.setClipRect(to);
const auto big = outgoingFrame.size().scaled(to.size(), Qt::KeepAspectRatioByExpanding);
const auto pos = QPoint(
to.left() + (to.width() - big.width()) / 2,
to.top() + (to.height() - big.height()) / 2);
auto hq = PainterHighQualityEnabler(p);
p.drawImage(QRect(pos, big), outgoingFrame);
p.restore();
}
_call->videoOutgoing()->markFrameShown();
if (_signalBars->isDisplayed()) {
paintSignalBarsBg(p);

View File

@ -156,8 +156,6 @@ private:
QPixmap _bottomCache;
QPixmap _cache;
QImage _videoFrame;
};
} // namespace Calls

@ -1 +1 @@
Subproject commit 29026bab2166c746b51c088de4a218f9b7a5921e
Subproject commit 78e8e4ae7746f0ce5e56a15c4087d4ec997bec84