From 9339db900fd2c1f5fa47ddbfd655aa7019d060b7 Mon Sep 17 00:00:00 2001 From: John Preston Date: Wed, 11 Dec 2019 17:01:11 +0300 Subject: [PATCH] Pause GIFs in message history. --- .../history/view/media/history_view_gif.cpp | 15 +- .../history/view/media/history_view_gif.h | 4 +- .../media/streaming/media_streaming_common.h | 2 +- .../streaming/media_streaming_instance.cpp | 4 + .../streaming/media_streaming_instance.h | 1 + .../streaming/media_streaming_player.cpp | 27 +++- .../media/streaming/media_streaming_player.h | 2 + .../streaming/media_streaming_video_track.cpp | 143 +++++++++++++++--- .../streaming/media_streaming_video_track.h | 12 ++ Telegram/lib_lottie | 2 +- 10 files changed, 176 insertions(+), 36 deletions(-) diff --git a/Telegram/SourceFiles/history/view/media/history_view_gif.cpp b/Telegram/SourceFiles/history/view/media/history_view_gif.cpp index d71544000c..32801fe2dc 100644 --- a/Telegram/SourceFiles/history/view/media/history_view_gif.cpp +++ b/Telegram/SourceFiles/history/view/media/history_view_gif.cpp @@ -253,7 +253,6 @@ void Gif::draw(Painter &p, const QRect &r, TextSelection selection, crl::time ms const auto isRound = _data->isVideoMessage(); auto displayMute = false; const auto streamed = activeCurrentStreamed(); - const auto player = streamed ? &streamed->player() : nullptr; if ((!streamed || item->id < 0) && displayLoading) { ensureAnimation(); @@ -310,7 +309,10 @@ void Gif::draw(Painter &p, const QRect &r, TextSelection selection, crl::time ms request.resize = QSize(_thumbw, _thumbh) * cIntRetinaFactor(); request.corners = roundCorners; request.radius = roundRadius; - p.drawImage(rthumb, player->frame(request)); + p.drawImage(rthumb, streamed->frame(request)); + if (!paused) { + streamed->markFrameShown(); + } if (const auto playback = videoPlayback()) { const auto value = playback->value(); @@ -367,7 +369,7 @@ void Gif::draw(Painter &p, const QRect &r, TextSelection selection, crl::time ms } if (radial - || (!player + || (!streamed && !startPlayAsync && ((_streamed && _streamed->player().failed()) || (!_data->loaded() && !_data->loading()) @@ -434,7 +436,7 @@ void Gif::draw(Painter &p, const QRect &r, TextSelection selection, crl::time ms } } - if (!isRound && (!player || item->id < 0)) { + if (!isRound && (!streamed || item->id < 0)) { auto statusX = paintx + st::msgDateImgDelta + st::msgDateImgPadding.x(); auto statusY = painty + st::msgDateImgDelta + st::msgDateImgPadding.y(); auto statusW = st::normalFont->width(_statusText) + 2 * st::msgDateImgPadding.x(); @@ -829,7 +831,7 @@ int Gif::additionalWidth(const HistoryMessageVia *via, const HistoryMessageReply return ::Media::Player::instance()->roundVideoStreamed(_parent->data()); } -const ::Media::Streaming::Instance *Gif::activeOwnStreamed() const { +::Media::Streaming::Instance *Gif::activeOwnStreamed() const { return (_streamed && _streamed->player().ready() && !_streamed->player().videoSize().isEmpty()) @@ -837,7 +839,7 @@ const ::Media::Streaming::Instance *Gif::activeOwnStreamed() const { : nullptr; } -const ::Media::Streaming::Instance *Gif::activeCurrentStreamed() const { +::Media::Streaming::Instance *Gif::activeCurrentStreamed() const { if (const auto streamed = activeRoundStreamed()) { return streamed; } @@ -903,6 +905,7 @@ void Gif::playAnimation(bool autoplay) { } auto options = ::Media::Streaming::PlaybackOptions(); options.audioId = AudioMsgId(_data, _realParent->fullId()); + options.waitForMarkAsShown = true; //if (!_streamed->withSound) { options.mode = ::Media::Streaming::Mode::Video; options.loop = true; diff --git a/Telegram/SourceFiles/history/view/media/history_view_gif.h b/Telegram/SourceFiles/history/view/media/history_view_gif.h index f045b2ec6d..7632c1f856 100644 --- a/Telegram/SourceFiles/history/view/media/history_view_gif.h +++ b/Telegram/SourceFiles/history/view/media/history_view_gif.h @@ -92,8 +92,8 @@ private: QSize countCurrentSize(int newWidth) override; QSize videoSize() const; ::Media::Streaming::Instance *activeRoundStreamed() const; - const ::Media::Streaming::Instance *activeOwnStreamed() const; - const ::Media::Streaming::Instance *activeCurrentStreamed() const; + ::Media::Streaming::Instance *activeOwnStreamed() const; + ::Media::Streaming::Instance *activeCurrentStreamed() const; ::Media::View::PlaybackProgress *videoPlayback() const; bool createStreamedPlayer(); diff --git a/Telegram/SourceFiles/media/streaming/media_streaming_common.h b/Telegram/SourceFiles/media/streaming/media_streaming_common.h index fcc7480752..48833ccd6e 100644 --- a/Telegram/SourceFiles/media/streaming/media_streaming_common.h +++ b/Telegram/SourceFiles/media/streaming/media_streaming_common.h @@ -43,7 +43,7 @@ struct PlaybackOptions { float64 speed = 1.; // Valid values between 0.5 and 2. AudioMsgId audioId; bool syncVideoByAudio = true; - bool dropStaleFrames = true; + bool waitForMarkAsShown = false; bool loop = false; }; diff --git a/Telegram/SourceFiles/media/streaming/media_streaming_instance.cpp b/Telegram/SourceFiles/media/streaming/media_streaming_instance.cpp index b5e4f2667b..82e1b2e04f 100644 --- a/Telegram/SourceFiles/media/streaming/media_streaming_instance.cpp +++ b/Telegram/SourceFiles/media/streaming/media_streaming_instance.cpp @@ -140,6 +140,10 @@ QImage Instance::frame(const FrameRequest &request) const { return player().frame(request); } +bool Instance::markFrameShown() { + return _shared->player().markFrameShown(); +} + rpl::lifetime &Instance::lifetime() { return _lifetime; } diff --git a/Telegram/SourceFiles/media/streaming/media_streaming_instance.h b/Telegram/SourceFiles/media/streaming/media_streaming_instance.h index dc24b0b0b5..479691d4cc 100644 --- a/Telegram/SourceFiles/media/streaming/media_streaming_instance.h +++ b/Telegram/SourceFiles/media/streaming/media_streaming_instance.h @@ -60,6 +60,7 @@ public: void callWaitingCallback(); [[nodiscard]] QImage frame(const FrameRequest &request) const; + bool markFrameShown(); rpl::lifetime &lifetime(); diff --git a/Telegram/SourceFiles/media/streaming/media_streaming_player.cpp b/Telegram/SourceFiles/media/streaming/media_streaming_player.cpp index a39b18aff5..ee9c18f4fa 100644 --- a/Telegram/SourceFiles/media/streaming/media_streaming_player.cpp +++ b/Telegram/SourceFiles/media/streaming/media_streaming_player.cpp @@ -101,7 +101,6 @@ void Player::checkNextFrameRender() { } } else { _renderFrameTimer.cancel(); - _nextFrameTime = kTimeUnknown; renderFrame(now); } } @@ -110,6 +109,7 @@ void Player::checkNextFrameAvailability() { Expects(_video != nullptr); _nextFrameTime = _video->nextFrameDisplayTime(); + Assert(_nextFrameTime != kFrameDisplayTimeAlreadyDone); if (_nextFrameTime != kTimeUnknown) { checkNextFrameRender(); } @@ -117,13 +117,32 @@ void Player::checkNextFrameAvailability() { void Player::renderFrame(crl::time now) { Expects(_video != nullptr); + Expects(_nextFrameTime != kTimeUnknown); + Expects(_nextFrameTime != kFrameDisplayTimeAlreadyDone); const auto position = _video->markFrameDisplayed(now); - Assert(position != kTimeUnknown); + if (_options.waitForMarkAsShown) { + _currentFrameTime = _nextFrameTime; + _nextFrameTime = kFrameDisplayTimeAlreadyDone; + } else { + _video->markFrameShown(); + _nextFrameTime = kTimeUnknown; + } + Assert(position != kTimeUnknown); videoPlayedTill(position); } +bool Player::markFrameShown() { + Expects(_video != nullptr); + + if (_nextFrameTime == kFrameDisplayTimeAlreadyDone) { + _nextFrameTime = kTimeUnknown; + _video->addTimelineDelay(crl::now() - _currentFrameTime); + } + return _video->markFrameShown(); +} + template void Player::trackReceivedTill( const Track &track, @@ -690,7 +709,9 @@ void Player::start() { } void Player::checkVideoStep() { - if (_nextFrameTime != kTimeUnknown) { + if (_nextFrameTime == kFrameDisplayTimeAlreadyDone) { + return; + } else if (_nextFrameTime != kTimeUnknown) { checkNextFrameRender(); } else { checkNextFrameAvailability(); diff --git a/Telegram/SourceFiles/media/streaming/media_streaming_player.h b/Telegram/SourceFiles/media/streaming/media_streaming_player.h index 0261bfa7d9..38a6bef7a0 100644 --- a/Telegram/SourceFiles/media/streaming/media_streaming_player.h +++ b/Telegram/SourceFiles/media/streaming/media_streaming_player.h @@ -61,6 +61,7 @@ public: [[nodiscard]] QSize videoSize() const; [[nodiscard]] QImage frame(const FrameRequest &request) const; + bool markFrameShown(); [[nodiscard]] Media::Player::TrackState prepareLegacyState() const; @@ -175,6 +176,7 @@ private: crl::time _startedTime = kTimeUnknown; crl::time _pausedTime = kTimeUnknown; + crl::time _currentFrameTime = kTimeUnknown; crl::time _nextFrameTime = kTimeUnknown; base::Timer _renderFrameTimer; rpl::event_stream _updates; diff --git a/Telegram/SourceFiles/media/streaming/media_streaming_video_track.cpp b/Telegram/SourceFiles/media/streaming/media_streaming_video_track.cpp index d267f9cf93..a928555cef 100644 --- a/Telegram/SourceFiles/media/streaming/media_streaming_video_track.cpp +++ b/Telegram/SourceFiles/media/streaming/media_streaming_video_track.cpp @@ -43,7 +43,8 @@ public: void resume(crl::time time); void setSpeed(float64 speed); void interrupt(); - void frameDisplayed(); + void frameShown(); + void addTimelineDelay(crl::time delayed); void updateFrameRequest(const FrameRequest &request); private: @@ -222,7 +223,7 @@ void VideoTrackObject::readFrames() { auto VideoTrackObject::readEnoughFrames(crl::time trackTime) -> ReadEnoughState { - const auto dropStaleFrames = _options.dropStaleFrames; + const auto dropStaleFrames = !_options.waitForMarkAsShown; const auto state = _shared->prepareState(trackTime, dropStaleFrames); return state.match([&](Shared::PrepareFrame frame) -> ReadEnoughState { while (true) { @@ -300,7 +301,6 @@ void VideoTrackObject::presentFrameIfNeeded() { if (_pausedTime != kTimeUnknown || _resumedTime == kTimeUnknown) { return; } - const auto time = trackTime(); const auto rasterize = [&](not_null frame) { Expects(frame->position != kFinishedPosition); @@ -320,11 +320,13 @@ void VideoTrackObject::presentFrameIfNeeded() { Ensures(VideoTrack::IsRasterized(frame)); }; + const auto dropStaleFrames = !_options.waitForMarkAsShown; const auto presented = _shared->presentFrame( - time, + trackTime(), _options.speed, - _options.dropStaleFrames, + dropStaleFrames, rasterize); + addTimelineDelay(presented.addedWorldTimeDelay); if (presented.displayPosition == kFinishedPosition) { interrupt(); _checkNextFrame = rpl::event_stream<>(); @@ -384,13 +386,25 @@ bool VideoTrackObject::interrupted() const { return (_shared == nullptr); } -void VideoTrackObject::frameDisplayed() { +void VideoTrackObject::frameShown() { if (interrupted()) { return; } queueReadFrames(); } +void VideoTrackObject::addTimelineDelay(crl::time delayed) { + Expects(_syncTimePoint.valid()); + + if (!delayed) { + return; + } + if (delayed > 1000) { + int a = 0; + } + _syncTimePoint.worldTime += delayed; +} + void VideoTrackObject::updateFrameRequest(const FrameRequest &request) { _request = request; } @@ -529,6 +543,7 @@ void VideoTrack::Shared::init(QImage &&cover, crl::time position) { // But in this case we update _counter, so we set a fake displayed time. _frames[0].displayed = kDisplaySkipped; + _delay = 0; _counter.store(0, std::memory_order_release); } @@ -617,23 +632,25 @@ auto VideoTrack::Shared::presentFrame( const auto present = [&](int counter, int index) -> PresentFrame { const auto frame = getFrame(index); const auto position = frame->position; + const auto addedWorldTimeDelay = base::take(_delay); if (position == kFinishedPosition) { - return { kFinishedPosition, kTimeUnknown }; + return { kFinishedPosition, kTimeUnknown, addedWorldTimeDelay }; } rasterize(frame); if (!IsRasterized(frame)) { // Error happened during frame prepare. - return { kTimeUnknown, kTimeUnknown }; + return { kTimeUnknown, kTimeUnknown, addedWorldTimeDelay }; } const auto trackLeft = position - time.trackTime; frame->display = time.worldTime + + addedWorldTimeDelay + crl::time(std::round(trackLeft / playbackSpeed)); // Release this frame to the main thread for rendering. _counter.store( (counter + 1) % (2 * kFramesCount), std::memory_order_release); - return { position, crl::time(0) }; + return { position, crl::time(0), addedWorldTimeDelay }; }; const auto nextCheckDelay = [&](int index) -> PresentFrame { const auto frame = getFrame(index); @@ -669,6 +686,10 @@ crl::time VideoTrack::Shared::nextFrameDisplayTime() const { const auto next = (counter + 1) % (2 * kFramesCount); const auto index = next / 2; const auto frame = getFrame(index); + if (frame->displayed != kTimeUnknown) { + // Frame already displayed, but not yet shown. + return kFrameDisplayTimeAlreadyDone; + } Assert(IsRasterized(frame)); Assert(frame->display != kTimeUnknown); @@ -689,33 +710,92 @@ crl::time VideoTrack::Shared::nextFrameDisplayTime() const { } crl::time VideoTrack::Shared::markFrameDisplayed(crl::time now) { - const auto markAndJump = [&](int counter) { + const auto mark = [&](int counter) { const auto next = (counter + 1) % (2 * kFramesCount); const auto index = next / 2; const auto frame = getFrame(index); Assert(frame->position != kTimeUnknown); - Assert(frame->displayed == kTimeUnknown); - - frame->displayed = now; - _counter.store( - next, - std::memory_order_release); + if (frame->displayed == kTimeUnknown) { + frame->displayed = now; + } return frame->position; }; switch (counter()) { case 0: Unexpected("Value 0 in VideoTrack::Shared::markFrameDisplayed."); - case 1: return markAndJump(1); + case 1: return mark(1); case 2: Unexpected("Value 2 in VideoTrack::Shared::markFrameDisplayed."); - case 3: return markAndJump(3); + case 3: return mark(3); case 4: Unexpected("Value 4 in VideoTrack::Shared::markFrameDisplayed."); - case 5: return markAndJump(5); + case 5: return mark(5); case 6: Unexpected("Value 6 in VideoTrack::Shared::markFrameDisplayed."); - case 7: return markAndJump(7); + case 7: return mark(7); } Unexpected("Counter value in VideoTrack::Shared::markFrameDisplayed."); } +void VideoTrack::Shared::addTimelineDelay(crl::time delayed) { + if (!delayed) { + return; + } + const auto recountCurrentFrame = [&](int counter) { + _delay += delayed; + if (delayed > 1000) { + int a = 0; + } + + //const auto next = (counter + 1) % (2 * kFramesCount); + //const auto index = next / 2; + //const auto frame = getFrame(index); + //if (frame->displayed != kTimeUnknown) { + // // Frame already displayed. + // return; + //} + //Assert(IsRasterized(frame)); + //Assert(frame->display != kTimeUnknown); + //frame->display = countFrameDisplayTime(frame->index); + }; + + switch (counter()) { + case 0: Unexpected("Value 0 in VideoTrack::Shared::addTimelineDelay."); + case 1: return recountCurrentFrame(1); + case 2: Unexpected("Value 2 in VideoTrack::Shared::addTimelineDelay."); + case 3: return recountCurrentFrame(3); + case 4: Unexpected("Value 4 in VideoTrack::Shared::addTimelineDelay."); + case 5: return recountCurrentFrame(5); + case 6: Unexpected("Value 6 in VideoTrack::Shared::addTimelineDelay."); + case 7: return recountCurrentFrame(7); + } + Unexpected("Counter value in VideoTrack::Shared::addTimelineDelay."); +} + +bool VideoTrack::Shared::markFrameShown() { + const auto jump = [&](int counter) { + const auto next = (counter + 1) % (2 * kFramesCount); + const auto index = next / 2; + const auto frame = getFrame(index); + if (frame->displayed == kTimeUnknown) { + return false; + } + _counter.store( + next, + std::memory_order_release); + return true; + }; + + switch (counter()) { + case 0: return false; + case 1: return jump(1); + case 2: return false; + case 3: return jump(3); + case 4: return false; + case 5: return jump(5); + case 6: return false; + case 7: return jump(7); + } + Unexpected("Counter value in VideoTrack::Shared::markFrameShown."); +} + not_null VideoTrack::Shared::frameForPaint() { const auto result = getFrame(counter() / 2); Assert(!result->original.isNull()); @@ -793,14 +873,31 @@ crl::time VideoTrack::nextFrameDisplayTime() const { crl::time VideoTrack::markFrameDisplayed(crl::time now) { const auto result = _shared->markFrameDisplayed(now); - _wrapped.with([](Implementation &unwrapped) { - unwrapped.frameDisplayed(); - }); Ensures(result != kTimeUnknown); return result; } +void VideoTrack::addTimelineDelay(crl::time delayed) { + _shared->addTimelineDelay(delayed); + //if (!delayed) { + // return; + //} + //_wrapped.with([=](Implementation &unwrapped) mutable { + // unwrapped.addTimelineDelay(delayed); + //}); +} + +bool VideoTrack::markFrameShown() { + if (!_shared->markFrameShown()) { + return false; + } + _wrapped.with([](Implementation &unwrapped) { + unwrapped.frameShown(); + }); + return true; +} + QImage VideoTrack::frame(const FrameRequest &request) { const auto frame = _shared->frameForPaint(); const auto changed = (frame->request != request) diff --git a/Telegram/SourceFiles/media/streaming/media_streaming_video_track.h b/Telegram/SourceFiles/media/streaming/media_streaming_video_track.h index ff704f8171..593d69682d 100644 --- a/Telegram/SourceFiles/media/streaming/media_streaming_video_track.h +++ b/Telegram/SourceFiles/media/streaming/media_streaming_video_track.h @@ -14,6 +14,9 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL namespace Media { namespace Streaming { +constexpr auto kFrameDisplayTimeAlreadyDone + = std::numeric_limits::max(); + class VideoTrackObject; class VideoTrack final { @@ -47,6 +50,8 @@ public: // Called from the main thread. // Returns the position of the displayed frame. [[nodiscard]] crl::time markFrameDisplayed(crl::time now); + void addTimelineDelay(crl::time delayed); + bool markFrameShown(); [[nodiscard]] crl::time nextFrameDisplayTime() const; [[nodiscard]] QImage frame(const FrameRequest &request); [[nodiscard]] rpl::producer<> checkNextFrame() const; @@ -79,6 +84,7 @@ private: struct PresentFrame { crl::time displayPosition = kTimeUnknown; crl::time nextCheckDelay = 0; + crl::time addedWorldTimeDelay = 0; }; // Called from the wrapped object queue. @@ -101,6 +107,8 @@ private: // Called from the main thread. // Returns the position of the displayed frame. [[nodiscard]] crl::time markFrameDisplayed(crl::time now); + void addTimelineDelay(crl::time delayed); + bool markFrameShown(); [[nodiscard]] crl::time nextFrameDisplayTime() const; [[nodiscard]] not_null frameForPaint(); @@ -115,6 +123,10 @@ private: static constexpr auto kFramesCount = 4; std::array _frames; + // (_counter % 2) == 1 main thread can write _delay. + // (_counter % 2) == 0 crl::queue can read _delay. + crl::time _delay = kTimeUnknown; + }; static QImage PrepareFrameByRequest( diff --git a/Telegram/lib_lottie b/Telegram/lib_lottie index a2f749fab1..a0a0269ffa 160000 --- a/Telegram/lib_lottie +++ b/Telegram/lib_lottie @@ -1 +1 @@ -Subproject commit a2f749fab192ebe7601bfb46399e271bf057c690 +Subproject commit a0a0269ffa44d1e23f0911eaeb286004a075b089