Pause GIFs in message history.

This commit is contained in:
John Preston 2019-12-11 17:01:11 +03:00
parent 2d7adbc68a
commit 9339db900f
10 changed files with 176 additions and 36 deletions

View File

@ -253,7 +253,6 @@ void Gif::draw(Painter &p, const QRect &r, TextSelection selection, crl::time ms
const auto isRound = _data->isVideoMessage();
auto displayMute = false;
const auto streamed = activeCurrentStreamed();
const auto player = streamed ? &streamed->player() : nullptr;
if ((!streamed || item->id < 0) && displayLoading) {
ensureAnimation();
@ -310,7 +309,10 @@ void Gif::draw(Painter &p, const QRect &r, TextSelection selection, crl::time ms
request.resize = QSize(_thumbw, _thumbh) * cIntRetinaFactor();
request.corners = roundCorners;
request.radius = roundRadius;
p.drawImage(rthumb, player->frame(request));
p.drawImage(rthumb, streamed->frame(request));
if (!paused) {
streamed->markFrameShown();
}
if (const auto playback = videoPlayback()) {
const auto value = playback->value();
@ -367,7 +369,7 @@ void Gif::draw(Painter &p, const QRect &r, TextSelection selection, crl::time ms
}
if (radial
|| (!player
|| (!streamed
&& !startPlayAsync
&& ((_streamed && _streamed->player().failed())
|| (!_data->loaded() && !_data->loading())
@ -434,7 +436,7 @@ void Gif::draw(Painter &p, const QRect &r, TextSelection selection, crl::time ms
}
}
if (!isRound && (!player || item->id < 0)) {
if (!isRound && (!streamed || item->id < 0)) {
auto statusX = paintx + st::msgDateImgDelta + st::msgDateImgPadding.x();
auto statusY = painty + st::msgDateImgDelta + st::msgDateImgPadding.y();
auto statusW = st::normalFont->width(_statusText) + 2 * st::msgDateImgPadding.x();
@ -829,7 +831,7 @@ int Gif::additionalWidth(const HistoryMessageVia *via, const HistoryMessageReply
return ::Media::Player::instance()->roundVideoStreamed(_parent->data());
}
const ::Media::Streaming::Instance *Gif::activeOwnStreamed() const {
::Media::Streaming::Instance *Gif::activeOwnStreamed() const {
return (_streamed
&& _streamed->player().ready()
&& !_streamed->player().videoSize().isEmpty())
@ -837,7 +839,7 @@ const ::Media::Streaming::Instance *Gif::activeOwnStreamed() const {
: nullptr;
}
const ::Media::Streaming::Instance *Gif::activeCurrentStreamed() const {
::Media::Streaming::Instance *Gif::activeCurrentStreamed() const {
if (const auto streamed = activeRoundStreamed()) {
return streamed;
}
@ -903,6 +905,7 @@ void Gif::playAnimation(bool autoplay) {
}
auto options = ::Media::Streaming::PlaybackOptions();
options.audioId = AudioMsgId(_data, _realParent->fullId());
options.waitForMarkAsShown = true;
//if (!_streamed->withSound) {
options.mode = ::Media::Streaming::Mode::Video;
options.loop = true;

View File

@ -92,8 +92,8 @@ private:
QSize countCurrentSize(int newWidth) override;
QSize videoSize() const;
::Media::Streaming::Instance *activeRoundStreamed() const;
const ::Media::Streaming::Instance *activeOwnStreamed() const;
const ::Media::Streaming::Instance *activeCurrentStreamed() const;
::Media::Streaming::Instance *activeOwnStreamed() const;
::Media::Streaming::Instance *activeCurrentStreamed() const;
::Media::View::PlaybackProgress *videoPlayback() const;
bool createStreamedPlayer();

View File

@ -43,7 +43,7 @@ struct PlaybackOptions {
float64 speed = 1.; // Valid values between 0.5 and 2.
AudioMsgId audioId;
bool syncVideoByAudio = true;
bool dropStaleFrames = true;
bool waitForMarkAsShown = false;
bool loop = false;
};

View File

@ -140,6 +140,10 @@ QImage Instance::frame(const FrameRequest &request) const {
return player().frame(request);
}
bool Instance::markFrameShown() {
return _shared->player().markFrameShown();
}
rpl::lifetime &Instance::lifetime() {
return _lifetime;
}

View File

@ -60,6 +60,7 @@ public:
void callWaitingCallback();
[[nodiscard]] QImage frame(const FrameRequest &request) const;
bool markFrameShown();
rpl::lifetime &lifetime();

View File

@ -101,7 +101,6 @@ void Player::checkNextFrameRender() {
}
} else {
_renderFrameTimer.cancel();
_nextFrameTime = kTimeUnknown;
renderFrame(now);
}
}
@ -110,6 +109,7 @@ void Player::checkNextFrameAvailability() {
Expects(_video != nullptr);
_nextFrameTime = _video->nextFrameDisplayTime();
Assert(_nextFrameTime != kFrameDisplayTimeAlreadyDone);
if (_nextFrameTime != kTimeUnknown) {
checkNextFrameRender();
}
@ -117,13 +117,32 @@ void Player::checkNextFrameAvailability() {
void Player::renderFrame(crl::time now) {
Expects(_video != nullptr);
Expects(_nextFrameTime != kTimeUnknown);
Expects(_nextFrameTime != kFrameDisplayTimeAlreadyDone);
const auto position = _video->markFrameDisplayed(now);
Assert(position != kTimeUnknown);
if (_options.waitForMarkAsShown) {
_currentFrameTime = _nextFrameTime;
_nextFrameTime = kFrameDisplayTimeAlreadyDone;
} else {
_video->markFrameShown();
_nextFrameTime = kTimeUnknown;
}
Assert(position != kTimeUnknown);
videoPlayedTill(position);
}
bool Player::markFrameShown() {
Expects(_video != nullptr);
if (_nextFrameTime == kFrameDisplayTimeAlreadyDone) {
_nextFrameTime = kTimeUnknown;
_video->addTimelineDelay(crl::now() - _currentFrameTime);
}
return _video->markFrameShown();
}
template <typename Track>
void Player::trackReceivedTill(
const Track &track,
@ -690,7 +709,9 @@ void Player::start() {
}
void Player::checkVideoStep() {
if (_nextFrameTime != kTimeUnknown) {
if (_nextFrameTime == kFrameDisplayTimeAlreadyDone) {
return;
} else if (_nextFrameTime != kTimeUnknown) {
checkNextFrameRender();
} else {
checkNextFrameAvailability();

View File

@ -61,6 +61,7 @@ public:
[[nodiscard]] QSize videoSize() const;
[[nodiscard]] QImage frame(const FrameRequest &request) const;
bool markFrameShown();
[[nodiscard]] Media::Player::TrackState prepareLegacyState() const;
@ -175,6 +176,7 @@ private:
crl::time _startedTime = kTimeUnknown;
crl::time _pausedTime = kTimeUnknown;
crl::time _currentFrameTime = kTimeUnknown;
crl::time _nextFrameTime = kTimeUnknown;
base::Timer _renderFrameTimer;
rpl::event_stream<Update, Error> _updates;

View File

@ -43,7 +43,8 @@ public:
void resume(crl::time time);
void setSpeed(float64 speed);
void interrupt();
void frameDisplayed();
void frameShown();
void addTimelineDelay(crl::time delayed);
void updateFrameRequest(const FrameRequest &request);
private:
@ -222,7 +223,7 @@ void VideoTrackObject::readFrames() {
auto VideoTrackObject::readEnoughFrames(crl::time trackTime)
-> ReadEnoughState {
const auto dropStaleFrames = _options.dropStaleFrames;
const auto dropStaleFrames = !_options.waitForMarkAsShown;
const auto state = _shared->prepareState(trackTime, dropStaleFrames);
return state.match([&](Shared::PrepareFrame frame) -> ReadEnoughState {
while (true) {
@ -300,7 +301,6 @@ void VideoTrackObject::presentFrameIfNeeded() {
if (_pausedTime != kTimeUnknown || _resumedTime == kTimeUnknown) {
return;
}
const auto time = trackTime();
const auto rasterize = [&](not_null<Frame*> frame) {
Expects(frame->position != kFinishedPosition);
@ -320,11 +320,13 @@ void VideoTrackObject::presentFrameIfNeeded() {
Ensures(VideoTrack::IsRasterized(frame));
};
const auto dropStaleFrames = !_options.waitForMarkAsShown;
const auto presented = _shared->presentFrame(
time,
trackTime(),
_options.speed,
_options.dropStaleFrames,
dropStaleFrames,
rasterize);
addTimelineDelay(presented.addedWorldTimeDelay);
if (presented.displayPosition == kFinishedPosition) {
interrupt();
_checkNextFrame = rpl::event_stream<>();
@ -384,13 +386,25 @@ bool VideoTrackObject::interrupted() const {
return (_shared == nullptr);
}
void VideoTrackObject::frameDisplayed() {
void VideoTrackObject::frameShown() {
if (interrupted()) {
return;
}
queueReadFrames();
}
void VideoTrackObject::addTimelineDelay(crl::time delayed) {
Expects(_syncTimePoint.valid());
if (!delayed) {
return;
}
if (delayed > 1000) {
int a = 0;
}
_syncTimePoint.worldTime += delayed;
}
void VideoTrackObject::updateFrameRequest(const FrameRequest &request) {
_request = request;
}
@ -529,6 +543,7 @@ void VideoTrack::Shared::init(QImage &&cover, crl::time position) {
// But in this case we update _counter, so we set a fake displayed time.
_frames[0].displayed = kDisplaySkipped;
_delay = 0;
_counter.store(0, std::memory_order_release);
}
@ -617,23 +632,25 @@ auto VideoTrack::Shared::presentFrame(
const auto present = [&](int counter, int index) -> PresentFrame {
const auto frame = getFrame(index);
const auto position = frame->position;
const auto addedWorldTimeDelay = base::take(_delay);
if (position == kFinishedPosition) {
return { kFinishedPosition, kTimeUnknown };
return { kFinishedPosition, kTimeUnknown, addedWorldTimeDelay };
}
rasterize(frame);
if (!IsRasterized(frame)) {
// Error happened during frame prepare.
return { kTimeUnknown, kTimeUnknown };
return { kTimeUnknown, kTimeUnknown, addedWorldTimeDelay };
}
const auto trackLeft = position - time.trackTime;
frame->display = time.worldTime
+ addedWorldTimeDelay
+ crl::time(std::round(trackLeft / playbackSpeed));
// Release this frame to the main thread for rendering.
_counter.store(
(counter + 1) % (2 * kFramesCount),
std::memory_order_release);
return { position, crl::time(0) };
return { position, crl::time(0), addedWorldTimeDelay };
};
const auto nextCheckDelay = [&](int index) -> PresentFrame {
const auto frame = getFrame(index);
@ -669,6 +686,10 @@ crl::time VideoTrack::Shared::nextFrameDisplayTime() const {
const auto next = (counter + 1) % (2 * kFramesCount);
const auto index = next / 2;
const auto frame = getFrame(index);
if (frame->displayed != kTimeUnknown) {
// Frame already displayed, but not yet shown.
return kFrameDisplayTimeAlreadyDone;
}
Assert(IsRasterized(frame));
Assert(frame->display != kTimeUnknown);
@ -689,33 +710,92 @@ crl::time VideoTrack::Shared::nextFrameDisplayTime() const {
}
crl::time VideoTrack::Shared::markFrameDisplayed(crl::time now) {
const auto markAndJump = [&](int counter) {
const auto mark = [&](int counter) {
const auto next = (counter + 1) % (2 * kFramesCount);
const auto index = next / 2;
const auto frame = getFrame(index);
Assert(frame->position != kTimeUnknown);
Assert(frame->displayed == kTimeUnknown);
frame->displayed = now;
_counter.store(
next,
std::memory_order_release);
if (frame->displayed == kTimeUnknown) {
frame->displayed = now;
}
return frame->position;
};
switch (counter()) {
case 0: Unexpected("Value 0 in VideoTrack::Shared::markFrameDisplayed.");
case 1: return markAndJump(1);
case 1: return mark(1);
case 2: Unexpected("Value 2 in VideoTrack::Shared::markFrameDisplayed.");
case 3: return markAndJump(3);
case 3: return mark(3);
case 4: Unexpected("Value 4 in VideoTrack::Shared::markFrameDisplayed.");
case 5: return markAndJump(5);
case 5: return mark(5);
case 6: Unexpected("Value 6 in VideoTrack::Shared::markFrameDisplayed.");
case 7: return markAndJump(7);
case 7: return mark(7);
}
Unexpected("Counter value in VideoTrack::Shared::markFrameDisplayed.");
}
void VideoTrack::Shared::addTimelineDelay(crl::time delayed) {
if (!delayed) {
return;
}
const auto recountCurrentFrame = [&](int counter) {
_delay += delayed;
if (delayed > 1000) {
int a = 0;
}
//const auto next = (counter + 1) % (2 * kFramesCount);
//const auto index = next / 2;
//const auto frame = getFrame(index);
//if (frame->displayed != kTimeUnknown) {
// // Frame already displayed.
// return;
//}
//Assert(IsRasterized(frame));
//Assert(frame->display != kTimeUnknown);
//frame->display = countFrameDisplayTime(frame->index);
};
switch (counter()) {
case 0: Unexpected("Value 0 in VideoTrack::Shared::addTimelineDelay.");
case 1: return recountCurrentFrame(1);
case 2: Unexpected("Value 2 in VideoTrack::Shared::addTimelineDelay.");
case 3: return recountCurrentFrame(3);
case 4: Unexpected("Value 4 in VideoTrack::Shared::addTimelineDelay.");
case 5: return recountCurrentFrame(5);
case 6: Unexpected("Value 6 in VideoTrack::Shared::addTimelineDelay.");
case 7: return recountCurrentFrame(7);
}
Unexpected("Counter value in VideoTrack::Shared::addTimelineDelay.");
}
bool VideoTrack::Shared::markFrameShown() {
const auto jump = [&](int counter) {
const auto next = (counter + 1) % (2 * kFramesCount);
const auto index = next / 2;
const auto frame = getFrame(index);
if (frame->displayed == kTimeUnknown) {
return false;
}
_counter.store(
next,
std::memory_order_release);
return true;
};
switch (counter()) {
case 0: return false;
case 1: return jump(1);
case 2: return false;
case 3: return jump(3);
case 4: return false;
case 5: return jump(5);
case 6: return false;
case 7: return jump(7);
}
Unexpected("Counter value in VideoTrack::Shared::markFrameShown.");
}
not_null<VideoTrack::Frame*> VideoTrack::Shared::frameForPaint() {
const auto result = getFrame(counter() / 2);
Assert(!result->original.isNull());
@ -793,14 +873,31 @@ crl::time VideoTrack::nextFrameDisplayTime() const {
crl::time VideoTrack::markFrameDisplayed(crl::time now) {
const auto result = _shared->markFrameDisplayed(now);
_wrapped.with([](Implementation &unwrapped) {
unwrapped.frameDisplayed();
});
Ensures(result != kTimeUnknown);
return result;
}
void VideoTrack::addTimelineDelay(crl::time delayed) {
_shared->addTimelineDelay(delayed);
//if (!delayed) {
// return;
//}
//_wrapped.with([=](Implementation &unwrapped) mutable {
// unwrapped.addTimelineDelay(delayed);
//});
}
bool VideoTrack::markFrameShown() {
if (!_shared->markFrameShown()) {
return false;
}
_wrapped.with([](Implementation &unwrapped) {
unwrapped.frameShown();
});
return true;
}
QImage VideoTrack::frame(const FrameRequest &request) {
const auto frame = _shared->frameForPaint();
const auto changed = (frame->request != request)

View File

@ -14,6 +14,9 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
namespace Media {
namespace Streaming {
constexpr auto kFrameDisplayTimeAlreadyDone
= std::numeric_limits<crl::time>::max();
class VideoTrackObject;
class VideoTrack final {
@ -47,6 +50,8 @@ public:
// Called from the main thread.
// Returns the position of the displayed frame.
[[nodiscard]] crl::time markFrameDisplayed(crl::time now);
void addTimelineDelay(crl::time delayed);
bool markFrameShown();
[[nodiscard]] crl::time nextFrameDisplayTime() const;
[[nodiscard]] QImage frame(const FrameRequest &request);
[[nodiscard]] rpl::producer<> checkNextFrame() const;
@ -79,6 +84,7 @@ private:
struct PresentFrame {
crl::time displayPosition = kTimeUnknown;
crl::time nextCheckDelay = 0;
crl::time addedWorldTimeDelay = 0;
};
// Called from the wrapped object queue.
@ -101,6 +107,8 @@ private:
// Called from the main thread.
// Returns the position of the displayed frame.
[[nodiscard]] crl::time markFrameDisplayed(crl::time now);
void addTimelineDelay(crl::time delayed);
bool markFrameShown();
[[nodiscard]] crl::time nextFrameDisplayTime() const;
[[nodiscard]] not_null<Frame*> frameForPaint();
@ -115,6 +123,10 @@ private:
static constexpr auto kFramesCount = 4;
std::array<Frame, kFramesCount> _frames;
// (_counter % 2) == 1 main thread can write _delay.
// (_counter % 2) == 0 crl::queue can read _delay.
crl::time _delay = kTimeUnknown;
};
static QImage PrepareFrameByRequest(

@ -1 +1 @@
Subproject commit a2f749fab192ebe7601bfb46399e271bf057c690
Subproject commit a0a0269ffa44d1e23f0911eaeb286004a075b089