Support streaming speed changing.

This commit is contained in:
John Preston 2019-02-21 20:01:55 +04:00
parent a7d9281768
commit 93c548c013
12 changed files with 296 additions and 116 deletions

View File

@ -295,6 +295,12 @@ void StartStreaming(
using namespace Media::Streaming;
if (auto loader = document->createStreamingLoader(origin)) {
static auto player = std::unique_ptr<Player>();
static auto pauseOnSeek = false;
static auto duration = crl::time(0);
static auto options = Media::Streaming::PlaybackOptions();
static auto subscribe = Fn<void()>();
static auto speed = 1.;
static auto step = pow(2., 1. / 12);
class Panel
#if defined Q_OS_MAC && !defined OS_MAC_OLD
@ -319,68 +325,97 @@ void StartStreaming(
} else {
player->pause();
}
} else if (e->key() == Qt::Key_Plus) {
speed = std::min(speed * step, 2.);
player->setSpeed(speed);
} else if (e->key() == Qt::Key_Minus) {
speed = std::max(speed / step, 0.5);
player->setSpeed(speed);
}
}
void mousePressEvent(QMouseEvent *e) override {
pauseOnSeek = player->paused();
player->pause();
}
void mouseReleaseEvent(QMouseEvent *e) override {
options.position = std::clamp(
(duration * e->pos().x()) / width(),
crl::time(0),
crl::time(duration));
player->play(options);
subscribe();
}
};
static auto video = base::unique_qptr<Panel>();
subscribe = [] {
player->updates(
) | rpl::start_with_next_error_done([=](Update &&update) {
update.data.match([&](Information &update) {
duration = update.video.state.duration;
if (!video && !update.video.cover.isNull()) {
video = base::make_unique_q<Panel>();
video->setAttribute(Qt::WA_OpaquePaintEvent);
video->paintRequest(
) | rpl::start_with_next([=](QRect rect) {
if (player->ready()) {
Painter(video.get()).drawImage(
video->rect(),
player->frame(FrameRequest()));
} else {
Painter(video.get()).fillRect(
rect,
Qt::black);
}
}, video->lifetime());
const auto size = QSize(
ConvertScale(update.video.size.width()),
ConvertScale(update.video.size.height()));
const auto center = App::wnd()->geometry().center();
video->setGeometry(QRect(
center - QPoint(size.width(), size.height()) / 2,
size));
video->show();
video->shownValue(
) | rpl::start_with_next([=](bool shown) {
if (!shown) {
base::take(player) = nullptr;
}
}, video->lifetime());
}
}, [&](PreloadedVideo &update) {
}, [&](UpdateVideo &update) {
Expects(video != nullptr);
video->update();
}, [&](PreloadedAudio &update) {
}, [&](UpdateAudio &update) {
}, [&](WaitingForData &update) {
}, [&](MutedByOther &update) {
});
}, [=](const Error &error) {
base::take(video) = nullptr;
}, [=] {
}, player->lifetime());
};
player = std::make_unique<Player>(
&document->owner(),
std::move(loader));
video = nullptr;
document->session().lifetime().add([] {
base::take(player) = nullptr;
base::take(video) = nullptr;
player->lifetime().add([] {
base::take(video) = nullptr;
});
document->session().lifetime().add([] {
base::take(player) = nullptr;
});
auto options = Media::Streaming::PlaybackOptions();
options.speed = 1.7;
options.speed = speed;
//options.syncVideoByAudio = false;
options.position = (document->duration() / 2) * crl::time(1000);
options.position = 0;
player->play(options);
player->updates(
) | rpl::start_with_next_error_done([=](Update &&update) {
update.data.match([&](Information &update) {
if (!update.video.cover.isNull()) {
video = base::make_unique_q<Panel>();
video->setAttribute(Qt::WA_OpaquePaintEvent);
video->paintRequest(
) | rpl::start_with_next([=](QRect rect) {
Painter(video.get()).drawImage(
video->rect(),
player->frame(FrameRequest()));
}, video->lifetime());
const auto size = QSize(
ConvertScale(update.video.size.width()),
ConvertScale(update.video.size.height()));
const auto center = App::wnd()->geometry().center();
video->setGeometry(QRect(
center - QPoint(size.width(), size.height()) / 2,
size));
video->show();
video->shownValue(
) | rpl::start_with_next([=](bool shown) {
if (!shown) {
base::take(player) = nullptr;
}
}, video->lifetime());
}
}, [&](PreloadedVideo &update) {
}, [&](UpdateVideo &update) {
Expects(video != nullptr);
video->update();
}, [&](PreloadedAudio &update) {
}, [&](UpdateAudio &update) {
}, [&](WaitingForData &update) {
}, [&](MutedByOther &update) {
});
}, [=](const Error &error) {
base::take(video) = nullptr;
}, [=] {
base::take(video) = nullptr;
}, player->lifetime());
subscribe();
}
}

View File

@ -303,6 +303,14 @@ void StopDetachIfNotUsedSafe() {
});
}
bool SupportsSpeedControl() {
#ifndef TDESKTOP_DISABLE_OPENAL_EFFECTS
return true;
#else // TDESKTOP_DISABLE_OPENAL_EFFECTS
return false;
#endif // TDESKTOP_DISABLE_OPENAL_EFFECTS
}
} // namespace Audio
namespace Player {
@ -347,24 +355,39 @@ void Mixer::Track::createStream(AudioMsgId::Type type) {
alGenBuffers(3, stream.buffers);
if (type == AudioMsgId::Type::Voice) {
mixer()->updatePlaybackSpeed(this);
#ifndef TDESKTOP_DISABLE_OPENAL_EFFECTS
} else if (speedEffect) {
applySourceSpeedEffect();
} else {
removeSourceSpeedEffect();
#endif // TDESKTOP_DISABLE_OPENAL_EFFECTS
}
}
#ifndef TDESKTOP_DISABLE_OPENAL_EFFECTS
void Mixer::Track::removeSourceSpeedEffect() {
alSource3i(stream.source, AL_AUXILIARY_SEND_FILTER, AL_EFFECTSLOT_NULL, 0, 0);
alSourcei(stream.source, AL_DIRECT_FILTER, AL_FILTER_NULL);
}
void Mixer::Track::applySourceSpeedEffect() {
Expects(speedEffect != nullptr);
if (!speedEffect->effect || !alIsEffect(speedEffect->effect)) {
alGenAuxiliaryEffectSlots(1, &speedEffect->effectSlot);
alGenEffects(1, &speedEffect->effect);
alGenFilters(1, &speedEffect->filter);
alEffecti(speedEffect->effect, AL_EFFECT_TYPE, AL_EFFECT_PITCH_SHIFTER);
alEffecti(speedEffect->effect, AL_PITCH_SHIFTER_COARSE_TUNE, speedEffect->coarseTune);
alAuxiliaryEffectSloti(speedEffect->effectSlot, AL_EFFECTSLOT_EFFECT, speedEffect->effect);
alFilteri(speedEffect->filter, AL_FILTER_TYPE, AL_FILTER_LOWPASS);
alFilterf(speedEffect->filter, AL_LOWPASS_GAIN, 0.f);
alSourcef(stream.source, AL_PITCH, speedEffect->speed);
alSource3i(stream.source, AL_AUXILIARY_SEND_FILTER, speedEffect->effectSlot, 0, 0);
alSourcei(stream.source, AL_DIRECT_FILTER, speedEffect->filter);
} else {
alSource3i(stream.source, AL_AUXILIARY_SEND_FILTER, AL_EFFECTSLOT_NULL, 0, 0);
alSourcei(stream.source, AL_DIRECT_FILTER, AL_FILTER_NULL);
}
alEffecti(speedEffect->effect, AL_PITCH_SHIFTER_COARSE_TUNE, speedEffect->coarseTune);
alAuxiliaryEffectSloti(speedEffect->effectSlot, AL_EFFECTSLOT_EFFECT, speedEffect->effect);
alSourcef(stream.source, AL_PITCH, speedEffect->speed);
alSource3i(stream.source, AL_AUXILIARY_SEND_FILTER, speedEffect->effectSlot, 0, 0);
alSourcei(stream.source, AL_DIRECT_FILTER, speedEffect->filter);
}
#endif // TDESKTOP_DISABLE_OPENAL_EFFECTS
void Mixer::Track::destroyStream() {
if (isStreamCreated()) {
@ -375,19 +398,28 @@ void Mixer::Track::destroyStream() {
for (auto i = 0; i != 3; ++i) {
stream.buffers[i] = 0;
}
#ifndef TDESKTOP_DISABLE_OPENAL_EFFECTS
resetSpeedEffect();
#endif // TDESKTOP_DISABLE_OPENAL_EFFECTS
}
#ifndef TDESKTOP_DISABLE_OPENAL_EFFECTS
void Mixer::Track::resetSpeedEffect() {
if (!speedEffect) {
return;
} else if (alIsEffect(speedEffect->effect)) {
} else if (speedEffect->effect && alIsEffect(speedEffect->effect)) {
if (isStreamCreated()) {
removeSourceSpeedEffect();
}
#ifndef TDESKTOP_DISABLE_OPENAL_EFFECTS
alDeleteEffects(1, &speedEffect->effect);
alDeleteAuxiliaryEffectSlots(1, &speedEffect->effectSlot);
alDeleteFilters(1, &speedEffect->filter);
#endif // TDESKTOP_DISABLE_OPENAL_EFFECTS
}
speedEffect->effect = speedEffect->effectSlot = speedEffect->filter = 0;
}
#endif // TDESKTOP_DISABLE_OPENAL_EFFECTS
void Mixer::Track::reattach(AudioMsgId::Type type) {
if (isStreamCreated() || !samplesCount[0]) {
@ -519,17 +551,30 @@ int Mixer::Track::getNotQueuedBufferIndex() {
}
void Mixer::Track::setVideoData(std::unique_ptr<VideoSoundData> data) {
resetSpeedEffect();
if (data && data->speed != 1.) {
speedEffect = std::make_unique<SpeedEffect>();
speedEffect->speed = data->speed;
speedEffect->coarseTune = CoarseTuneForSpeed(data->speed);
} else {
speedEffect = nullptr;
}
#ifndef TDESKTOP_DISABLE_OPENAL_EFFECTS
changeSpeedEffect(data ? data->speed : 1.);
#endif // TDESKTOP_DISABLE_OPENAL_EFFECTS
videoData = std::move(data);
}
#ifndef TDESKTOP_DISABLE_OPENAL_EFFECTS
void Mixer::Track::changeSpeedEffect(float64 speed) {
if (speed != 1.) {
if (!speedEffect) {
speedEffect = std::make_unique<SpeedEffect>();
}
speedEffect->speed = speed;
speedEffect->coarseTune = CoarseTuneForSpeed(speed);
if (isStreamCreated()) {
applySourceSpeedEffect();
}
} else if (speedEffect) {
resetSpeedEffect();
speedEffect = nullptr;
}
}
#endif // TDESKTOP_DISABLE_OPENAL_EFFECTS
void Mixer::Track::resetStream() {
if (isStreamCreated()) {
alSourceStop(stream.source);
@ -831,12 +876,22 @@ void Mixer::forceToBufferVideo(const AudioMsgId &audioId) {
_loader->forceToBufferVideo(audioId);
}
Streaming::TimeCorrection Mixer::getVideoTimeCorrection(
void Mixer::setSpeedFromVideo(const AudioMsgId &audioId, float64 speed) {
#ifndef TDESKTOP_DISABLE_OPENAL_EFFECTS
QMutexLocker lock(&AudioMutex);
const auto track = trackForType(AudioMsgId::Type::Video);
if (track->state.id == audioId) {
track->changeSpeedEffect(speed);
}
#endif // TDESKTOP_DISABLE_OPENAL_EFFECTS
}
Streaming::TimePoint Mixer::getVideoSyncTimePoint(
const AudioMsgId &audio) const {
Expects(audio.type() == AudioMsgId::Type::Video);
Expects(audio.playId() != 0);
auto result = Streaming::TimeCorrection();
auto result = Streaming::TimePoint();
const auto playId = audio.playId();
QMutexLocker lock(&AudioMutex);

View File

@ -15,7 +15,7 @@ struct VideoSoundPart;
namespace Media {
namespace Streaming {
struct TimeCorrection;
struct TimePoint;
} // namespace Streaming
namespace Audio {
@ -36,6 +36,7 @@ bool AttachToDevice();
void ScheduleDetachFromDeviceSafe();
void ScheduleDetachIfNotUsedSafe();
void StopDetachIfNotUsedSafe();
bool SupportsSpeedControl();
} // namespace Audio
@ -126,7 +127,8 @@ public:
// Video player audio stream interface.
void feedFromVideo(const VideoSoundPart &part);
void forceToBufferVideo(const AudioMsgId &audioId);
Streaming::TimeCorrection getVideoTimeCorrection(
void setSpeedFromVideo(const AudioMsgId &audioId, float64 speed);
Streaming::TimePoint getVideoSyncTimePoint(
const AudioMsgId &audio) const;
crl::time getVideoCorrectedTime(
const AudioMsgId &id,
@ -201,6 +203,9 @@ private:
int getNotQueuedBufferIndex();
void setVideoData(std::unique_ptr<VideoSoundData> data);
#ifndef TDESKTOP_DISABLE_OPENAL_EFFECTS
void changeSpeedEffect(float64 speed);
#endif // TDESKTOP_DISABLE_OPENAL_EFFECTS
~Track();
@ -226,6 +231,7 @@ private:
Stream stream;
std::unique_ptr<VideoSoundData> videoData;
#ifndef TDESKTOP_DISABLE_OPENAL_EFFECTS
struct SpeedEffect {
uint32 effect = 0;
uint32 effectSlot = 0;
@ -234,14 +240,19 @@ private:
float64 speed = 1.;
};
std::unique_ptr<SpeedEffect> speedEffect;
#endif // TDESKTOP_DISABLE_OPENAL_EFFECTS
crl::time lastUpdateWhen = 0;
crl::time lastUpdatePosition = 0;
private:
void createStream(AudioMsgId::Type type);
void destroyStream();
void resetSpeedEffect();
void resetStream();
#ifndef TDESKTOP_DISABLE_OPENAL_EFFECTS
void resetSpeedEffect();
void applySourceSpeedEffect();
void removeSourceSpeedEffect();
#endif // TDESKTOP_DISABLE_OPENAL_EFFECTS
};

View File

@ -395,11 +395,8 @@ void Widget::checkForTypeChange() {
}
bool Widget::hasPlaybackSpeedControl() const {
#ifndef TDESKTOP_DISABLE_OPENAL_EFFECTS
return (_type == AudioMsgId::Type::Voice);
#else // TDESKTOP_DISABLE_OPENAL_EFFECTS
return false;
#endif // TDESKTOP_DISABLE_OPENAL_EFFECTS
return (_type == AudioMsgId::Type::Voice)
&& Media::Audio::SupportsSpeedControl();
}
void Widget::setType(AudioMsgId::Type type) {

View File

@ -137,6 +137,11 @@ void AudioTrack::resume(crl::time time) {
Media::Player::mixer()->resume(_audioId, true);
}
void AudioTrack::setSpeed(float64 speed) {
_options.speed = speed;
Media::Player::mixer()->setSpeedFromVideo(_audioId, speed);
}
rpl::producer<crl::time> AudioTrack::playPosition() {
Expects(_ready == nullptr);

View File

@ -10,7 +10,6 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "media/streaming/media_streaming_utility.h"
namespace Media {
namespace Streaming {
class AudioTrack final {
@ -29,6 +28,9 @@ public:
void pause(crl::time time);
void resume(crl::time time);
// Called from the main thread.
void setSpeed(float64 speed);
// Called from the main thread.
// Non-const, because we subscribe to changes on the first call.
// Must be called after 'ready' was invoked.
@ -55,7 +57,7 @@ private:
void mixerForceToBuffer();
void callReady();
const PlaybackOptions _options;
PlaybackOptions _options;
// Accessed from the same unspecified thread.
Stream _stream;

View File

@ -11,8 +11,16 @@ namespace Media {
constexpr auto kTimeUnknown = std::numeric_limits<crl::time>::min();
namespace Audio {
bool SupportsSpeedControl();
} // namespace Audio
namespace Streaming {
inline bool SupportsSpeedControl() {
return Media::Audio::SupportsSpeedControl();
}
class VideoTrack;
class AudioTrack;

View File

@ -11,6 +11,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "media/streaming/media_streaming_loader.h"
#include "media/streaming/media_streaming_audio_track.h"
#include "media/streaming/media_streaming_video_track.h"
#include "media/audio/media_audio.h" // for SupportsSpeedControl()
namespace Media {
namespace Streaming {
@ -315,12 +316,15 @@ void Player::play(const PlaybackOptions &options) {
stop();
_options = options;
if (!Media::Audio::SupportsSpeedControl()) {
_options.speed = 1.;
}
_stage = Stage::Initializing;
_file->start(delegate(), _options.position);
}
void Player::pause() {
Expects(_stage != Stage::Uninitialized && _stage != Stage::Failed);
Expects(valid());
if (_paused) {
return;
@ -338,7 +342,7 @@ void Player::pause() {
}
void Player::resume() {
Expects(_stage != Stage::Uninitialized && _stage != Stage::Failed);
Expects(valid());
if (!_paused) {
return;
@ -386,6 +390,7 @@ void Player::stop() {
_audio = nullptr;
_video = nullptr;
_paused = false;
_information = Information();
invalidate_weak_ptrs(&_sessionGuard);
if (_stage != Stage::Failed) {
_stage = Stage::Uninitialized;
@ -405,6 +410,32 @@ bool Player::paused() const {
return _paused;
}
void Player::setSpeed(float64 speed) {
Expects(valid());
Expects(speed >= 0.5 && speed <= 2.);
if (!Media::Audio::SupportsSpeedControl()) {
speed = 1.;
}
if (_options.speed != speed) {
_options.speed = speed;
if (_audio) {
_audio->setSpeed(speed);
}
if (_video) {
_video->setSpeed(speed);
}
}
}
bool Player::valid() const {
return (_stage != Stage::Uninitialized) && (_stage != Stage::Failed);
}
bool Player::ready() const {
return valid() && (_stage != Stage::Initializing);
}
rpl::producer<Update, Error> Player::updates() const {
return _updates.events();
}

View File

@ -38,6 +38,12 @@ public:
void resume();
void stop();
bool valid() const;
bool ready() const;
float64 speed() const;
void setSpeed(float64 speed); // 0.5 <= speed <= 2.
[[nodiscard]] bool failed() const;
[[nodiscard]] bool playing() const;
[[nodiscard]] bool paused() const;

View File

@ -20,7 +20,7 @@ namespace Streaming {
constexpr auto kUniversalTimeBase = AVRational{ 1, AV_TIME_BASE };
struct TimeCorrection {
struct TimePoint {
crl::time trackTime = kTimeUnknown;
crl::time worldTime = kTimeUnknown;

View File

@ -39,6 +39,7 @@ public:
void pause(crl::time time);
void resume(crl::time time);
void setSpeed(float64 speed);
void interrupt();
void frameDisplayed();
@ -55,14 +56,10 @@ private:
// Force frame position to be clamped to [0, duration] and monotonic.
[[nodiscard]] crl::time currentFramePosition() const;
struct TrackTime {
crl::time worldNow = kTimeUnknown;
crl::time trackNow = kTimeUnknown;
};
[[nodiscard]] TrackTime trackTime() const;
[[nodiscard]] TimePoint trackTime() const;
const crl::weak_on_queue<VideoTrackObject> _weak;
const PlaybackOptions _options;
PlaybackOptions _options;
// Main thread wrapper destructor will set _shared back to nullptr.
// All queued method calls after that should be discarded.
@ -75,7 +72,7 @@ private:
Fn<void()> _error;
crl::time _pausedTime = kTimeUnknown;
crl::time _resumedTime = kTimeUnknown;
mutable TimeCorrection _timeCorrection;
mutable TimePoint _syncTimePoint;
mutable crl::time _previousFramePosition = kTimeUnknown;
rpl::variable<crl::time> _nextFrameDisplayTime = kTimeUnknown;
@ -109,10 +106,11 @@ rpl::producer<crl::time> VideoTrackObject::displayFrameAt() const {
}
void VideoTrackObject::process(Packet &&packet) {
_noMoreData = packet.empty();
if (interrupted()) {
return;
} else if (_shared->initialized()) {
}
_noMoreData = packet.empty();
if (_shared->initialized()) {
_stream.queue.push_back(std::move(packet));
queueReadFrames();
} else if (!tryReadFirstFrame(std::move(packet))) {
@ -136,7 +134,7 @@ void VideoTrackObject::readFrames() {
if (interrupted()) {
return;
}
const auto state = _shared->prepareState(trackTime().trackNow);
const auto state = _shared->prepareState(trackTime().trackTime);
state.match([&](Shared::PrepareFrame frame) {
if (readFrame(frame)) {
presentFrameIfNeeded();
@ -185,38 +183,55 @@ void VideoTrackObject::presentFrameIfNeeded() {
return;
}
const auto time = trackTime();
const auto presented = _shared->presentFrame(time.trackNow);
const auto presented = _shared->presentFrame(time.trackTime);
if (presented.displayPosition != kTimeUnknown) {
const auto trackLeft = presented.displayPosition - time.trackNow;
_nextFrameDisplayTime = time.worldNow
const auto trackLeft = presented.displayPosition - time.trackTime;
_nextFrameDisplayTime = time.worldTime
+ crl::time(std::round(trackLeft / _options.speed));
}
queueReadFrames(presented.nextCheckDelay);
}
void VideoTrackObject::pause(crl::time time) {
Expects(_timeCorrection.valid());
Expects(_syncTimePoint.valid());
if (_pausedTime == kTimeUnknown) {
if (interrupted()) {
return;
} else if (_pausedTime == kTimeUnknown) {
_pausedTime = time;
}
}
void VideoTrackObject::resume(crl::time time) {
Expects(_timeCorrection.trackTime != kTimeUnknown);
Expects(_syncTimePoint.trackTime != kTimeUnknown);
if (interrupted()) {
return;
}
// Resumed time used to validate sync to audio.
_resumedTime = time;
if (_pausedTime != kTimeUnknown) {
Assert(_pausedTime <= time);
_timeCorrection.worldTime += (time - _pausedTime);
_syncTimePoint.worldTime += (time - _pausedTime);
_pausedTime = kTimeUnknown;
} else {
_timeCorrection.worldTime = time;
_syncTimePoint.worldTime = time;
}
queueReadFrames();
Ensures(_timeCorrection.valid());
Ensures(_syncTimePoint.valid());
Ensures(_pausedTime == kTimeUnknown);
}
void VideoTrackObject::setSpeed(float64 speed) {
if (interrupted()) {
return;
}
if (_syncTimePoint.valid()) {
_syncTimePoint = trackTime();
}
_options.speed = speed;
}
bool VideoTrackObject::interrupted() const {
@ -224,6 +239,9 @@ bool VideoTrackObject::interrupted() const {
}
void VideoTrackObject::frameDisplayed() {
if (interrupted()) {
return;
}
queueReadFrames();
}
@ -246,7 +264,7 @@ bool VideoTrackObject::tryReadFirstFrame(Packet &&packet) {
if (frame.isNull()) {
return false;
}
_shared->init(std::move(frame), _timeCorrection.trackTime);
_shared->init(std::move(frame), _syncTimePoint.trackTime);
callReady();
if (!_stream.queue.empty()) {
queueReadFrames();
@ -267,13 +285,13 @@ crl::time VideoTrackObject::currentFramePosition() const {
}
bool VideoTrackObject::fillStateFromFrame() {
Expects(_timeCorrection.trackTime == kTimeUnknown);
Expects(_syncTimePoint.trackTime == kTimeUnknown);
const auto position = currentFramePosition();
if (position == kTimeUnknown) {
return false;
}
_nextFrameDisplayTime = _timeCorrection.trackTime = position;
_nextFrameDisplayTime = _syncTimePoint.trackTime = position;
return true;
}
@ -291,31 +309,34 @@ void VideoTrackObject::callReady() {
data.cover = frame->original;
data.rotation = _stream.rotation;
data.state.duration = _stream.duration;
data.state.position = _timeCorrection.trackTime;
data.state.position = _syncTimePoint.trackTime;
data.state.receivedTill = _noMoreData
? _stream.duration
: _timeCorrection.trackTime;
: _syncTimePoint.trackTime;
base::take(_ready)({ data });
}
VideoTrackObject::TrackTime VideoTrackObject::trackTime() const {
auto result = TrackTime();
result.worldNow = crl::now();
if (!_timeCorrection) {
result.trackNow = _timeCorrection.trackTime;
TimePoint VideoTrackObject::trackTime() const {
auto result = TimePoint();
result.worldTime = (_pausedTime != kTimeUnknown)
? _pausedTime
: crl::now();
if (!_syncTimePoint) {
result.trackTime = _syncTimePoint.trackTime;
return result;
}
Assert(_resumedTime != kTimeUnknown);
if (_options.syncVideoByAudio && _audioId.playId()) {
const auto mixer = Media::Player::mixer();
const auto correction = mixer->getVideoTimeCorrection(_audioId);
if (correction && correction.worldTime > _resumedTime) {
_timeCorrection = correction;
const auto point = mixer->getVideoSyncTimePoint(_audioId);
if (point && point.worldTime > _resumedTime) {
_syncTimePoint = point;
}
}
const auto sinceKnown = (result.worldNow - _timeCorrection.worldTime);
result.trackNow = _timeCorrection.trackTime
+ crl::time(std::round(sinceKnown * _options.speed));
const auto adjust = (result.worldTime - _syncTimePoint.worldTime);
result.trackTime = _syncTimePoint.trackTime
+ crl::time(std::round(adjust * _options.speed));
return result;
}
@ -517,6 +538,12 @@ void VideoTrack::resume(crl::time time) {
});
}
void VideoTrack::setSpeed(float64 speed) {
_wrapped.with([=](Implementation &unwrapped) {
unwrapped.setSpeed(speed);
});
}
crl::time VideoTrack::markFrameDisplayed(crl::time now) {
const auto position = _shared->markFrameDisplayed(now);
if (position != kTimeUnknown) {

View File

@ -40,6 +40,9 @@ public:
void pause(crl::time time);
void resume(crl::time time);
// Called from the main thread.
void setSpeed(float64 speed);
// Called from the main thread.
// Returns the position of the displayed frame.
[[nodiscard]] crl::time markFrameDisplayed(crl::time now);