mirror of
https://github.com/telegramdesktop/tdesktop
synced 2025-03-23 20:01:35 +00:00
If stuck wait for three seconds of packets.
This commit is contained in:
parent
3e9b811875
commit
d37b65e624
@ -297,9 +297,10 @@ void StartStreaming(
|
||||
static auto player = std::unique_ptr<Player>();
|
||||
static auto pauseOnSeek = false;
|
||||
static auto position = crl::time(0);
|
||||
static auto preloaded = crl::time(0);
|
||||
static auto preloadedAudio = crl::time(0);
|
||||
static auto preloadedVideo = crl::time(0);
|
||||
static auto duration = crl::time(0);
|
||||
static auto options = Media::Streaming::PlaybackOptions();
|
||||
static auto options = PlaybackOptions();
|
||||
static auto speed = 1.;
|
||||
static auto step = pow(2., 1. / 12);
|
||||
static auto frame = QImage();
|
||||
@ -343,10 +344,14 @@ void StartStreaming(
|
||||
if (player->ready()) {
|
||||
frame = player->frame({});
|
||||
}
|
||||
preloaded = position = options.position = std::clamp(
|
||||
(duration * e->pos().x()) / width(),
|
||||
crl::time(0),
|
||||
crl::time(duration));
|
||||
preloadedAudio
|
||||
= preloadedVideo
|
||||
= position
|
||||
= options.position
|
||||
= std::clamp(
|
||||
(duration * e->pos().x()) / width(),
|
||||
crl::time(0),
|
||||
crl::time(duration));
|
||||
player->play(options);
|
||||
}
|
||||
|
||||
@ -367,7 +372,7 @@ void StartStreaming(
|
||||
|
||||
options.speed = speed;
|
||||
//options.syncVideoByAudio = false;
|
||||
preloaded = position = options.position = 0;
|
||||
preloadedAudio = preloadedVideo = position = options.position = 0;
|
||||
frame = QImage();
|
||||
player->play(options);
|
||||
player->updates(
|
||||
@ -391,7 +396,9 @@ void StartStreaming(
|
||||
? (position * video->width() / duration)
|
||||
: 0;
|
||||
const auto till2 = duration
|
||||
? (preloaded * video->width() / duration)
|
||||
? (std::min(preloadedAudio, preloadedVideo)
|
||||
* video->width()
|
||||
/ duration)
|
||||
: 0;
|
||||
if (player->ready()) {
|
||||
Painter(video.get()).drawImage(
|
||||
@ -437,9 +444,11 @@ void StartStreaming(
|
||||
}, video->lifetime());
|
||||
}
|
||||
}, [&](PreloadedVideo &update) {
|
||||
if (preloaded < update.till) {
|
||||
preloaded = update.till;
|
||||
video->update();
|
||||
if (preloadedVideo < update.till) {
|
||||
if (preloadedVideo < preloadedAudio) {
|
||||
video->update();
|
||||
}
|
||||
preloadedVideo = update.till;
|
||||
}
|
||||
}, [&](UpdateVideo &update) {
|
||||
Expects(video != nullptr);
|
||||
@ -449,11 +458,11 @@ void StartStreaming(
|
||||
}
|
||||
video->update();
|
||||
}, [&](PreloadedAudio &update) {
|
||||
if (preloaded < update.till) {
|
||||
preloaded = update.till;
|
||||
if (video) {
|
||||
if (preloadedAudio < update.till) {
|
||||
if (video && preloadedAudio < preloadedVideo) {
|
||||
video->update();
|
||||
}
|
||||
preloadedAudio = update.till;
|
||||
}
|
||||
}, [&](UpdateAudio &update) {
|
||||
if (position < update.position) {
|
||||
|
@ -645,7 +645,7 @@ bool HistoryDocument::updateStatusText() const {
|
||||
|
||||
statusSize = -1 - (state.position / state.frequency);
|
||||
realDuration = (state.length / state.frequency);
|
||||
showPause = (state.state == State::Playing || state.state == State::Resuming || state.state == State::Starting);
|
||||
showPause = Media::Player::ShowPauseIcon(state.state);
|
||||
} else {
|
||||
if (auto voice = Get<HistoryDocumentVoice>()) {
|
||||
voice->checkPlaybackFinished();
|
||||
@ -660,7 +660,7 @@ bool HistoryDocument::updateStatusText() const {
|
||||
&& !Media::Player::IsStoppedOrStopping(state.state)) {
|
||||
statusSize = -1 - (state.position / state.frequency);
|
||||
realDuration = (state.length / state.frequency);
|
||||
showPause = (state.state == State::Playing || state.state == State::Resuming || state.state == State::Starting);
|
||||
showPause = Media::Player::ShowPauseIcon(state.state);
|
||||
} else {
|
||||
}
|
||||
if (!showPause && (state.id == AudioMsgId(_data, _parent->data()->fullId()))) {
|
||||
|
@ -901,7 +901,7 @@ bool File::updateStatusText() const {
|
||||
if (state.id == AudioMsgId(_document, FullMsgId()) && !Media::Player::IsStoppedOrStopping(state.state)) {
|
||||
statusSize = -1 - (state.position / state.frequency);
|
||||
realDuration = (state.length / state.frequency);
|
||||
showPause = (state.state == State::Playing || state.state == State::Resuming || state.state == State::Starting);
|
||||
showPause = Media::Player::ShowPauseIcon(state.state);
|
||||
}
|
||||
} else if (_document->isAudioFile()) {
|
||||
statusSize = FileStatusSizeLoaded;
|
||||
@ -909,7 +909,7 @@ bool File::updateStatusText() const {
|
||||
if (state.id == AudioMsgId(_document, FullMsgId()) && !Media::Player::IsStoppedOrStopping(state.state)) {
|
||||
statusSize = -1 - (state.position / state.frequency);
|
||||
realDuration = (state.length / state.frequency);
|
||||
showPause = (state.state == State::Playing || state.state == State::Resuming || state.state == State::Starting);
|
||||
showPause = Media::Player::ShowPauseIcon(state.state);
|
||||
}
|
||||
if (!showPause && (state.id == AudioMsgId(_document, FullMsgId())) && Media::Player::instance()->isSeeking(AudioMsgId::Type::Song)) {
|
||||
showPause = true;
|
||||
|
@ -436,7 +436,9 @@ void Mixer::Track::reattach(AudioMsgId::Type type) {
|
||||
}
|
||||
|
||||
alSourcei(stream.source, AL_SAMPLE_OFFSET, qMax(state.position - bufferedPosition, 0LL));
|
||||
if (!IsStopped(state.state) && state.state != State::PausedAtEnd) {
|
||||
if (!IsStopped(state.state)
|
||||
&& (state.state != State::PausedAtEnd)
|
||||
&& !state.waitingForData) {
|
||||
alSourcef(stream.source, AL_GAIN, ComputeVolume(type));
|
||||
alSourcePlay(stream.source);
|
||||
if (IsPaused(state.state)) {
|
||||
@ -449,6 +451,7 @@ void Mixer::Track::reattach(AudioMsgId::Type type) {
|
||||
}
|
||||
|
||||
void Mixer::Track::detach() {
|
||||
getNotQueuedBufferIndex();
|
||||
resetStream();
|
||||
destroyStream();
|
||||
}
|
||||
@ -711,20 +714,28 @@ void Mixer::resetFadeStartPosition(AudioMsgId::Type type, int positionInBuffered
|
||||
if (positionInBuffered < 0) {
|
||||
Audio::AttachToDevice();
|
||||
if (track->isStreamCreated()) {
|
||||
ALint currentPosition = 0;
|
||||
alGetSourcei(track->stream.source, AL_SAMPLE_OFFSET, ¤tPosition);
|
||||
ALint alSampleOffset = 0;
|
||||
ALint alState = AL_INITIAL;
|
||||
alGetSourcei(track->stream.source, AL_SAMPLE_OFFSET, &alSampleOffset);
|
||||
alGetSourcei(track->stream.source, AL_SOURCE_STATE, &alState);
|
||||
if (Audio::PlaybackErrorHappened()) {
|
||||
setStoppedState(track, State::StoppedAtError);
|
||||
onError(track->state.id);
|
||||
return;
|
||||
}
|
||||
|
||||
if (currentPosition == 0 && !internal::CheckAudioDeviceConnected()) {
|
||||
} else if ((alState == AL_STOPPED)
|
||||
&& (alSampleOffset == 0)
|
||||
&& !internal::CheckAudioDeviceConnected()) {
|
||||
track->fadeStartPosition = track->state.position;
|
||||
return;
|
||||
}
|
||||
|
||||
positionInBuffered = currentPosition;
|
||||
const auto stoppedAtEnd = (alState == AL_STOPPED)
|
||||
&& (!IsStopped(track->state.state)
|
||||
|| IsStoppedAtEnd(track->state.state))
|
||||
|| track->state.waitingForData;
|
||||
positionInBuffered = stoppedAtEnd
|
||||
? track->bufferedLength
|
||||
: alSampleOffset;
|
||||
} else {
|
||||
positionInBuffered = 0;
|
||||
}
|
||||
@ -1406,10 +1417,7 @@ void Fader::onTimer() {
|
||||
}
|
||||
|
||||
int32 Fader::updateOnePlayback(Mixer::Track *track, bool &hasPlaying, bool &hasFading, float64 volumeMultiplier, bool volumeChanged) {
|
||||
auto playing = false;
|
||||
auto fading = false;
|
||||
|
||||
auto errorHappened = [this, track] {
|
||||
const auto errorHappened = [&] {
|
||||
if (Audio::PlaybackErrorHappened()) {
|
||||
setStoppedState(track, State::StoppedAtError);
|
||||
return true;
|
||||
@ -1417,32 +1425,34 @@ int32 Fader::updateOnePlayback(Mixer::Track *track, bool &hasPlaying, bool &hasF
|
||||
return false;
|
||||
};
|
||||
|
||||
ALint positionInBuffered = 0;
|
||||
ALint state = AL_INITIAL;
|
||||
alGetSourcei(track->stream.source, AL_SAMPLE_OFFSET, &positionInBuffered);
|
||||
alGetSourcei(track->stream.source, AL_SOURCE_STATE, &state);
|
||||
if (errorHappened()) return EmitError;
|
||||
ALint alSampleOffset = 0;
|
||||
ALint alState = AL_INITIAL;
|
||||
alGetSourcei(track->stream.source, AL_SAMPLE_OFFSET, &alSampleOffset);
|
||||
alGetSourcei(track->stream.source, AL_SOURCE_STATE, &alState);
|
||||
if (errorHappened()) {
|
||||
return EmitError;
|
||||
} else if ((alState == AL_STOPPED)
|
||||
&& (alSampleOffset == 0)
|
||||
&& !internal::CheckAudioDeviceConnected()) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
int32 emitSignals = 0;
|
||||
const auto stoppedAtEnd = (alState == AL_STOPPED)
|
||||
&& (!IsStopped(track->state.state)
|
||||
|| IsStoppedAtEnd(track->state.state))
|
||||
|| track->state.waitingForData;
|
||||
const auto positionInBuffered = stoppedAtEnd
|
||||
? track->bufferedLength
|
||||
: alSampleOffset;
|
||||
const auto waitingForDataOld = track->state.waitingForData;
|
||||
track->state.waitingForData = stoppedAtEnd
|
||||
&& (track->state.state != State::Stopping);
|
||||
const auto fullPosition = track->bufferedPosition + positionInBuffered;
|
||||
|
||||
if (state == AL_STOPPED && positionInBuffered == 0 && !internal::CheckAudioDeviceConnected()) {
|
||||
return emitSignals;
|
||||
}
|
||||
|
||||
switch (track->state.state) {
|
||||
case State::Stopping:
|
||||
case State::Pausing:
|
||||
case State::Starting:
|
||||
case State::Resuming: {
|
||||
fading = true;
|
||||
} break;
|
||||
case State::Playing: {
|
||||
playing = true;
|
||||
} break;
|
||||
}
|
||||
|
||||
auto fullPosition = track->bufferedPosition + positionInBuffered;
|
||||
if (state != AL_PLAYING && !track->loading) {
|
||||
auto playing = (track->state.state == State::Playing);
|
||||
auto fading = IsFading(track->state.state);
|
||||
if (alState != AL_PLAYING && !track->loading) {
|
||||
if (fading || playing) {
|
||||
fading = false;
|
||||
playing = false;
|
||||
@ -1456,7 +1466,7 @@ int32 Fader::updateOnePlayback(Mixer::Track *track, bool &hasPlaying, bool &hasF
|
||||
if (errorHappened()) return EmitError;
|
||||
emitSignals |= EmitStopped;
|
||||
}
|
||||
} else if (fading && state == AL_PLAYING) {
|
||||
} else if (fading && alState == AL_PLAYING) {
|
||||
auto fadingForSamplesCount = (fullPosition - track->fadeStartPosition);
|
||||
if (crl::time(1000) * fadingForSamplesCount >= kFadeDuration * track->state.frequency) {
|
||||
fading = false;
|
||||
@ -1466,7 +1476,7 @@ int32 Fader::updateOnePlayback(Mixer::Track *track, bool &hasPlaying, bool &hasF
|
||||
switch (track->state.state) {
|
||||
case State::Stopping: {
|
||||
setStoppedState(track);
|
||||
state = AL_STOPPED;
|
||||
alState = AL_STOPPED;
|
||||
} break;
|
||||
case State::Pausing: {
|
||||
alSourcePause(track->stream.source);
|
||||
@ -1488,15 +1498,22 @@ int32 Fader::updateOnePlayback(Mixer::Track *track, bool &hasPlaying, bool &hasF
|
||||
alSourcef(track->stream.source, AL_GAIN, newGain * volumeMultiplier);
|
||||
if (errorHappened()) return EmitError;
|
||||
}
|
||||
} else if (playing && state == AL_PLAYING) {
|
||||
} else if (playing && alState == AL_PLAYING) {
|
||||
if (volumeChanged) {
|
||||
alSourcef(track->stream.source, AL_GAIN, 1. * volumeMultiplier);
|
||||
if (errorHappened()) return EmitError;
|
||||
}
|
||||
}
|
||||
if (state == AL_PLAYING && fullPosition >= track->state.position + kCheckPlaybackPositionDelta) {
|
||||
if (alState == AL_PLAYING && fullPosition >= track->state.position + kCheckPlaybackPositionDelta) {
|
||||
track->state.position = fullPosition;
|
||||
emitSignals |= EmitPositionUpdated;
|
||||
} else if (track->state.waitingForData && !waitingForDataOld) {
|
||||
if (fullPosition > track->state.position) {
|
||||
track->state.position = fullPosition;
|
||||
}
|
||||
// When stopped because of insufficient data while streaming,
|
||||
// inform the player about the last position we were at.
|
||||
emitSignals |= EmitPositionUpdated;
|
||||
}
|
||||
if (playing || track->state.state == State::Starting || track->state.state == State::Resuming) {
|
||||
if (!track->loaded && !track->loading) {
|
||||
|
@ -88,6 +88,10 @@ inline bool IsPaused(State state) {
|
||||
|| (state == State::PausedAtEnd);
|
||||
}
|
||||
|
||||
inline bool IsPausedOrPausing(State state) {
|
||||
return IsPaused(state) || (state == State::Pausing);
|
||||
}
|
||||
|
||||
inline bool IsFading(State state) {
|
||||
return (state == State::Starting)
|
||||
|| (state == State::Stopping)
|
||||
@ -99,12 +103,18 @@ inline bool IsActive(State state) {
|
||||
return !IsStopped(state) && !IsPaused(state);
|
||||
}
|
||||
|
||||
inline bool ShowPauseIcon(State state) {
|
||||
return !IsStoppedOrStopping(state)
|
||||
&& !IsPausedOrPausing(state);
|
||||
}
|
||||
|
||||
struct TrackState {
|
||||
AudioMsgId id;
|
||||
State state = State::Stopped;
|
||||
int64 position = 0;
|
||||
int64 length = 0;
|
||||
int frequency = kDefaultFrequency;
|
||||
bool waitingForData = false;
|
||||
};
|
||||
|
||||
class Mixer : public QObject, private base::Subscriber {
|
||||
|
@ -227,9 +227,10 @@ void Loaders::loadData(AudioMsgId audio, crl::time positionMs) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (started) {
|
||||
if (started || samplesCount) {
|
||||
Audio::AttachToDevice();
|
||||
|
||||
}
|
||||
if (started) {
|
||||
track->started();
|
||||
if (!internal::audioCheckError()) {
|
||||
setStoppedState(track, State::StoppedAtStart);
|
||||
@ -263,12 +264,6 @@ void Loaders::loadData(AudioMsgId audio, crl::time positionMs) {
|
||||
l->setForceToBuffer(false);
|
||||
}
|
||||
|
||||
//LOG(("[%4] PUSHING %1 SAMPLES (%2 BYTES) %3ms"
|
||||
// ).arg(samplesCount
|
||||
// ).arg(samples.size()
|
||||
// ).arg((samplesCount * 1000LL) / track->frequency
|
||||
// ).arg(crl::now() % 10000, 4, 10, QChar('0')));
|
||||
|
||||
track->bufferSamples[bufferIndex] = samples;
|
||||
track->samplesCount[bufferIndex] = samplesCount;
|
||||
track->bufferedLength += samplesCount;
|
||||
@ -287,6 +282,7 @@ void Loaders::loadData(AudioMsgId audio, crl::time positionMs) {
|
||||
}
|
||||
finished = true;
|
||||
}
|
||||
track->state.waitingForData = false;
|
||||
|
||||
if (finished) {
|
||||
track->loaded = true;
|
||||
@ -295,44 +291,47 @@ void Loaders::loadData(AudioMsgId audio, crl::time positionMs) {
|
||||
}
|
||||
|
||||
track->loading = false;
|
||||
if (track->state.state == State::Resuming || track->state.state == State::Playing || track->state.state == State::Starting) {
|
||||
ALint state = AL_INITIAL;
|
||||
alGetSourcei(track->stream.source, AL_SOURCE_STATE, &state);
|
||||
if (internal::audioCheckError()) {
|
||||
if (state != AL_PLAYING) {
|
||||
if (state == AL_STOPPED && !internal::CheckAudioDeviceConnected()) {
|
||||
return;
|
||||
}
|
||||
if (IsPausedOrPausing(track->state.state)
|
||||
|| IsStoppedOrStopping(track->state.state)) {
|
||||
return;
|
||||
}
|
||||
ALint state = AL_INITIAL;
|
||||
alGetSourcei(track->stream.source, AL_SOURCE_STATE, &state);
|
||||
if (!internal::audioCheckError()) {
|
||||
setStoppedState(track, State::StoppedAtError);
|
||||
emitError(type);
|
||||
return;
|
||||
}
|
||||
|
||||
alSourcef(track->stream.source, AL_GAIN, ComputeVolume(type));
|
||||
if (!internal::audioCheckError()) {
|
||||
setStoppedState(track, State::StoppedAtError);
|
||||
emitError(type);
|
||||
return;
|
||||
}
|
||||
if (state == AL_PLAYING) {
|
||||
return;
|
||||
} else if (state == AL_STOPPED && !internal::CheckAudioDeviceConnected()) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (state == AL_STOPPED) {
|
||||
alSourcei(track->stream.source, AL_SAMPLE_OFFSET, qMax(track->state.position - track->bufferedPosition, 0LL));
|
||||
if (!internal::audioCheckError()) {
|
||||
setStoppedState(track, State::StoppedAtError);
|
||||
emitError(type);
|
||||
return;
|
||||
}
|
||||
}
|
||||
alSourcePlay(track->stream.source);
|
||||
if (!internal::audioCheckError()) {
|
||||
setStoppedState(track, State::StoppedAtError);
|
||||
emitError(type);
|
||||
return;
|
||||
}
|
||||
alSourcef(track->stream.source, AL_GAIN, ComputeVolume(type));
|
||||
if (!internal::audioCheckError()) {
|
||||
setStoppedState(track, State::StoppedAtError);
|
||||
emitError(type);
|
||||
return;
|
||||
}
|
||||
|
||||
emit needToCheck();
|
||||
}
|
||||
} else {
|
||||
if (state == AL_STOPPED) {
|
||||
alSourcei(track->stream.source, AL_SAMPLE_OFFSET, qMax(track->state.position - track->bufferedPosition, 0LL));
|
||||
if (!internal::audioCheckError()) {
|
||||
setStoppedState(track, State::StoppedAtError);
|
||||
emitError(type);
|
||||
return;
|
||||
}
|
||||
}
|
||||
alSourcePlay(track->stream.source);
|
||||
if (!internal::audioCheckError()) {
|
||||
setStoppedState(track, State::StoppedAtError);
|
||||
emitError(type);
|
||||
return;
|
||||
}
|
||||
|
||||
emit needToCheck();
|
||||
}
|
||||
|
||||
AudioPlayerLoader *Loaders::setupLoader(
|
||||
|
@ -246,7 +246,7 @@ void CoverWidget::handleSongUpdate(const TrackState &state) {
|
||||
}
|
||||
|
||||
auto stopped = IsStoppedOrStopping(state.state);
|
||||
auto showPause = !stopped && (state.state == State::Playing || state.state == State::Resuming || state.state == State::Starting);
|
||||
auto showPause = ShowPauseIcon(state.state);
|
||||
if (instance()->isSeeking(AudioMsgId::Type::Song)) {
|
||||
showPause = true;
|
||||
}
|
||||
|
@ -403,7 +403,7 @@ void Instance::playPauseCancelClicked(AudioMsgId::Type type) {
|
||||
|
||||
auto state = mixer()->currentState(type);
|
||||
auto stopped = IsStoppedOrStopping(state.state);
|
||||
auto showPause = !stopped && (state.state == State::Playing || state.state == State::Resuming || state.state == State::Starting);
|
||||
auto showPause = ShowPauseIcon(state.state);
|
||||
auto audio = state.id.audio();
|
||||
if (audio && audio->loading()) {
|
||||
audio->cancel();
|
||||
|
@ -436,7 +436,7 @@ void Widget::handleSongUpdate(const TrackState &state) {
|
||||
}
|
||||
|
||||
auto stopped = IsStoppedOrStopping(state.state);
|
||||
auto showPause = !stopped && (state.state == State::Playing || state.state == State::Resuming || state.state == State::Starting);
|
||||
auto showPause = ShowPauseIcon(state.state);
|
||||
if (instance()->isSeeking(_type)) {
|
||||
showPause = true;
|
||||
}
|
||||
|
@ -164,6 +164,10 @@ void AudioTrack::setSpeed(float64 speed) {
|
||||
Media::Player::mixer()->setSpeedFromVideo(_audioId, speed);
|
||||
}
|
||||
|
||||
rpl::producer<> AudioTrack::waitingForData() const {
|
||||
return _waitingForData.events();
|
||||
}
|
||||
|
||||
rpl::producer<crl::time> AudioTrack::playPosition() {
|
||||
Expects(_ready == nullptr);
|
||||
|
||||
@ -194,6 +198,9 @@ rpl::producer<crl::time> AudioTrack::playPosition() {
|
||||
case State::Stopping:
|
||||
case State::Pausing:
|
||||
case State::Resuming:
|
||||
if (state.waitingForData) {
|
||||
_waitingForData.fire({});
|
||||
}
|
||||
_playPosition = state.position * 1000 / state.frequency;
|
||||
return;
|
||||
case State::Paused:
|
||||
|
@ -30,6 +30,7 @@ public:
|
||||
|
||||
// Called from the main thread.
|
||||
void setSpeed(float64 speed);
|
||||
[[nodiscard]] rpl::producer<> waitingForData() const;
|
||||
|
||||
// Called from the main thread.
|
||||
// Non-const, because we subscribe to changes on the first call.
|
||||
@ -75,6 +76,7 @@ private:
|
||||
|
||||
// Accessed from the main thread.
|
||||
base::Subscription _subscription;
|
||||
rpl::event_stream<> _waitingForData;
|
||||
// First set from the same unspecified thread before _ready is called.
|
||||
// After that accessed from the main thread.
|
||||
rpl::variable<crl::time> _playPosition;
|
||||
|
@ -17,6 +17,24 @@ namespace Media {
|
||||
namespace Streaming {
|
||||
namespace {
|
||||
|
||||
constexpr auto kReceivedTillEnd = std::numeric_limits<crl::time>::max();
|
||||
constexpr auto kBufferFor = crl::time(3000);
|
||||
|
||||
[[nodiscard]] crl::time TrackClampReceivedTill(
|
||||
crl::time position,
|
||||
const TrackState &state) {
|
||||
return (state.duration == kTimeUnknown || position == kTimeUnknown)
|
||||
? position
|
||||
: (position == kReceivedTillEnd)
|
||||
? state.duration
|
||||
: std::clamp(position, 0LL, state.duration - 1);
|
||||
}
|
||||
|
||||
[[nodiscard]] bool FullTrackReceived(const TrackState &state) {
|
||||
return (state.duration != kTimeUnknown)
|
||||
&& (state.receivedTill == state.duration);
|
||||
}
|
||||
|
||||
void SaveValidStateInformation(TrackState &to, TrackState &&from) {
|
||||
Expects(from.position != kTimeUnknown);
|
||||
Expects(from.receivedTill != kTimeUnknown);
|
||||
@ -74,6 +92,7 @@ not_null<FileDelegate*> Player::delegate() {
|
||||
|
||||
void Player::checkNextFrame() {
|
||||
Expects(_nextFrameTime != kTimeUnknown);
|
||||
Expects(!_renderFrameTimer.isActive());
|
||||
|
||||
const auto now = crl::now();
|
||||
if (now < _nextFrameTime) {
|
||||
@ -85,12 +104,12 @@ void Player::checkNextFrame() {
|
||||
}
|
||||
|
||||
void Player::renderFrame(crl::time now) {
|
||||
if (_video) {
|
||||
const auto position = _video->markFrameDisplayed(now);
|
||||
if (position != kTimeUnknown) {
|
||||
videoPlayedTill(position);
|
||||
}
|
||||
}
|
||||
Expects(_video != nullptr);
|
||||
|
||||
const auto position = _video->markFrameDisplayed(now);
|
||||
Assert(position != kTimeUnknown);
|
||||
|
||||
videoPlayedTill(position);
|
||||
}
|
||||
|
||||
template <typename Track>
|
||||
@ -104,7 +123,7 @@ void Player::trackReceivedTill(
|
||||
position = std::clamp(position, 0LL, state.duration);
|
||||
if (state.receivedTill < position) {
|
||||
state.receivedTill = position;
|
||||
_updates.fire({ PreloadedUpdate<Track>{ position } });
|
||||
trackSendReceivedTill(track, state);
|
||||
}
|
||||
} else {
|
||||
state.receivedTill = position;
|
||||
@ -125,10 +144,22 @@ void Player::trackPlayedTill(
|
||||
}
|
||||
}
|
||||
|
||||
template <typename Track>
|
||||
void Player::trackSendReceivedTill(
|
||||
const Track &track,
|
||||
TrackState &state) {
|
||||
Expects(state.duration != kTimeUnknown);
|
||||
Expects(state.receivedTill != kTimeUnknown);
|
||||
|
||||
_updates.fire({ PreloadedUpdate<Track>{ state.receivedTill } });
|
||||
}
|
||||
|
||||
void Player::audioReceivedTill(crl::time position) {
|
||||
Expects(_audio != nullptr);
|
||||
|
||||
position = TrackClampReceivedTill(position, _information.audio.state);
|
||||
trackReceivedTill(*_audio, _information.audio.state, position);
|
||||
checkResumeFromWaitingForData();
|
||||
}
|
||||
|
||||
void Player::audioPlayedTill(crl::time position) {
|
||||
@ -140,6 +171,7 @@ void Player::audioPlayedTill(crl::time position) {
|
||||
void Player::videoReceivedTill(crl::time position) {
|
||||
Expects(_video != nullptr);
|
||||
|
||||
position = TrackClampReceivedTill(position, _information.video.state);
|
||||
trackReceivedTill(*_video, _information.video.state, position);
|
||||
}
|
||||
|
||||
@ -298,12 +330,13 @@ void Player::provideStartInformation() {
|
||||
|
||||
if (_stage == Stage::Ready && !_paused) {
|
||||
_paused = true;
|
||||
resume();
|
||||
updatePausedState();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Player::fail() {
|
||||
_sessionLifetime = rpl::lifetime();
|
||||
const auto stopGuarded = crl::guard(&_sessionGuard, [=] { stop(); });
|
||||
_stage = Stage::Failed;
|
||||
_updates.fire_error({});
|
||||
@ -326,11 +359,35 @@ void Player::play(const PlaybackOptions &options) {
|
||||
void Player::pause() {
|
||||
Expects(valid());
|
||||
|
||||
if (_paused) {
|
||||
_pausedByUser = true;
|
||||
updatePausedState();
|
||||
}
|
||||
|
||||
void Player::resume() {
|
||||
Expects(valid());
|
||||
|
||||
_pausedByUser = false;
|
||||
updatePausedState();
|
||||
}
|
||||
|
||||
void Player::updatePausedState() {
|
||||
const auto paused = _pausedByUser || _pausedByWaitingForData;
|
||||
if (_paused == paused) {
|
||||
return;
|
||||
}
|
||||
_paused = true;
|
||||
if (_stage == Stage::Started) {
|
||||
_paused = paused;
|
||||
if (!_paused && _stage == Stage::Ready) {
|
||||
const auto guard = base::make_weak(&_sessionGuard);
|
||||
start();
|
||||
if (!guard) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (_stage != Stage::Started) {
|
||||
return;
|
||||
}
|
||||
if (_paused) {
|
||||
_pausedTime = crl::now();
|
||||
if (_audio) {
|
||||
_audio->pause(_pausedTime);
|
||||
@ -338,20 +395,7 @@ void Player::pause() {
|
||||
if (_video) {
|
||||
_video->pause(_pausedTime);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Player::resume() {
|
||||
Expects(valid());
|
||||
|
||||
if (!_paused) {
|
||||
return;
|
||||
}
|
||||
_paused = false;
|
||||
if (_stage == Stage::Ready) {
|
||||
start();
|
||||
}
|
||||
if (_stage == Stage::Started) {
|
||||
} else {
|
||||
_startedTime = crl::now();
|
||||
if (_audio) {
|
||||
_audio->resume(_startedTime);
|
||||
@ -362,48 +406,85 @@ void Player::resume() {
|
||||
}
|
||||
}
|
||||
|
||||
bool Player::trackReceivedEnough(const TrackState &state) const {
|
||||
return FullTrackReceived(state)
|
||||
|| (state.position + kBufferFor <= state.receivedTill);
|
||||
}
|
||||
|
||||
void Player::checkResumeFromWaitingForData() {
|
||||
if (_pausedByWaitingForData
|
||||
&& (!_audio || trackReceivedEnough(_information.audio.state))
|
||||
&& (!_video || trackReceivedEnough(_information.video.state))) {
|
||||
_pausedByWaitingForData = false;
|
||||
updatePausedState();
|
||||
}
|
||||
}
|
||||
|
||||
void Player::start() {
|
||||
Expects(_stage == Stage::Ready);
|
||||
|
||||
_stage = Stage::Started;
|
||||
if (_audio) {
|
||||
const auto guard = base::make_weak(&_sessionGuard);
|
||||
|
||||
rpl::merge(
|
||||
_audio ? _audio->waitingForData() : rpl::never(),
|
||||
_video ? _video->waitingForData() : rpl::never()
|
||||
) | rpl::filter([=] {
|
||||
return !FullTrackReceived(_information.video.state)
|
||||
|| !FullTrackReceived(_information.audio.state);
|
||||
}) | rpl::start_with_next([=] {
|
||||
_pausedByWaitingForData = true;
|
||||
updatePausedState();
|
||||
_updates.fire({ WaitingForData() });
|
||||
}, _sessionLifetime);
|
||||
|
||||
if (guard && _audio) {
|
||||
_audio->playPosition(
|
||||
) | rpl::start_with_next_done([=](crl::time position) {
|
||||
audioPlayedTill(position);
|
||||
}, [=] {
|
||||
if (_stage == Stage::Started) {
|
||||
_audioFinished = true;
|
||||
if (!_video || _videoFinished) {
|
||||
_updates.fire({ Finished() });
|
||||
}
|
||||
Expects(_stage == Stage::Started);
|
||||
|
||||
_audioFinished = true;
|
||||
if (!_video || _videoFinished) {
|
||||
_updates.fire({ Finished() });
|
||||
}
|
||||
}, _lifetime);
|
||||
}, _sessionLifetime);
|
||||
}
|
||||
if (_video) {
|
||||
|
||||
if (guard && _video) {
|
||||
_video->renderNextFrame(
|
||||
) | rpl::start_with_next_done([=](crl::time when) {
|
||||
_nextFrameTime = when;
|
||||
checkNextFrame();
|
||||
}, [=] {
|
||||
if (_stage == Stage::Started) {
|
||||
_videoFinished = true;
|
||||
if (!_audio || _audioFinished) {
|
||||
_updates.fire({ Finished() });
|
||||
}
|
||||
}
|
||||
}, _lifetime);
|
||||
}
|
||||
Expects(_stage == Stage::Started);
|
||||
|
||||
_videoFinished = true;
|
||||
if (!_audio || _audioFinished) {
|
||||
_updates.fire({ Finished() });
|
||||
}
|
||||
}, _sessionLifetime);
|
||||
}
|
||||
if (guard && _audio) {
|
||||
trackSendReceivedTill(*_audio, _information.audio.state);
|
||||
}
|
||||
if (guard && _video) {
|
||||
trackSendReceivedTill(*_video, _information.video.state);
|
||||
}
|
||||
}
|
||||
|
||||
void Player::stop() {
|
||||
_file->stop();
|
||||
_sessionLifetime = rpl::lifetime();
|
||||
if (_stage != Stage::Failed) {
|
||||
_stage = Stage::Uninitialized;
|
||||
}
|
||||
_audio = nullptr;
|
||||
_video = nullptr;
|
||||
invalidate_weak_ptrs(&_sessionGuard);
|
||||
_paused = false;
|
||||
_pausedByUser = _pausedByWaitingForData = _paused = false;
|
||||
_renderFrameTimer.cancel();
|
||||
_audioFinished = false;
|
||||
_videoFinished = false;
|
||||
_readTillEnd = false;
|
||||
@ -418,8 +499,12 @@ bool Player::playing() const {
|
||||
return (_stage == Stage::Started) && !_paused;
|
||||
}
|
||||
|
||||
bool Player::buffering() const {
|
||||
return _pausedByWaitingForData;
|
||||
}
|
||||
|
||||
bool Player::paused() const {
|
||||
return _paused;
|
||||
return _pausedByUser;
|
||||
}
|
||||
|
||||
void Player::setSpeed(float64 speed) {
|
||||
|
@ -46,6 +46,7 @@ public:
|
||||
|
||||
[[nodiscard]] bool failed() const;
|
||||
[[nodiscard]] bool playing() const;
|
||||
[[nodiscard]] bool buffering() const;
|
||||
[[nodiscard]] bool paused() const;
|
||||
|
||||
[[nodiscard]] rpl::producer<Update, Error> updates() const;
|
||||
@ -57,9 +58,6 @@ public:
|
||||
~Player();
|
||||
|
||||
private:
|
||||
static constexpr auto kReceivedTillEnd
|
||||
= std::numeric_limits<crl::time>::max();
|
||||
|
||||
enum class Stage {
|
||||
Uninitialized,
|
||||
Initializing,
|
||||
@ -91,12 +89,21 @@ private:
|
||||
void videoReceivedTill(crl::time position);
|
||||
void videoPlayedTill(crl::time position);
|
||||
|
||||
void updatePausedState();
|
||||
[[nodiscard]] bool trackReceivedEnough(const TrackState &state) const;
|
||||
void checkResumeFromWaitingForData();
|
||||
|
||||
template <typename Track>
|
||||
void trackReceivedTill(
|
||||
const Track &track,
|
||||
TrackState &state,
|
||||
crl::time position);
|
||||
|
||||
template <typename Track>
|
||||
void trackSendReceivedTill(
|
||||
const Track &track,
|
||||
TrackState &state);
|
||||
|
||||
template <typename Track>
|
||||
void trackPlayedTill(
|
||||
const Track &track,
|
||||
@ -121,6 +128,8 @@ private:
|
||||
// Belongs to the main thread.
|
||||
Information _information;
|
||||
Stage _stage = Stage::Uninitialized;
|
||||
bool _pausedByUser = false;
|
||||
bool _pausedByWaitingForData = false;
|
||||
bool _paused = false;
|
||||
bool _audioFinished = false;
|
||||
bool _videoFinished = false;
|
||||
@ -130,7 +139,9 @@ private:
|
||||
crl::time _nextFrameTime = kTimeUnknown;
|
||||
base::Timer _renderFrameTimer;
|
||||
rpl::event_stream<Update, Error> _updates;
|
||||
|
||||
rpl::lifetime _lifetime;
|
||||
rpl::lifetime _sessionLifetime;
|
||||
|
||||
};
|
||||
|
||||
|
@ -36,6 +36,7 @@ public:
|
||||
void process(Packet &&packet);
|
||||
|
||||
[[nodisacrd]] rpl::producer<crl::time> displayFrameAt() const;
|
||||
[[nodisacrd]] rpl::producer<> waitingForData() const;
|
||||
|
||||
void pause(crl::time time);
|
||||
void resume(crl::time time);
|
||||
@ -76,6 +77,7 @@ private:
|
||||
mutable TimePoint _syncTimePoint;
|
||||
mutable crl::time _previousFramePosition = kTimeUnknown;
|
||||
rpl::variable<crl::time> _nextFrameDisplayTime = kTimeUnknown;
|
||||
rpl::event_stream<> _waitingForData;
|
||||
|
||||
bool _queued = false;
|
||||
base::ConcurrentTimer _readFramesTimer;
|
||||
@ -111,6 +113,10 @@ rpl::producer<crl::time> VideoTrackObject::displayFrameAt() const {
|
||||
: _nextFrameDisplayTime.value();
|
||||
}
|
||||
|
||||
rpl::producer<> VideoTrackObject::waitingForData() const {
|
||||
return interrupted() ? rpl::never() : _waitingForData.events();
|
||||
}
|
||||
|
||||
void VideoTrackObject::process(Packet &&packet) {
|
||||
if (interrupted()) {
|
||||
return;
|
||||
@ -162,6 +168,8 @@ bool VideoTrackObject::readFrame(not_null<Frame*> frame) {
|
||||
} else if (error.code() != AVERROR(EAGAIN) || _noMoreData) {
|
||||
interrupt();
|
||||
_error();
|
||||
} else if (_stream.queue.empty()) {
|
||||
_waitingForData.fire({});
|
||||
}
|
||||
return false;
|
||||
}
|
||||
@ -604,6 +612,12 @@ rpl::producer<crl::time> VideoTrack::renderNextFrame() const {
|
||||
});
|
||||
}
|
||||
|
||||
rpl::producer<> VideoTrack::waitingForData() const {
|
||||
return _wrapped.producer_on_main([](const Implementation &unwrapped) {
|
||||
return unwrapped.waitingForData();
|
||||
});
|
||||
}
|
||||
|
||||
VideoTrack::~VideoTrack() {
|
||||
_wrapped.with([shared = std::move(_shared)](Implementation &unwrapped) {
|
||||
unwrapped.interrupt();
|
||||
|
@ -48,6 +48,7 @@ public:
|
||||
[[nodiscard]] crl::time markFrameDisplayed(crl::time now);
|
||||
[[nodiscard]] QImage frame(const FrameRequest &request) const;
|
||||
[[nodiscard]] rpl::producer<crl::time> renderNextFrame() const;
|
||||
[[nodiscard]] rpl::producer<> waitingForData() const;
|
||||
|
||||
// Called from the main thread.
|
||||
~VideoTrack();
|
||||
|
@ -131,7 +131,7 @@ void Controller::updatePlayback(const Player::TrackState &state) {
|
||||
}
|
||||
|
||||
void Controller::updatePlayPauseResumeState(const Player::TrackState &state) {
|
||||
auto showPause = (state.state == Player::State::Playing || state.state == Player::State::Resuming || _seekPositionMs >= 0);
|
||||
auto showPause = ShowPauseIcon(state.state) || (_seekPositionMs >= 0);
|
||||
if (showPause != _showPause) {
|
||||
disconnect(_playPauseResume, SIGNAL(clicked()), this, _showPause ? SIGNAL(pausePressed()) : SIGNAL(playPressed()));
|
||||
_showPause = showPause;
|
||||
|
@ -854,7 +854,7 @@ bool Voice::updateStatusText() {
|
||||
if (state.id == AudioMsgId(_data, parent()->fullId(), state.id.playId()) && !Media::Player::IsStoppedOrStopping(state.state)) {
|
||||
statusSize = -1 - (state.position / state.frequency);
|
||||
realDuration = (state.length / state.frequency);
|
||||
showPause = (state.state == State::Playing || state.state == State::Resuming || state.state == State::Starting);
|
||||
showPause = Media::Player::ShowPauseIcon(state.state);
|
||||
}
|
||||
} else {
|
||||
statusSize = FileStatusSizeReady;
|
||||
@ -1225,7 +1225,7 @@ bool Document::updateStatusText() {
|
||||
if (state.id == AudioMsgId(_data, parent()->fullId()) && !Media::Player::IsStoppedOrStopping(state.state)) {
|
||||
statusSize = -1 - (state.position / state.frequency);
|
||||
realDuration = (state.length / state.frequency);
|
||||
showPause = (state.state == State::Playing || state.state == State::Resuming || state.state == State::Starting);
|
||||
showPause = Media::Player::ShowPauseIcon(state.state);
|
||||
}
|
||||
if (!showPause && (state.id == AudioMsgId(_data, parent()->fullId())) && Media::Player::instance()->isSeeking(AudioMsgId::Type::Song)) {
|
||||
showPause = true;
|
||||
|
Loading…
Reference in New Issue
Block a user