mirror of
https://github.com/telegramdesktop/tdesktop
synced 2025-04-01 23:00:58 +00:00
If stuck wait for three seconds of packets.
This commit is contained in:
parent
3e9b811875
commit
d37b65e624
@ -297,9 +297,10 @@ void StartStreaming(
|
|||||||
static auto player = std::unique_ptr<Player>();
|
static auto player = std::unique_ptr<Player>();
|
||||||
static auto pauseOnSeek = false;
|
static auto pauseOnSeek = false;
|
||||||
static auto position = crl::time(0);
|
static auto position = crl::time(0);
|
||||||
static auto preloaded = crl::time(0);
|
static auto preloadedAudio = crl::time(0);
|
||||||
|
static auto preloadedVideo = crl::time(0);
|
||||||
static auto duration = crl::time(0);
|
static auto duration = crl::time(0);
|
||||||
static auto options = Media::Streaming::PlaybackOptions();
|
static auto options = PlaybackOptions();
|
||||||
static auto speed = 1.;
|
static auto speed = 1.;
|
||||||
static auto step = pow(2., 1. / 12);
|
static auto step = pow(2., 1. / 12);
|
||||||
static auto frame = QImage();
|
static auto frame = QImage();
|
||||||
@ -343,10 +344,14 @@ void StartStreaming(
|
|||||||
if (player->ready()) {
|
if (player->ready()) {
|
||||||
frame = player->frame({});
|
frame = player->frame({});
|
||||||
}
|
}
|
||||||
preloaded = position = options.position = std::clamp(
|
preloadedAudio
|
||||||
(duration * e->pos().x()) / width(),
|
= preloadedVideo
|
||||||
crl::time(0),
|
= position
|
||||||
crl::time(duration));
|
= options.position
|
||||||
|
= std::clamp(
|
||||||
|
(duration * e->pos().x()) / width(),
|
||||||
|
crl::time(0),
|
||||||
|
crl::time(duration));
|
||||||
player->play(options);
|
player->play(options);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -367,7 +372,7 @@ void StartStreaming(
|
|||||||
|
|
||||||
options.speed = speed;
|
options.speed = speed;
|
||||||
//options.syncVideoByAudio = false;
|
//options.syncVideoByAudio = false;
|
||||||
preloaded = position = options.position = 0;
|
preloadedAudio = preloadedVideo = position = options.position = 0;
|
||||||
frame = QImage();
|
frame = QImage();
|
||||||
player->play(options);
|
player->play(options);
|
||||||
player->updates(
|
player->updates(
|
||||||
@ -391,7 +396,9 @@ void StartStreaming(
|
|||||||
? (position * video->width() / duration)
|
? (position * video->width() / duration)
|
||||||
: 0;
|
: 0;
|
||||||
const auto till2 = duration
|
const auto till2 = duration
|
||||||
? (preloaded * video->width() / duration)
|
? (std::min(preloadedAudio, preloadedVideo)
|
||||||
|
* video->width()
|
||||||
|
/ duration)
|
||||||
: 0;
|
: 0;
|
||||||
if (player->ready()) {
|
if (player->ready()) {
|
||||||
Painter(video.get()).drawImage(
|
Painter(video.get()).drawImage(
|
||||||
@ -437,9 +444,11 @@ void StartStreaming(
|
|||||||
}, video->lifetime());
|
}, video->lifetime());
|
||||||
}
|
}
|
||||||
}, [&](PreloadedVideo &update) {
|
}, [&](PreloadedVideo &update) {
|
||||||
if (preloaded < update.till) {
|
if (preloadedVideo < update.till) {
|
||||||
preloaded = update.till;
|
if (preloadedVideo < preloadedAudio) {
|
||||||
video->update();
|
video->update();
|
||||||
|
}
|
||||||
|
preloadedVideo = update.till;
|
||||||
}
|
}
|
||||||
}, [&](UpdateVideo &update) {
|
}, [&](UpdateVideo &update) {
|
||||||
Expects(video != nullptr);
|
Expects(video != nullptr);
|
||||||
@ -449,11 +458,11 @@ void StartStreaming(
|
|||||||
}
|
}
|
||||||
video->update();
|
video->update();
|
||||||
}, [&](PreloadedAudio &update) {
|
}, [&](PreloadedAudio &update) {
|
||||||
if (preloaded < update.till) {
|
if (preloadedAudio < update.till) {
|
||||||
preloaded = update.till;
|
if (video && preloadedAudio < preloadedVideo) {
|
||||||
if (video) {
|
|
||||||
video->update();
|
video->update();
|
||||||
}
|
}
|
||||||
|
preloadedAudio = update.till;
|
||||||
}
|
}
|
||||||
}, [&](UpdateAudio &update) {
|
}, [&](UpdateAudio &update) {
|
||||||
if (position < update.position) {
|
if (position < update.position) {
|
||||||
|
@ -645,7 +645,7 @@ bool HistoryDocument::updateStatusText() const {
|
|||||||
|
|
||||||
statusSize = -1 - (state.position / state.frequency);
|
statusSize = -1 - (state.position / state.frequency);
|
||||||
realDuration = (state.length / state.frequency);
|
realDuration = (state.length / state.frequency);
|
||||||
showPause = (state.state == State::Playing || state.state == State::Resuming || state.state == State::Starting);
|
showPause = Media::Player::ShowPauseIcon(state.state);
|
||||||
} else {
|
} else {
|
||||||
if (auto voice = Get<HistoryDocumentVoice>()) {
|
if (auto voice = Get<HistoryDocumentVoice>()) {
|
||||||
voice->checkPlaybackFinished();
|
voice->checkPlaybackFinished();
|
||||||
@ -660,7 +660,7 @@ bool HistoryDocument::updateStatusText() const {
|
|||||||
&& !Media::Player::IsStoppedOrStopping(state.state)) {
|
&& !Media::Player::IsStoppedOrStopping(state.state)) {
|
||||||
statusSize = -1 - (state.position / state.frequency);
|
statusSize = -1 - (state.position / state.frequency);
|
||||||
realDuration = (state.length / state.frequency);
|
realDuration = (state.length / state.frequency);
|
||||||
showPause = (state.state == State::Playing || state.state == State::Resuming || state.state == State::Starting);
|
showPause = Media::Player::ShowPauseIcon(state.state);
|
||||||
} else {
|
} else {
|
||||||
}
|
}
|
||||||
if (!showPause && (state.id == AudioMsgId(_data, _parent->data()->fullId()))) {
|
if (!showPause && (state.id == AudioMsgId(_data, _parent->data()->fullId()))) {
|
||||||
|
@ -901,7 +901,7 @@ bool File::updateStatusText() const {
|
|||||||
if (state.id == AudioMsgId(_document, FullMsgId()) && !Media::Player::IsStoppedOrStopping(state.state)) {
|
if (state.id == AudioMsgId(_document, FullMsgId()) && !Media::Player::IsStoppedOrStopping(state.state)) {
|
||||||
statusSize = -1 - (state.position / state.frequency);
|
statusSize = -1 - (state.position / state.frequency);
|
||||||
realDuration = (state.length / state.frequency);
|
realDuration = (state.length / state.frequency);
|
||||||
showPause = (state.state == State::Playing || state.state == State::Resuming || state.state == State::Starting);
|
showPause = Media::Player::ShowPauseIcon(state.state);
|
||||||
}
|
}
|
||||||
} else if (_document->isAudioFile()) {
|
} else if (_document->isAudioFile()) {
|
||||||
statusSize = FileStatusSizeLoaded;
|
statusSize = FileStatusSizeLoaded;
|
||||||
@ -909,7 +909,7 @@ bool File::updateStatusText() const {
|
|||||||
if (state.id == AudioMsgId(_document, FullMsgId()) && !Media::Player::IsStoppedOrStopping(state.state)) {
|
if (state.id == AudioMsgId(_document, FullMsgId()) && !Media::Player::IsStoppedOrStopping(state.state)) {
|
||||||
statusSize = -1 - (state.position / state.frequency);
|
statusSize = -1 - (state.position / state.frequency);
|
||||||
realDuration = (state.length / state.frequency);
|
realDuration = (state.length / state.frequency);
|
||||||
showPause = (state.state == State::Playing || state.state == State::Resuming || state.state == State::Starting);
|
showPause = Media::Player::ShowPauseIcon(state.state);
|
||||||
}
|
}
|
||||||
if (!showPause && (state.id == AudioMsgId(_document, FullMsgId())) && Media::Player::instance()->isSeeking(AudioMsgId::Type::Song)) {
|
if (!showPause && (state.id == AudioMsgId(_document, FullMsgId())) && Media::Player::instance()->isSeeking(AudioMsgId::Type::Song)) {
|
||||||
showPause = true;
|
showPause = true;
|
||||||
|
@ -436,7 +436,9 @@ void Mixer::Track::reattach(AudioMsgId::Type type) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
alSourcei(stream.source, AL_SAMPLE_OFFSET, qMax(state.position - bufferedPosition, 0LL));
|
alSourcei(stream.source, AL_SAMPLE_OFFSET, qMax(state.position - bufferedPosition, 0LL));
|
||||||
if (!IsStopped(state.state) && state.state != State::PausedAtEnd) {
|
if (!IsStopped(state.state)
|
||||||
|
&& (state.state != State::PausedAtEnd)
|
||||||
|
&& !state.waitingForData) {
|
||||||
alSourcef(stream.source, AL_GAIN, ComputeVolume(type));
|
alSourcef(stream.source, AL_GAIN, ComputeVolume(type));
|
||||||
alSourcePlay(stream.source);
|
alSourcePlay(stream.source);
|
||||||
if (IsPaused(state.state)) {
|
if (IsPaused(state.state)) {
|
||||||
@ -449,6 +451,7 @@ void Mixer::Track::reattach(AudioMsgId::Type type) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void Mixer::Track::detach() {
|
void Mixer::Track::detach() {
|
||||||
|
getNotQueuedBufferIndex();
|
||||||
resetStream();
|
resetStream();
|
||||||
destroyStream();
|
destroyStream();
|
||||||
}
|
}
|
||||||
@ -711,20 +714,28 @@ void Mixer::resetFadeStartPosition(AudioMsgId::Type type, int positionInBuffered
|
|||||||
if (positionInBuffered < 0) {
|
if (positionInBuffered < 0) {
|
||||||
Audio::AttachToDevice();
|
Audio::AttachToDevice();
|
||||||
if (track->isStreamCreated()) {
|
if (track->isStreamCreated()) {
|
||||||
ALint currentPosition = 0;
|
ALint alSampleOffset = 0;
|
||||||
alGetSourcei(track->stream.source, AL_SAMPLE_OFFSET, ¤tPosition);
|
ALint alState = AL_INITIAL;
|
||||||
|
alGetSourcei(track->stream.source, AL_SAMPLE_OFFSET, &alSampleOffset);
|
||||||
|
alGetSourcei(track->stream.source, AL_SOURCE_STATE, &alState);
|
||||||
if (Audio::PlaybackErrorHappened()) {
|
if (Audio::PlaybackErrorHappened()) {
|
||||||
setStoppedState(track, State::StoppedAtError);
|
setStoppedState(track, State::StoppedAtError);
|
||||||
onError(track->state.id);
|
onError(track->state.id);
|
||||||
return;
|
return;
|
||||||
}
|
} else if ((alState == AL_STOPPED)
|
||||||
|
&& (alSampleOffset == 0)
|
||||||
if (currentPosition == 0 && !internal::CheckAudioDeviceConnected()) {
|
&& !internal::CheckAudioDeviceConnected()) {
|
||||||
track->fadeStartPosition = track->state.position;
|
track->fadeStartPosition = track->state.position;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
positionInBuffered = currentPosition;
|
const auto stoppedAtEnd = (alState == AL_STOPPED)
|
||||||
|
&& (!IsStopped(track->state.state)
|
||||||
|
|| IsStoppedAtEnd(track->state.state))
|
||||||
|
|| track->state.waitingForData;
|
||||||
|
positionInBuffered = stoppedAtEnd
|
||||||
|
? track->bufferedLength
|
||||||
|
: alSampleOffset;
|
||||||
} else {
|
} else {
|
||||||
positionInBuffered = 0;
|
positionInBuffered = 0;
|
||||||
}
|
}
|
||||||
@ -1406,10 +1417,7 @@ void Fader::onTimer() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
int32 Fader::updateOnePlayback(Mixer::Track *track, bool &hasPlaying, bool &hasFading, float64 volumeMultiplier, bool volumeChanged) {
|
int32 Fader::updateOnePlayback(Mixer::Track *track, bool &hasPlaying, bool &hasFading, float64 volumeMultiplier, bool volumeChanged) {
|
||||||
auto playing = false;
|
const auto errorHappened = [&] {
|
||||||
auto fading = false;
|
|
||||||
|
|
||||||
auto errorHappened = [this, track] {
|
|
||||||
if (Audio::PlaybackErrorHappened()) {
|
if (Audio::PlaybackErrorHappened()) {
|
||||||
setStoppedState(track, State::StoppedAtError);
|
setStoppedState(track, State::StoppedAtError);
|
||||||
return true;
|
return true;
|
||||||
@ -1417,32 +1425,34 @@ int32 Fader::updateOnePlayback(Mixer::Track *track, bool &hasPlaying, bool &hasF
|
|||||||
return false;
|
return false;
|
||||||
};
|
};
|
||||||
|
|
||||||
ALint positionInBuffered = 0;
|
ALint alSampleOffset = 0;
|
||||||
ALint state = AL_INITIAL;
|
ALint alState = AL_INITIAL;
|
||||||
alGetSourcei(track->stream.source, AL_SAMPLE_OFFSET, &positionInBuffered);
|
alGetSourcei(track->stream.source, AL_SAMPLE_OFFSET, &alSampleOffset);
|
||||||
alGetSourcei(track->stream.source, AL_SOURCE_STATE, &state);
|
alGetSourcei(track->stream.source, AL_SOURCE_STATE, &alState);
|
||||||
if (errorHappened()) return EmitError;
|
if (errorHappened()) {
|
||||||
|
return EmitError;
|
||||||
|
} else if ((alState == AL_STOPPED)
|
||||||
|
&& (alSampleOffset == 0)
|
||||||
|
&& !internal::CheckAudioDeviceConnected()) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
int32 emitSignals = 0;
|
int32 emitSignals = 0;
|
||||||
|
const auto stoppedAtEnd = (alState == AL_STOPPED)
|
||||||
|
&& (!IsStopped(track->state.state)
|
||||||
|
|| IsStoppedAtEnd(track->state.state))
|
||||||
|
|| track->state.waitingForData;
|
||||||
|
const auto positionInBuffered = stoppedAtEnd
|
||||||
|
? track->bufferedLength
|
||||||
|
: alSampleOffset;
|
||||||
|
const auto waitingForDataOld = track->state.waitingForData;
|
||||||
|
track->state.waitingForData = stoppedAtEnd
|
||||||
|
&& (track->state.state != State::Stopping);
|
||||||
|
const auto fullPosition = track->bufferedPosition + positionInBuffered;
|
||||||
|
|
||||||
if (state == AL_STOPPED && positionInBuffered == 0 && !internal::CheckAudioDeviceConnected()) {
|
auto playing = (track->state.state == State::Playing);
|
||||||
return emitSignals;
|
auto fading = IsFading(track->state.state);
|
||||||
}
|
if (alState != AL_PLAYING && !track->loading) {
|
||||||
|
|
||||||
switch (track->state.state) {
|
|
||||||
case State::Stopping:
|
|
||||||
case State::Pausing:
|
|
||||||
case State::Starting:
|
|
||||||
case State::Resuming: {
|
|
||||||
fading = true;
|
|
||||||
} break;
|
|
||||||
case State::Playing: {
|
|
||||||
playing = true;
|
|
||||||
} break;
|
|
||||||
}
|
|
||||||
|
|
||||||
auto fullPosition = track->bufferedPosition + positionInBuffered;
|
|
||||||
if (state != AL_PLAYING && !track->loading) {
|
|
||||||
if (fading || playing) {
|
if (fading || playing) {
|
||||||
fading = false;
|
fading = false;
|
||||||
playing = false;
|
playing = false;
|
||||||
@ -1456,7 +1466,7 @@ int32 Fader::updateOnePlayback(Mixer::Track *track, bool &hasPlaying, bool &hasF
|
|||||||
if (errorHappened()) return EmitError;
|
if (errorHappened()) return EmitError;
|
||||||
emitSignals |= EmitStopped;
|
emitSignals |= EmitStopped;
|
||||||
}
|
}
|
||||||
} else if (fading && state == AL_PLAYING) {
|
} else if (fading && alState == AL_PLAYING) {
|
||||||
auto fadingForSamplesCount = (fullPosition - track->fadeStartPosition);
|
auto fadingForSamplesCount = (fullPosition - track->fadeStartPosition);
|
||||||
if (crl::time(1000) * fadingForSamplesCount >= kFadeDuration * track->state.frequency) {
|
if (crl::time(1000) * fadingForSamplesCount >= kFadeDuration * track->state.frequency) {
|
||||||
fading = false;
|
fading = false;
|
||||||
@ -1466,7 +1476,7 @@ int32 Fader::updateOnePlayback(Mixer::Track *track, bool &hasPlaying, bool &hasF
|
|||||||
switch (track->state.state) {
|
switch (track->state.state) {
|
||||||
case State::Stopping: {
|
case State::Stopping: {
|
||||||
setStoppedState(track);
|
setStoppedState(track);
|
||||||
state = AL_STOPPED;
|
alState = AL_STOPPED;
|
||||||
} break;
|
} break;
|
||||||
case State::Pausing: {
|
case State::Pausing: {
|
||||||
alSourcePause(track->stream.source);
|
alSourcePause(track->stream.source);
|
||||||
@ -1488,15 +1498,22 @@ int32 Fader::updateOnePlayback(Mixer::Track *track, bool &hasPlaying, bool &hasF
|
|||||||
alSourcef(track->stream.source, AL_GAIN, newGain * volumeMultiplier);
|
alSourcef(track->stream.source, AL_GAIN, newGain * volumeMultiplier);
|
||||||
if (errorHappened()) return EmitError;
|
if (errorHappened()) return EmitError;
|
||||||
}
|
}
|
||||||
} else if (playing && state == AL_PLAYING) {
|
} else if (playing && alState == AL_PLAYING) {
|
||||||
if (volumeChanged) {
|
if (volumeChanged) {
|
||||||
alSourcef(track->stream.source, AL_GAIN, 1. * volumeMultiplier);
|
alSourcef(track->stream.source, AL_GAIN, 1. * volumeMultiplier);
|
||||||
if (errorHappened()) return EmitError;
|
if (errorHappened()) return EmitError;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (state == AL_PLAYING && fullPosition >= track->state.position + kCheckPlaybackPositionDelta) {
|
if (alState == AL_PLAYING && fullPosition >= track->state.position + kCheckPlaybackPositionDelta) {
|
||||||
track->state.position = fullPosition;
|
track->state.position = fullPosition;
|
||||||
emitSignals |= EmitPositionUpdated;
|
emitSignals |= EmitPositionUpdated;
|
||||||
|
} else if (track->state.waitingForData && !waitingForDataOld) {
|
||||||
|
if (fullPosition > track->state.position) {
|
||||||
|
track->state.position = fullPosition;
|
||||||
|
}
|
||||||
|
// When stopped because of insufficient data while streaming,
|
||||||
|
// inform the player about the last position we were at.
|
||||||
|
emitSignals |= EmitPositionUpdated;
|
||||||
}
|
}
|
||||||
if (playing || track->state.state == State::Starting || track->state.state == State::Resuming) {
|
if (playing || track->state.state == State::Starting || track->state.state == State::Resuming) {
|
||||||
if (!track->loaded && !track->loading) {
|
if (!track->loaded && !track->loading) {
|
||||||
|
@ -88,6 +88,10 @@ inline bool IsPaused(State state) {
|
|||||||
|| (state == State::PausedAtEnd);
|
|| (state == State::PausedAtEnd);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
inline bool IsPausedOrPausing(State state) {
|
||||||
|
return IsPaused(state) || (state == State::Pausing);
|
||||||
|
}
|
||||||
|
|
||||||
inline bool IsFading(State state) {
|
inline bool IsFading(State state) {
|
||||||
return (state == State::Starting)
|
return (state == State::Starting)
|
||||||
|| (state == State::Stopping)
|
|| (state == State::Stopping)
|
||||||
@ -99,12 +103,18 @@ inline bool IsActive(State state) {
|
|||||||
return !IsStopped(state) && !IsPaused(state);
|
return !IsStopped(state) && !IsPaused(state);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
inline bool ShowPauseIcon(State state) {
|
||||||
|
return !IsStoppedOrStopping(state)
|
||||||
|
&& !IsPausedOrPausing(state);
|
||||||
|
}
|
||||||
|
|
||||||
struct TrackState {
|
struct TrackState {
|
||||||
AudioMsgId id;
|
AudioMsgId id;
|
||||||
State state = State::Stopped;
|
State state = State::Stopped;
|
||||||
int64 position = 0;
|
int64 position = 0;
|
||||||
int64 length = 0;
|
int64 length = 0;
|
||||||
int frequency = kDefaultFrequency;
|
int frequency = kDefaultFrequency;
|
||||||
|
bool waitingForData = false;
|
||||||
};
|
};
|
||||||
|
|
||||||
class Mixer : public QObject, private base::Subscriber {
|
class Mixer : public QObject, private base::Subscriber {
|
||||||
|
@ -227,9 +227,10 @@ void Loaders::loadData(AudioMsgId audio, crl::time positionMs) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (started) {
|
if (started || samplesCount) {
|
||||||
Audio::AttachToDevice();
|
Audio::AttachToDevice();
|
||||||
|
}
|
||||||
|
if (started) {
|
||||||
track->started();
|
track->started();
|
||||||
if (!internal::audioCheckError()) {
|
if (!internal::audioCheckError()) {
|
||||||
setStoppedState(track, State::StoppedAtStart);
|
setStoppedState(track, State::StoppedAtStart);
|
||||||
@ -263,12 +264,6 @@ void Loaders::loadData(AudioMsgId audio, crl::time positionMs) {
|
|||||||
l->setForceToBuffer(false);
|
l->setForceToBuffer(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
//LOG(("[%4] PUSHING %1 SAMPLES (%2 BYTES) %3ms"
|
|
||||||
// ).arg(samplesCount
|
|
||||||
// ).arg(samples.size()
|
|
||||||
// ).arg((samplesCount * 1000LL) / track->frequency
|
|
||||||
// ).arg(crl::now() % 10000, 4, 10, QChar('0')));
|
|
||||||
|
|
||||||
track->bufferSamples[bufferIndex] = samples;
|
track->bufferSamples[bufferIndex] = samples;
|
||||||
track->samplesCount[bufferIndex] = samplesCount;
|
track->samplesCount[bufferIndex] = samplesCount;
|
||||||
track->bufferedLength += samplesCount;
|
track->bufferedLength += samplesCount;
|
||||||
@ -287,6 +282,7 @@ void Loaders::loadData(AudioMsgId audio, crl::time positionMs) {
|
|||||||
}
|
}
|
||||||
finished = true;
|
finished = true;
|
||||||
}
|
}
|
||||||
|
track->state.waitingForData = false;
|
||||||
|
|
||||||
if (finished) {
|
if (finished) {
|
||||||
track->loaded = true;
|
track->loaded = true;
|
||||||
@ -295,44 +291,47 @@ void Loaders::loadData(AudioMsgId audio, crl::time positionMs) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
track->loading = false;
|
track->loading = false;
|
||||||
if (track->state.state == State::Resuming || track->state.state == State::Playing || track->state.state == State::Starting) {
|
if (IsPausedOrPausing(track->state.state)
|
||||||
ALint state = AL_INITIAL;
|
|| IsStoppedOrStopping(track->state.state)) {
|
||||||
alGetSourcei(track->stream.source, AL_SOURCE_STATE, &state);
|
return;
|
||||||
if (internal::audioCheckError()) {
|
}
|
||||||
if (state != AL_PLAYING) {
|
ALint state = AL_INITIAL;
|
||||||
if (state == AL_STOPPED && !internal::CheckAudioDeviceConnected()) {
|
alGetSourcei(track->stream.source, AL_SOURCE_STATE, &state);
|
||||||
return;
|
if (!internal::audioCheckError()) {
|
||||||
}
|
setStoppedState(track, State::StoppedAtError);
|
||||||
|
emitError(type);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
alSourcef(track->stream.source, AL_GAIN, ComputeVolume(type));
|
if (state == AL_PLAYING) {
|
||||||
if (!internal::audioCheckError()) {
|
return;
|
||||||
setStoppedState(track, State::StoppedAtError);
|
} else if (state == AL_STOPPED && !internal::CheckAudioDeviceConnected()) {
|
||||||
emitError(type);
|
return;
|
||||||
return;
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if (state == AL_STOPPED) {
|
alSourcef(track->stream.source, AL_GAIN, ComputeVolume(type));
|
||||||
alSourcei(track->stream.source, AL_SAMPLE_OFFSET, qMax(track->state.position - track->bufferedPosition, 0LL));
|
if (!internal::audioCheckError()) {
|
||||||
if (!internal::audioCheckError()) {
|
setStoppedState(track, State::StoppedAtError);
|
||||||
setStoppedState(track, State::StoppedAtError);
|
emitError(type);
|
||||||
emitError(type);
|
return;
|
||||||
return;
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
alSourcePlay(track->stream.source);
|
|
||||||
if (!internal::audioCheckError()) {
|
|
||||||
setStoppedState(track, State::StoppedAtError);
|
|
||||||
emitError(type);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
emit needToCheck();
|
if (state == AL_STOPPED) {
|
||||||
}
|
alSourcei(track->stream.source, AL_SAMPLE_OFFSET, qMax(track->state.position - track->bufferedPosition, 0LL));
|
||||||
} else {
|
if (!internal::audioCheckError()) {
|
||||||
setStoppedState(track, State::StoppedAtError);
|
setStoppedState(track, State::StoppedAtError);
|
||||||
emitError(type);
|
emitError(type);
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
alSourcePlay(track->stream.source);
|
||||||
|
if (!internal::audioCheckError()) {
|
||||||
|
setStoppedState(track, State::StoppedAtError);
|
||||||
|
emitError(type);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
emit needToCheck();
|
||||||
}
|
}
|
||||||
|
|
||||||
AudioPlayerLoader *Loaders::setupLoader(
|
AudioPlayerLoader *Loaders::setupLoader(
|
||||||
|
@ -246,7 +246,7 @@ void CoverWidget::handleSongUpdate(const TrackState &state) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
auto stopped = IsStoppedOrStopping(state.state);
|
auto stopped = IsStoppedOrStopping(state.state);
|
||||||
auto showPause = !stopped && (state.state == State::Playing || state.state == State::Resuming || state.state == State::Starting);
|
auto showPause = ShowPauseIcon(state.state);
|
||||||
if (instance()->isSeeking(AudioMsgId::Type::Song)) {
|
if (instance()->isSeeking(AudioMsgId::Type::Song)) {
|
||||||
showPause = true;
|
showPause = true;
|
||||||
}
|
}
|
||||||
|
@ -403,7 +403,7 @@ void Instance::playPauseCancelClicked(AudioMsgId::Type type) {
|
|||||||
|
|
||||||
auto state = mixer()->currentState(type);
|
auto state = mixer()->currentState(type);
|
||||||
auto stopped = IsStoppedOrStopping(state.state);
|
auto stopped = IsStoppedOrStopping(state.state);
|
||||||
auto showPause = !stopped && (state.state == State::Playing || state.state == State::Resuming || state.state == State::Starting);
|
auto showPause = ShowPauseIcon(state.state);
|
||||||
auto audio = state.id.audio();
|
auto audio = state.id.audio();
|
||||||
if (audio && audio->loading()) {
|
if (audio && audio->loading()) {
|
||||||
audio->cancel();
|
audio->cancel();
|
||||||
|
@ -436,7 +436,7 @@ void Widget::handleSongUpdate(const TrackState &state) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
auto stopped = IsStoppedOrStopping(state.state);
|
auto stopped = IsStoppedOrStopping(state.state);
|
||||||
auto showPause = !stopped && (state.state == State::Playing || state.state == State::Resuming || state.state == State::Starting);
|
auto showPause = ShowPauseIcon(state.state);
|
||||||
if (instance()->isSeeking(_type)) {
|
if (instance()->isSeeking(_type)) {
|
||||||
showPause = true;
|
showPause = true;
|
||||||
}
|
}
|
||||||
|
@ -164,6 +164,10 @@ void AudioTrack::setSpeed(float64 speed) {
|
|||||||
Media::Player::mixer()->setSpeedFromVideo(_audioId, speed);
|
Media::Player::mixer()->setSpeedFromVideo(_audioId, speed);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
rpl::producer<> AudioTrack::waitingForData() const {
|
||||||
|
return _waitingForData.events();
|
||||||
|
}
|
||||||
|
|
||||||
rpl::producer<crl::time> AudioTrack::playPosition() {
|
rpl::producer<crl::time> AudioTrack::playPosition() {
|
||||||
Expects(_ready == nullptr);
|
Expects(_ready == nullptr);
|
||||||
|
|
||||||
@ -194,6 +198,9 @@ rpl::producer<crl::time> AudioTrack::playPosition() {
|
|||||||
case State::Stopping:
|
case State::Stopping:
|
||||||
case State::Pausing:
|
case State::Pausing:
|
||||||
case State::Resuming:
|
case State::Resuming:
|
||||||
|
if (state.waitingForData) {
|
||||||
|
_waitingForData.fire({});
|
||||||
|
}
|
||||||
_playPosition = state.position * 1000 / state.frequency;
|
_playPosition = state.position * 1000 / state.frequency;
|
||||||
return;
|
return;
|
||||||
case State::Paused:
|
case State::Paused:
|
||||||
|
@ -30,6 +30,7 @@ public:
|
|||||||
|
|
||||||
// Called from the main thread.
|
// Called from the main thread.
|
||||||
void setSpeed(float64 speed);
|
void setSpeed(float64 speed);
|
||||||
|
[[nodiscard]] rpl::producer<> waitingForData() const;
|
||||||
|
|
||||||
// Called from the main thread.
|
// Called from the main thread.
|
||||||
// Non-const, because we subscribe to changes on the first call.
|
// Non-const, because we subscribe to changes on the first call.
|
||||||
@ -75,6 +76,7 @@ private:
|
|||||||
|
|
||||||
// Accessed from the main thread.
|
// Accessed from the main thread.
|
||||||
base::Subscription _subscription;
|
base::Subscription _subscription;
|
||||||
|
rpl::event_stream<> _waitingForData;
|
||||||
// First set from the same unspecified thread before _ready is called.
|
// First set from the same unspecified thread before _ready is called.
|
||||||
// After that accessed from the main thread.
|
// After that accessed from the main thread.
|
||||||
rpl::variable<crl::time> _playPosition;
|
rpl::variable<crl::time> _playPosition;
|
||||||
|
@ -17,6 +17,24 @@ namespace Media {
|
|||||||
namespace Streaming {
|
namespace Streaming {
|
||||||
namespace {
|
namespace {
|
||||||
|
|
||||||
|
constexpr auto kReceivedTillEnd = std::numeric_limits<crl::time>::max();
|
||||||
|
constexpr auto kBufferFor = crl::time(3000);
|
||||||
|
|
||||||
|
[[nodiscard]] crl::time TrackClampReceivedTill(
|
||||||
|
crl::time position,
|
||||||
|
const TrackState &state) {
|
||||||
|
return (state.duration == kTimeUnknown || position == kTimeUnknown)
|
||||||
|
? position
|
||||||
|
: (position == kReceivedTillEnd)
|
||||||
|
? state.duration
|
||||||
|
: std::clamp(position, 0LL, state.duration - 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
[[nodiscard]] bool FullTrackReceived(const TrackState &state) {
|
||||||
|
return (state.duration != kTimeUnknown)
|
||||||
|
&& (state.receivedTill == state.duration);
|
||||||
|
}
|
||||||
|
|
||||||
void SaveValidStateInformation(TrackState &to, TrackState &&from) {
|
void SaveValidStateInformation(TrackState &to, TrackState &&from) {
|
||||||
Expects(from.position != kTimeUnknown);
|
Expects(from.position != kTimeUnknown);
|
||||||
Expects(from.receivedTill != kTimeUnknown);
|
Expects(from.receivedTill != kTimeUnknown);
|
||||||
@ -74,6 +92,7 @@ not_null<FileDelegate*> Player::delegate() {
|
|||||||
|
|
||||||
void Player::checkNextFrame() {
|
void Player::checkNextFrame() {
|
||||||
Expects(_nextFrameTime != kTimeUnknown);
|
Expects(_nextFrameTime != kTimeUnknown);
|
||||||
|
Expects(!_renderFrameTimer.isActive());
|
||||||
|
|
||||||
const auto now = crl::now();
|
const auto now = crl::now();
|
||||||
if (now < _nextFrameTime) {
|
if (now < _nextFrameTime) {
|
||||||
@ -85,12 +104,12 @@ void Player::checkNextFrame() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void Player::renderFrame(crl::time now) {
|
void Player::renderFrame(crl::time now) {
|
||||||
if (_video) {
|
Expects(_video != nullptr);
|
||||||
const auto position = _video->markFrameDisplayed(now);
|
|
||||||
if (position != kTimeUnknown) {
|
const auto position = _video->markFrameDisplayed(now);
|
||||||
videoPlayedTill(position);
|
Assert(position != kTimeUnknown);
|
||||||
}
|
|
||||||
}
|
videoPlayedTill(position);
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename Track>
|
template <typename Track>
|
||||||
@ -104,7 +123,7 @@ void Player::trackReceivedTill(
|
|||||||
position = std::clamp(position, 0LL, state.duration);
|
position = std::clamp(position, 0LL, state.duration);
|
||||||
if (state.receivedTill < position) {
|
if (state.receivedTill < position) {
|
||||||
state.receivedTill = position;
|
state.receivedTill = position;
|
||||||
_updates.fire({ PreloadedUpdate<Track>{ position } });
|
trackSendReceivedTill(track, state);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
state.receivedTill = position;
|
state.receivedTill = position;
|
||||||
@ -125,10 +144,22 @@ void Player::trackPlayedTill(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template <typename Track>
|
||||||
|
void Player::trackSendReceivedTill(
|
||||||
|
const Track &track,
|
||||||
|
TrackState &state) {
|
||||||
|
Expects(state.duration != kTimeUnknown);
|
||||||
|
Expects(state.receivedTill != kTimeUnknown);
|
||||||
|
|
||||||
|
_updates.fire({ PreloadedUpdate<Track>{ state.receivedTill } });
|
||||||
|
}
|
||||||
|
|
||||||
void Player::audioReceivedTill(crl::time position) {
|
void Player::audioReceivedTill(crl::time position) {
|
||||||
Expects(_audio != nullptr);
|
Expects(_audio != nullptr);
|
||||||
|
|
||||||
|
position = TrackClampReceivedTill(position, _information.audio.state);
|
||||||
trackReceivedTill(*_audio, _information.audio.state, position);
|
trackReceivedTill(*_audio, _information.audio.state, position);
|
||||||
|
checkResumeFromWaitingForData();
|
||||||
}
|
}
|
||||||
|
|
||||||
void Player::audioPlayedTill(crl::time position) {
|
void Player::audioPlayedTill(crl::time position) {
|
||||||
@ -140,6 +171,7 @@ void Player::audioPlayedTill(crl::time position) {
|
|||||||
void Player::videoReceivedTill(crl::time position) {
|
void Player::videoReceivedTill(crl::time position) {
|
||||||
Expects(_video != nullptr);
|
Expects(_video != nullptr);
|
||||||
|
|
||||||
|
position = TrackClampReceivedTill(position, _information.video.state);
|
||||||
trackReceivedTill(*_video, _information.video.state, position);
|
trackReceivedTill(*_video, _information.video.state, position);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -298,12 +330,13 @@ void Player::provideStartInformation() {
|
|||||||
|
|
||||||
if (_stage == Stage::Ready && !_paused) {
|
if (_stage == Stage::Ready && !_paused) {
|
||||||
_paused = true;
|
_paused = true;
|
||||||
resume();
|
updatePausedState();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Player::fail() {
|
void Player::fail() {
|
||||||
|
_sessionLifetime = rpl::lifetime();
|
||||||
const auto stopGuarded = crl::guard(&_sessionGuard, [=] { stop(); });
|
const auto stopGuarded = crl::guard(&_sessionGuard, [=] { stop(); });
|
||||||
_stage = Stage::Failed;
|
_stage = Stage::Failed;
|
||||||
_updates.fire_error({});
|
_updates.fire_error({});
|
||||||
@ -326,11 +359,35 @@ void Player::play(const PlaybackOptions &options) {
|
|||||||
void Player::pause() {
|
void Player::pause() {
|
||||||
Expects(valid());
|
Expects(valid());
|
||||||
|
|
||||||
if (_paused) {
|
_pausedByUser = true;
|
||||||
|
updatePausedState();
|
||||||
|
}
|
||||||
|
|
||||||
|
void Player::resume() {
|
||||||
|
Expects(valid());
|
||||||
|
|
||||||
|
_pausedByUser = false;
|
||||||
|
updatePausedState();
|
||||||
|
}
|
||||||
|
|
||||||
|
void Player::updatePausedState() {
|
||||||
|
const auto paused = _pausedByUser || _pausedByWaitingForData;
|
||||||
|
if (_paused == paused) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
_paused = true;
|
_paused = paused;
|
||||||
if (_stage == Stage::Started) {
|
if (!_paused && _stage == Stage::Ready) {
|
||||||
|
const auto guard = base::make_weak(&_sessionGuard);
|
||||||
|
start();
|
||||||
|
if (!guard) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (_stage != Stage::Started) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (_paused) {
|
||||||
_pausedTime = crl::now();
|
_pausedTime = crl::now();
|
||||||
if (_audio) {
|
if (_audio) {
|
||||||
_audio->pause(_pausedTime);
|
_audio->pause(_pausedTime);
|
||||||
@ -338,20 +395,7 @@ void Player::pause() {
|
|||||||
if (_video) {
|
if (_video) {
|
||||||
_video->pause(_pausedTime);
|
_video->pause(_pausedTime);
|
||||||
}
|
}
|
||||||
}
|
} else {
|
||||||
}
|
|
||||||
|
|
||||||
void Player::resume() {
|
|
||||||
Expects(valid());
|
|
||||||
|
|
||||||
if (!_paused) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
_paused = false;
|
|
||||||
if (_stage == Stage::Ready) {
|
|
||||||
start();
|
|
||||||
}
|
|
||||||
if (_stage == Stage::Started) {
|
|
||||||
_startedTime = crl::now();
|
_startedTime = crl::now();
|
||||||
if (_audio) {
|
if (_audio) {
|
||||||
_audio->resume(_startedTime);
|
_audio->resume(_startedTime);
|
||||||
@ -362,48 +406,85 @@ void Player::resume() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool Player::trackReceivedEnough(const TrackState &state) const {
|
||||||
|
return FullTrackReceived(state)
|
||||||
|
|| (state.position + kBufferFor <= state.receivedTill);
|
||||||
|
}
|
||||||
|
|
||||||
|
void Player::checkResumeFromWaitingForData() {
|
||||||
|
if (_pausedByWaitingForData
|
||||||
|
&& (!_audio || trackReceivedEnough(_information.audio.state))
|
||||||
|
&& (!_video || trackReceivedEnough(_information.video.state))) {
|
||||||
|
_pausedByWaitingForData = false;
|
||||||
|
updatePausedState();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void Player::start() {
|
void Player::start() {
|
||||||
Expects(_stage == Stage::Ready);
|
Expects(_stage == Stage::Ready);
|
||||||
|
|
||||||
_stage = Stage::Started;
|
_stage = Stage::Started;
|
||||||
if (_audio) {
|
const auto guard = base::make_weak(&_sessionGuard);
|
||||||
|
|
||||||
|
rpl::merge(
|
||||||
|
_audio ? _audio->waitingForData() : rpl::never(),
|
||||||
|
_video ? _video->waitingForData() : rpl::never()
|
||||||
|
) | rpl::filter([=] {
|
||||||
|
return !FullTrackReceived(_information.video.state)
|
||||||
|
|| !FullTrackReceived(_information.audio.state);
|
||||||
|
}) | rpl::start_with_next([=] {
|
||||||
|
_pausedByWaitingForData = true;
|
||||||
|
updatePausedState();
|
||||||
|
_updates.fire({ WaitingForData() });
|
||||||
|
}, _sessionLifetime);
|
||||||
|
|
||||||
|
if (guard && _audio) {
|
||||||
_audio->playPosition(
|
_audio->playPosition(
|
||||||
) | rpl::start_with_next_done([=](crl::time position) {
|
) | rpl::start_with_next_done([=](crl::time position) {
|
||||||
audioPlayedTill(position);
|
audioPlayedTill(position);
|
||||||
}, [=] {
|
}, [=] {
|
||||||
if (_stage == Stage::Started) {
|
Expects(_stage == Stage::Started);
|
||||||
_audioFinished = true;
|
|
||||||
if (!_video || _videoFinished) {
|
_audioFinished = true;
|
||||||
_updates.fire({ Finished() });
|
if (!_video || _videoFinished) {
|
||||||
}
|
_updates.fire({ Finished() });
|
||||||
}
|
}
|
||||||
}, _lifetime);
|
}, _sessionLifetime);
|
||||||
}
|
}
|
||||||
if (_video) {
|
|
||||||
|
if (guard && _video) {
|
||||||
_video->renderNextFrame(
|
_video->renderNextFrame(
|
||||||
) | rpl::start_with_next_done([=](crl::time when) {
|
) | rpl::start_with_next_done([=](crl::time when) {
|
||||||
_nextFrameTime = when;
|
_nextFrameTime = when;
|
||||||
checkNextFrame();
|
checkNextFrame();
|
||||||
}, [=] {
|
}, [=] {
|
||||||
if (_stage == Stage::Started) {
|
Expects(_stage == Stage::Started);
|
||||||
_videoFinished = true;
|
|
||||||
if (!_audio || _audioFinished) {
|
|
||||||
_updates.fire({ Finished() });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}, _lifetime);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
_videoFinished = true;
|
||||||
|
if (!_audio || _audioFinished) {
|
||||||
|
_updates.fire({ Finished() });
|
||||||
|
}
|
||||||
|
}, _sessionLifetime);
|
||||||
|
}
|
||||||
|
if (guard && _audio) {
|
||||||
|
trackSendReceivedTill(*_audio, _information.audio.state);
|
||||||
|
}
|
||||||
|
if (guard && _video) {
|
||||||
|
trackSendReceivedTill(*_video, _information.video.state);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Player::stop() {
|
void Player::stop() {
|
||||||
_file->stop();
|
_file->stop();
|
||||||
|
_sessionLifetime = rpl::lifetime();
|
||||||
if (_stage != Stage::Failed) {
|
if (_stage != Stage::Failed) {
|
||||||
_stage = Stage::Uninitialized;
|
_stage = Stage::Uninitialized;
|
||||||
}
|
}
|
||||||
_audio = nullptr;
|
_audio = nullptr;
|
||||||
_video = nullptr;
|
_video = nullptr;
|
||||||
invalidate_weak_ptrs(&_sessionGuard);
|
invalidate_weak_ptrs(&_sessionGuard);
|
||||||
_paused = false;
|
_pausedByUser = _pausedByWaitingForData = _paused = false;
|
||||||
|
_renderFrameTimer.cancel();
|
||||||
_audioFinished = false;
|
_audioFinished = false;
|
||||||
_videoFinished = false;
|
_videoFinished = false;
|
||||||
_readTillEnd = false;
|
_readTillEnd = false;
|
||||||
@ -418,8 +499,12 @@ bool Player::playing() const {
|
|||||||
return (_stage == Stage::Started) && !_paused;
|
return (_stage == Stage::Started) && !_paused;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool Player::buffering() const {
|
||||||
|
return _pausedByWaitingForData;
|
||||||
|
}
|
||||||
|
|
||||||
bool Player::paused() const {
|
bool Player::paused() const {
|
||||||
return _paused;
|
return _pausedByUser;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Player::setSpeed(float64 speed) {
|
void Player::setSpeed(float64 speed) {
|
||||||
|
@ -46,6 +46,7 @@ public:
|
|||||||
|
|
||||||
[[nodiscard]] bool failed() const;
|
[[nodiscard]] bool failed() const;
|
||||||
[[nodiscard]] bool playing() const;
|
[[nodiscard]] bool playing() const;
|
||||||
|
[[nodiscard]] bool buffering() const;
|
||||||
[[nodiscard]] bool paused() const;
|
[[nodiscard]] bool paused() const;
|
||||||
|
|
||||||
[[nodiscard]] rpl::producer<Update, Error> updates() const;
|
[[nodiscard]] rpl::producer<Update, Error> updates() const;
|
||||||
@ -57,9 +58,6 @@ public:
|
|||||||
~Player();
|
~Player();
|
||||||
|
|
||||||
private:
|
private:
|
||||||
static constexpr auto kReceivedTillEnd
|
|
||||||
= std::numeric_limits<crl::time>::max();
|
|
||||||
|
|
||||||
enum class Stage {
|
enum class Stage {
|
||||||
Uninitialized,
|
Uninitialized,
|
||||||
Initializing,
|
Initializing,
|
||||||
@ -91,12 +89,21 @@ private:
|
|||||||
void videoReceivedTill(crl::time position);
|
void videoReceivedTill(crl::time position);
|
||||||
void videoPlayedTill(crl::time position);
|
void videoPlayedTill(crl::time position);
|
||||||
|
|
||||||
|
void updatePausedState();
|
||||||
|
[[nodiscard]] bool trackReceivedEnough(const TrackState &state) const;
|
||||||
|
void checkResumeFromWaitingForData();
|
||||||
|
|
||||||
template <typename Track>
|
template <typename Track>
|
||||||
void trackReceivedTill(
|
void trackReceivedTill(
|
||||||
const Track &track,
|
const Track &track,
|
||||||
TrackState &state,
|
TrackState &state,
|
||||||
crl::time position);
|
crl::time position);
|
||||||
|
|
||||||
|
template <typename Track>
|
||||||
|
void trackSendReceivedTill(
|
||||||
|
const Track &track,
|
||||||
|
TrackState &state);
|
||||||
|
|
||||||
template <typename Track>
|
template <typename Track>
|
||||||
void trackPlayedTill(
|
void trackPlayedTill(
|
||||||
const Track &track,
|
const Track &track,
|
||||||
@ -121,6 +128,8 @@ private:
|
|||||||
// Belongs to the main thread.
|
// Belongs to the main thread.
|
||||||
Information _information;
|
Information _information;
|
||||||
Stage _stage = Stage::Uninitialized;
|
Stage _stage = Stage::Uninitialized;
|
||||||
|
bool _pausedByUser = false;
|
||||||
|
bool _pausedByWaitingForData = false;
|
||||||
bool _paused = false;
|
bool _paused = false;
|
||||||
bool _audioFinished = false;
|
bool _audioFinished = false;
|
||||||
bool _videoFinished = false;
|
bool _videoFinished = false;
|
||||||
@ -130,7 +139,9 @@ private:
|
|||||||
crl::time _nextFrameTime = kTimeUnknown;
|
crl::time _nextFrameTime = kTimeUnknown;
|
||||||
base::Timer _renderFrameTimer;
|
base::Timer _renderFrameTimer;
|
||||||
rpl::event_stream<Update, Error> _updates;
|
rpl::event_stream<Update, Error> _updates;
|
||||||
|
|
||||||
rpl::lifetime _lifetime;
|
rpl::lifetime _lifetime;
|
||||||
|
rpl::lifetime _sessionLifetime;
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -36,6 +36,7 @@ public:
|
|||||||
void process(Packet &&packet);
|
void process(Packet &&packet);
|
||||||
|
|
||||||
[[nodisacrd]] rpl::producer<crl::time> displayFrameAt() const;
|
[[nodisacrd]] rpl::producer<crl::time> displayFrameAt() const;
|
||||||
|
[[nodisacrd]] rpl::producer<> waitingForData() const;
|
||||||
|
|
||||||
void pause(crl::time time);
|
void pause(crl::time time);
|
||||||
void resume(crl::time time);
|
void resume(crl::time time);
|
||||||
@ -76,6 +77,7 @@ private:
|
|||||||
mutable TimePoint _syncTimePoint;
|
mutable TimePoint _syncTimePoint;
|
||||||
mutable crl::time _previousFramePosition = kTimeUnknown;
|
mutable crl::time _previousFramePosition = kTimeUnknown;
|
||||||
rpl::variable<crl::time> _nextFrameDisplayTime = kTimeUnknown;
|
rpl::variable<crl::time> _nextFrameDisplayTime = kTimeUnknown;
|
||||||
|
rpl::event_stream<> _waitingForData;
|
||||||
|
|
||||||
bool _queued = false;
|
bool _queued = false;
|
||||||
base::ConcurrentTimer _readFramesTimer;
|
base::ConcurrentTimer _readFramesTimer;
|
||||||
@ -111,6 +113,10 @@ rpl::producer<crl::time> VideoTrackObject::displayFrameAt() const {
|
|||||||
: _nextFrameDisplayTime.value();
|
: _nextFrameDisplayTime.value();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
rpl::producer<> VideoTrackObject::waitingForData() const {
|
||||||
|
return interrupted() ? rpl::never() : _waitingForData.events();
|
||||||
|
}
|
||||||
|
|
||||||
void VideoTrackObject::process(Packet &&packet) {
|
void VideoTrackObject::process(Packet &&packet) {
|
||||||
if (interrupted()) {
|
if (interrupted()) {
|
||||||
return;
|
return;
|
||||||
@ -162,6 +168,8 @@ bool VideoTrackObject::readFrame(not_null<Frame*> frame) {
|
|||||||
} else if (error.code() != AVERROR(EAGAIN) || _noMoreData) {
|
} else if (error.code() != AVERROR(EAGAIN) || _noMoreData) {
|
||||||
interrupt();
|
interrupt();
|
||||||
_error();
|
_error();
|
||||||
|
} else if (_stream.queue.empty()) {
|
||||||
|
_waitingForData.fire({});
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@ -604,6 +612,12 @@ rpl::producer<crl::time> VideoTrack::renderNextFrame() const {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
rpl::producer<> VideoTrack::waitingForData() const {
|
||||||
|
return _wrapped.producer_on_main([](const Implementation &unwrapped) {
|
||||||
|
return unwrapped.waitingForData();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
VideoTrack::~VideoTrack() {
|
VideoTrack::~VideoTrack() {
|
||||||
_wrapped.with([shared = std::move(_shared)](Implementation &unwrapped) {
|
_wrapped.with([shared = std::move(_shared)](Implementation &unwrapped) {
|
||||||
unwrapped.interrupt();
|
unwrapped.interrupt();
|
||||||
|
@ -48,6 +48,7 @@ public:
|
|||||||
[[nodiscard]] crl::time markFrameDisplayed(crl::time now);
|
[[nodiscard]] crl::time markFrameDisplayed(crl::time now);
|
||||||
[[nodiscard]] QImage frame(const FrameRequest &request) const;
|
[[nodiscard]] QImage frame(const FrameRequest &request) const;
|
||||||
[[nodiscard]] rpl::producer<crl::time> renderNextFrame() const;
|
[[nodiscard]] rpl::producer<crl::time> renderNextFrame() const;
|
||||||
|
[[nodiscard]] rpl::producer<> waitingForData() const;
|
||||||
|
|
||||||
// Called from the main thread.
|
// Called from the main thread.
|
||||||
~VideoTrack();
|
~VideoTrack();
|
||||||
|
@ -131,7 +131,7 @@ void Controller::updatePlayback(const Player::TrackState &state) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void Controller::updatePlayPauseResumeState(const Player::TrackState &state) {
|
void Controller::updatePlayPauseResumeState(const Player::TrackState &state) {
|
||||||
auto showPause = (state.state == Player::State::Playing || state.state == Player::State::Resuming || _seekPositionMs >= 0);
|
auto showPause = ShowPauseIcon(state.state) || (_seekPositionMs >= 0);
|
||||||
if (showPause != _showPause) {
|
if (showPause != _showPause) {
|
||||||
disconnect(_playPauseResume, SIGNAL(clicked()), this, _showPause ? SIGNAL(pausePressed()) : SIGNAL(playPressed()));
|
disconnect(_playPauseResume, SIGNAL(clicked()), this, _showPause ? SIGNAL(pausePressed()) : SIGNAL(playPressed()));
|
||||||
_showPause = showPause;
|
_showPause = showPause;
|
||||||
|
@ -854,7 +854,7 @@ bool Voice::updateStatusText() {
|
|||||||
if (state.id == AudioMsgId(_data, parent()->fullId(), state.id.playId()) && !Media::Player::IsStoppedOrStopping(state.state)) {
|
if (state.id == AudioMsgId(_data, parent()->fullId(), state.id.playId()) && !Media::Player::IsStoppedOrStopping(state.state)) {
|
||||||
statusSize = -1 - (state.position / state.frequency);
|
statusSize = -1 - (state.position / state.frequency);
|
||||||
realDuration = (state.length / state.frequency);
|
realDuration = (state.length / state.frequency);
|
||||||
showPause = (state.state == State::Playing || state.state == State::Resuming || state.state == State::Starting);
|
showPause = Media::Player::ShowPauseIcon(state.state);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
statusSize = FileStatusSizeReady;
|
statusSize = FileStatusSizeReady;
|
||||||
@ -1225,7 +1225,7 @@ bool Document::updateStatusText() {
|
|||||||
if (state.id == AudioMsgId(_data, parent()->fullId()) && !Media::Player::IsStoppedOrStopping(state.state)) {
|
if (state.id == AudioMsgId(_data, parent()->fullId()) && !Media::Player::IsStoppedOrStopping(state.state)) {
|
||||||
statusSize = -1 - (state.position / state.frequency);
|
statusSize = -1 - (state.position / state.frequency);
|
||||||
realDuration = (state.length / state.frequency);
|
realDuration = (state.length / state.frequency);
|
||||||
showPause = (state.state == State::Playing || state.state == State::Resuming || state.state == State::Starting);
|
showPause = Media::Player::ShowPauseIcon(state.state);
|
||||||
}
|
}
|
||||||
if (!showPause && (state.id == AudioMsgId(_data, parent()->fullId())) && Media::Player::instance()->isSeeking(AudioMsgId::Type::Song)) {
|
if (!showPause && (state.id == AudioMsgId(_data, parent()->fullId())) && Media::Player::instance()->isSeeking(AudioMsgId::Type::Song)) {
|
||||||
showPause = true;
|
showPause = true;
|
||||||
|
Loading…
Reference in New Issue
Block a user