Implement precise seek in streaming.

This commit is contained in:
John Preston 2019-02-22 16:39:32 +04:00
parent 44c562d8ba
commit 3e9b811875
6 changed files with 70 additions and 46 deletions

View File

@ -302,6 +302,7 @@ void StartStreaming(
static auto options = Media::Streaming::PlaybackOptions();
static auto speed = 1.;
static auto step = pow(2., 1. / 12);
static auto frame = QImage();
class Panel
#if defined Q_OS_MAC && !defined OS_MAC_OLD
@ -339,6 +340,9 @@ void StartStreaming(
player->pause();
}
void mouseReleaseEvent(QMouseEvent *e) override {
if (player->ready()) {
frame = player->frame({});
}
preloaded = position = options.position = std::clamp(
(duration * e->pos().x()) / width(),
crl::time(0),
@ -364,6 +368,7 @@ void StartStreaming(
options.speed = speed;
//options.syncVideoByAudio = false;
preloaded = position = options.position = 0;
frame = QImage();
player->play(options);
player->updates(
) | rpl::start_with_next_error_done([=](Update &&update) {
@ -391,7 +396,11 @@ void StartStreaming(
if (player->ready()) {
Painter(video.get()).drawImage(
video->rect(),
player->frame(FrameRequest()));
player->frame({}));
} else if (!frame.isNull()) {
Painter(video.get()).drawImage(
video->rect(),
frame);
} else {
Painter(video.get()).fillRect(
rect,

View File

@ -61,29 +61,51 @@ bool AudioTrack::initialized() const {
}
bool AudioTrack::tryReadFirstFrame(Packet &&packet) {
// #TODO streaming fix seek to the end.
if (ProcessPacket(_stream, std::move(packet)).failed()) {
return false;
}
if (const auto error = ReadNextFrame(_stream)) {
if (error.code() == AVERROR_EOF) {
// #TODO streaming fix seek to the end.
if (!_initialSkippingFrame) {
return false;
}
// Return the last valid frame if we seek too far.
_stream.frame = std::move(_initialSkippingFrame);
return processFirstFrame();
} else if (error.code() != AVERROR(EAGAIN) || _noMoreData) {
return false;
}
} else {
// Waiting for more packets.
return true;
}
} else if (!fillStateFromFrame()) {
return false;
} else if (_startedPosition < _options.position) {
// Seek was with AVSEEK_FLAG_BACKWARD so first we get old frames.
// Try skipping frames until one is after the requested position.
std::swap(_initialSkippingFrame, _stream.frame);
if (!_stream.frame) {
_stream.frame = MakeFramePointer();
}
return true;
} else {
return processFirstFrame();
}
}
bool AudioTrack::processFirstFrame() {
mixerInit();
callReady();
return true;
}
bool AudioTrack::fillStateFromFrame() {
_startedPosition = FramePosition(_stream);
return (_startedPosition != kTimeUnknown);
const auto position = FramePosition(_stream);
if (position == kTimeUnknown) {
return false;
}
_startedPosition = position;
return true;
}
void AudioTrack::mixerInit() {

View File

@ -52,6 +52,7 @@ private:
[[nodiscard]] bool initialized() const;
[[nodiscard]] bool tryReadFirstFrame(Packet &&packet);
[[nodiscard]] bool fillStateFromFrame();
[[nodiscard]] bool processFirstFrame();
void mixerInit();
void mixerEnqueue(Packet &&packet);
void mixerForceToBuffer();
@ -78,6 +79,9 @@ private:
// After that accessed from the main thread.
rpl::variable<crl::time> _playPosition;
// For initial frame skipping for an exact seek.
FramePointer _initialSkippingFrame;
};
} // namespace Streaming

View File

@ -110,15 +110,8 @@ Stream File::Context::initStream(AVMediaType type) {
}
const auto info = _formatContext->streams[index];
result.codec = MakeCodecPointer(info);
if (!result.codec) {
return {};
}
if (type == AVMEDIA_TYPE_VIDEO) {
const auto codec = result.codec.get();
result.rotation = ReadRotationFromMetadata(info);
result.dimensions = QSize(codec->width, codec->height);
} else if (type == AVMEDIA_TYPE_AUDIO) {
result.frequency = info->codecpar->sample_rate;
if (!result.frequency) {
@ -126,6 +119,11 @@ Stream File::Context::initStream(AVMediaType type) {
}
}
result.codec = MakeCodecPointer(info);
if (!result.codec) {
return {};
}
result.frame = MakeFramePointer();
if (!result.frame) {
return {};

View File

@ -154,7 +154,6 @@ struct Stream {
// Video only.
int rotation = 0;
QSize dimensions;
SwsContextPointer swsContext;
};

View File

@ -47,10 +47,7 @@ private:
[[nodiscard]] bool interrupted() const;
[[nodiscard]] bool tryReadFirstFrame(Packet &&packet);
[[nodiscard]] bool fillStateFromFrame();
[[nodiscard]] bool fillStateFromFakeLastFrame();
[[nodiscard]] bool fillStateFromFrameTime(crl::time frameTime);
[[nodiscard]] QImage createFakeLastFrame() const;
[[nodiscard]] bool processFirstFrame(QImage frame);
[[nodiscard]] bool processFirstFrame();
void queueReadFrames(crl::time delay = 0);
void readFrames();
[[nodiscard]] bool readFrame(not_null<Frame*> frame);
@ -83,6 +80,9 @@ private:
bool _queued = false;
base::ConcurrentTimer _readFramesTimer;
// For initial frame skipping for an exact seek.
FramePointer _initialSkippingFrame;
};
VideoTrackObject::VideoTrackObject(
@ -259,10 +259,12 @@ bool VideoTrackObject::tryReadFirstFrame(Packet &&packet) {
auto frame = QImage();
if (const auto error = ReadNextFrame(_stream)) {
if (error.code() == AVERROR_EOF) {
if (!fillStateFromFakeLastFrame()) {
if (!_initialSkippingFrame) {
return false;
}
return processFirstFrame(createFakeLastFrame());
// Return the last valid frame if we seek too far.
_stream.frame = std::move(_initialSkippingFrame);
return processFirstFrame();
} else if (error.code() != AVERROR(EAGAIN) || _noMoreData) {
return false;
} else {
@ -271,22 +273,21 @@ bool VideoTrackObject::tryReadFirstFrame(Packet &&packet) {
}
} else if (!fillStateFromFrame()) {
return false;
} else if (_syncTimePoint.trackTime < _options.position) {
// Seek was with AVSEEK_FLAG_BACKWARD so first we get old frames.
// Try skipping frames until one is after the requested position.
std::swap(_initialSkippingFrame, _stream.frame);
if (!_stream.frame) {
_stream.frame = MakeFramePointer();
}
return true;
} else {
return processFirstFrame();
}
return processFirstFrame(ConvertFrame(_stream, QSize(), QImage()));
}
QImage VideoTrackObject::createFakeLastFrame() const {
if (_stream.dimensions.isEmpty()) {
LOG(("Streaming Error: Can't seek to the end of the video "
"in case the codec doesn't provide valid dimensions."));
return QImage();
}
auto result = CreateImageForOriginalFrame(_stream.dimensions);
result.fill(Qt::black);
return result;
}
bool VideoTrackObject::processFirstFrame(QImage frame) {
bool VideoTrackObject::processFirstFrame() {
auto frame = ConvertFrame(_stream, QSize(), QImage());
if (frame.isNull()) {
return false;
}
@ -312,20 +313,11 @@ crl::time VideoTrackObject::currentFramePosition() const {
}
bool VideoTrackObject::fillStateFromFrame() {
return fillStateFromFrameTime(currentFramePosition());
}
bool VideoTrackObject::fillStateFromFakeLastFrame() {
return fillStateFromFrameTime(_stream.duration);
}
bool VideoTrackObject::fillStateFromFrameTime(crl::time frameTime) {
Expects(_syncTimePoint.trackTime == kTimeUnknown);
if (frameTime == kTimeUnknown) {
const auto position = currentFramePosition();
if (position == kTimeUnknown) {
return false;
}
_syncTimePoint.trackTime = frameTime;
_syncTimePoint.trackTime = position;
return true;
}