tdesktop/Telegram/SourceFiles/media/streaming/media_streaming_video_track...

1347 lines
36 KiB
C++
Raw Normal View History

/*
This file is part of Telegram Desktop,
the official desktop application for the Telegram messaging service.
For license and copyright information please follow this link:
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
*/
#include "media/streaming/media_streaming_video_track.h"
#include "ffmpeg/ffmpeg_utility.h"
2019-02-21 11:15:44 +00:00
#include "media/audio/media_audio.h"
#include "base/concurrent_timer.h"
#include "core/crash_reports.h"
#include "base/debug_log.h"
namespace Media {
namespace Streaming {
namespace {
constexpr auto kMaxFrameArea = 3840 * 2160; // usual 4K
constexpr auto kDisplaySkipped = crl::time(-1);
2019-03-06 13:00:03 +00:00
constexpr auto kFinishedPosition = std::numeric_limits<crl::time>::max();
static_assert(kDisplaySkipped != kTimeUnknown);
[[nodiscard]] QImage ConvertToARGB32(
FrameFormat format,
const FrameYUV &data) {
Expects(data.y.data != nullptr);
Expects(data.u.data != nullptr);
Expects((format == FrameFormat::NV12) || (data.v.data != nullptr));
Expects(!data.size.isEmpty());
//if (FFmpeg::RotationSwapWidthHeight(stream.rotation)) {
// resize.transpose();
//}
auto result = FFmpeg::CreateFrameStorage(data.size);
const auto swscale = FFmpeg::MakeSwscalePointer(
data.size,
(format == FrameFormat::YUV420
? AV_PIX_FMT_YUV420P
: AV_PIX_FMT_NV12),
data.size,
AV_PIX_FMT_BGRA);
if (!swscale) {
return QImage();
}
// AV_NUM_DATA_POINTERS defined in AVFrame struct
const uint8_t *srcData[AV_NUM_DATA_POINTERS] = {
static_cast<const uint8_t*>(data.y.data),
static_cast<const uint8_t*>(data.u.data),
static_cast<const uint8_t*>(data.v.data),
nullptr,
};
int srcLinesize[AV_NUM_DATA_POINTERS] = {
data.y.stride,
data.u.stride,
data.v.stride,
0,
};
uint8_t *dstData[AV_NUM_DATA_POINTERS] = { result.bits(), nullptr };
int dstLinesize[AV_NUM_DATA_POINTERS] = { int(result.bytesPerLine()), 0 };
sws_scale(
swscale.get(),
srcData,
srcLinesize,
0,
data.size.height(),
dstData,
dstLinesize);
return result;
}
} // namespace
class VideoTrackObject final {
public:
using Frame = VideoTrack::Frame;
using Shared = VideoTrack::Shared;
VideoTrackObject(
crl::weak_on_queue<VideoTrackObject> weak,
const PlaybackOptions &options,
not_null<Shared*> shared,
Stream &&stream,
2019-02-21 11:15:44 +00:00
const AudioMsgId &audioId,
FnMut<void(const Information &)> ready,
2019-03-05 13:56:27 +00:00
Fn<void(Error)> error);
void process(std::vector<FFmpeg::Packet> &&packets);
[[nodiscard]] rpl::producer<> checkNextFrame() const;
[[nodiscard]] rpl::producer<> waitingForData() const;
2019-02-21 14:57:00 +00:00
void pause(crl::time time);
void resume(crl::time time);
2019-02-21 16:01:55 +00:00
void setSpeed(float64 speed);
2019-12-19 10:50:33 +00:00
void setWaitForMarkAsShown(bool wait);
void interrupt();
2019-12-11 14:01:11 +00:00
void frameShown();
void addTimelineDelay(crl::time delayed);
void updateFrameRequest(
const Instance *instance,
const FrameRequest &request);
void removeFrameRequest(const Instance *instance);
2020-07-17 18:59:25 +00:00
void rasterizeFrame(not_null<Frame*> frame);
2021-06-03 12:57:48 +00:00
[[nodiscard]] bool requireARGB32() const;
2020-07-17 18:59:25 +00:00
private:
enum class FrameResult {
Done,
Error,
Waiting,
Looped,
Finished,
};
using ReadEnoughState = std::variant<
v::null_t,
2019-03-06 13:00:03 +00:00
FrameResult,
Shared::PrepareNextCheck>;
void fail(Error error);
[[nodiscard]] bool interrupted() const;
2019-06-26 15:04:38 +00:00
[[nodiscard]] bool tryReadFirstFrame(FFmpeg::Packet &&packet);
[[nodiscard]] bool fillStateFromFrame();
2019-02-22 12:39:32 +00:00
[[nodiscard]] bool processFirstFrame();
void queueReadFrames(crl::time delay = 0);
void readFrames();
2019-03-06 13:00:03 +00:00
[[nodiscard]] ReadEnoughState readEnoughFrames(crl::time trackTime);
[[nodiscard]] FrameResult readFrame(not_null<Frame*> frame);
void fillRequests(not_null<Frame*> frame) const;
[[nodiscard]] QSize chooseOriginalResize(QSize encoded) const;
void presentFrameIfNeeded();
void callReady();
[[nodiscard]] bool loopAround();
[[nodiscard]] crl::time computeDuration() const;
2019-06-26 15:04:38 +00:00
[[nodiscard]] int durationByPacket(const FFmpeg::Packet &packet);
// Force frame position to be clamped to [0, duration] and monotonic.
[[nodiscard]] crl::time currentFramePosition() const;
2019-02-21 16:01:55 +00:00
[[nodiscard]] TimePoint trackTime() const;
const crl::weak_on_queue<VideoTrackObject> _weak;
2019-02-21 16:01:55 +00:00
PlaybackOptions _options;
// Main thread wrapper destructor will set _shared back to nullptr.
// All queued method calls after that should be discarded.
Shared *_shared = nullptr;
Stream _stream;
2019-02-21 11:15:44 +00:00
AudioMsgId _audioId;
bool _readTillEnd = false;
FnMut<void(const Information &)> _ready;
2019-03-05 13:56:27 +00:00
Fn<void(Error)> _error;
2019-02-21 14:57:00 +00:00
crl::time _pausedTime = kTimeUnknown;
crl::time _resumedTime = kTimeUnknown;
int _frameIndex = 0;
int _durationByLastPacket = 0;
2019-02-21 16:01:55 +00:00
mutable TimePoint _syncTimePoint;
crl::time _loopingShift = 0;
2019-03-07 13:23:19 +00:00
rpl::event_stream<> _checkNextFrame;
rpl::event_stream<> _waitingForData;
base::flat_map<const Instance*, FrameRequest> _requests;
bool _queued = false;
base::ConcurrentTimer _readFramesTimer;
2019-03-13 12:02:59 +00:00
// For initial frame skipping for an exact seek.
2019-06-26 15:04:38 +00:00
FFmpeg::FramePointer _initialSkippingFrame;
2019-03-13 12:02:59 +00:00
};
VideoTrackObject::VideoTrackObject(
crl::weak_on_queue<VideoTrackObject> weak,
const PlaybackOptions &options,
not_null<Shared*> shared,
Stream &&stream,
2019-02-21 11:15:44 +00:00
const AudioMsgId &audioId,
FnMut<void(const Information &)> ready,
2019-03-05 13:56:27 +00:00
Fn<void(Error)> error)
: _weak(std::move(weak))
, _options(options)
, _shared(shared)
, _stream(std::move(stream))
2019-02-21 11:15:44 +00:00
, _audioId(audioId)
, _ready(std::move(ready))
, _error(std::move(error))
, _readFramesTimer(_weak, [=] { readFrames(); }) {
Expects(_stream.duration > 1);
Expects(_ready != nullptr);
Expects(_error != nullptr);
}
2019-03-07 13:23:19 +00:00
rpl::producer<> VideoTrackObject::checkNextFrame() const {
2019-02-22 11:58:26 +00:00
return interrupted()
2019-03-07 13:23:19 +00:00
? (rpl::complete<>() | rpl::type_erased())
: !_shared->firstPresentHappened()
? (_checkNextFrame.events() | rpl::type_erased())
: _checkNextFrame.events_starting_with({});
}
rpl::producer<> VideoTrackObject::waitingForData() const {
2019-03-01 14:41:10 +00:00
return interrupted()
? (rpl::never() | rpl::type_erased())
: _waitingForData.events();
}
void VideoTrackObject::process(std::vector<FFmpeg::Packet> &&packets) {
if (interrupted() || packets.empty()) {
return;
2019-02-21 16:01:55 +00:00
}
if (packets.front().empty()) {
Assert(packets.size() == 1);
_readTillEnd = true;
} else if (!_readTillEnd) {
//for (const auto &packet : packets) {
// // Maybe it is enough to count by list.back()?.. hope so.
// accumulate_max(
// _durationByLastPacket,
// durationByPacket(packet));
// if (interrupted()) {
// return;
// }
//}
accumulate_max(
_durationByLastPacket,
durationByPacket(packets.back()));
if (interrupted()) {
return;
}
}
for (auto i = begin(packets), e = end(packets); i != e; ++i) {
if (_shared->initialized()) {
_stream.queue.insert(
end(_stream.queue),
std::make_move_iterator(i),
std::make_move_iterator(e));
queueReadFrames();
break;
} else if (!tryReadFirstFrame(std::move(*i))) {
fail(Error::InvalidData);
break;
}
}
}
2019-06-26 15:04:38 +00:00
int VideoTrackObject::durationByPacket(const FFmpeg::Packet &packet) {
// We've set this value on the first cycle.
if (_loopingShift || _stream.duration != kDurationUnavailable) {
return 0;
}
2019-06-26 15:04:38 +00:00
const auto result = FFmpeg::DurationByPacket(packet, _stream.timeBase);
if (result < 0) {
fail(Error::InvalidData);
return 0;
}
Ensures(result > 0);
return result;
}
void VideoTrackObject::queueReadFrames(crl::time delay) {
if (delay > 0) {
_readFramesTimer.callOnce(delay);
} else if (!_queued) {
_queued = true;
_weak.with([](VideoTrackObject &that) {
that._queued = false;
that.readFrames();
});
}
}
void VideoTrackObject::readFrames() {
if (interrupted()) {
return;
}
auto time = trackTime().trackTime;
2019-03-06 13:00:03 +00:00
while (true) {
const auto result = readEnoughFrames(time);
v::match(result, [&](FrameResult result) {
2019-03-06 13:00:03 +00:00
if (result == FrameResult::Done
|| result == FrameResult::Finished) {
presentFrameIfNeeded();
} else if (result == FrameResult::Looped) {
const auto duration = computeDuration();
Assert(duration != kDurationUnavailable);
time -= duration;
2019-03-06 13:00:03 +00:00
}
}, [&](Shared::PrepareNextCheck delay) {
Expects(delay == kTimeUnknown || delay > 0);
2019-03-06 13:00:03 +00:00
if (delay != kTimeUnknown) {
queueReadFrames(delay);
}
}, [](v::null_t) {
2019-03-06 13:00:03 +00:00
});
if (!v::is_null(result)) {
2019-03-06 13:00:03 +00:00
break;
}
}
}
auto VideoTrackObject::readEnoughFrames(crl::time trackTime)
-> ReadEnoughState {
2019-12-11 14:01:11 +00:00
const auto dropStaleFrames = !_options.waitForMarkAsShown;
2019-03-06 13:00:03 +00:00
const auto state = _shared->prepareState(trackTime, dropStaleFrames);
return v::match(state, [&](Shared::PrepareFrame frame)
-> ReadEnoughState {
while (true) {
const auto result = readFrame(frame);
2019-03-06 13:00:03 +00:00
if (result != FrameResult::Done) {
return result;
} else if (!dropStaleFrames
2019-03-06 13:00:03 +00:00
|| !VideoTrack::IsStale(frame, trackTime)) {
return v::null;
2019-02-27 11:36:19 +00:00
}
}
2019-03-06 13:00:03 +00:00
}, [&](Shared::PrepareNextCheck delay) -> ReadEnoughState {
return delay;
}, [&](v::null_t) -> ReadEnoughState {
2019-03-06 13:00:03 +00:00
return FrameResult::Done;
});
}
bool VideoTrackObject::loopAround() {
const auto duration = computeDuration();
if (duration == kDurationUnavailable) {
LOG(("Streaming Error: "
"Couldn't find out the real video stream duration."));
return false;
}
avcodec_flush_buffers(_stream.codec.get());
_frameIndex = 0;
_loopingShift += duration;
_readTillEnd = false;
return true;
}
crl::time VideoTrackObject::computeDuration() const {
if (_stream.duration != kDurationUnavailable) {
return _stream.duration;
} else if ((_loopingShift || _readTillEnd) && _durationByLastPacket) {
// We looped, so it already holds full stream duration.
return _durationByLastPacket;
}
return kDurationUnavailable;
}
auto VideoTrackObject::readFrame(not_null<Frame*> frame) -> FrameResult {
if (const auto error = ReadNextFrame(_stream)) {
if (error.code() == AVERROR_EOF) {
if (!_options.loop) {
2019-03-06 13:00:03 +00:00
frame->position = kFinishedPosition;
frame->displayed = kTimeUnknown;
return FrameResult::Finished;
} else if (loopAround()) {
return FrameResult::Looped;
} else {
fail(Error::InvalidData);
return FrameResult::Error;
}
} else if (error.code() != AVERROR(EAGAIN) || _readTillEnd) {
fail(Error::InvalidData);
return FrameResult::Error;
}
Assert(_stream.queue.empty());
_waitingForData.fire({});
return FrameResult::Waiting;
}
const auto position = currentFramePosition();
if (position == kTimeUnknown) {
fail(Error::InvalidData);
return FrameResult::Error;
}
std::swap(frame->decoded, _stream.decodedFrame);
std::swap(frame->transferred, _stream.transferredFrame);
frame->index = _frameIndex++;
frame->position = position;
frame->displayed = kTimeUnknown;
return FrameResult::Done;
}
void VideoTrackObject::fillRequests(not_null<Frame*> frame) const {
auto i = frame->prepared.begin();
for (const auto &[instance, request] : _requests) {
while (i != frame->prepared.end() && i->first < instance) {
i = frame->prepared.erase(i);
}
if (i == frame->prepared.end() || i->first > instance) {
i = frame->prepared.emplace(instance, request).first;
}
++i;
}
while (i != frame->prepared.end()) {
i = frame->prepared.erase(i);
}
}
QSize VideoTrackObject::chooseOriginalResize(QSize encoded) const {
auto chosen = QSize();
if (FFmpeg::RotationSwapWidthHeight(_stream.rotation)) {
encoded.transpose();
}
for (const auto &[_, request] : _requests) {
const auto resize = request.blurredBackground
? CalculateResizeFromOuter(request.outer, encoded)
: request.resize;
if (resize.isEmpty()) {
return QSize();
}
const auto byWidth = (resize.width() >= chosen.width());
const auto byHeight = (resize.height() >= chosen.height());
if (byWidth && byHeight) {
chosen = resize;
} else if (byWidth || byHeight) {
return QSize();
}
}
return chosen;
}
2021-06-03 12:57:48 +00:00
bool VideoTrackObject::requireARGB32() const {
for (const auto &[_, request] : _requests) {
if (!request.requireARGB32) {
return false;
}
}
return true;
}
2020-07-17 18:59:25 +00:00
void VideoTrackObject::rasterizeFrame(not_null<Frame*> frame) {
Expects(frame->position != kFinishedPosition);
fillRequests(frame);
2021-06-03 12:57:48 +00:00
frame->format = FrameFormat::None;
if (frame->decoded->hw_frames_ctx) {
if (!frame->transferred) {
frame->transferred = FFmpeg::MakeFramePointer();
}
const auto success = TransferFrame(
_stream,
frame->decoded.get(),
frame->transferred.get());
if (!success) {
frame->prepared.clear();
fail(Error::InvalidData);
return;
}
} else {
frame->transferred = nullptr;
}
const auto frameWithData = frame->transferred
? frame->transferred.get()
: frame->decoded.get();
if ((frameWithData->format == AV_PIX_FMT_YUV420P
|| frameWithData->format == AV_PIX_FMT_NV12) && !requireARGB32()) {
const auto nv12 = (frameWithData->format == AV_PIX_FMT_NV12);
2021-06-03 12:57:48 +00:00
frame->alpha = false;
frame->yuv = ExtractYUV(_stream, frameWithData);
if (frame->yuv.size.isEmpty()
|| frame->yuv.chromaSize.isEmpty()
|| !frame->yuv.y.data
|| !frame->yuv.u.data
|| (!nv12 && !frame->yuv.v.data)) {
2021-06-03 12:57:48 +00:00
frame->prepared.clear();
fail(Error::InvalidData);
return;
}
if (!frame->original.isNull()) {
frame->original = QImage();
for (auto &[_, prepared] : frame->prepared) {
prepared.image = QImage();
}
}
frame->format = nv12 ? FrameFormat::NV12 : FrameFormat::YUV420;
2021-06-03 12:57:48 +00:00
} else {
frame->alpha = (frameWithData->format == AV_PIX_FMT_BGRA)
|| (frameWithData->format == AV_PIX_FMT_YUVA420P);
frame->yuv.size = {
frameWithData->width,
frameWithData->height
2021-06-03 12:57:48 +00:00
};
frame->original = ConvertFrame(
_stream,
frameWithData,
chooseOriginalResize(
{ frameWithData->width, frameWithData->height }),
2021-06-03 12:57:48 +00:00
std::move(frame->original));
if (frame->original.isNull()) {
frame->prepared.clear();
fail(Error::InvalidData);
return;
}
frame->format = FrameFormat::ARGB32;
2019-02-21 14:57:00 +00:00
}
2019-02-27 11:36:19 +00:00
VideoTrack::PrepareFrameByRequests(
frame,
_stream.aspect,
_stream.rotation);
2019-02-27 11:36:19 +00:00
2020-07-17 18:59:25 +00:00
Ensures(VideoTrack::IsRasterized(frame));
}
void VideoTrackObject::presentFrameIfNeeded() {
if (_pausedTime != kTimeUnknown || _resumedTime == kTimeUnknown) {
return;
}
2019-12-11 14:01:11 +00:00
const auto dropStaleFrames = !_options.waitForMarkAsShown;
const auto time = trackTime();
2019-03-06 13:00:03 +00:00
const auto presented = _shared->presentFrame(
2020-07-17 18:59:25 +00:00
this,
time,
2019-03-07 13:23:19 +00:00
_options.speed,
2020-07-17 18:59:25 +00:00
dropStaleFrames);
2019-12-11 14:01:11 +00:00
addTimelineDelay(presented.addedWorldTimeDelay);
2019-03-06 13:00:03 +00:00
if (presented.displayPosition == kFinishedPosition) {
interrupt();
2019-03-07 13:23:19 +00:00
_checkNextFrame = rpl::event_stream<>();
2019-03-06 13:00:03 +00:00
return;
} else if (presented.displayPosition != kTimeUnknown) {
2019-03-07 13:23:19 +00:00
_checkNextFrame.fire({});
}
2019-03-06 13:00:03 +00:00
if (presented.nextCheckDelay != kTimeUnknown) {
Assert(presented.nextCheckDelay >= 0);
queueReadFrames(presented.nextCheckDelay);
}
}
2019-02-21 14:57:00 +00:00
void VideoTrackObject::pause(crl::time time) {
2019-02-21 16:01:55 +00:00
Expects(_syncTimePoint.valid());
2019-02-21 14:57:00 +00:00
2019-02-21 16:01:55 +00:00
if (interrupted()) {
return;
} else if (_pausedTime == kTimeUnknown) {
2019-02-21 14:57:00 +00:00
_pausedTime = time;
}
}
void VideoTrackObject::resume(crl::time time) {
2019-02-21 16:01:55 +00:00
Expects(_syncTimePoint.trackTime != kTimeUnknown);
if (interrupted()) {
return;
}
2019-02-21 14:57:00 +00:00
// Resumed time used to validate sync to audio.
_resumedTime = time;
if (_pausedTime != kTimeUnknown) {
Assert(_pausedTime <= time);
2019-02-21 16:01:55 +00:00
_syncTimePoint.worldTime += (time - _pausedTime);
2019-02-21 14:57:00 +00:00
_pausedTime = kTimeUnknown;
} else {
2019-02-21 16:01:55 +00:00
_syncTimePoint.worldTime = time;
2019-02-21 14:57:00 +00:00
}
queueReadFrames();
2019-02-21 14:57:00 +00:00
2019-02-21 16:01:55 +00:00
Ensures(_syncTimePoint.valid());
Ensures(_pausedTime == kTimeUnknown);
}
void VideoTrackObject::setSpeed(float64 speed) {
if (interrupted()) {
return;
}
if (_syncTimePoint.valid()) {
const auto time = trackTime();
_syncTimePoint = time;
2019-02-21 16:01:55 +00:00
}
_options.speed = speed;
}
2019-12-19 10:50:33 +00:00
void VideoTrackObject::setWaitForMarkAsShown(bool wait) {
if (interrupted()) {
return;
}
_options.waitForMarkAsShown = wait;
}
bool VideoTrackObject::interrupted() const {
return !_shared;
}
2019-12-11 14:01:11 +00:00
void VideoTrackObject::frameShown() {
2019-02-21 16:01:55 +00:00
if (interrupted()) {
return;
}
queueReadFrames();
}
2019-12-11 14:01:11 +00:00
void VideoTrackObject::addTimelineDelay(crl::time delayed) {
Expects(_syncTimePoint.valid());
if (!delayed) {
return;
}
_syncTimePoint.worldTime += delayed;
}
void VideoTrackObject::updateFrameRequest(
const Instance *instance,
const FrameRequest &request) {
2021-05-18 10:15:11 +00:00
_requests[instance] = request;
}
void VideoTrackObject::removeFrameRequest(const Instance *instance) {
_requests.remove(instance);
2019-02-27 11:36:19 +00:00
}
2019-06-26 15:04:38 +00:00
bool VideoTrackObject::tryReadFirstFrame(FFmpeg::Packet &&packet) {
if (ProcessPacket(_stream, std::move(packet)).failed()) {
return false;
}
2019-03-13 12:02:59 +00:00
while (true) {
if (const auto error = ReadNextFrame(_stream)) {
if (error.code() == AVERROR_EOF) {
if (!_initialSkippingFrame) {
return false;
}
// Return the last valid frame if we seek too far.
_stream.decodedFrame = std::move(_initialSkippingFrame);
2019-03-13 12:02:59 +00:00
return processFirstFrame();
} else if (error.code() != AVERROR(EAGAIN) || _readTillEnd) {
2019-03-13 12:02:59 +00:00
return false;
} else {
// Waiting for more packets.
return true;
}
} else if (!fillStateFromFrame()) {
return false;
2019-03-13 12:02:59 +00:00
} else if (_syncTimePoint.trackTime >= _options.position) {
return processFirstFrame();
}
2019-03-13 12:02:59 +00:00
2019-02-22 12:39:32 +00:00
// Seek was with AVSEEK_FLAG_BACKWARD so first we get old frames.
// Try skipping frames until one is after the requested position.
std::swap(_initialSkippingFrame, _stream.decodedFrame);
if (!_stream.decodedFrame) {
_stream.decodedFrame = FFmpeg::MakeFramePointer();
2019-03-13 12:02:59 +00:00
}
}
2019-02-22 11:58:26 +00:00
}
2019-02-22 12:39:32 +00:00
bool VideoTrackObject::processFirstFrame() {
const auto decodedFrame = _stream.decodedFrame.get();
if (decodedFrame->width * decodedFrame->height > kMaxFrameArea) {
return false;
} else if (decodedFrame->hw_frames_ctx) {
if (!_stream.transferredFrame) {
_stream.transferredFrame = FFmpeg::MakeFramePointer();
}
const auto success = TransferFrame(
_stream,
decodedFrame,
_stream.transferredFrame.get());
if (!success) {
LOG(("Video Error: Failed accelerated decoding from format %1."
).arg(int(decodedFrame->format)));
return false;
}
DEBUG_LOG(("Video Info: "
"Using accelerated decoding from format %1 to format %2."
).arg(int(decodedFrame->format)
).arg(int(_stream.transferredFrame->format)));
} else {
_stream.transferredFrame = nullptr;
}
const auto frameWithData = _stream.transferredFrame
? _stream.transferredFrame.get()
: decodedFrame;
const auto alpha = (frameWithData->format == AV_PIX_FMT_BGRA)
|| (frameWithData->format == AV_PIX_FMT_YUVA420P);
2019-02-28 21:03:25 +00:00
auto frame = ConvertFrame(
_stream,
frameWithData,
2019-02-28 21:03:25 +00:00
QSize(),
QImage());
if (frame.isNull()) {
return false;
}
_shared->init(std::move(frame), alpha, _syncTimePoint.trackTime);
callReady();
2019-03-13 12:02:59 +00:00
queueReadFrames();
return true;
}
crl::time VideoTrackObject::currentFramePosition() const {
2019-03-06 13:00:03 +00:00
const auto position = FramePosition(_stream);
if (position == kTimeUnknown || position == kFinishedPosition) {
return kTimeUnknown;
}
return _loopingShift + std::clamp(
2019-03-06 13:00:03 +00:00
position,
crl::time(0),
computeDuration() - 1);
}
bool VideoTrackObject::fillStateFromFrame() {
2019-02-22 12:39:32 +00:00
const auto position = currentFramePosition();
if (position == kTimeUnknown) {
2019-02-21 14:57:00 +00:00
return false;
}
2019-02-22 12:39:32 +00:00
_syncTimePoint.trackTime = position;
2019-02-21 14:57:00 +00:00
return true;
}
void VideoTrackObject::callReady() {
Expects(_ready != nullptr);
const auto frame = _shared->frameForPaint();
++_frameIndex;
base::take(_ready)({ VideoInformation{
.state = {
.position = _syncTimePoint.trackTime,
.receivedTill = (_readTillEnd
? _stream.duration
: _syncTimePoint.trackTime),
.duration = _stream.duration,
},
.size = FFmpeg::TransposeSizeByRotation(
FFmpeg::CorrectByAspect(frame->original.size(), _stream.aspect),
_stream.rotation),
.cover = frame->original,
.rotation = _stream.rotation,
.alpha = frame->alpha,
} });
}
2019-02-21 16:01:55 +00:00
TimePoint VideoTrackObject::trackTime() const {
auto result = TimePoint();
result.worldTime = (_pausedTime != kTimeUnknown)
? _pausedTime
: crl::now();
if (!_syncTimePoint) {
result.trackTime = _syncTimePoint.trackTime;
2019-02-21 11:15:44 +00:00
return result;
}
2019-02-21 16:01:55 +00:00
Assert(_resumedTime != kTimeUnknown);
2019-02-28 21:03:25 +00:00
if (_options.syncVideoByAudio && _audioId.externalPlayId()) {
2019-02-21 14:57:00 +00:00
const auto mixer = Media::Player::mixer();
2019-02-28 21:03:25 +00:00
const auto point = mixer->getExternalSyncTimePoint(_audioId);
2019-02-21 16:01:55 +00:00
if (point && point.worldTime > _resumedTime) {
_syncTimePoint = point;
2019-02-21 14:57:00 +00:00
}
}
2019-02-21 16:01:55 +00:00
const auto adjust = (result.worldTime - _syncTimePoint.worldTime);
const auto adjustSpeed = adjust * _options.speed;
const auto roundAdjustSpeed = base::SafeRound(adjustSpeed);
const auto timeRoundAdjustSpeed = crl::time(roundAdjustSpeed);
result.trackTime = _syncTimePoint.trackTime + timeRoundAdjustSpeed;
2019-02-21 11:15:44 +00:00
return result;
}
void VideoTrackObject::interrupt() {
_shared = nullptr;
}
void VideoTrackObject::fail(Error error) {
interrupt();
_error(error);
}
void VideoTrack::Shared::init(
QImage &&cover,
bool hasAlpha,
crl::time position) {
Expects(!initialized());
_frames[0].original = std::move(cover);
_frames[0].position = position;
2021-06-03 12:57:48 +00:00
_frames[0].format = FrameFormat::ARGB32;
_frames[0].alpha = hasAlpha;
// Usually main thread sets displayed time before _counter increment.
// But in this case we update _counter, so we set a fake displayed time.
_frames[0].displayed = kDisplaySkipped;
2019-12-11 14:01:11 +00:00
_delay = 0;
_counter.store(0, std::memory_order_release);
}
int VideoTrack::Shared::counter() const {
return _counter.load(std::memory_order_acquire);
}
bool VideoTrack::Shared::initialized() const {
return (counter() != kCounterUninitialized);
}
not_null<VideoTrack::Frame*> VideoTrack::Shared::getFrame(int index) {
Expects(index >= 0 && index < kFramesCount);
return &_frames[index];
}
2019-03-07 13:23:19 +00:00
not_null<const VideoTrack::Frame*> VideoTrack::Shared::getFrame(
int index) const {
Expects(index >= 0 && index < kFramesCount);
return &_frames[index];
}
2019-02-27 11:36:19 +00:00
auto VideoTrack::Shared::prepareState(
crl::time trackTime,
bool dropStaleFrames)
-> PrepareState {
const auto prepareNext = [&](int index) -> PrepareState {
const auto frame = getFrame(index);
const auto next = getFrame((index + 1) % kFramesCount);
2019-02-27 11:36:19 +00:00
if (!IsDecoded(frame)) {
return frame;
2019-03-06 13:00:03 +00:00
} else if (!IsDecoded(next)) {
return next;
} else if (next->position < frame->position) {
std::swap(*frame, *next);
}
if (next->position == kFinishedPosition || !dropStaleFrames) {
return PrepareNextCheck(kTimeUnknown);
} else if (IsStale(frame, trackTime)) {
std::swap(*frame, *next);
next->displayed = kDisplaySkipped;
return next;
} else {
if (frame->position - trackTime + 1 <= 0) { // Debugging crash.
CrashReports::SetAnnotation(
"DelayValues",
(QString::number(frame->position)
+ " + 1 <= "
+ QString::number(trackTime)));
}
Assert(frame->position >= trackTime);
Assert(frame->position - trackTime + 1 > 0);
2019-02-25 10:17:25 +00:00
return PrepareNextCheck(frame->position - trackTime + 1);
}
};
2019-03-06 13:00:03 +00:00
const auto finishPrepare = [&](int index) -> PrepareState {
// If player already awaits next frame - we ignore if it's stale.
2019-03-06 13:00:03 +00:00
dropStaleFrames = false;
const auto result = prepareNext(index);
return v::is<PrepareNextCheck>(result) ? PrepareState() : result;
};
switch (counter()) {
case 0: return finishPrepare(1);
case 1: return prepareNext(2);
case 2: return finishPrepare(2);
case 3: return prepareNext(3);
case 4: return finishPrepare(3);
case 5: return prepareNext(0);
case 6: return finishPrepare(0);
case 7: return prepareNext(1);
}
Unexpected("Counter value in VideoTrack::Shared::prepareState.");
}
2019-03-07 13:23:19 +00:00
// Sometimes main thread subscribes to check frame requests before
// the first frame is ready and presented and sometimes after.
bool VideoTrack::Shared::firstPresentHappened() const {
switch (counter()) {
case 0: return false;
case 1: return true;
}
Unexpected("Counter value in VideoTrack::Shared::firstPresentHappened.");
}
2019-02-27 11:36:19 +00:00
auto VideoTrack::Shared::presentFrame(
2020-07-17 18:59:25 +00:00
not_null<VideoTrackObject*> object,
2019-03-07 13:23:19 +00:00
TimePoint time,
float64 playbackSpeed,
2020-07-17 18:59:25 +00:00
bool dropStaleFrames)
2019-02-27 11:36:19 +00:00
-> PresentFrame {
const auto present = [&](int counter, int index) -> PresentFrame {
const auto frame = getFrame(index);
2019-02-25 10:17:25 +00:00
const auto position = frame->position;
2019-12-11 14:01:11 +00:00
const auto addedWorldTimeDelay = base::take(_delay);
2019-03-06 13:00:03 +00:00
if (position == kFinishedPosition) {
2019-12-11 14:01:11 +00:00
return { kFinishedPosition, kTimeUnknown, addedWorldTimeDelay };
2019-03-06 13:00:03 +00:00
}
2020-07-17 18:59:25 +00:00
object->rasterizeFrame(frame);
2019-03-06 13:00:03 +00:00
if (!IsRasterized(frame)) {
// Error happened during frame prepare.
2019-12-11 14:01:11 +00:00
return { kTimeUnknown, kTimeUnknown, addedWorldTimeDelay };
2019-02-27 11:36:19 +00:00
}
2019-03-07 13:23:19 +00:00
const auto trackLeft = position - time.trackTime;
const auto adjustedBySpeed = trackLeft / playbackSpeed;
const auto roundedAdjustedBySpeed = base::SafeRound(adjustedBySpeed);
2019-03-07 13:23:19 +00:00
frame->display = time.worldTime
2019-12-11 14:01:11 +00:00
+ addedWorldTimeDelay
+ crl::time(roundedAdjustedBySpeed);
// Release this frame to the main thread for rendering.
_counter.store(
2021-06-03 12:57:48 +00:00
counter + 1,
std::memory_order_release);
2019-12-11 14:01:11 +00:00
return { position, crl::time(0), addedWorldTimeDelay };
};
const auto nextCheckDelay = [&](int index) -> PresentFrame {
const auto frame = getFrame(index);
2019-03-06 13:00:03 +00:00
if (frame->position == kFinishedPosition) {
return { kFinishedPosition, kTimeUnknown };
}
const auto next = getFrame((index + 1) % kFramesCount);
2019-03-06 13:00:03 +00:00
if (!IsDecoded(frame) || !IsDecoded(next)) {
return { kTimeUnknown, crl::time(0) };
2019-03-06 13:00:03 +00:00
} else if (next->position == kFinishedPosition
|| !dropStaleFrames
2019-03-07 13:23:19 +00:00
|| IsStale(frame, time.trackTime)) {
2019-03-06 13:00:03 +00:00
return { kTimeUnknown, kTimeUnknown };
}
2019-03-07 13:23:19 +00:00
return { kTimeUnknown, (frame->position - time.trackTime + 1) };
};
switch (counter()) {
case 0: return present(0, 1);
case 1: return nextCheckDelay(2);
case 2: return present(2, 2);
case 3: return nextCheckDelay(3);
case 4: return present(4, 3);
case 5: return nextCheckDelay(0);
case 6: return present(6, 0);
case 7: return nextCheckDelay(1);
}
Unexpected("Counter value in VideoTrack::Shared::prepareState.");
}
2019-03-07 13:23:19 +00:00
crl::time VideoTrack::Shared::nextFrameDisplayTime() const {
2019-03-13 11:11:54 +00:00
const auto frameDisplayTime = [&](int counter) {
const auto next = (counter + 1) % (2 * kFramesCount);
const auto index = next / 2;
2019-03-07 13:23:19 +00:00
const auto frame = getFrame(index);
2019-12-11 14:01:11 +00:00
if (frame->displayed != kTimeUnknown) {
// Frame already displayed, but not yet shown.
return kFrameDisplayTimeAlreadyDone;
}
2019-03-13 11:11:54 +00:00
Assert(IsRasterized(frame));
Assert(frame->display != kTimeUnknown);
2019-03-07 13:23:19 +00:00
return frame->display;
};
switch (counter()) {
case 0: return kTimeUnknown;
case 1: return frameDisplayTime(1);
case 2: return kTimeUnknown;
2019-03-13 11:11:54 +00:00
case 3: return frameDisplayTime(3);
2019-03-07 13:23:19 +00:00
case 4: return kTimeUnknown;
2019-03-13 11:11:54 +00:00
case 5: return frameDisplayTime(5);
2019-03-07 13:23:19 +00:00
case 6: return kTimeUnknown;
2019-03-13 11:11:54 +00:00
case 7: return frameDisplayTime(7);
2019-03-07 13:23:19 +00:00
}
Unexpected("Counter value in VideoTrack::Shared::nextFrameDisplayTime.");
}
crl::time VideoTrack::Shared::markFrameDisplayed(crl::time now) {
2019-12-11 14:01:11 +00:00
const auto mark = [&](int counter) {
2019-03-13 11:11:54 +00:00
const auto next = (counter + 1) % (2 * kFramesCount);
const auto index = next / 2;
const auto frame = getFrame(index);
2019-03-13 11:11:54 +00:00
Assert(frame->position != kTimeUnknown);
2019-12-11 14:01:11 +00:00
if (frame->displayed == kTimeUnknown) {
frame->displayed = now;
}
return frame->position;
};
switch (counter()) {
2019-03-13 11:11:54 +00:00
case 0: Unexpected("Value 0 in VideoTrack::Shared::markFrameDisplayed.");
2019-12-11 14:01:11 +00:00
case 1: return mark(1);
2019-03-13 11:11:54 +00:00
case 2: Unexpected("Value 2 in VideoTrack::Shared::markFrameDisplayed.");
2019-12-11 14:01:11 +00:00
case 3: return mark(3);
2019-03-13 11:11:54 +00:00
case 4: Unexpected("Value 4 in VideoTrack::Shared::markFrameDisplayed.");
2019-12-11 14:01:11 +00:00
case 5: return mark(5);
2019-03-13 11:11:54 +00:00
case 6: Unexpected("Value 6 in VideoTrack::Shared::markFrameDisplayed.");
2019-12-11 14:01:11 +00:00
case 7: return mark(7);
}
Unexpected("Counter value in VideoTrack::Shared::markFrameDisplayed.");
}
2019-12-11 14:01:11 +00:00
void VideoTrack::Shared::addTimelineDelay(crl::time delayed) {
if (!delayed) {
return;
}
const auto recountCurrentFrame = [&](int counter) {
_delay += delayed;
//const auto next = (counter + 1) % (2 * kFramesCount);
//const auto index = next / 2;
//const auto frame = getFrame(index);
//if (frame->displayed != kTimeUnknown) {
// // Frame already displayed.
// return;
//}
//Assert(IsRasterized(frame));
//Assert(frame->display != kTimeUnknown);
//frame->display = countFrameDisplayTime(frame->index);
};
switch (counter()) {
case 0: Unexpected("Value 0 in VideoTrack::Shared::addTimelineDelay.");
case 1: return recountCurrentFrame(1);
case 2: Unexpected("Value 2 in VideoTrack::Shared::addTimelineDelay.");
case 3: return recountCurrentFrame(3);
case 4: Unexpected("Value 4 in VideoTrack::Shared::addTimelineDelay.");
case 5: return recountCurrentFrame(5);
case 6: Unexpected("Value 6 in VideoTrack::Shared::addTimelineDelay.");
case 7: return recountCurrentFrame(7);
}
Unexpected("Counter value in VideoTrack::Shared::addTimelineDelay.");
}
bool VideoTrack::Shared::markFrameShown() {
const auto jump = [&](int counter) {
const auto next = (counter + 1) % (2 * kFramesCount);
const auto index = next / 2;
const auto frame = getFrame(index);
if (frame->displayed == kTimeUnknown) {
return false;
}
_counter.store(
next,
std::memory_order_release);
return true;
};
switch (counter()) {
case 0: return false;
case 1: return jump(1);
case 2: return false;
case 3: return jump(3);
case 4: return false;
case 5: return jump(5);
case 6: return false;
case 7: return jump(7);
}
Unexpected("Counter value in VideoTrack::Shared::markFrameShown.");
}
not_null<VideoTrack::Frame*> VideoTrack::Shared::frameForPaint() {
2021-06-03 12:57:48 +00:00
return frameForPaintWithIndex().frame;
}
VideoTrack::FrameWithIndex VideoTrack::Shared::frameForPaintWithIndex() {
const auto index = counter() / 2;
const auto frame = getFrame(index);
Assert(frame->format != FrameFormat::None);
Assert(frame->position != kTimeUnknown);
Assert(frame->displayed != kTimeUnknown);
return {
.frame = frame,
.index = frame->index,
2021-06-03 12:57:48 +00:00
};
}
VideoTrack::VideoTrack(
const PlaybackOptions &options,
Stream &&stream,
2019-02-21 11:15:44 +00:00
const AudioMsgId &audioId,
FnMut<void(const Information &)> ready,
2019-03-05 13:56:27 +00:00
Fn<void(Error)> error)
: _streamIndex(stream.index)
, _streamTimeBase(stream.timeBase)
, _streamDuration(stream.duration)
2019-12-18 17:15:42 +00:00
, _streamRotation(stream.rotation)
, _streamAspect(stream.aspect)
, _shared(std::make_unique<Shared>())
, _wrapped(
options,
_shared.get(),
std::move(stream),
2019-02-21 11:15:44 +00:00
audioId,
std::move(ready),
std::move(error)) {
}
int VideoTrack::streamIndex() const {
return _streamIndex;
}
AVRational VideoTrack::streamTimeBase() const {
return _streamTimeBase;
}
crl::time VideoTrack::streamDuration() const {
return _streamDuration;
}
void VideoTrack::process(std::vector<FFmpeg::Packet> &&packets) {
_wrapped.with([
packets = std::move(packets)
](Implementation &unwrapped) mutable {
unwrapped.process(std::move(packets));
});
}
void VideoTrack::waitForData() {
}
2019-02-21 14:57:00 +00:00
void VideoTrack::pause(crl::time time) {
_wrapped.with([=](Implementation &unwrapped) {
unwrapped.pause(time);
});
}
void VideoTrack::resume(crl::time time) {
_wrapped.with([=](Implementation &unwrapped) {
2019-02-21 14:57:00 +00:00
unwrapped.resume(time);
});
}
2019-02-21 16:01:55 +00:00
void VideoTrack::setSpeed(float64 speed) {
_wrapped.with([=](Implementation &unwrapped) {
unwrapped.setSpeed(speed);
});
}
2019-12-19 10:50:33 +00:00
void VideoTrack::setWaitForMarkAsShown(bool wait) {
_wrapped.with([=](Implementation &unwrapped) {
unwrapped.setWaitForMarkAsShown(wait);
});
}
2019-03-07 13:23:19 +00:00
crl::time VideoTrack::nextFrameDisplayTime() const {
return _shared->nextFrameDisplayTime();
}
crl::time VideoTrack::markFrameDisplayed(crl::time now) {
2019-03-13 11:11:54 +00:00
const auto result = _shared->markFrameDisplayed(now);
Ensures(result != kTimeUnknown);
return result;
}
2019-12-11 14:01:11 +00:00
void VideoTrack::addTimelineDelay(crl::time delayed) {
_shared->addTimelineDelay(delayed);
//if (!delayed) {
// return;
//}
//_wrapped.with([=](Implementation &unwrapped) mutable {
// unwrapped.addTimelineDelay(delayed);
//});
}
bool VideoTrack::markFrameShown() {
if (!_shared->markFrameShown()) {
return false;
}
_wrapped.with([](Implementation &unwrapped) {
unwrapped.frameShown();
});
return true;
}
QImage VideoTrack::frame(
const FrameRequest &request,
const Instance *instance) {
return frameImage(_shared->frameForPaint(), request, instance);
}
FrameWithInfo VideoTrack::frameWithInfo(
const FrameRequest &request,
const Instance *instance) {
const auto data = _shared->frameForPaintWithIndex();
return {
.image = frameImage(data.frame, request, instance),
.format = FrameFormat::ARGB32,
.index = data.index,
};
}
FrameWithInfo VideoTrack::frameWithInfo(const Instance *instance) {
const auto data = _shared->frameForPaintWithIndex();
const auto i = data.frame->prepared.find(instance);
const auto none = (i == data.frame->prepared.end());
if (none || i->second.request.requireARGB32) {
_wrapped.with([=](Implementation &unwrapped) {
unwrapped.updateFrameRequest(
instance,
{ .requireARGB32 = false });
});
}
return {
.image = data.frame->original,
.yuv = &data.frame->yuv,
.format = data.frame->format,
.index = data.index,
.alpha = data.frame->alpha,
};
}
QImage VideoTrack::frameImage(
not_null<Frame*> frame,
const FrameRequest &request,
const Instance *instance) {
const auto i = frame->prepared.find(instance);
const auto none = (i == frame->prepared.end());
const auto preparedFor = frame->prepared.empty()
? FrameRequest::NonStrict()
: (none ? frame->prepared.begin() : i)->second.request;
const auto changed = !preparedFor.goodFor(request);
const auto useRequest = changed ? request : preparedFor;
2019-02-27 11:36:19 +00:00
if (changed) {
_wrapped.with([=](Implementation &unwrapped) {
unwrapped.updateFrameRequest(instance, useRequest);
2019-02-27 11:36:19 +00:00
});
}
if (frame->original.isNull()
&& (frame->format == FrameFormat::YUV420
|| frame->format == FrameFormat::NV12)) {
frame->original = ConvertToARGB32(frame->format, frame->yuv);
}
if (GoodForRequest(
frame->original,
frame->alpha,
_streamRotation,
useRequest)) {
return frame->original;
} else if (changed || none || i->second.image.isNull()) {
const auto j = none
? frame->prepared.emplace(instance, useRequest).first
: i;
2019-12-12 13:26:53 +00:00
if (changed && !none) {
i->second.request = useRequest;
}
if (frame->prepared.size() > 1) {
for (auto &[alreadyInstance, prepared] : frame->prepared) {
if (alreadyInstance != instance
&& prepared.request == useRequest
&& !prepared.image.isNull()) {
return prepared.image;
}
}
}
j->second.image = PrepareByRequest(
frame->original,
2020-02-24 13:48:23 +00:00
frame->alpha,
_streamAspect,
2019-12-18 17:15:42 +00:00
_streamRotation,
useRequest,
std::move(j->second.image));
return j->second.image;
}
return i->second.image;
}
QImage VideoTrack::currentFrameImage() {
const auto frame = _shared->frameForPaint();
if (frame->original.isNull()
&& (frame->format == FrameFormat::YUV420
|| frame->format == FrameFormat::NV12)) {
frame->original = ConvertToARGB32(frame->format, frame->yuv);
}
return frame->original;
}
void VideoTrack::unregisterInstance(not_null<const Instance*> instance) {
_wrapped.with([=](Implementation &unwrapped) {
unwrapped.removeFrameRequest(instance);
});
2019-02-27 11:36:19 +00:00
}
2019-12-18 17:15:42 +00:00
void VideoTrack::PrepareFrameByRequests(
not_null<Frame*> frame,
const AVRational &aspect,
2019-12-18 17:15:42 +00:00
int rotation) {
2021-06-03 12:57:48 +00:00
Expects(frame->format != FrameFormat::ARGB32
|| !frame->original.isNull());
if (frame->format != FrameFormat::ARGB32) {
return;
}
const auto begin = frame->prepared.begin();
const auto end = frame->prepared.end();
for (auto i = begin; i != end; ++i) {
auto &prepared = i->second;
if (!GoodForRequest(
frame->original,
frame->alpha,
rotation,
prepared.request)) {
auto j = begin;
for (; j != i; ++j) {
if (j->second.request == prepared.request) {
prepared.image = QImage();
break;
}
}
if (j == i) {
prepared.image = PrepareByRequest(
frame->original,
2020-02-24 13:48:23 +00:00
frame->alpha,
aspect,
2019-12-18 17:15:42 +00:00
rotation,
prepared.request,
std::move(prepared.image));
}
}
}
}
2019-03-13 11:11:54 +00:00
bool VideoTrack::IsDecoded(not_null<const Frame*> frame) {
2019-02-27 11:36:19 +00:00
return (frame->position != kTimeUnknown)
&& (frame->displayed == kTimeUnknown);
}
2019-03-13 11:11:54 +00:00
bool VideoTrack::IsRasterized(not_null<const Frame*> frame) {
2019-02-27 11:36:19 +00:00
return IsDecoded(frame)
2021-06-03 12:57:48 +00:00
&& (!frame->original.isNull()
|| frame->format == FrameFormat::YUV420
|| frame->format == FrameFormat::NV12);
2019-02-27 11:36:19 +00:00
}
2019-03-13 11:11:54 +00:00
bool VideoTrack::IsStale(not_null<const Frame*> frame, crl::time trackTime) {
2019-02-27 11:36:19 +00:00
Expects(IsDecoded(frame));
return (frame->position < trackTime);
}
2019-03-07 13:23:19 +00:00
rpl::producer<> VideoTrack::checkNextFrame() const {
return _wrapped.producer_on_main([](const Implementation &unwrapped) {
2019-03-07 13:23:19 +00:00
return unwrapped.checkNextFrame();
});
}
rpl::producer<> VideoTrack::waitingForData() const {
return _wrapped.producer_on_main([](const Implementation &unwrapped) {
return unwrapped.waitingForData();
});
}
VideoTrack::~VideoTrack() {
_wrapped.with([shared = std::move(_shared)](Implementation &unwrapped) {
unwrapped.interrupt();
});
}
} // namespace Streaming
} // namespace Media