mirror of
https://github.com/telegramdesktop/tdesktop
synced 2025-01-13 18:52:54 +00:00
Apply sample_aspect_ratio in streaming.
This commit is contained in:
parent
99e96a5b13
commit
e2eb9cea00
@ -111,6 +111,7 @@ Stream File::Context::initStream(AVMediaType type) {
|
|||||||
const auto info = _format->streams[index];
|
const auto info = _format->streams[index];
|
||||||
if (type == AVMEDIA_TYPE_VIDEO) {
|
if (type == AVMEDIA_TYPE_VIDEO) {
|
||||||
result.rotation = ReadRotationFromMetadata(info);
|
result.rotation = ReadRotationFromMetadata(info);
|
||||||
|
result.aspect = ValidateAspectRatio(info->sample_aspect_ratio);
|
||||||
} else if (type == AVMEDIA_TYPE_AUDIO) {
|
} else if (type == AVMEDIA_TYPE_AUDIO) {
|
||||||
result.frequency = info->codecpar->sample_rate;
|
result.frequency = info->codecpar->sample_rate;
|
||||||
if (!result.frequency) {
|
if (!result.frequency) {
|
||||||
|
@ -59,6 +59,7 @@ public:
|
|||||||
[[nodiscard]] rpl::producer<Update, Error> updates() const;
|
[[nodiscard]] rpl::producer<Update, Error> updates() const;
|
||||||
|
|
||||||
[[nodiscard]] QImage frame(const FrameRequest &request) const;
|
[[nodiscard]] QImage frame(const FrameRequest &request) const;
|
||||||
|
//[[nodiscard]] int videoRotation() const;
|
||||||
|
|
||||||
[[nodiscard]] Media::Player::TrackState prepareLegacyState() const;
|
[[nodiscard]] Media::Player::TrackState prepareLegacyState() const;
|
||||||
|
|
||||||
|
@ -23,17 +23,25 @@ constexpr auto kAlignImageBy = 16;
|
|||||||
constexpr auto kPixelBytesSize = 4;
|
constexpr auto kPixelBytesSize = 4;
|
||||||
constexpr auto kImageFormat = QImage::Format_ARGB32_Premultiplied;
|
constexpr auto kImageFormat = QImage::Format_ARGB32_Premultiplied;
|
||||||
constexpr auto kAvioBlockSize = 4096;
|
constexpr auto kAvioBlockSize = 4096;
|
||||||
|
constexpr auto kMaxScaleByAspectRatio = 16;
|
||||||
|
|
||||||
void AlignedImageBufferCleanupHandler(void* data) {
|
void AlignedImageBufferCleanupHandler(void* data) {
|
||||||
const auto buffer = static_cast<uchar*>(data);
|
const auto buffer = static_cast<uchar*>(data);
|
||||||
delete[] buffer;
|
delete[] buffer;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool IsAlignedImage(const QImage &image) {
|
[[nodiscard]] bool IsAlignedImage(const QImage &image) {
|
||||||
return !(reinterpret_cast<uintptr_t>(image.bits()) % kAlignImageBy)
|
return !(reinterpret_cast<uintptr_t>(image.bits()) % kAlignImageBy)
|
||||||
&& !(image.bytesPerLine() % kAlignImageBy);
|
&& !(image.bytesPerLine() % kAlignImageBy);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[[nodiscard]] bool IsValidAspectRatio(AVRational aspect) {
|
||||||
|
return (aspect.num > 0)
|
||||||
|
&& (aspect.den > 0)
|
||||||
|
&& (aspect.num <= aspect.den * kMaxScaleByAspectRatio)
|
||||||
|
&& (aspect.den <= aspect.num * kMaxScaleByAspectRatio);
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace
|
} // namespace
|
||||||
|
|
||||||
bool GoodStorageForFrame(const QImage &storage, QSize size) {
|
bool GoodStorageForFrame(const QImage &storage, QSize size) {
|
||||||
@ -303,6 +311,16 @@ int ReadRotationFromMetadata(not_null<AVStream*> stream) {
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
AVRational ValidateAspectRatio(AVRational aspect) {
|
||||||
|
return IsValidAspectRatio(aspect) ? aspect : kNormalAspect;
|
||||||
|
}
|
||||||
|
|
||||||
|
QSize CorrectByAspect(QSize size, AVRational aspect) {
|
||||||
|
Expects(IsValidAspectRatio(aspect));
|
||||||
|
|
||||||
|
return QSize(size.width() * aspect.num / aspect.den, size.height());
|
||||||
|
}
|
||||||
|
|
||||||
bool RotationSwapWidthHeight(int rotation) {
|
bool RotationSwapWidthHeight(int rotation) {
|
||||||
return (rotation == 90 || rotation == 270);
|
return (rotation == 90 || rotation == 270);
|
||||||
}
|
}
|
||||||
|
@ -19,6 +19,7 @@ namespace Media {
|
|||||||
namespace Streaming {
|
namespace Streaming {
|
||||||
|
|
||||||
constexpr auto kUniversalTimeBase = AVRational{ 1, AV_TIME_BASE };
|
constexpr auto kUniversalTimeBase = AVRational{ 1, AV_TIME_BASE };
|
||||||
|
constexpr auto kNormalAspect = AVRational{ 1, 1 };
|
||||||
|
|
||||||
struct TimePoint {
|
struct TimePoint {
|
||||||
crl::time trackTime = kTimeUnknown;
|
crl::time trackTime = kTimeUnknown;
|
||||||
@ -176,6 +177,7 @@ struct Stream {
|
|||||||
|
|
||||||
// Video only.
|
// Video only.
|
||||||
int rotation = 0;
|
int rotation = 0;
|
||||||
|
AVRational aspect = kNormalAspect;
|
||||||
SwscalePointer swscale;
|
SwscalePointer swscale;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -191,7 +193,9 @@ void LogError(QLatin1String method, AvErrorWrap error);
|
|||||||
AVRational timeBase);
|
AVRational timeBase);
|
||||||
[[nodiscard]] crl::time FramePosition(const Stream &stream);
|
[[nodiscard]] crl::time FramePosition(const Stream &stream);
|
||||||
[[nodiscard]] int ReadRotationFromMetadata(not_null<AVStream*> stream);
|
[[nodiscard]] int ReadRotationFromMetadata(not_null<AVStream*> stream);
|
||||||
|
[[nodiscard]] AVRational ValidateAspectRatio(AVRational aspect);
|
||||||
[[nodiscard]] bool RotationSwapWidthHeight(int rotation);
|
[[nodiscard]] bool RotationSwapWidthHeight(int rotation);
|
||||||
|
[[nodiscard]] QSize CorrectByAspect(QSize size, AVRational aspect);
|
||||||
[[nodiscard]] AvErrorWrap ProcessPacket(Stream &stream, Packet &&packet);
|
[[nodiscard]] AvErrorWrap ProcessPacket(Stream &stream, Packet &&packet);
|
||||||
[[nodiscard]] AvErrorWrap ReadNextFrame(Stream &stream);
|
[[nodiscard]] AvErrorWrap ReadNextFrame(Stream &stream);
|
||||||
|
|
||||||
|
@ -394,7 +394,7 @@ void VideoTrackObject::callReady() {
|
|||||||
Assert(frame != nullptr);
|
Assert(frame != nullptr);
|
||||||
|
|
||||||
auto data = VideoInformation();
|
auto data = VideoInformation();
|
||||||
data.size = frame->original.size();
|
data.size = CorrectByAspect(frame->original.size(), _stream.aspect);
|
||||||
if (RotationSwapWidthHeight(_stream.rotation)) {
|
if (RotationSwapWidthHeight(_stream.rotation)) {
|
||||||
data.size.transpose();
|
data.size.transpose();
|
||||||
}
|
}
|
||||||
@ -586,6 +586,7 @@ VideoTrack::VideoTrack(
|
|||||||
, _streamTimeBase(stream.timeBase)
|
, _streamTimeBase(stream.timeBase)
|
||||||
, _streamDuration(stream.duration)
|
, _streamDuration(stream.duration)
|
||||||
//, _streamRotation(stream.rotation)
|
//, _streamRotation(stream.rotation)
|
||||||
|
//, _streamAspect(stream.aspect)
|
||||||
, _shared(std::make_unique<Shared>())
|
, _shared(std::make_unique<Shared>())
|
||||||
, _wrapped(
|
, _wrapped(
|
||||||
options,
|
options,
|
||||||
|
@ -123,6 +123,7 @@ private:
|
|||||||
const AVRational _streamTimeBase;
|
const AVRational _streamTimeBase;
|
||||||
const crl::time _streamDuration = 0;
|
const crl::time _streamDuration = 0;
|
||||||
//const int _streamRotation = 0;
|
//const int _streamRotation = 0;
|
||||||
|
//AVRational _streamAspect = kNormalAspect;
|
||||||
std::unique_ptr<Shared> _shared;
|
std::unique_ptr<Shared> _shared;
|
||||||
|
|
||||||
using Implementation = VideoTrackObject;
|
using Implementation = VideoTrackObject;
|
||||||
|
@ -1199,7 +1199,15 @@ void OverlayWidget::onCopy() {
|
|||||||
if (!_current.isNull()) {
|
if (!_current.isNull()) {
|
||||||
QApplication::clipboard()->setPixmap(_current);
|
QApplication::clipboard()->setPixmap(_current);
|
||||||
} else if (videoShown()) {
|
} else if (videoShown()) {
|
||||||
QApplication::clipboard()->setImage(videoFrame());
|
// #TODO streaming later apply rotation
|
||||||
|
auto image = videoFrame();
|
||||||
|
if (image.size() != _streamed->info.video.size) {
|
||||||
|
image = image.scaled(
|
||||||
|
_streamed->info.video.size,
|
||||||
|
Qt::IgnoreAspectRatio,
|
||||||
|
Qt::SmoothTransformation);
|
||||||
|
}
|
||||||
|
QApplication::clipboard()->setImage(std::move(image));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (!_photo || !_photo->loaded()) return;
|
if (!_photo || !_photo->loaded()) return;
|
||||||
@ -1900,13 +1908,14 @@ void OverlayWidget::initStreamingThumbnail() {
|
|||||||
} else if (thumb && !useThumb) {
|
} else if (thumb && !useThumb) {
|
||||||
thumb->load(fileOrigin());
|
thumb->load(fileOrigin());
|
||||||
}
|
}
|
||||||
|
const auto size = useGood ? good->size() : _doc->dimensions;
|
||||||
if (!useGood && !thumb && !blurred) {
|
if (!useGood && !thumb && !blurred) {
|
||||||
return;
|
return;
|
||||||
} else if (_doc->dimensions.isEmpty()) {
|
} else if (size.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const auto w = _doc->dimensions.width();
|
const auto w = size.width();
|
||||||
const auto h = _doc->dimensions.height();
|
const auto h = size.height();
|
||||||
const auto options = VideoThumbOptions(_doc);
|
const auto options = VideoThumbOptions(_doc);
|
||||||
const auto goodOptions = (options & ~Images::Option::Blurred);
|
const auto goodOptions = (options & ~Images::Option::Blurred);
|
||||||
_current = (useGood
|
_current = (useGood
|
||||||
@ -1962,10 +1971,17 @@ void OverlayWidget::validateStreamedGoodThumbnail() {
|
|||||||
Expects(_doc != nullptr);
|
Expects(_doc != nullptr);
|
||||||
|
|
||||||
const auto good = _doc->goodThumbnail();
|
const auto good = _doc->goodThumbnail();
|
||||||
const auto &image = _streamed->info.video.cover;
|
auto image = _streamed->info.video.cover;
|
||||||
if (image.isNull() || (good && good->loaded()) || _doc->uploading()) {
|
if (image.isNull() || (good && good->loaded()) || _doc->uploading()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
// #TODO streaming later apply rotation
|
||||||
|
if (image.size() != _streamed->info.video.size) {
|
||||||
|
image = image.scaled(
|
||||||
|
_streamed->info.video.size,
|
||||||
|
Qt::IgnoreAspectRatio,
|
||||||
|
Qt::SmoothTransformation);
|
||||||
|
}
|
||||||
auto bytes = QByteArray();
|
auto bytes = QByteArray();
|
||||||
{
|
{
|
||||||
auto buffer = QBuffer(&bytes);
|
auto buffer = QBuffer(&bytes);
|
||||||
@ -1976,7 +1992,7 @@ void OverlayWidget::validateStreamedGoodThumbnail() {
|
|||||||
LOG(("App Error: Bad thumbnail data for saving to cache."));
|
LOG(("App Error: Bad thumbnail data for saving to cache."));
|
||||||
} else if (_doc->uploading()) {
|
} else if (_doc->uploading()) {
|
||||||
_doc->setGoodThumbnailOnUpload(
|
_doc->setGoodThumbnailOnUpload(
|
||||||
base::duplicate(image),
|
std::move(image),
|
||||||
std::move(bytes));
|
std::move(bytes));
|
||||||
} else {
|
} else {
|
||||||
_doc->owner().cache().putIfEmpty(
|
_doc->owner().cache().putIfEmpty(
|
||||||
@ -2187,7 +2203,7 @@ void OverlayWidget::restartAtSeekPosition(crl::time position) {
|
|||||||
}
|
}
|
||||||
auto options = Streaming::PlaybackOptions();
|
auto options = Streaming::PlaybackOptions();
|
||||||
options.position = position;
|
options.position = position;
|
||||||
if (_doc->isAnimation() || true) {
|
if (_doc->isAnimation()) {
|
||||||
options.mode = Streaming::Mode::Video;
|
options.mode = Streaming::Mode::Video;
|
||||||
options.loop = true;
|
options.loop = true;
|
||||||
}
|
}
|
||||||
@ -2348,17 +2364,13 @@ void OverlayWidget::paintEvent(QPaintEvent *e) {
|
|||||||
if (rect.intersects(r)) {
|
if (rect.intersects(r)) {
|
||||||
if (videoShown()) {
|
if (videoShown()) {
|
||||||
const auto image = videoFrame();
|
const auto image = videoFrame();
|
||||||
if (image.width() != _w) {
|
//if (_fullScreenVideo) {
|
||||||
//if (_fullScreenVideo) {
|
// const auto fill = rect.intersected(this->rect());
|
||||||
// const auto fill = rect.intersected(this->rect());
|
// PaintImageProfile(p, image, rect, fill);
|
||||||
// PaintImageProfile(p, image, rect, fill);
|
//} else {
|
||||||
//} else {
|
PainterHighQualityEnabler hq(p);
|
||||||
PainterHighQualityEnabler hq(p);
|
p.drawImage(rect, image);
|
||||||
p.drawImage(rect, image);
|
//}
|
||||||
//}
|
|
||||||
} else {
|
|
||||||
p.drawImage(rect.topLeft(), image);
|
|
||||||
}
|
|
||||||
} else if (!_current.isNull()) {
|
} else if (!_current.isNull()) {
|
||||||
if ((!_doc || !_doc->getStickerLarge()) && _current.hasAlpha()) {
|
if ((!_doc || !_doc->getStickerLarge()) && _current.hasAlpha()) {
|
||||||
p.fillRect(rect, _transparentBrush);
|
p.fillRect(rect, _transparentBrush);
|
||||||
|
Loading…
Reference in New Issue
Block a user