2019-02-13 18:10:18 +00:00
|
|
|
/*
|
|
|
|
This file is part of Telegram Desktop,
|
|
|
|
the official desktop application for the Telegram messaging service.
|
|
|
|
|
|
|
|
For license and copyright information please follow this link:
|
|
|
|
https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
|
|
|
|
*/
|
2019-02-17 06:54:57 +00:00
|
|
|
#include "media/streaming/media_streaming_utility.h"
|
|
|
|
|
2019-02-13 18:10:18 +00:00
|
|
|
#include "media/streaming/media_streaming_common.h"
|
2019-02-27 11:36:19 +00:00
|
|
|
#include "ui/image/image_prepare.h"
|
2019-06-26 15:04:38 +00:00
|
|
|
#include "ffmpeg/ffmpeg_utility.h"
|
2019-02-13 18:10:18 +00:00
|
|
|
|
|
|
|
namespace Media {
|
|
|
|
namespace Streaming {
|
|
|
|
namespace {
|
|
|
|
|
2019-02-16 10:29:55 +00:00
|
|
|
constexpr auto kSkipInvalidDataPackets = 10;
|
2019-03-05 09:00:49 +00:00
|
|
|
|
2019-02-22 11:58:26 +00:00
|
|
|
} // namespace
|
|
|
|
|
2019-02-20 13:28:48 +00:00
|
|
|
crl::time FramePosition(const Stream &stream) {
|
2019-02-17 11:08:29 +00:00
|
|
|
const auto pts = !stream.frame
|
|
|
|
? AV_NOPTS_VALUE
|
2019-03-06 13:00:03 +00:00
|
|
|
: (stream.frame->best_effort_timestamp != AV_NOPTS_VALUE)
|
|
|
|
? stream.frame->best_effort_timestamp
|
|
|
|
: (stream.frame->pts != AV_NOPTS_VALUE)
|
|
|
|
? stream.frame->pts
|
|
|
|
: stream.frame->pkt_dts;
|
2019-06-26 15:04:38 +00:00
|
|
|
return FFmpeg::PtsToTime(pts, stream.timeBase);
|
2019-02-16 10:29:55 +00:00
|
|
|
}
|
|
|
|
|
2019-06-26 15:04:38 +00:00
|
|
|
FFmpeg::AvErrorWrap ProcessPacket(Stream &stream, FFmpeg::Packet &&packet) {
|
2019-02-17 11:08:29 +00:00
|
|
|
Expects(stream.codec != nullptr);
|
2019-02-13 18:10:18 +00:00
|
|
|
|
2019-06-26 15:04:38 +00:00
|
|
|
auto error = FFmpeg::AvErrorWrap();
|
2019-02-13 18:10:18 +00:00
|
|
|
|
2019-02-17 11:08:29 +00:00
|
|
|
const auto native = &packet.fields();
|
|
|
|
const auto guard = gsl::finally([
|
|
|
|
&,
|
|
|
|
size = native->size,
|
|
|
|
data = native->data
|
|
|
|
] {
|
|
|
|
native->size = size;
|
|
|
|
native->data = data;
|
2019-06-26 15:04:38 +00:00
|
|
|
packet = FFmpeg::Packet();
|
2019-02-17 11:08:29 +00:00
|
|
|
});
|
|
|
|
|
|
|
|
error = avcodec_send_packet(
|
|
|
|
stream.codec.get(),
|
|
|
|
native->data ? native : nullptr); // Drain on eof.
|
|
|
|
if (error) {
|
2019-02-13 18:10:18 +00:00
|
|
|
LogError(qstr("avcodec_send_packet"), error);
|
|
|
|
if (error.code() == AVERROR_INVALIDDATA
|
|
|
|
// There is a sample voice message where skipping such packet
|
|
|
|
// results in a crash (read_access to nullptr) in swr_convert().
|
|
|
|
&& stream.codec->codec_id != AV_CODEC_ID_OPUS) {
|
|
|
|
if (++stream.invalidDataPackets < kSkipInvalidDataPackets) {
|
2019-06-26 15:04:38 +00:00
|
|
|
return FFmpeg::AvErrorWrap(); // Try to skip a bad packet.
|
2019-02-13 18:10:18 +00:00
|
|
|
}
|
|
|
|
}
|
2020-12-14 15:56:24 +00:00
|
|
|
} else {
|
|
|
|
stream.invalidDataPackets = 0;
|
2019-02-17 11:08:29 +00:00
|
|
|
}
|
|
|
|
return error;
|
|
|
|
}
|
|
|
|
|
2019-06-26 15:04:38 +00:00
|
|
|
FFmpeg::AvErrorWrap ReadNextFrame(Stream &stream) {
|
2019-02-17 11:08:29 +00:00
|
|
|
Expects(stream.frame != nullptr);
|
|
|
|
|
2019-06-26 15:04:38 +00:00
|
|
|
auto error = FFmpeg::AvErrorWrap();
|
2019-02-17 11:08:29 +00:00
|
|
|
|
|
|
|
do {
|
|
|
|
error = avcodec_receive_frame(
|
|
|
|
stream.codec.get(),
|
|
|
|
stream.frame.get());
|
|
|
|
if (!error
|
|
|
|
|| error.code() != AVERROR(EAGAIN)
|
|
|
|
|| stream.queue.empty()) {
|
|
|
|
return error;
|
|
|
|
}
|
|
|
|
|
|
|
|
error = ProcessPacket(stream, std::move(stream.queue.front()));
|
|
|
|
stream.queue.pop_front();
|
|
|
|
} while (!error);
|
2019-02-13 18:10:18 +00:00
|
|
|
|
2019-02-17 11:08:29 +00:00
|
|
|
return error;
|
2019-02-13 18:10:18 +00:00
|
|
|
}
|
|
|
|
|
2019-12-18 17:15:42 +00:00
|
|
|
bool GoodForRequest(
|
|
|
|
const QImage &image,
|
2022-01-19 14:45:51 +00:00
|
|
|
bool hasAlpha,
|
2019-12-18 17:15:42 +00:00
|
|
|
int rotation,
|
|
|
|
const FrameRequest &request) {
|
2022-01-21 15:00:13 +00:00
|
|
|
if (image.isNull()
|
|
|
|
|| (hasAlpha && !request.keepAlpha)
|
|
|
|
|| request.colored.alpha() != 0) {
|
2021-06-04 09:50:41 +00:00
|
|
|
return false;
|
|
|
|
} else if (request.resize.isEmpty()) {
|
2019-02-27 11:36:19 +00:00
|
|
|
return true;
|
2019-12-18 17:15:42 +00:00
|
|
|
} else if (rotation != 0) {
|
|
|
|
return false;
|
2019-02-27 11:36:19 +00:00
|
|
|
} else if ((request.radius != ImageRoundRadius::None)
|
|
|
|
&& ((request.corners & RectPart::AllCorners) != 0)) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
return (request.resize == request.outer)
|
|
|
|
&& (request.resize == image.size());
|
|
|
|
}
|
|
|
|
|
2019-02-28 21:03:25 +00:00
|
|
|
QImage ConvertFrame(
|
|
|
|
Stream &stream,
|
|
|
|
AVFrame *frame,
|
|
|
|
QSize resize,
|
|
|
|
QImage storage) {
|
|
|
|
Expects(frame != nullptr);
|
2019-02-13 18:10:18 +00:00
|
|
|
|
2019-02-16 10:29:55 +00:00
|
|
|
const auto frameSize = QSize(frame->width, frame->height);
|
|
|
|
if (frameSize.isEmpty()) {
|
|
|
|
LOG(("Streaming Error: Bad frame size %1,%2"
|
2019-02-27 11:36:19 +00:00
|
|
|
).arg(frameSize.width()
|
|
|
|
).arg(frameSize.height()));
|
2019-02-16 10:29:55 +00:00
|
|
|
return QImage();
|
2019-06-26 15:04:38 +00:00
|
|
|
} else if (!FFmpeg::FrameHasData(frame)) {
|
2019-02-16 10:29:55 +00:00
|
|
|
LOG(("Streaming Error: Bad frame data."));
|
|
|
|
return QImage();
|
2019-02-13 18:10:18 +00:00
|
|
|
}
|
2019-02-16 10:29:55 +00:00
|
|
|
if (resize.isEmpty()) {
|
|
|
|
resize = frameSize;
|
2019-06-26 15:04:38 +00:00
|
|
|
} else if (FFmpeg::RotationSwapWidthHeight(stream.rotation)) {
|
2019-02-16 10:29:55 +00:00
|
|
|
resize.transpose();
|
2019-02-13 18:10:18 +00:00
|
|
|
}
|
2019-02-27 11:36:19 +00:00
|
|
|
|
2019-06-26 15:04:38 +00:00
|
|
|
if (!FFmpeg::GoodStorageForFrame(storage, resize)) {
|
|
|
|
storage = FFmpeg::CreateFrameStorage(resize);
|
2019-02-13 18:10:18 +00:00
|
|
|
}
|
2019-02-16 10:29:55 +00:00
|
|
|
const auto format = AV_PIX_FMT_BGRA;
|
2019-02-27 12:13:43 +00:00
|
|
|
const auto hasDesiredFormat = (frame->format == format);
|
2019-02-16 10:29:55 +00:00
|
|
|
if (frameSize == storage.size() && hasDesiredFormat) {
|
2019-06-26 15:04:38 +00:00
|
|
|
static_assert(sizeof(uint32) == FFmpeg::kPixelBytesSize);
|
2019-02-16 10:29:55 +00:00
|
|
|
auto to = reinterpret_cast<uint32*>(storage.bits());
|
|
|
|
auto from = reinterpret_cast<const uint32*>(frame->data[0]);
|
2019-06-26 15:04:38 +00:00
|
|
|
const auto deltaTo = (storage.bytesPerLine() / sizeof(uint32))
|
2019-02-16 10:29:55 +00:00
|
|
|
- storage.width();
|
2019-06-26 15:04:38 +00:00
|
|
|
const auto deltaFrom = (frame->linesize[0] / sizeof(uint32))
|
2019-02-16 10:29:55 +00:00
|
|
|
- frame->width;
|
2021-07-13 17:51:07 +00:00
|
|
|
for ([[maybe_unused]] const auto y : ranges::views::ints(0, frame->height)) {
|
|
|
|
for ([[maybe_unused]] const auto x : ranges::views::ints(0, frame->width)) {
|
2019-02-16 10:29:55 +00:00
|
|
|
// Wipe out possible alpha values.
|
2019-03-13 14:58:50 +00:00
|
|
|
*to++ = 0xFF000000U | *from++;
|
2019-02-16 10:29:55 +00:00
|
|
|
}
|
|
|
|
to += deltaTo;
|
|
|
|
from += deltaFrom;
|
|
|
|
}
|
|
|
|
} else {
|
2019-03-04 11:28:52 +00:00
|
|
|
stream.swscale = MakeSwscalePointer(
|
2019-02-16 10:29:55 +00:00
|
|
|
frame,
|
|
|
|
resize,
|
2019-03-04 11:28:52 +00:00
|
|
|
&stream.swscale);
|
|
|
|
if (!stream.swscale) {
|
2019-02-16 10:29:55 +00:00
|
|
|
return QImage();
|
|
|
|
}
|
2019-02-13 18:10:18 +00:00
|
|
|
|
2019-02-16 10:29:55 +00:00
|
|
|
// AV_NUM_DATA_POINTERS defined in AVFrame struct
|
|
|
|
uint8_t *data[AV_NUM_DATA_POINTERS] = { storage.bits(), nullptr };
|
2021-10-19 13:00:21 +00:00
|
|
|
int linesize[AV_NUM_DATA_POINTERS] = { int(storage.bytesPerLine()), 0 };
|
2019-02-16 10:29:55 +00:00
|
|
|
|
2020-08-31 07:34:38 +00:00
|
|
|
sws_scale(
|
2019-03-04 11:28:52 +00:00
|
|
|
stream.swscale.get(),
|
2019-02-16 10:29:55 +00:00
|
|
|
frame->data,
|
|
|
|
frame->linesize,
|
|
|
|
0,
|
|
|
|
frame->height,
|
|
|
|
data,
|
|
|
|
linesize);
|
2022-01-19 14:45:51 +00:00
|
|
|
|
|
|
|
if (frame->format == AV_PIX_FMT_YUVA420P) {
|
|
|
|
FFmpeg::PremultiplyInplace(storage);
|
|
|
|
}
|
2019-02-16 10:29:55 +00:00
|
|
|
}
|
2019-02-28 21:03:25 +00:00
|
|
|
|
2019-06-26 15:04:38 +00:00
|
|
|
FFmpeg::ClearFrameMemory(frame);
|
2019-02-27 11:36:19 +00:00
|
|
|
return storage;
|
|
|
|
}
|
|
|
|
|
2021-06-03 12:57:48 +00:00
|
|
|
FrameYUV420 ExtractYUV420(Stream &stream, AVFrame *frame) {
|
|
|
|
return {
|
|
|
|
.size = { frame->width, frame->height },
|
|
|
|
.chromaSize = {
|
|
|
|
AV_CEIL_RSHIFT(frame->width, 1), // SWScale does that.
|
|
|
|
AV_CEIL_RSHIFT(frame->height, 1)
|
|
|
|
},
|
|
|
|
.y = { .data = frame->data[0], .stride = frame->linesize[0] },
|
|
|
|
.u = { .data = frame->data[1], .stride = frame->linesize[1] },
|
|
|
|
.v = { .data = frame->data[2], .stride = frame->linesize[2] },
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2019-12-18 17:15:42 +00:00
|
|
|
void PaintFrameOuter(QPainter &p, const QRect &inner, QSize outer) {
|
|
|
|
const auto left = inner.x();
|
|
|
|
const auto right = outer.width() - inner.width() - left;
|
|
|
|
const auto top = inner.y();
|
|
|
|
const auto bottom = outer.height() - inner.height() - top;
|
|
|
|
if (left > 0) {
|
|
|
|
p.fillRect(0, 0, left, outer.height(), st::imageBg);
|
|
|
|
}
|
|
|
|
if (right > 0) {
|
|
|
|
p.fillRect(
|
|
|
|
outer.width() - right,
|
|
|
|
0,
|
|
|
|
right,
|
|
|
|
outer.height(),
|
|
|
|
st::imageBg);
|
|
|
|
}
|
|
|
|
if (top > 0) {
|
|
|
|
p.fillRect(left, 0, inner.width(), top, st::imageBg);
|
|
|
|
}
|
|
|
|
if (bottom > 0) {
|
|
|
|
p.fillRect(
|
|
|
|
left,
|
|
|
|
outer.height() - bottom,
|
|
|
|
inner.width(),
|
|
|
|
bottom,
|
|
|
|
st::imageBg);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void PaintFrameInner(
|
|
|
|
QPainter &p,
|
|
|
|
QRect to,
|
|
|
|
const QImage &original,
|
2020-02-24 13:48:23 +00:00
|
|
|
bool alpha,
|
2019-12-18 17:15:42 +00:00
|
|
|
int rotation) {
|
|
|
|
const auto rotated = [](QRect rect, int rotation) {
|
|
|
|
switch (rotation) {
|
|
|
|
case 0: return rect;
|
|
|
|
case 90: return QRect(
|
|
|
|
rect.y(),
|
|
|
|
-rect.x() - rect.width(),
|
|
|
|
rect.height(),
|
|
|
|
rect.width());
|
|
|
|
case 180: return QRect(
|
|
|
|
-rect.x() - rect.width(),
|
|
|
|
-rect.y() - rect.height(),
|
|
|
|
rect.width(),
|
|
|
|
rect.height());
|
|
|
|
case 270: return QRect(
|
|
|
|
-rect.y() - rect.height(),
|
|
|
|
rect.x(),
|
|
|
|
rect.height(),
|
|
|
|
rect.width());
|
|
|
|
}
|
|
|
|
Unexpected("Rotation in PaintFrameInner.");
|
|
|
|
};
|
|
|
|
|
|
|
|
PainterHighQualityEnabler hq(p);
|
|
|
|
if (rotation) {
|
|
|
|
p.rotate(rotation);
|
|
|
|
}
|
2020-02-24 13:48:23 +00:00
|
|
|
const auto rect = rotated(to, rotation);
|
|
|
|
if (alpha) {
|
|
|
|
p.fillRect(rect, Qt::white);
|
|
|
|
}
|
|
|
|
p.drawImage(rect, original);
|
2019-12-18 17:15:42 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void PaintFrameContent(
|
|
|
|
QPainter &p,
|
|
|
|
const QImage &original,
|
2020-02-24 13:48:23 +00:00
|
|
|
bool alpha,
|
2019-12-18 17:15:42 +00:00
|
|
|
int rotation,
|
|
|
|
const FrameRequest &request) {
|
2020-02-24 13:48:23 +00:00
|
|
|
const auto full = request.outer.isEmpty()
|
|
|
|
? original.size()
|
|
|
|
: request.outer;
|
|
|
|
const auto size = request.resize.isEmpty()
|
|
|
|
? original.size()
|
|
|
|
: request.resize;
|
2019-12-18 17:15:42 +00:00
|
|
|
const auto to = QRect(
|
2020-02-24 13:48:23 +00:00
|
|
|
(full.width() - size.width()) / 2,
|
|
|
|
(full.height() - size.height()) / 2,
|
|
|
|
size.width(),
|
|
|
|
size.height());
|
2022-01-19 14:45:51 +00:00
|
|
|
if (!alpha || !request.keepAlpha) {
|
|
|
|
PaintFrameOuter(p, to, full);
|
|
|
|
}
|
|
|
|
const auto deAlpha = alpha && !request.keepAlpha;
|
|
|
|
PaintFrameInner(p, to, original, deAlpha, rotation);
|
2019-12-18 17:15:42 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void ApplyFrameRounding(QImage &storage, const FrameRequest &request) {
|
|
|
|
if (!(request.corners & RectPart::AllCorners)
|
|
|
|
|| (request.radius == ImageRoundRadius::None)) {
|
|
|
|
return;
|
|
|
|
}
|
2022-01-21 12:31:39 +00:00
|
|
|
storage = Images::Round(
|
|
|
|
std::move(storage),
|
|
|
|
request.radius,
|
|
|
|
request.corners);
|
2019-12-18 17:15:42 +00:00
|
|
|
}
|
|
|
|
|
2019-02-27 11:36:19 +00:00
|
|
|
QImage PrepareByRequest(
|
|
|
|
const QImage &original,
|
2020-02-24 13:48:23 +00:00
|
|
|
bool alpha,
|
2019-12-18 17:15:42 +00:00
|
|
|
int rotation,
|
2019-02-27 11:36:19 +00:00
|
|
|
const FrameRequest &request,
|
|
|
|
QImage storage) {
|
2020-02-24 13:48:23 +00:00
|
|
|
Expects(!request.outer.isEmpty() || alpha);
|
2019-02-27 11:36:19 +00:00
|
|
|
|
2020-02-24 13:48:23 +00:00
|
|
|
const auto outer = request.outer.isEmpty()
|
|
|
|
? original.size()
|
|
|
|
: request.outer;
|
|
|
|
if (!FFmpeg::GoodStorageForFrame(storage, outer)) {
|
|
|
|
storage = FFmpeg::CreateFrameStorage(outer);
|
2019-02-27 11:36:19 +00:00
|
|
|
}
|
2019-12-18 17:15:42 +00:00
|
|
|
|
2022-01-19 14:45:51 +00:00
|
|
|
if (alpha && request.keepAlpha) {
|
|
|
|
storage.fill(Qt::transparent);
|
|
|
|
}
|
|
|
|
|
2019-12-18 17:15:42 +00:00
|
|
|
QPainter p(&storage);
|
2020-02-24 13:48:23 +00:00
|
|
|
PaintFrameContent(p, original, alpha, rotation, request);
|
2019-12-18 17:15:42 +00:00
|
|
|
p.end();
|
|
|
|
|
|
|
|
ApplyFrameRounding(storage, request);
|
2022-01-21 15:00:13 +00:00
|
|
|
if (request.colored.alpha() != 0) {
|
|
|
|
storage = Images::Colored(std::move(storage), request.colored);
|
|
|
|
}
|
2019-02-17 06:54:57 +00:00
|
|
|
return storage;
|
2019-02-13 18:10:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
} // namespace Streaming
|
|
|
|
} // namespace Media
|