mirror of
https://github.com/telegramdesktop/tdesktop
synced 2025-01-24 00:03:13 +00:00
Send mp4 and quicktime files as videos.
This commit is contained in:
parent
eaae662b7d
commit
9ed8cbe2d1
@ -761,11 +761,14 @@ void HistoryVideo::draw(Painter &p, const QRect &r, TextSelection selection, Tim
|
||||
p.setOpacity(1);
|
||||
}
|
||||
|
||||
auto icon = ([loaded, radial, this, selected] {
|
||||
if (loaded) {
|
||||
auto icon = ([this, radial, selected, loaded]() -> const style::icon * {
|
||||
if (loaded && !radial) {
|
||||
return &(selected ? st::historyFileThumbPlaySelected : st::historyFileThumbPlay);
|
||||
} else if (radial || _data->loading()) {
|
||||
return &(selected ? st::historyFileThumbCancelSelected : st::historyFileThumbCancel);
|
||||
if (_parent->id > 0 || _data->uploading()) {
|
||||
return &(selected ? st::historyFileThumbCancelSelected : st::historyFileThumbCancel);
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
return &(selected ? st::historyFileThumbDownloadSelected : st::historyFileThumbDownload);
|
||||
})();
|
||||
@ -775,9 +778,9 @@ void HistoryVideo::draw(Painter &p, const QRect &r, TextSelection selection, Tim
|
||||
_animation->radial.draw(p, rinner, st::msgFileRadialLine, selected ? st::historyFileThumbRadialFgSelected : st::historyFileThumbRadialFg);
|
||||
}
|
||||
|
||||
int32 statusX = skipx + st::msgDateImgDelta + st::msgDateImgPadding.x(), statusY = skipy + st::msgDateImgDelta + st::msgDateImgPadding.y();
|
||||
int32 statusW = st::normalFont->width(_statusText) + 2 * st::msgDateImgPadding.x();
|
||||
int32 statusH = st::normalFont->height + 2 * st::msgDateImgPadding.y();
|
||||
auto statusX = skipx + st::msgDateImgDelta + st::msgDateImgPadding.x(), statusY = skipy + st::msgDateImgDelta + st::msgDateImgPadding.y();
|
||||
auto statusW = st::normalFont->width(_statusText) + 2 * st::msgDateImgPadding.x();
|
||||
auto statusH = st::normalFont->height + 2 * st::msgDateImgPadding.y();
|
||||
App::roundRect(p, rtlrect(statusX - st::msgDateImgPadding.x(), statusY - st::msgDateImgPadding.y(), statusW, statusH, _width), selected ? st::msgDateImgBgSelected : st::msgDateImgBg, selected ? DateSelectedCorners : DateCorners);
|
||||
p.setFont(st::normalFont);
|
||||
p.setPen(st::msgDateImgFg);
|
||||
@ -823,7 +826,11 @@ HistoryTextState HistoryVideo::getState(int x, int y, HistoryStateRequest reques
|
||||
height -= skipy + st::mediaPadding.bottom();
|
||||
}
|
||||
if (x >= skipx && y >= skipy && x < skipx + width && y < skipy + height) {
|
||||
result.link = loaded ? _openl : (_data->loading() ? _cancell : _savel);
|
||||
if (_data->uploading()) {
|
||||
result.link = _cancell;
|
||||
} else {
|
||||
result.link = loaded ? _openl : (_data->loading() ? _cancell : _savel);
|
||||
}
|
||||
if (_caption.isEmpty() && _parent->getMedia() == this) {
|
||||
int32 fullRight = skipx + width, fullBottom = skipy + height;
|
||||
bool inDate = _parent->pointInTime(fullRight, fullBottom, x, y, InfoDisplayOverImage);
|
||||
@ -879,6 +886,12 @@ void HistoryVideo::detachFromParent() {
|
||||
App::unregDocumentItem(_data, _parent);
|
||||
}
|
||||
|
||||
void HistoryVideo::updateSentMedia(const MTPMessageMedia &media) {
|
||||
if (media.type() == mtpc_messageMediaDocument) {
|
||||
App::feedDocument(media.c_messageMediaDocument().vdocument, _data);
|
||||
}
|
||||
}
|
||||
|
||||
bool HistoryVideo::needReSetInlineResultMedia(const MTPMessageMedia &media) {
|
||||
return needReSetInlineResultDocument(media, _data);
|
||||
}
|
||||
|
@ -237,6 +237,7 @@ public:
|
||||
void attachToParent() override;
|
||||
void detachFromParent() override;
|
||||
|
||||
void updateSentMedia(const MTPMessageMedia &media) override;
|
||||
bool needReSetInlineResultMedia(const MTPMessageMedia &media) override;
|
||||
|
||||
bool hasReplyPreview() const override {
|
||||
|
@ -368,17 +368,9 @@ bool FFMpegReaderImplementation::start(Mode mode, TimeMs &positionMs) {
|
||||
_codec = avcodec_find_decoder(_codecContext->codec_id);
|
||||
|
||||
_audioStreamId = av_find_best_stream(_fmtContext, AVMEDIA_TYPE_AUDIO, -1, -1, 0, 0);
|
||||
if (_mode == Mode::OnlyGifv) {
|
||||
if (_audioStreamId >= 0) { // should be no audio stream
|
||||
_audioStreamId = -1;
|
||||
return false;
|
||||
}
|
||||
if (dataSize() > AnimationInMemory) {
|
||||
return false;
|
||||
}
|
||||
if (_codecContext->codec_id != AV_CODEC_ID_H264) {
|
||||
return false;
|
||||
}
|
||||
if (_mode == Mode::Inspecting) {
|
||||
_hasAudioStream = (_audioStreamId >= 0);
|
||||
_audioStreamId = -1;
|
||||
} else if (_mode == Mode::Silent || !_playId) {
|
||||
_audioStreamId = -1;
|
||||
}
|
||||
@ -390,7 +382,7 @@ bool FFMpegReaderImplementation::start(Mode mode, TimeMs &positionMs) {
|
||||
|
||||
std::unique_ptr<VideoSoundData> soundData;
|
||||
if (_audioStreamId >= 0) {
|
||||
AVCodecContext *audioContext = avcodec_alloc_context3(nullptr);
|
||||
auto audioContext = avcodec_alloc_context3(nullptr);
|
||||
if (!audioContext) {
|
||||
LOG(("Audio Error: Unable to avcodec_alloc_context3 %1").arg(logData()));
|
||||
return false;
|
||||
@ -418,9 +410,8 @@ bool FFMpegReaderImplementation::start(Mode mode, TimeMs &positionMs) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (positionMs > 0) {
|
||||
int64 ts = (positionMs * _fmtContext->streams[_streamId]->time_base.den) / (1000LL * _fmtContext->streams[_streamId]->time_base.num);
|
||||
auto ts = (positionMs * _fmtContext->streams[_streamId]->time_base.den) / (1000LL * _fmtContext->streams[_streamId]->time_base.num);
|
||||
if (av_seek_frame(_fmtContext, _streamId, ts, 0) < 0) {
|
||||
if (av_seek_frame(_fmtContext, _streamId, ts, AVSEEK_FLAG_BACKWARD) < 0) {
|
||||
return false;
|
||||
@ -446,6 +437,44 @@ bool FFMpegReaderImplementation::start(Mode mode, TimeMs &positionMs) {
|
||||
return true;
|
||||
}
|
||||
|
||||
bool FFMpegReaderImplementation::inspectAt(TimeMs &positionMs) {
|
||||
if (positionMs > 0) {
|
||||
auto ts = (positionMs * _fmtContext->streams[_streamId]->time_base.den) / (1000LL * _fmtContext->streams[_streamId]->time_base.num);
|
||||
if (av_seek_frame(_fmtContext, _streamId, ts, 0) < 0) {
|
||||
if (av_seek_frame(_fmtContext, _streamId, ts, AVSEEK_FLAG_BACKWARD) < 0) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_packetQueue.clear();
|
||||
|
||||
AVPacket packet;
|
||||
auto readResult = readPacket(&packet);
|
||||
if (readResult == PacketResult::Ok && positionMs > 0) {
|
||||
positionMs = countPacketMs(&packet);
|
||||
}
|
||||
|
||||
if (readResult == PacketResult::Ok) {
|
||||
processPacket(&packet);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool FFMpegReaderImplementation::isGifv() const {
|
||||
if (_hasAudioStream) {
|
||||
return false;
|
||||
}
|
||||
if (dataSize() > AnimationInMemory) {
|
||||
return false;
|
||||
}
|
||||
if (_codecContext->codec_id != AV_CODEC_ID_H264) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
QString FFMpegReaderImplementation::logData() const {
|
||||
return qsl("for file '%1', data size '%2'").arg(_location ? _location->name() : QString()).arg(_data->size());
|
||||
}
|
||||
@ -501,8 +530,8 @@ FFMpegReaderImplementation::PacketResult FFMpegReaderImplementation::readPacket(
|
||||
}
|
||||
|
||||
void FFMpegReaderImplementation::processPacket(AVPacket *packet) {
|
||||
bool videoPacket = (packet->stream_index == _streamId);
|
||||
bool audioPacket = (_audioStreamId >= 0 && packet->stream_index == _audioStreamId);
|
||||
auto videoPacket = (packet->stream_index == _streamId);
|
||||
auto audioPacket = (_audioStreamId >= 0 && packet->stream_index == _audioStreamId);
|
||||
if (audioPacket || videoPacket) {
|
||||
if (videoPacket) {
|
||||
_lastReadVideoMs = countPacketMs(packet);
|
||||
|
@ -51,10 +51,13 @@ public:
|
||||
void pauseAudio() override;
|
||||
void resumeAudio() override;
|
||||
|
||||
bool start(Mode mode, int64 &positionMs) override;
|
||||
bool start(Mode mode, TimeMs &positionMs) override;
|
||||
bool inspectAt(TimeMs &positionMs);
|
||||
|
||||
QString logData() const;
|
||||
|
||||
bool isGifv() const;
|
||||
|
||||
~FFMpegReaderImplementation();
|
||||
|
||||
private:
|
||||
@ -105,6 +108,7 @@ private:
|
||||
bool _frameRead = false;
|
||||
int _skippedInvalidDataPackets = 0;
|
||||
|
||||
bool _hasAudioStream = false;
|
||||
int _audioStreamId = -1;
|
||||
uint64 _playId = 0;
|
||||
TimeMs _lastReadVideoMs = 0;
|
||||
|
@ -33,9 +33,9 @@ public:
|
||||
, _data(data) {
|
||||
}
|
||||
enum class Mode {
|
||||
OnlyGifv,
|
||||
Silent,
|
||||
Normal,
|
||||
Inspecting, // Not playing video, but reading data.
|
||||
};
|
||||
|
||||
enum class ReadResult {
|
||||
@ -59,6 +59,7 @@ public:
|
||||
virtual void resumeAudio() = 0;
|
||||
|
||||
virtual bool start(Mode mode, TimeMs &positionMs) = 0;
|
||||
|
||||
virtual ~ReaderImplementation() {
|
||||
}
|
||||
int64 dataSize() const {
|
||||
|
@ -98,15 +98,15 @@ TimeMs QtGifReaderImplementation::durationMs() const {
|
||||
return 0; // not supported
|
||||
}
|
||||
|
||||
bool QtGifReaderImplementation::start(Mode mode, int64 &positionMs) {
|
||||
if (mode == Mode::OnlyGifv) return false;
|
||||
bool QtGifReaderImplementation::start(Mode mode, TimeMs &positionMs) {
|
||||
if (mode == Mode::Inspecting) {
|
||||
return false;
|
||||
}
|
||||
_mode = mode;
|
||||
return jumpToStart();
|
||||
}
|
||||
|
||||
QtGifReaderImplementation::~QtGifReaderImplementation() {
|
||||
delete base::take(_reader);
|
||||
}
|
||||
QtGifReaderImplementation::~QtGifReaderImplementation() = default;
|
||||
|
||||
bool QtGifReaderImplementation::jumpToStart() {
|
||||
if (_reader && _reader->jumpToImage(0)) {
|
||||
@ -114,9 +114,9 @@ bool QtGifReaderImplementation::jumpToStart() {
|
||||
return true;
|
||||
}
|
||||
|
||||
delete _reader;
|
||||
_reader = nullptr;
|
||||
initDevice();
|
||||
_reader = new QImageReader(_device);
|
||||
_reader = std::make_unique<QImageReader>(_device);
|
||||
#ifndef OS_MAC_OLD
|
||||
_reader->setAutoTransform(true);
|
||||
#endif // OS_MAC_OLD
|
||||
|
@ -57,7 +57,7 @@ private:
|
||||
|
||||
Mode _mode = Mode::Normal;
|
||||
|
||||
QImageReader *_reader = nullptr;
|
||||
std::unique_ptr<QImageReader> _reader;
|
||||
int _framesLeft = 0;
|
||||
TimeMs _frameRealTime = 0;
|
||||
TimeMs _frameTime = 0;
|
||||
|
@ -39,47 +39,54 @@ namespace {
|
||||
QVector<QThread*> threads;
|
||||
QVector<Manager*> managers;
|
||||
|
||||
QPixmap _prepareFrame(const FrameRequest &request, const QImage &original, bool hasAlpha, QImage &cache) {
|
||||
bool badSize = (original.width() != request.framew) || (original.height() != request.frameh);
|
||||
bool needOuter = (request.outerw != request.framew) || (request.outerh != request.frameh);
|
||||
if (badSize || needOuter || hasAlpha || request.radius != ImageRoundRadius::None) {
|
||||
int32 factor(request.factor);
|
||||
bool newcache = (cache.width() != request.outerw || cache.height() != request.outerh);
|
||||
if (newcache) {
|
||||
cache = QImage(request.outerw, request.outerh, QImage::Format_ARGB32_Premultiplied);
|
||||
cache.setDevicePixelRatio(factor);
|
||||
}
|
||||
{
|
||||
Painter p(&cache);
|
||||
if (newcache) {
|
||||
if (request.framew < request.outerw) {
|
||||
p.fillRect(0, 0, (request.outerw - request.framew) / (2 * factor), cache.height() / factor, st::imageBg);
|
||||
p.fillRect((request.outerw - request.framew) / (2 * factor) + (request.framew / factor), 0, (cache.width() / factor) - ((request.outerw - request.framew) / (2 * factor) + (request.framew / factor)), cache.height() / factor, st::imageBg);
|
||||
}
|
||||
if (request.frameh < request.outerh) {
|
||||
p.fillRect(qMax(0, (request.outerw - request.framew) / (2 * factor)), 0, qMin(cache.width(), request.framew) / factor, (request.outerh - request.frameh) / (2 * factor), st::imageBg);
|
||||
p.fillRect(qMax(0, (request.outerw - request.framew) / (2 * factor)), (request.outerh - request.frameh) / (2 * factor) + (request.frameh / factor), qMin(cache.width(), request.framew) / factor, (cache.height() / factor) - ((request.outerh - request.frameh) / (2 * factor) + (request.frameh / factor)), st::imageBg);
|
||||
}
|
||||
}
|
||||
if (hasAlpha) {
|
||||
p.fillRect(qMax(0, (request.outerw - request.framew) / (2 * factor)), qMax(0, (request.outerh - request.frameh) / (2 * factor)), qMin(cache.width(), request.framew) / factor, qMin(cache.height(), request.frameh) / factor, st::imageBgTransparent);
|
||||
}
|
||||
QPoint position((request.outerw - request.framew) / (2 * factor), (request.outerh - request.frameh) / (2 * factor));
|
||||
if (badSize) {
|
||||
p.setRenderHint(QPainter::SmoothPixmapTransform);
|
||||
QRect to(position, QSize(request.framew / factor, request.frameh / factor));
|
||||
QRect from(0, 0, original.width(), original.height());
|
||||
p.drawImage(to, original, from, Qt::ColorOnly);
|
||||
} else {
|
||||
p.drawImage(position, original);
|
||||
}
|
||||
}
|
||||
if (request.radius != ImageRoundRadius::None) {
|
||||
Images::prepareRound(cache, request.radius, request.corners);
|
||||
}
|
||||
return QPixmap::fromImage(cache, Qt::ColorOnly);
|
||||
QImage PrepareFrameImage(const FrameRequest &request, const QImage &original, bool hasAlpha, QImage &cache) {
|
||||
auto needResize = (original.width() != request.framew) || (original.height() != request.frameh);
|
||||
auto needOuterFill = (request.outerw != request.framew) || (request.outerh != request.frameh);
|
||||
auto needRounding = (request.radius != ImageRoundRadius::None);
|
||||
if (!needResize && !needOuterFill && !hasAlpha && !needRounding) {
|
||||
return original;
|
||||
}
|
||||
return QPixmap::fromImage(original, Qt::ColorOnly);
|
||||
|
||||
auto factor = request.factor;
|
||||
auto needNewCache = (cache.width() != request.outerw || cache.height() != request.outerh);
|
||||
if (needNewCache) {
|
||||
cache = QImage(request.outerw, request.outerh, QImage::Format_ARGB32_Premultiplied);
|
||||
cache.setDevicePixelRatio(factor);
|
||||
}
|
||||
{
|
||||
Painter p(&cache);
|
||||
if (needNewCache) {
|
||||
if (request.framew < request.outerw) {
|
||||
p.fillRect(0, 0, (request.outerw - request.framew) / (2 * factor), cache.height() / factor, st::imageBg);
|
||||
p.fillRect((request.outerw - request.framew) / (2 * factor) + (request.framew / factor), 0, (cache.width() / factor) - ((request.outerw - request.framew) / (2 * factor) + (request.framew / factor)), cache.height() / factor, st::imageBg);
|
||||
}
|
||||
if (request.frameh < request.outerh) {
|
||||
p.fillRect(qMax(0, (request.outerw - request.framew) / (2 * factor)), 0, qMin(cache.width(), request.framew) / factor, (request.outerh - request.frameh) / (2 * factor), st::imageBg);
|
||||
p.fillRect(qMax(0, (request.outerw - request.framew) / (2 * factor)), (request.outerh - request.frameh) / (2 * factor) + (request.frameh / factor), qMin(cache.width(), request.framew) / factor, (cache.height() / factor) - ((request.outerh - request.frameh) / (2 * factor) + (request.frameh / factor)), st::imageBg);
|
||||
}
|
||||
}
|
||||
if (hasAlpha) {
|
||||
p.fillRect(qMax(0, (request.outerw - request.framew) / (2 * factor)), qMax(0, (request.outerh - request.frameh) / (2 * factor)), qMin(cache.width(), request.framew) / factor, qMin(cache.height(), request.frameh) / factor, st::imageBgTransparent);
|
||||
}
|
||||
auto position = QPoint((request.outerw - request.framew) / (2 * factor), (request.outerh - request.frameh) / (2 * factor));
|
||||
if (needResize) {
|
||||
PainterHighQualityEnabler hq(p);
|
||||
|
||||
auto dst = QRect(position, QSize(request.framew / factor, request.frameh / factor));
|
||||
auto src = QRect(0, 0, original.width(), original.height());
|
||||
p.drawImage(dst, original, src, Qt::ColorOnly);
|
||||
} else {
|
||||
p.drawImage(position, original);
|
||||
}
|
||||
}
|
||||
if (needRounding) {
|
||||
Images::prepareRound(cache, request.radius, request.corners);
|
||||
}
|
||||
return cache;
|
||||
}
|
||||
|
||||
QPixmap PrepareFrame(const FrameRequest &request, const QImage &original, bool hasAlpha, QImage &cache) {
|
||||
return QPixmap::fromImage(PrepareFrameImage(request, original, hasAlpha, cache), Qt::ColorOnly);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
@ -239,7 +246,7 @@ QPixmap Reader::current(int32 framew, int32 frameh, int32 outerw, int32 outerh,
|
||||
QImage cacheForResize;
|
||||
frame->original.setDevicePixelRatio(factor);
|
||||
frame->pix = QPixmap();
|
||||
frame->pix = _prepareFrame(frame->request, frame->original, true, cacheForResize);
|
||||
frame->pix = PrepareFrame(frame->request, frame->original, true, cacheForResize);
|
||||
|
||||
auto other = frameToWriteNext(true);
|
||||
if (other) other->request = frame->request;
|
||||
@ -446,7 +453,7 @@ public:
|
||||
}
|
||||
frame()->original.setDevicePixelRatio(_request.factor);
|
||||
frame()->pix = QPixmap();
|
||||
frame()->pix = _prepareFrame(_request, frame()->original, frame()->alpha, frame()->cache);
|
||||
frame()->pix = PrepareFrame(_request, frame()->original, frame()->alpha, frame()->cache);
|
||||
frame()->when = _nextFrameWhen;
|
||||
frame()->positionMs = _nextFramePositionMs;
|
||||
return true;
|
||||
@ -832,33 +839,39 @@ Manager::~Manager() {
|
||||
clear();
|
||||
}
|
||||
|
||||
MTPDocumentAttribute readAttributes(const QString &fname, const QByteArray &data, QImage &cover) {
|
||||
FileLocation localloc(fname);
|
||||
QByteArray localdata(data);
|
||||
SendData PrepareForSending(const QString &fname, const QByteArray &data) {
|
||||
auto result = SendData();
|
||||
auto localLocation = FileLocation(fname);
|
||||
auto localData = QByteArray(data);
|
||||
|
||||
auto playId = 0ULL;
|
||||
auto seekPositionMs = 0LL;
|
||||
auto reader = std::make_unique<internal::FFMpegReaderImplementation>(&localloc, &localdata, playId);
|
||||
if (reader->start(internal::ReaderImplementation::Mode::OnlyGifv, seekPositionMs)) {
|
||||
bool hasAlpha = false;
|
||||
auto readResult = reader->readFramesTill(-1, getms());
|
||||
auto readFrame = (readResult == internal::ReaderImplementation::ReadResult::Success);
|
||||
if (readFrame && reader->renderFrame(cover, hasAlpha, QSize())) {
|
||||
if (cover.width() > 0 && cover.height() > 0 && cover.width() < cover.height() * 10 && cover.height() < cover.width() * 10) {
|
||||
if (hasAlpha) {
|
||||
QImage cacheForResize;
|
||||
FrameRequest request;
|
||||
request.framew = request.outerw = cover.width();
|
||||
request.frameh = request.outerh = cover.height();
|
||||
request.factor = 1;
|
||||
cover = _prepareFrame(request, cover, hasAlpha, cacheForResize).toImage();
|
||||
}
|
||||
int duration = reader->durationMs() / 1000;
|
||||
return MTP_documentAttributeVideo(MTP_int(duration), MTP_int(cover.width()), MTP_int(cover.height()));
|
||||
auto reader = std::make_unique<internal::FFMpegReaderImplementation>(&localLocation, &localData, playId);
|
||||
if (reader->start(internal::ReaderImplementation::Mode::Inspecting, seekPositionMs)) {
|
||||
auto durationMs = reader->durationMs();
|
||||
result.isGifv = reader->isGifv();
|
||||
if (!result.isGifv) {
|
||||
auto middleMs = durationMs / 2;
|
||||
if (!reader->inspectAt(middleMs)) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
auto hasAlpha = false;
|
||||
auto readResult = reader->readFramesTill(-1, getms());
|
||||
auto readFrame = (readResult == internal::ReaderImplementation::ReadResult::Success);
|
||||
if (readFrame && reader->renderFrame(result.cover, hasAlpha, QSize())) {
|
||||
if (hasAlpha) {
|
||||
auto cacheForResize = QImage();
|
||||
auto request = FrameRequest();
|
||||
request.framew = request.outerw = result.cover.width();
|
||||
request.frameh = request.outerh = result.cover.height();
|
||||
request.factor = 1;
|
||||
result.cover = PrepareFrameImage(request, result.cover, hasAlpha, cacheForResize);
|
||||
}
|
||||
result.duration = static_cast<int>(durationMs / 1000);
|
||||
}
|
||||
}
|
||||
return MTP_documentAttributeFilename(MTP_string(fname));
|
||||
return result;
|
||||
}
|
||||
|
||||
void Finish() {
|
||||
|
@ -243,7 +243,12 @@ private:
|
||||
|
||||
};
|
||||
|
||||
MTPDocumentAttribute readAttributes(const QString &fname, const QByteArray &data, QImage &cover);
|
||||
struct SendData {
|
||||
QImage cover;
|
||||
int duration = 0;
|
||||
bool isGifv = false;
|
||||
};
|
||||
SendData PrepareForSending(const QString &fname, const QByteArray &data);
|
||||
|
||||
void Finish();
|
||||
|
||||
|
@ -29,6 +29,14 @@ Copyright (c) 2014-2017 John Preston, https://desktop.telegram.org
|
||||
#include "lang.h"
|
||||
#include "boxes/confirmbox.h"
|
||||
|
||||
namespace {
|
||||
|
||||
bool ValidateThumbDimensions(int width, int height) {
|
||||
return (width > 0) && (height > 0) && (width < 20 * height) && (height < 20 * width);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
TaskQueue::TaskQueue(QObject *parent, int32 stopTimeoutMs) : QObject(parent), _thread(0), _worker(0), _stopTimer(0) {
|
||||
if (stopTimeoutMs > 0) {
|
||||
_stopTimer = new QTimer(this);
|
||||
@ -211,7 +219,7 @@ void FileLoadTask::process() {
|
||||
|
||||
auto animated = false;
|
||||
auto song = false;
|
||||
auto gif = false;
|
||||
auto video = false;
|
||||
auto voice = (_type == SendMediaType::Audio);
|
||||
auto fullimage = base::take(_image);
|
||||
auto info = _filepath.isEmpty() ? QFileInfo() : QFileInfo(_filepath);
|
||||
@ -251,13 +259,12 @@ void FileLoadTask::process() {
|
||||
}
|
||||
} else if (!fullimage.isNull() && fullimage.width() > 0) {
|
||||
if (_type == SendMediaType::Photo) {
|
||||
auto w = fullimage.width(), h = fullimage.height();
|
||||
if (w >= 20 * h || h >= 20 * w) {
|
||||
_type = SendMediaType::File;
|
||||
} else {
|
||||
if (ValidateThumbDimensions(fullimage.width(), fullimage.height())) {
|
||||
filesize = -1; // Fill later.
|
||||
filemime = mimeTypeForName("image/jpeg").name();
|
||||
filename = filedialogDefaultName(qsl("image"), qsl(".jpg"), QString(), true);
|
||||
} else {
|
||||
_type = SendMediaType::File;
|
||||
}
|
||||
}
|
||||
if (_type == SendMediaType::File) {
|
||||
@ -294,17 +301,18 @@ void FileLoadTask::process() {
|
||||
filename.endsWith(qstr(".flac"), Qt::CaseInsensitive)) {
|
||||
QImage cover;
|
||||
QByteArray coverBytes, coverFormat;
|
||||
MTPDocumentAttribute audioAttribute = audioReadSongAttributes(_filepath, _content, cover, coverBytes, coverFormat);
|
||||
auto audioAttribute = audioReadSongAttributes(_filepath, _content, cover, coverBytes, coverFormat);
|
||||
if (audioAttribute.type() == mtpc_documentAttributeAudio) {
|
||||
attributes.push_back(audioAttribute);
|
||||
song = true;
|
||||
if (!cover.isNull()) { // cover to thumb
|
||||
int32 cw = cover.width(), ch = cover.height();
|
||||
if (cw < 20 * ch && ch < 20 * cw) {
|
||||
QPixmap full = (cw > 90 || ch > 90) ? App::pixmapFromImageInPlace(cover.scaled(90, 90, Qt::KeepAspectRatio, Qt::SmoothTransformation)) : App::pixmapFromImageInPlace(std::move(cover));
|
||||
auto coverWidth = cover.width();
|
||||
auto coverHeight = cover.height();
|
||||
if (ValidateThumbDimensions(coverWidth, coverHeight)) {
|
||||
auto full = (coverWidth > 90 || coverHeight > 90) ? App::pixmapFromImageInPlace(cover.scaled(90, 90, Qt::KeepAspectRatio, Qt::SmoothTransformation)) : App::pixmapFromImageInPlace(std::move(cover));
|
||||
{
|
||||
QByteArray thumbFormat = "JPG";
|
||||
int32 thumbQuality = 87;
|
||||
auto thumbFormat = QByteArray("JPG");
|
||||
auto thumbQuality = 87;
|
||||
|
||||
QBuffer buffer(&thumbdata);
|
||||
full.save(&buffer, thumbFormat, thumbQuality);
|
||||
@ -318,27 +326,32 @@ void FileLoadTask::process() {
|
||||
}
|
||||
}
|
||||
}
|
||||
if (filemime == qstr("video/mp4") || filename.endsWith(qstr(".mp4"), Qt::CaseInsensitive) || animated) {
|
||||
QImage cover;
|
||||
MTPDocumentAttribute animatedAttribute = Media::Clip::readAttributes(_filepath, _content, cover);
|
||||
if (animatedAttribute.type() == mtpc_documentAttributeVideo) {
|
||||
int32 cw = cover.width(), ch = cover.height();
|
||||
if (cw < 20 * ch && ch < 20 * cw) {
|
||||
attributes.push_back(MTP_documentAttributeAnimated());
|
||||
attributes.push_back(animatedAttribute);
|
||||
gif = true;
|
||||
if (filemime == qstr("video/mp4") || filemime == qstr("video/quicktime")
|
||||
|| filename.endsWith(qstr(".mp4"), Qt::CaseInsensitive) || filename.endsWith(qstr(".mov"), Qt::CaseInsensitive)) {
|
||||
auto sendVideoData = Media::Clip::PrepareForSending(_filepath, _content);
|
||||
if (sendVideoData.duration > 0) {
|
||||
auto coverWidth = sendVideoData.cover.width();
|
||||
auto coverHeight = sendVideoData.cover.height();
|
||||
if (ValidateThumbDimensions(coverWidth, coverHeight)) {
|
||||
if (sendVideoData.isGifv) {
|
||||
attributes.push_back(MTP_documentAttributeAnimated());
|
||||
}
|
||||
attributes.push_back(MTP_documentAttributeVideo(MTP_int(sendVideoData.duration), MTP_int(coverWidth), MTP_int(coverHeight)));
|
||||
video = true;
|
||||
|
||||
QPixmap full = (cw > 90 || ch > 90) ? App::pixmapFromImageInPlace(cover.scaled(90, 90, Qt::KeepAspectRatio, Qt::SmoothTransformation)) : App::pixmapFromImageInPlace(std::move(cover));
|
||||
auto cover = (coverWidth > 90 || coverHeight > 90)
|
||||
? sendVideoData.cover.scaled(90, 90, Qt::KeepAspectRatio, Qt::SmoothTransformation)
|
||||
: std::move(sendVideoData.cover);
|
||||
{
|
||||
QByteArray thumbFormat = "JPG";
|
||||
int32 thumbQuality = 87;
|
||||
auto thumbFormat = QByteArray("JPG");
|
||||
auto thumbQuality = 87;
|
||||
|
||||
QBuffer buffer(&thumbdata);
|
||||
full.save(&buffer, thumbFormat, thumbQuality);
|
||||
cover.save(&buffer, thumbFormat, thumbQuality);
|
||||
}
|
||||
|
||||
thumb = full;
|
||||
thumbSize = MTP_photoSize(MTP_string(""), MTP_fileLocationUnavailable(MTP_long(0), MTP_int(0), MTP_long(0)), MTP_int(full.width()), MTP_int(full.height()), MTP_int(0));
|
||||
thumb = App::pixmapFromImageInPlace(std::move(cover));
|
||||
thumbSize = MTP_photoSize(MTP_string(""), MTP_fileLocationUnavailable(MTP_long(0), MTP_int(0), MTP_long(0)), MTP_int(thumb.width()), MTP_int(thumb.height()), MTP_int(0));
|
||||
|
||||
thumbId = rand_value<uint64>();
|
||||
|
||||
@ -350,11 +363,11 @@ void FileLoadTask::process() {
|
||||
}
|
||||
}
|
||||
|
||||
if (!fullimage.isNull() && fullimage.width() > 0 && !song && !gif && !voice) {
|
||||
if (!fullimage.isNull() && fullimage.width() > 0 && !song && !video && !voice) {
|
||||
auto w = fullimage.width(), h = fullimage.height();
|
||||
attributes.push_back(MTP_documentAttributeImageSize(MTP_int(w), MTP_int(h)));
|
||||
|
||||
if (w < 20 * h && h < 20 * w) {
|
||||
if (ValidateThumbDimensions(w, h)) {
|
||||
if (animated) {
|
||||
attributes.push_back(MTP_documentAttributeAnimated());
|
||||
} else if (_type != SendMediaType::File) {
|
||||
|
Loading…
Reference in New Issue
Block a user