ffmpeg rendering inline animations

This commit is contained in:
John Preston 2015-12-17 20:31:28 +03:00
parent 1416e4e277
commit 840ffa6482
12 changed files with 498 additions and 101 deletions

View File

@ -141,7 +141,7 @@ Open **VS2015 x86 Native Tools Command Prompt.bat** (should be in **Start Menu >
PKG_CONFIG_PATH="/mingw64/lib/pkgconfig:$PKG_CONFIG_PATH"
./configure --toolchain=msvc --disable-programs --disable-everything --enable-libopus --enable-decoder=aac --enable-decoder=aac_latm --enable-decoder=aasc --enable-decoder=mp1 --enable-decoder=mp1float --enable-decoder=mp2 --enable-decoder=mp2float --enable-decoder=mp3 --enable-decoder=mp3adu --enable-decoder=mp3adufloat --enable-decoder=mp3float --enable-decoder=mp3on4 --enable-decoder=mp3on4float --enable-decoder=wavpack --enable-decoder=opus --enable-decoder=vorbis --enable-decoder=wmalossless --enable-decoder=wmapro --enable-decoder=wmav1 --enable-decoder=wmav2 --enable-decoder=wmavoice --enable-decoder=flac --enable-encoder=libopus --enable-parser=aac --enable-parser=aac_latm --enable-parser=mpegaudio --enable-parser=opus --enable-parser=vorbis --enable-parser=flac --enable-demuxer=aac --enable-demuxer=wav --enable-demuxer=mp3 --enable-demuxer=ogg --enable-demuxer=mov --enable-demuxer=flac --enable-muxer=ogg --enable-muxer=opus --extra-ldflags="-libpath:/d/TBuild/Libraries/opus/win32/VS2010/Win32/Release celt.lib silk_common.lib silk_float.lib"
./configure --toolchain=msvc --disable-programs --disable-everything --enable-libopus --enable-decoder=aac --enable-decoder=aac_latm --enable-decoder=aasc --enable-decoder=gif --enable-decoder=h264 --enable-decoder=h264_crystalhd --enable-decoder=h264_qsv --enable-decoder=h264_vda --enable-decoder=h264_vdpau --enable-decoder=mp1 --enable-decoder=mp1float --enable-decoder=mp2 --enable-decoder=mp2float --enable-decoder=mp3 --enable-decoder=mp3adu --enable-decoder=mp3adufloat --enable-decoder=mp3float --enable-decoder=mp3on4 --enable-decoder=mp3on4float --enable-decoder=mpeg4 --enable-decoder=mpeg4_crystalhd --enable-decoder=mpeg4_vdpau --enable-decoder=msmpeg4_crystalhd --enable-decoder=msmpeg4_crystalhd --enable-decoder=msmpeg4v2 --enable-decoder=msmpeg4v3 --enable-decoder=wavpack --enable-decoder=opus --enable-decoder=vorbis --enable-decoder=wmalossless --enable-decoder=wmapro --enable-decoder=wmav1 --enable-decoder=wmav2 --enable-decoder=wmavoice --enable-decoder=flac --enable-encoder=libopus --enable-demuxer=aac --enable-demuxer=gif --enable-demuxer=h264 --enable-demuxer=wav --enable-demuxer=mp3 --enable-demuxer=ogg --enable-demuxer=mov --enable-demuxer=flac --enable-parser=aac --enable-parser=aac_latm --enable-parser=h264 --enable-parser=mpeg4video --enable-parser=mpegaudio --enable-parser=opus --enable-parser=vorbis --enable-parser=flac --enable-muxer=ogg --enable-muxer=opus --enable-hwaccel=mpeg4_vaapi --enable-hwaccel=mpeg4_vdpau --enable-hwaccel=mpeg4_videotoolbox --enable-hwaccel=h264_d3d11va --enable-hwaccel=h264_dxva2 --enable-hwaccel=h264_mmal --enable-hwaccel=h264_qsv --enable-hwaccel=h264_vaapi --enable-hwaccel=h264_vda --enable-hwaccel=h264_vda_old --enable-hwaccel=h264_vdpau --enable-hwaccel=h264_videotoolbox --extra-ldflags="-libpath:/d/TBuild/Libraries/opus/win32/VS2010/Win32/Release celt.lib silk_common.lib silk_float.lib"
make
make install
@ -169,7 +169,7 @@ and run
#####Apply the patch
cd qtbase && git apply ../../../tdesktop/Telegram/_qtbase_5_5_1_patch.diff && cd ..
#####Install Windows SDKs

View File

@ -27,15 +27,6 @@ Copyright (c) 2014-2015 John Preston, https://desktop.telegram.org
#define AL_ALEXT_PROTOTYPES
#include <AL/alext.h>
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/opt.h>
#include <libswresample/swresample.h>
}
#ifdef Q_OS_MAC
extern "C" {
@ -106,15 +97,16 @@ bool _checkALError() {
Q_DECLARE_METATYPE(AudioMsgId);
Q_DECLARE_METATYPE(SongMsgId);
void audioInit() {
av_register_all();
avcodec_register_all();
if (!audioDevice) {
av_register_all();
avcodec_register_all();
}
if (!capture) {
capture = new AudioCapture();
cSetHasAudioCapture(capture->check());
}
uint64 ms = getms();
if (audioDevice) return;
audioDevice = alcOpenDevice(0);
@ -223,7 +215,6 @@ void audioInit() {
player = new AudioPlayer();
alcDevicePauseSOFT(audioDevice);
LOG(("Audio init time: %1").arg(getms() - ms));
cSetHasAudioPlayer(true);
}
@ -1126,7 +1117,6 @@ protected:
};
static const uint32 AVBlockSize = 4096; // 4Kb
static const AVSampleFormat _toFormat = AV_SAMPLE_FMT_S16;
static const int64_t _toChannelLayout = AV_CH_LAYOUT_STEREO;
static const int32 _toChannels = 2;
@ -1189,14 +1179,14 @@ public:
return false;
}
freq = fmtContext->streams[streamId]->codec->sample_rate;
freq = codecContext->sample_rate;
if (fmtContext->streams[streamId]->duration == AV_NOPTS_VALUE) {
len = (fmtContext->duration * freq) / AV_TIME_BASE;
} else {
len = (fmtContext->streams[streamId]->duration * freq * fmtContext->streams[streamId]->time_base.num) / fmtContext->streams[streamId]->time_base.den;
}
uint64_t layout = fmtContext->streams[streamId]->codec->channel_layout;
inputFormat = fmtContext->streams[streamId]->codec->sample_fmt;
uint64_t layout = codecContext->channel_layout;
inputFormat = codecContext->sample_fmt;
switch (layout) {
case AV_CH_LAYOUT_MONO:
switch (inputFormat) {

View File

@ -87,6 +87,8 @@ enum {
AverageGifSize = 320 * 240,
WaitBeforeGifPause = 200, // wait 200ms for gif draw before pausing it
AVBlockSize = 4096, // 4Kb for ffmpeg blocksize
SaveRecentEmojisTimeout = 3000, // 3 secs
SaveWindowPositionTimeout = 1000, // 1 sec

View File

@ -132,13 +132,12 @@ void AnimationManager::clipReinit(ClipReader *reader) {
}
void AnimationManager::clipRedraw(ClipReader *reader) {
if (reader->currentDisplayed()) {
return;
}
const GifItems &items(App::gifItems());
GifItems::const_iterator it = items.constFind(reader);
if (it != items.cend()) {
if (reader->currentDisplayed()) {
return;
}
Ui::redrawHistoryItem(it.value());
}
}
@ -442,6 +441,386 @@ ClipReader::~ClipReader() {
stop();
}
class ClipReaderImplementation {
public:
ClipReaderImplementation(FileLocation *location, QByteArray *data) : _location(location), _data(data), _device(0) {
}
virtual bool readNextFrame(QImage &to) = 0;
virtual int32 nextFrameDelay() = 0;
virtual bool start() = 0;
virtual ~ClipReaderImplementation() {
}
protected:
FileLocation *_location;
QByteArray *_data;
QFile _file;
QBuffer _buffer;
QIODevice *_device;
void initDevice() {
if (_data->isEmpty()) {
if (_file.isOpen()) _file.close();
_file.setFileName(_location->name());
} else {
if (_buffer.isOpen()) _buffer.close();
_buffer.setBuffer(_data);
}
_device = _data->isEmpty() ? static_cast<QIODevice*>(&_file) : static_cast<QIODevice*>(&_buffer);
}
};
class QtGifReaderImplementation : public ClipReaderImplementation{
public:
QtGifReaderImplementation(FileLocation *location, QByteArray *data) : ClipReaderImplementation(location, data)
, _reader(0)
, _framesLeft(0)
, _frameDelay(0) {
}
bool readNextFrame(QImage &to) {
if (_framesLeft < 1 && !jumpToStart()) {
return false;
}
_frameDelay = _reader->nextImageDelay();
QImage frame; // QGifHandler always reads first to internal QImage and returns it
if (!_reader->read(&frame)) {
return false;
}
--_framesLeft;
int32 w = frame.width(), h = frame.height();
if (to.width() == w && to.height() == h && to.format() == frame.format()) {
if (to.byteCount() != frame.byteCount()) {
int bpl = qMin(to.bytesPerLine(), frame.bytesPerLine());
for (int i = 0; i < h; ++i) {
memcpy(to.scanLine(i), frame.constScanLine(i), bpl);
}
} else {
memcpy(to.bits(), frame.constBits(), frame.byteCount());
}
} else {
to = frame.copy();
}
return true;
}
int32 nextFrameDelay() {
return _frameDelay;
}
bool start() {
return jumpToStart();
}
~QtGifReaderImplementation() {
delete _reader;
setBadPointer(_reader);
}
private:
QImageReader *_reader;
int32 _framesLeft, _frameDelay;
bool jumpToStart() {
if (_reader && _reader->jumpToImage(0)) {
_framesLeft = _reader->imageCount();
return true;
}
delete _reader;
initDevice();
_reader = new QImageReader(_device);
if (!_reader->canRead() || !_reader->supportsAnimation()) {
return false;
}
_framesLeft = _reader->imageCount();
if (_framesLeft < 1) {
return false;
}
return true;
}
};
class FFMpegReaderImplementation : public ClipReaderImplementation {
public:
FFMpegReaderImplementation(FileLocation *location, QByteArray *data) : ClipReaderImplementation(location, data)
, _ioBuffer(0)
, _ioContext(0)
, _fmtContext(0)
, _codec(0)
, _codecContext(0)
, _streamId(0)
, _frame(0)
, _opened(false)
, _hadFrame(false)
, _packetSize(0)
, _packetData(0)
, _packetWas(false)
, _width(0)
, _height(0)
, _swsContext(0)
, _nextFrameDelay(0)
, _currentFrameDelay(0)
, _frameMs(0) {
_frame = av_frame_alloc();
av_init_packet(&_avpkt);
_avpkt.data = NULL;
_avpkt.size = 0;
}
bool readNextFrame(QImage &to) {
int res;
while (true) {
if (_avpkt.size > 0) { // previous packet not finished
res = 0;
} else if ((res = av_read_frame(_fmtContext, &_avpkt)) < 0) {
if (res != AVERROR_EOF || !_hadFrame) {
char err[AV_ERROR_MAX_STRING_SIZE] = { 0 };
LOG(("Gif Error: Unable to av_read_frame() %1, error %2, %3").arg(logData()).arg(res).arg(av_make_error_string(err, sizeof(err), res)));
return false;
}
}
bool finished = (res < 0);
if (finished) {
_avpkt.data = NULL;
_avpkt.size = 0;
} else {
rememberPacket();
}
int32 got_frame = 0;
int32 decoded = _avpkt.size;
if (_avpkt.stream_index == _streamId) {
if ((res = avcodec_decode_video2(_codecContext, _frame, &got_frame, &_avpkt)) < 0) {
char err[AV_ERROR_MAX_STRING_SIZE] = { 0 };
LOG(("Gif Error: Unable to avcodec_decode_video2() %1, error %2, %3").arg(logData()).arg(res).arg(av_make_error_string(err, sizeof(err), res)));
if (res == AVERROR_INVALIDDATA) { // try to skip bad packet
freePacket();
_avpkt.data = NULL;
_avpkt.size = 0;
continue;
}
return false;
}
if (res > 0) decoded = res;
}
if (!finished) {
_avpkt.data += decoded;
_avpkt.size -= decoded;
if (_avpkt.size <= 0) freePacket();
}
if (got_frame) {
_hadFrame = true;
if (!_width || !_height) {
_width = _frame->width;
_height = _frame->height;
if (!_width || !_height) {
LOG(("Gif Error: Bad frame size %1").arg(logData()));
return false;
}
}
if (to.isNull() || to.width() != _width || to.height() != _height) {
to = QImage(_width, _height, QImage::Format_ARGB32);
}
if (_frame->width == _width && _frame->height == _height && (_frame->format == AV_PIX_FMT_BGRA || (_frame->format == -1 && _codecContext->pix_fmt == AV_PIX_FMT_BGRA))) {
int32 sbpl = _frame->linesize[0], dbpl = to.bytesPerLine(), bpl = qMin(sbpl, dbpl);
uchar *s = _frame->data[0], *d = to.bits();
for (int32 i = 0, l = _frame->height; i < l; ++i) {
memcpy(d + i * dbpl, s + i * sbpl, bpl);
}
} else {
if (_frame->width != _width || _frame->height != _height || (_frame->format != -1 && _frame->format != _codecContext->pix_fmt) || !_swsContext) {
_swsContext = sws_getCachedContext(_swsContext, _frame->width, _frame->height, AVPixelFormat(_frame->format), _width, _height, AV_PIX_FMT_BGRA, 0, 0, 0, 0);
}
uint8_t * toData[1] = { to.bits() };
int toLinesize[1] = { to.bytesPerLine() };
if ((res = sws_scale(_swsContext, _frame->data, _frame->linesize, 0, _frame->height, toData, toLinesize)) != _height) {
LOG(("Gif Error: Unable to sws_scale to good size %1, hieght %2, should be %3").arg(logData()).arg(res).arg(_height));
return false;
}
}
int64 duration = av_frame_get_pkt_duration(_frame);
if (duration == AV_NOPTS_VALUE) {
int64 framePts = (_frame->pkt_pts == AV_NOPTS_VALUE) ? _frame->pkt_dts : _frame->pkt_pts;
int64 frameMs = (framePts * 1000LL * _fmtContext->streams[_streamId]->time_base.num) / _fmtContext->streams[_streamId]->time_base.den;
if (frameMs > _frameMs) {
_currentFrameDelay = int32(frameMs - _frameMs);
_frameMs = frameMs;
} else {
_currentFrameDelay = 0;
}
_nextFrameDelay = _currentFrameDelay;
} else {
_currentFrameDelay = _nextFrameDelay;
_nextFrameDelay = (duration * 1000LL * _fmtContext->streams[_streamId]->time_base.num) / _fmtContext->streams[_streamId]->time_base.den;
_frameMs += _nextFrameDelay;
}
av_frame_unref(_frame);
return true;
}
if (finished) {
if ((res = avformat_seek_file(_fmtContext, _streamId, std::numeric_limits<int64_t>::min(), 0, std::numeric_limits<int64_t>::max(), 0)) < 0) {
if ((res = av_seek_frame(_fmtContext, _streamId, 0, AVSEEK_FLAG_BYTE)) < 0) {
if ((res = av_seek_frame(_fmtContext, _streamId, 0, AVSEEK_FLAG_FRAME)) < 0) {
if ((res = av_seek_frame(_fmtContext, _streamId, 0, 0)) < 0) {
char err[AV_ERROR_MAX_STRING_SIZE] = { 0 };
LOG(("Gif Error: Unable to av_seek_frame() to the start %1, error %2, %3").arg(logData()).arg(res).arg(av_make_error_string(err, sizeof(err), res)));
return false;
}
}
}
}
avcodec_flush_buffers(_codecContext);
_hadFrame = false;
_frameMs = 0;
}
}
return false;
}
int32 nextFrameDelay() {
return _currentFrameDelay;
}
QString logData() const {
return qsl("for file '%1', data size '%2'").arg(_location ? _location->name() : QString()).arg(_data->size());
}
bool start() {
initDevice();
if (!_device->open(QIODevice::ReadOnly)) {
LOG(("Gif Error: Unable to open device %1").arg(logData()));
return false;
}
_ioBuffer = (uchar*)av_malloc(AVBlockSize);
_ioContext = avio_alloc_context(_ioBuffer, AVBlockSize, 0, static_cast<void*>(this), &FFMpegReaderImplementation::_read, 0, &FFMpegReaderImplementation::_seek);
_fmtContext = avformat_alloc_context();
if (!_fmtContext) {
LOG(("Gif Error: Unable to avformat_alloc_context %1").arg(logData()));
return false;
}
_fmtContext->pb = _ioContext;
int res = 0;
char err[AV_ERROR_MAX_STRING_SIZE] = { 0 };
if ((res = avformat_open_input(&_fmtContext, 0, 0, 0)) < 0) {
_ioBuffer = 0;
LOG(("Gif Error: Unable to avformat_open_input %1, error %2, %3").arg(logData()).arg(res).arg(av_make_error_string(err, sizeof(err), res)));
return false;
}
_opened = true;
if ((res = avformat_find_stream_info(_fmtContext, 0)) < 0) {
LOG(("Gif Error: Unable to avformat_find_stream_info %1, error %2, %3").arg(logData()).arg(res).arg(av_make_error_string(err, sizeof(err), res)));
return false;
}
_streamId = av_find_best_stream(_fmtContext, AVMEDIA_TYPE_VIDEO, -1, -1, 0, 0);
if (_streamId < 0) {
LOG(("Gif Error: Unable to av_find_best_stream %1, error %2, %3").arg(logData()).arg(_streamId).arg(av_make_error_string(err, sizeof(err), _streamId)));
return false;
}
// Get a pointer to the codec context for the audio stream
_codecContext = _fmtContext->streams[_streamId]->codec;
_codec = avcodec_find_decoder(_codecContext->codec_id);
av_opt_set_int(_codecContext, "refcounted_frames", 1, 0);
if ((res = avcodec_open2(_codecContext, _codec, 0)) < 0) {
LOG(("Gif Error: Unable to avcodec_open2 %1, error %2, %3").arg(logData()).arg(res).arg(av_make_error_string(err, sizeof(err), res)));
return false;
}
return true;
}
~FFMpegReaderImplementation() {
if (_ioContext) av_free(_ioContext);
if (_codecContext) avcodec_close(_codecContext);
if (_swsContext) sws_freeContext(_swsContext);
if (_opened) {
avformat_close_input(&_fmtContext);
} else if (_ioBuffer) {
av_free(_ioBuffer);
}
if (_fmtContext) avformat_free_context(_fmtContext);
av_frame_free(&_frame);
freePacket();
}
private:
uchar *_ioBuffer;
AVIOContext *_ioContext;
AVFormatContext *_fmtContext;
AVCodec *_codec;
AVCodecContext *_codecContext;
int32 _streamId;
AVSampleFormat _inputFormat;
AVFrame *_frame;
bool _opened, _hadFrame;
AVPacket _avpkt;
int _packetSize;
uint8_t *_packetData;
bool _packetWas;
void rememberPacket() {
if (!_packetWas) {
_packetSize = _avpkt.size;
_packetData = _avpkt.data;
_packetWas = true;
}
}
void freePacket() {
if (_packetWas) {
_avpkt.size = _packetSize;
_avpkt.data = _packetData;
_packetWas = false;
av_free_packet(&_avpkt);
}
}
int32 _width, _height;
SwsContext *_swsContext;
int64 _frameMs;
int32 _nextFrameDelay, _currentFrameDelay;
static int _read(void *opaque, uint8_t *buf, int buf_size) {
FFMpegReaderImplementation *l = reinterpret_cast<FFMpegReaderImplementation*>(opaque);
return int(l->_device->read((char*)(buf), buf_size));
}
static int64_t _seek(void *opaque, int64_t offset, int whence) {
FFMpegReaderImplementation *l = reinterpret_cast<FFMpegReaderImplementation*>(opaque);
switch (whence) {
case SEEK_SET: return l->_device->seek(offset) ? l->_device->pos() : -1;
case SEEK_CUR: return l->_device->seek(l->_device->pos() + offset) ? l->_device->pos() : -1;
case SEEK_END: return l->_device->seek(l->_device->size() + offset) ? l->_device->pos() : -1;
}
return -1;
}
};
class ClipReaderPrivate {
public:
@ -450,13 +829,11 @@ public:
, _data(data)
, _location(_data.isEmpty() ? new FileLocation(location) : 0)
, _accessed(false)
, _buffer(_data.isEmpty() ? 0 : &_data)
, _reader(0)
, _implementation(0)
, _previousMs(0)
, _currentMs(0)
, _nextUpdateMs(0)
, _paused(false) {
if (_data.isEmpty() && !_location->accessEnable()) {
error();
return;
@ -466,14 +843,13 @@ public:
ClipProcessResult start(uint64 ms) {
_nextUpdateMs = ms + 86400 * 1000ULL;
if (!_reader && !restartReader(true)) {
if (!_implementation && !init()) {
return error();
}
if (_currentOriginal.isNull()) {
if (!readNextFrame(_currentOriginal)) {
if (!_implementation->readNextFrame(_currentOriginal)) {
return error();
}
--_framesLeft;
return ClipProcessReinit;
}
return ClipProcessWait;
@ -517,7 +893,7 @@ public:
}
uint64 nextFrameDelay() {
int delay = _reader->nextImageDelay();
int32 delay = _implementation->nextFrameDelay();
return qMax(delay, 5);
}
@ -529,75 +905,31 @@ public:
qSwap(_currentCache, _nextCache);
}
bool readNextFrame(QImage &to) {
QImage frame; // QGifHandler always reads first to internal QImage and returns it
if (!_reader->read(&frame)) {
return false;
}
int32 w = frame.width(), h = frame.height();
if (to.width() == w && to.height() == h && to.format() == frame.format()) {
if (to.byteCount() != frame.byteCount()) {
int bpl = qMin(to.bytesPerLine(), frame.bytesPerLine());
for (int i = 0; i < h; ++i) {
memcpy(to.scanLine(i), frame.constScanLine(i), bpl);
}
} else {
memcpy(to.bits(), frame.constBits(), frame.byteCount());
}
} else {
to = frame.copy();
}
return true;
}
bool prepareNextFrame() {
_nextUpdateMs = _currentMs + nextFrameDelay();
if (!_framesLeft) {
if (_reader->jumpToImage(0)) {
_framesLeft = _reader->imageCount();
} else if (!restartReader()) {
return false;
}
}
if (!readNextFrame(_nextOriginal)) {
if (!_implementation->readNextFrame(_nextOriginal)) {
return false;
}
_nextUpdateMs = _currentMs + nextFrameDelay();
_nextOriginal.setDevicePixelRatio(_request.factor);
--_framesLeft;
_next = QPixmap();
_next = _prepareFrame(_request, _nextOriginal, _nextCache, true);
return true;
}
bool restartReader(bool first = false) {
if (first && _data.isEmpty() && QFileInfo(_location->name()).size() <= AnimationInMemory) {
bool init() {
if (_data.isEmpty() && QFileInfo(_location->name()).size() <= AnimationInMemory) {
QFile f(_location->name());
if (f.open(QIODevice::ReadOnly)) {
_data = f.readAll();
if (f.error() == QFile::NoError) {
_buffer.setBuffer(&_data);
} else {
if (f.error() != QFile::NoError) {
_data = QByteArray();
}
}
} else if (!_data.isEmpty()) {
_buffer.close();
}
delete _reader;
if (_data.isEmpty()) {
_reader = new QImageReader(_location->name());
} else {
_reader = new QImageReader(&_buffer);
}
if (!_reader->canRead() || !_reader->supportsAnimation()) {
return false;
}
_framesLeft = _reader->imageCount();
if (_framesLeft < 1) {
return false;
}
return true;
_implementation = new FFMpegReaderImplementation(_location, &_data);
// _implementation = new QtGifReaderImplementation(_location, &_data);
return _implementation->start();
}
ClipProcessResult error() {
@ -607,8 +939,8 @@ public:
}
void stop() {
delete _reader;
_reader = 0;
delete _implementation;
_implementation = 0;
if (_location) {
if (_accessed) {
@ -623,7 +955,7 @@ public:
~ClipReaderPrivate() {
stop();
setBadPointer(_location);
setBadPointer(_reader);
setBadPointer(_implementation);
}
private:
@ -636,13 +968,12 @@ private:
bool _accessed;
QBuffer _buffer;
QImageReader *_reader;
ClipReaderImplementation *_implementation;
ClipFrameRequest _request;
QPixmap _current, _next;
QImage _currentOriginal, _nextOriginal, _currentCache, _nextCache;
int32 _framesLeft;
uint64 _previousMs, _currentMs, _nextUpdateMs;
bool _paused;

View File

@ -230,6 +230,9 @@ public:
}
private:
Animation(const Animation &);
Animation &operator=(const Animation &);
AnimationCallbacks *_cb;
bool _animating;

View File

@ -403,27 +403,35 @@ QPixmap Image::pixNoCache(int32 w, int32 h, bool smooth, bool blurred, bool roun
const QPixmap &p(pixData());
if (p.isNull()) return blank()->pix();
bool n = isNull();
QImage img = p.toImage();
if (blurred) img = imageBlur(img);
if (w <= 0 || !width() || !height() || (w == width() && (h <= 0 || h == height()))) {
} else if (h <= 0) {
img = img.scaledToWidth(w, smooth ? Qt::SmoothTransformation : Qt::FastTransformation);
} else {
img = img.scaled(w, h, Qt::IgnoreAspectRatio, smooth ? Qt::SmoothTransformation : Qt::FastTransformation);
if (!n || !(outerw > 0 && outerh > 0)) {
if (blurred) img = imageBlur(img);
if (w <= 0 || !width() || !height() || (w == width() && (h <= 0 || h == height()))) {
} else if (h <= 0) {
img = img.scaledToWidth(w, smooth ? Qt::SmoothTransformation : Qt::FastTransformation);
} else {
img = img.scaled(w, h, Qt::IgnoreAspectRatio, smooth ? Qt::SmoothTransformation : Qt::FastTransformation);
}
}
if (outerw > 0 && outerh > 0) {
outerw *= cIntRetinaFactor();
outerh *= cIntRetinaFactor();
if (outerw != w || outerh != h) {
if (outerw != w || outerh != h || n) {
img.setDevicePixelRatio(cRetinaFactor());
QImage result(outerw, outerh, QImage::Format_ARGB32_Premultiplied);
result.setDevicePixelRatio(cRetinaFactor());
{
if (n) {
QPainter p(&result);
if (w < outerw || h < outerh) {
p.fillRect(0, 0, result.width(), result.height(), st::black->b);
} else {
QPainter p(&result);
if (w < outerw || h < outerh || n) {
p.fillRect(0, 0, result.width(), result.height(), st::black->b);
}
p.drawImage((result.width() - img.width()) / (2 * cIntRetinaFactor()), (result.height() - img.height()) / (2 * cIntRetinaFactor()), img);
if (!n) {
p.drawImage((result.width() - img.width()) / (2 * cIntRetinaFactor()), (result.height() - img.height()) / (2 * cIntRetinaFactor()), img);
}
}
img = result;
}

View File

@ -3614,6 +3614,15 @@ HistoryVideo::HistoryVideo(const MTPDvideo &video, const QString &caption, Histo
_data->thumb->load();
}
HistoryVideo::HistoryVideo(const HistoryVideo &other) : HistoryFileMedia()
, _data(other._data)
, _caption(other._caption)
, _thumbw(other._thumbw) {
setLinks(new VideoOpenLink(_data), new VideoSaveLink(_data), new VideoCancelLink(_data));
setStatusSize(other._statusSize);
}
void HistoryVideo::initDimensions(const HistoryItem *parent) {
bool bubble = parent->hasBubble();
@ -3923,6 +3932,13 @@ HistoryAudio::HistoryAudio(const MTPDaudio &audio) : HistoryFileMedia()
setStatusSize(FileStatusSizeReady);
}
HistoryAudio::HistoryAudio(const HistoryAudio &other) : HistoryFileMedia()
, _data(other._data) {
setLinks(new AudioOpenLink(_data), new AudioSaveLink(_data), new AudioCancelLink(_data));
setStatusSize(other._statusSize);
}
void HistoryAudio::setStatusSize(int32 newSize, qint64 realDuration) const {
HistoryFileMedia::setStatusSize(newSize, _data->size, _data->duration, realDuration);
}
@ -4155,6 +4171,18 @@ HistoryDocument::HistoryDocument(DocumentData *document) : HistoryFileMedia()
}
}
HistoryDocument::HistoryDocument(const HistoryDocument &other) : HistoryFileMedia()
, _data(other._data)
, _linksavel(new DocumentSaveLink(_data))
, _linkcancell(new DocumentCancelLink(_data))
, _namew(other._namew)
, _name(other._name)
, _thumbw(other._thumbw) {
setLinks(new DocumentOpenLink(_data), new DocumentSaveLink(_data), new DocumentCancelLink(_data));
setStatusSize(other._statusSize);
}
void HistoryDocument::setStatusSize(int32 newSize, qint64 realDuration) const {
HistoryFileMedia::setStatusSize(newSize, _data->size, _data->song() ? _data->song()->duration : -1, realDuration);
@ -4556,6 +4584,16 @@ HistoryGif::HistoryGif(DocumentData *document) : HistoryFileMedia()
_data->thumb->load();
}
HistoryGif::HistoryGif(const HistoryGif &other) : HistoryFileMedia()
, _data(other._data)
, _thumbw(other._thumbw)
, _thumbh(other._thumbh)
, _gif(0) {
setLinks(new DocumentOpenLink(_data), new DocumentOpenLink(_data), new DocumentCancelLink(_data));
setStatusSize(other._statusSize);
}
void HistoryGif::initDimensions(const HistoryItem *parent) {
bool bubble = parent->hasBubble();
int32 tw = 0, th = 0;

View File

@ -784,6 +784,7 @@ public:
protected:
mutable int32 _height, _maxw, _minh;
HistoryElem &operator=(const HistoryElem &);
};
@ -1350,12 +1351,18 @@ protected:
};
mutable AnimationData *_animation;
private:
HistoryFileMedia(const HistoryFileMedia &other);
};
class HistoryVideo : public HistoryFileMedia {
public:
HistoryVideo(const MTPDvideo &video, const QString &caption, HistoryItem *parent);
HistoryVideo(const HistoryVideo &other);
void initDimensions(const HistoryItem *parent);
void draw(Painter &p, const HistoryItem *parent, const QRect &r, bool selected, uint64 ms) const;
@ -1420,6 +1427,8 @@ class HistoryAudio : public HistoryFileMedia {
public:
HistoryAudio(const MTPDaudio &audio);
HistoryAudio(const HistoryAudio &other);
void initDimensions(const HistoryItem *parent);
void draw(Painter &p, const HistoryItem *parent, const QRect &r, bool selected, uint64 ms) const;
@ -1475,6 +1484,8 @@ class HistoryDocument : public HistoryFileMedia {
public:
HistoryDocument(DocumentData *document);
HistoryDocument(const HistoryDocument &other);
void initDimensions(const HistoryItem *parent);
bool withThumb() const {
@ -1556,6 +1567,8 @@ class HistoryGif : public HistoryFileMedia {
public:
HistoryGif(DocumentData *document);
HistoryGif(const HistoryGif &other);
void initDimensions(const HistoryItem *parent);
void draw(Painter &p, const HistoryItem *parent, const QRect &r, bool selected, uint64 ms) const;

View File

@ -51,6 +51,16 @@ Copyright (c) 2014-2015 John Preston, https://desktop.telegram.org
#define _NEED_LINUX_GENERATE_DUMP
#endif
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/opt.h>
#include <libswresample/swresample.h>
#include <libswscale/swscale.h>
}
#include "types.h"
#include "config.h"

View File

@ -1008,7 +1008,7 @@ void DocumentData::setattributes(const QVector<MTPDocumentAttribute> &attributes
const MTPDdocumentAttributeImageSize &d(attributes[i].c_documentAttributeImageSize());
dimensions = QSize(d.vw.v, d.vh.v);
} break;
case mtpc_documentAttributeAnimated: if (type == FileDocument || type == StickerDocument) {
case mtpc_documentAttributeAnimated: if (type == FileDocument || type == StickerDocument || type == VideoDocument) {
type = AnimatedDocument;
delete _additional;
_additional = 0;
@ -1025,7 +1025,9 @@ void DocumentData::setattributes(const QVector<MTPDocumentAttribute> &attributes
} break;
case mtpc_documentAttributeVideo: {
const MTPDdocumentAttributeVideo &d(attributes[i].c_documentAttributeVideo());
type = VideoDocument;
if (type == FileDocument) {
type = VideoDocument;
}
// duration = d.vduration.v;
dimensions = QSize(d.vw.v, d.vh.v);
} break;

View File

@ -641,7 +641,7 @@ void Window::sendServiceHistoryRequest() {
UserData *user = App::userLoaded(ServiceUserId);
if (!user) {
int32 userFlags = MTPDuser::flag_first_name | MTPDuser::flag_phone | MTPDuser::flag_status | MTPDuser::flag_verified;
user = App::feedUsers(MTP_vector<MTPUser>(1, MTP_user(MTP_int(userFlags), MTP_int(ServiceUserId), MTPlong(), MTP_string("Telegram"), MTPstring(), MTPstring(), MTP_string("42777"), MTP_userProfilePhotoEmpty(), MTP_userStatusRecently(), MTPint())));
user = App::feedUsers(MTP_vector<MTPUser>(1, MTP_user(MTP_int(userFlags), MTP_int(ServiceUserId), MTPlong(), MTP_string("Telegram"), MTPstring(), MTPstring(), MTP_string("42777"), MTP_userProfilePhotoEmpty(), MTP_userStatusRecently(), MTPint(), MTPstring())));
}
_serviceHistoryRequest = MTP::send(MTPmessages_GetHistory(user->input, MTP_int(0), MTP_int(0), MTP_int(1), MTP_int(0), MTP_int(0)), main->rpcDone(&MainWidget::serviceHistoryDone), main->rpcFail(&MainWidget::serviceHistoryFail));
}

View File

@ -83,7 +83,7 @@
<SubSystem>Windows</SubSystem>
<OutputFile>$(OutDir)$(ProjectName).exe</OutputFile>
<AdditionalLibraryDirectories>.\..\..\Libraries\lzma\C\Util\LzmaLib\Debug;.\..\..\Libraries\libexif-0.6.20\win32\Debug;.\..\..\Libraries\ffmpeg;.\..\..\Libraries\opus\win32\VS2010\Win32\Debug;.\..\..\Libraries\openal-soft\build\Debug;.\..\..\Libraries\zlib-1.2.8\contrib\vstudio\vc11\x86\ZlibStatDebug;.\..\..\Libraries\openssl_debug\Debug\lib;$(QTDIR)\lib;$(QTDIR)\plugins;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
<AdditionalDependencies>kernel32.lib;user32.lib;shell32.lib;uuid.lib;ole32.lib;advapi32.lib;ws2_32.lib;gdi32.lib;comdlg32.lib;oleaut32.lib;Shlwapi.lib;Gdiplus.lib;imm32.lib;winmm.lib;qtmaind.lib;glu32.lib;opengl32.lib;Strmiids.lib;Qt5Cored.lib;Qt5Guid.lib;qtharfbuzzngd.lib;qtpcred.lib;qtfreetyped.lib;Qt5Widgetsd.lib;Qt5Networkd.lib;Qt5PlatformSupportd.lib;platforms\qwindowsd.lib;imageformats\qwebpd.lib;libeay32.lib;ssleay32.lib;Crypt32.lib;zlibstat.lib;LzmaLib.lib;lib_exif.lib;UxTheme.lib;DbgHelp.lib;OpenAL32.lib;common.lib;libavformat\libavformat.a;libavcodec\libavcodec.a;libavutil\libavutil.a;libswresample\libswresample.a;opus.lib;celt.lib;silk_common.lib;silk_float.lib;%(AdditionalDependencies)</AdditionalDependencies>
<AdditionalDependencies>kernel32.lib;user32.lib;shell32.lib;uuid.lib;ole32.lib;advapi32.lib;ws2_32.lib;gdi32.lib;comdlg32.lib;oleaut32.lib;Shlwapi.lib;Gdiplus.lib;imm32.lib;winmm.lib;qtmaind.lib;glu32.lib;opengl32.lib;Strmiids.lib;Qt5Cored.lib;Qt5Guid.lib;qtharfbuzzngd.lib;qtpcred.lib;qtfreetyped.lib;Qt5Widgetsd.lib;Qt5Networkd.lib;Qt5PlatformSupportd.lib;platforms\qwindowsd.lib;imageformats\qwebpd.lib;libeay32.lib;ssleay32.lib;Crypt32.lib;zlibstat.lib;LzmaLib.lib;lib_exif.lib;UxTheme.lib;DbgHelp.lib;OpenAL32.lib;common.lib;libavformat\libavformat.a;libavcodec\libavcodec.a;libavutil\libavutil.a;libswresample\libswresample.a;libswscale\libswscale.a;opus.lib;celt.lib;silk_common.lib;silk_float.lib;%(AdditionalDependencies)</AdditionalDependencies>
<GenerateDebugInformation>true</GenerateDebugInformation>
<ImageHasSafeExceptionHandlers />
<IgnoreSpecificDefaultLibraries>LIBCMT</IgnoreSpecificDefaultLibraries>