mirror of
https://github.com/telegramdesktop/tdesktop
synced 2025-03-30 07:18:28 +00:00
Enable video files overview in MediaView. Video restart supported.
This commit is contained in:
parent
8da39356dc
commit
647759f0d1
@ -281,6 +281,7 @@ void AudioPlayer::AudioMsg::clear() {
|
||||
nextBuffer = 0;
|
||||
|
||||
videoData = nullptr;
|
||||
videoPlayId = 0;
|
||||
}
|
||||
|
||||
AudioPlayer::AudioPlayer() : _audioCurrent(0), _songCurrent(0),
|
||||
@ -520,6 +521,24 @@ void AudioPlayer::playFromVideo(const AudioMsgId &audio, uint64 videoPlayId, std
|
||||
if (stopped) emit updated(stopped);
|
||||
}
|
||||
|
||||
void AudioPlayer::stopFromVideo(uint64 videoPlayId) {
|
||||
AudioMsgId current;
|
||||
{
|
||||
QMutexLocker lock(&playerMutex);
|
||||
auto data = dataForType(AudioMsgId::Type::Video);
|
||||
t_assert(data != nullptr);
|
||||
|
||||
if (data->videoPlayId != videoPlayId) {
|
||||
return;
|
||||
}
|
||||
|
||||
current = data->audio;
|
||||
fadedStop(AudioMsgId::Type::Video);
|
||||
data->clear();
|
||||
}
|
||||
if (current) emit updated(current);
|
||||
}
|
||||
|
||||
void AudioPlayer::feedFromVideo(VideoSoundPart &&part) {
|
||||
_loader->feedFromVideo(std_::move(part));
|
||||
}
|
||||
|
@ -72,6 +72,7 @@ public:
|
||||
int64 getVideoCorrectedTime(uint64 playId, uint64 systemMs);
|
||||
void videoSoundProgress(const AudioMsgId &audio);
|
||||
AudioPlaybackState currentVideoState(uint64 videoPlayId);
|
||||
void stopFromVideo(uint64 videoPlayId);
|
||||
|
||||
void stopAndClear();
|
||||
|
||||
|
@ -36,7 +36,7 @@ FFMpegReaderImplementation::FFMpegReaderImplementation(FileLocation *location, Q
|
||||
_packetNull.size = 0;
|
||||
}
|
||||
|
||||
bool FFMpegReaderImplementation::readNextFrame() {
|
||||
ReaderImplementation::ReadResult FFMpegReaderImplementation::readNextFrame() {
|
||||
if (_frameRead) {
|
||||
av_frame_unref(_frame);
|
||||
_frameRead = false;
|
||||
@ -46,7 +46,7 @@ bool FFMpegReaderImplementation::readNextFrame() {
|
||||
while (_packetQueue.isEmpty()) {
|
||||
auto packetResult = readPacket();
|
||||
if (packetResult == PacketResult::Error) {
|
||||
return false;
|
||||
return ReadResult::Error;
|
||||
} else if (packetResult == PacketResult::EndOfFile) {
|
||||
break;
|
||||
}
|
||||
@ -75,7 +75,7 @@ bool FFMpegReaderImplementation::readNextFrame() {
|
||||
|
||||
eofReached = (res == AVERROR_EOF);
|
||||
if (!eofReached || !_hadFrame) { // try to skip end of file
|
||||
return false;
|
||||
return ReadResult::Error;
|
||||
}
|
||||
}
|
||||
if (res > 0) decoded = res;
|
||||
@ -105,13 +105,13 @@ bool FFMpegReaderImplementation::readNextFrame() {
|
||||
|
||||
_hadFrame = _frameRead = true;
|
||||
_frameTime += _currentFrameDelay;
|
||||
return true;
|
||||
return ReadResult::Success;
|
||||
}
|
||||
|
||||
if (eofReached) {
|
||||
clearPacketQueue();
|
||||
if (_mode == Mode::Normal) {
|
||||
return false;
|
||||
return ReadResult::Eof;
|
||||
}
|
||||
|
||||
if ((res = avformat_seek_file(_fmtContext, _streamId, std::numeric_limits<int64_t>::min(), 0, std::numeric_limits<int64_t>::max(), 0)) < 0) {
|
||||
@ -120,7 +120,7 @@ bool FFMpegReaderImplementation::readNextFrame() {
|
||||
if ((res = av_seek_frame(_fmtContext, _streamId, 0, 0)) < 0) {
|
||||
char err[AV_ERROR_MAX_STRING_SIZE] = { 0 };
|
||||
LOG(("Gif Error: Unable to av_seek_frame() to the start %1, error %2, %3").arg(logData()).arg(res).arg(av_make_error_string(err, sizeof(err), res)));
|
||||
return false;
|
||||
return ReadResult::Error;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -132,41 +132,42 @@ bool FFMpegReaderImplementation::readNextFrame() {
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
return ReadResult::Error;
|
||||
}
|
||||
|
||||
bool FFMpegReaderImplementation::readFramesTill(int64 ms) {
|
||||
if (_audioStreamId >= 0) { // sync by audio stream
|
||||
auto correctMs = audioPlayer()->getVideoCorrectedTime(_playId, ms);
|
||||
|
||||
if (!_frameRead && !readNextFrame()) {
|
||||
return false;
|
||||
}
|
||||
while (_frameTime <= correctMs) {
|
||||
if (!readNextFrame()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
_frameTimeCorrection = ms - correctMs;
|
||||
return true;
|
||||
} else { // just keep up
|
||||
ReaderImplementation::ReadResult FFMpegReaderImplementation::readFramesTill(int64 ms) {
|
||||
if (_audioStreamId < 0) { // just keep up
|
||||
if (_frameRead && _frameTime > ms) {
|
||||
return true;
|
||||
return ReadResult::Success;
|
||||
}
|
||||
if (!readNextFrame()) {
|
||||
return false;
|
||||
}
|
||||
if (_frameTime > ms) {
|
||||
return true;
|
||||
}
|
||||
if (!readNextFrame()) {
|
||||
return false;
|
||||
auto readResult = readNextFrame();
|
||||
if (readResult != ReadResult::Success || _frameTime > ms) {
|
||||
return readResult;
|
||||
}
|
||||
readResult = readNextFrame();
|
||||
if (_frameTime <= ms) {
|
||||
_frameTime = ms + 5; // keep up
|
||||
}
|
||||
return true;
|
||||
return readResult;
|
||||
}
|
||||
|
||||
// sync by audio stream
|
||||
auto correctMs = audioPlayer()->getVideoCorrectedTime(_playId, ms);
|
||||
|
||||
if (!_frameRead) {
|
||||
auto readResult = readNextFrame();
|
||||
if (readResult != ReadResult::Success) {
|
||||
return readResult;
|
||||
}
|
||||
}
|
||||
while (_frameTime <= correctMs) {
|
||||
auto readResult = readNextFrame();
|
||||
if (readResult != ReadResult::Success) {
|
||||
return readResult;
|
||||
}
|
||||
}
|
||||
_frameTimeCorrection = ms - correctMs;
|
||||
return ReadResult::Success;
|
||||
}
|
||||
|
||||
int64 FFMpegReaderImplementation::frameRealTime() const {
|
||||
@ -333,7 +334,7 @@ QString FFMpegReaderImplementation::logData() const {
|
||||
|
||||
FFMpegReaderImplementation::~FFMpegReaderImplementation() {
|
||||
if (_mode == Mode::Normal && _audioStreamId >= 0) {
|
||||
audioPlayer()->stop(AudioMsgId::Type::Video);
|
||||
audioPlayer()->stopFromVideo(_playId);
|
||||
}
|
||||
if (_frameRead) {
|
||||
av_frame_unref(_frame);
|
||||
|
@ -37,11 +37,14 @@ class FFMpegReaderImplementation : public ReaderImplementation {
|
||||
public:
|
||||
FFMpegReaderImplementation(FileLocation *location, QByteArray *data, uint64 playId);
|
||||
|
||||
bool readFramesTill(int64 ms) override;
|
||||
ReadResult readFramesTill(int64 ms) override;
|
||||
int64 frameRealTime() const override;
|
||||
uint64 framePresentationTime() const override;
|
||||
bool renderFrame(QImage &to, bool &hasAlpha, const QSize &size) override;
|
||||
int64 durationMs() const override;
|
||||
bool hasAudio() const override {
|
||||
return (_audioStreamId >= 0);
|
||||
}
|
||||
bool start(Mode mode) override;
|
||||
|
||||
QString logData() const;
|
||||
@ -49,7 +52,7 @@ public:
|
||||
~FFMpegReaderImplementation();
|
||||
|
||||
private:
|
||||
bool readNextFrame();
|
||||
ReadResult readNextFrame();
|
||||
|
||||
enum class PacketResult {
|
||||
Ok,
|
||||
|
@ -38,8 +38,13 @@ public:
|
||||
Normal,
|
||||
};
|
||||
|
||||
enum class ReadResult {
|
||||
Success,
|
||||
Error,
|
||||
Eof,
|
||||
};
|
||||
// Read frames till current frame will have presentation time > ms.
|
||||
virtual bool readFramesTill(int64 ms) = 0;
|
||||
virtual ReadResult readFramesTill(int64 ms) = 0;
|
||||
|
||||
// Get current frame real and presentation time.
|
||||
virtual int64 frameRealTime() const = 0;
|
||||
@ -49,6 +54,7 @@ public:
|
||||
virtual bool renderFrame(QImage &to, bool &hasAlpha, const QSize &size) = 0;
|
||||
|
||||
virtual int64 durationMs() const = 0;
|
||||
virtual bool hasAudio() const = 0;
|
||||
|
||||
virtual bool start(Mode mode) = 0;
|
||||
virtual ~ReaderImplementation() {
|
||||
|
@ -28,23 +28,19 @@ namespace internal {
|
||||
QtGifReaderImplementation::QtGifReaderImplementation(FileLocation *location, QByteArray *data) : ReaderImplementation(location, data) {
|
||||
}
|
||||
|
||||
bool QtGifReaderImplementation::readFramesTill(int64 ms) {
|
||||
ReaderImplementation::ReadResult QtGifReaderImplementation::readFramesTill(int64 ms) {
|
||||
if (!_frame.isNull() && _frameTime > ms) {
|
||||
return true;
|
||||
return ReadResult::Success;
|
||||
}
|
||||
if (!readNextFrame()) {
|
||||
return false;
|
||||
}
|
||||
if (_frameTime > ms) {
|
||||
return true;
|
||||
}
|
||||
if (!readNextFrame()) {
|
||||
return false;
|
||||
auto readResult = readNextFrame();
|
||||
if (readResult != ReadResult::Success || _frameTime > ms) {
|
||||
return readResult;
|
||||
}
|
||||
readResult = readNextFrame();
|
||||
if (_frameTime <= ms) {
|
||||
_frameTime = ms + 5; // keep up
|
||||
}
|
||||
return true;
|
||||
return readResult;
|
||||
}
|
||||
|
||||
int64 QtGifReaderImplementation::frameRealTime() const {
|
||||
@ -55,20 +51,24 @@ uint64 QtGifReaderImplementation::framePresentationTime() const {
|
||||
return static_cast<uint64>(qMax(_frameTime, 0LL));
|
||||
}
|
||||
|
||||
bool QtGifReaderImplementation::readNextFrame() {
|
||||
ReaderImplementation::ReadResult QtGifReaderImplementation::readNextFrame() {
|
||||
if (_reader) _frameDelay = _reader->nextImageDelay();
|
||||
if (_framesLeft < 1 && !jumpToStart()) {
|
||||
return false;
|
||||
if (_framesLeft < 1) {
|
||||
if (_mode == Mode::Normal) {
|
||||
return ReadResult::Eof;
|
||||
} else if (!jumpToStart()) {
|
||||
return ReadResult::Error;
|
||||
}
|
||||
}
|
||||
|
||||
_frame = QImage(); // QGifHandler always reads first to internal QImage and returns it
|
||||
if (!_reader->read(&_frame) || _frame.isNull()) {
|
||||
return false;
|
||||
return ReadResult::Error;
|
||||
}
|
||||
--_framesLeft;
|
||||
_frameTime += _frameDelay;
|
||||
_frameRealTime += _frameDelay;
|
||||
return true;
|
||||
return ReadResult::Success;
|
||||
}
|
||||
|
||||
bool QtGifReaderImplementation::renderFrame(QImage &to, bool &hasAlpha, const QSize &size) {
|
||||
@ -101,6 +101,7 @@ int64 QtGifReaderImplementation::durationMs() const {
|
||||
|
||||
bool QtGifReaderImplementation::start(Mode mode) {
|
||||
if (mode == Mode::OnlyGifv) return false;
|
||||
_mode = mode;
|
||||
return jumpToStart();
|
||||
}
|
||||
|
||||
|
@ -31,18 +31,23 @@ public:
|
||||
|
||||
QtGifReaderImplementation(FileLocation *location, QByteArray *data);
|
||||
|
||||
bool readFramesTill(int64 ms) override;
|
||||
ReadResult readFramesTill(int64 ms) override;
|
||||
int64 frameRealTime() const override;
|
||||
uint64 framePresentationTime() const override;
|
||||
bool renderFrame(QImage &to, bool &hasAlpha, const QSize &size) override;
|
||||
int64 durationMs() const override;
|
||||
bool hasAudio() const override {
|
||||
return false;
|
||||
}
|
||||
bool start(Mode mode) override;
|
||||
|
||||
~QtGifReaderImplementation();
|
||||
|
||||
private:
|
||||
bool jumpToStart();
|
||||
bool readNextFrame();
|
||||
ReadResult readNextFrame();
|
||||
|
||||
Mode _mode = Mode::Normal;
|
||||
|
||||
QImageReader *_reader = nullptr;
|
||||
int _framesLeft = 0;
|
||||
|
@ -297,6 +297,10 @@ void Reader::error() {
|
||||
_state = State::Error;
|
||||
}
|
||||
|
||||
void Reader::finished() {
|
||||
_state = State::Finished;
|
||||
}
|
||||
|
||||
Reader::~Reader() {
|
||||
stop();
|
||||
}
|
||||
@ -306,11 +310,13 @@ public:
|
||||
ReaderPrivate(Reader *reader, const FileLocation &location, const QByteArray &data) : _interface(reader)
|
||||
, _mode(reader->mode())
|
||||
, _playId(reader->playId())
|
||||
, _data(data)
|
||||
, _location(_data.isEmpty() ? new FileLocation(location) : 0) {
|
||||
if (_data.isEmpty() && !_location->accessEnable()) {
|
||||
error();
|
||||
return;
|
||||
, _data(data) {
|
||||
if (_data.isEmpty()) {
|
||||
_location = std_::make_unique<FileLocation>(location);
|
||||
if (!_location->accessEnable()) {
|
||||
error();
|
||||
return;
|
||||
}
|
||||
}
|
||||
_accessed = true;
|
||||
}
|
||||
@ -320,7 +326,8 @@ public:
|
||||
return error();
|
||||
}
|
||||
if (frame() && frame()->original.isNull()) {
|
||||
if (!_implementation->readFramesTill(-1)) { // Read the first frame.
|
||||
auto readResult = _implementation->readFramesTill(-1);
|
||||
if (readResult != internal::ReaderImplementation::ReadResult::Success) { // Read the first frame.
|
||||
return error();
|
||||
}
|
||||
if (!_implementation->renderFrame(frame()->original, frame()->alpha, QSize())) {
|
||||
@ -329,13 +336,18 @@ public:
|
||||
_width = frame()->original.width();
|
||||
_height = frame()->original.height();
|
||||
_durationMs = _implementation->durationMs();
|
||||
_hasAudio = _implementation->hasAudio();
|
||||
return ProcessResult::Started;
|
||||
}
|
||||
return ProcessResult::Wait;
|
||||
}
|
||||
|
||||
ProcessResult process(uint64 ms) { // -1 - do nothing, 0 - update, 1 - reinit
|
||||
if (_state == State::Error) return ProcessResult::Error;
|
||||
if (_state == State::Error) {
|
||||
return ProcessResult::Error;
|
||||
} else if (_state == State::Finished) {
|
||||
return ProcessResult::Finished;
|
||||
}
|
||||
|
||||
if (!_request.valid()) {
|
||||
return start(ms);
|
||||
@ -348,7 +360,12 @@ public:
|
||||
}
|
||||
|
||||
ProcessResult finishProcess(uint64 ms) {
|
||||
if (!_implementation->readFramesTill(ms - _animationStarted)) {
|
||||
auto readResult = _implementation->readFramesTill(ms - _animationStarted);
|
||||
if (readResult == internal::ReaderImplementation::ReadResult::Eof) {
|
||||
stop();
|
||||
_state = State::Finished;
|
||||
return ProcessResult::Finished;
|
||||
} else if (readResult == internal::ReaderImplementation::ReadResult::Error) {
|
||||
return error();
|
||||
}
|
||||
_nextFramePositionMs = _implementation->frameRealTime();
|
||||
@ -384,7 +401,7 @@ public:
|
||||
}
|
||||
}
|
||||
|
||||
_implementation = std_::make_unique<internal::FFMpegReaderImplementation>(_location, &_data, _playId);
|
||||
_implementation = std_::make_unique<internal::FFMpegReaderImplementation>(_location.get(), &_data, _playId);
|
||||
// _implementation = new QtGifReaderImplementation(_location, &_data);
|
||||
|
||||
auto implementationMode = [this]() {
|
||||
@ -414,15 +431,13 @@ public:
|
||||
if (_accessed) {
|
||||
_location->accessDisable();
|
||||
}
|
||||
delete _location;
|
||||
_location = 0;
|
||||
_location = nullptr;
|
||||
}
|
||||
_accessed = false;
|
||||
}
|
||||
|
||||
~ReaderPrivate() {
|
||||
stop();
|
||||
deleteAndMark(_location);
|
||||
_data.clear();
|
||||
}
|
||||
|
||||
@ -433,7 +448,7 @@ private:
|
||||
uint64 _playId;
|
||||
|
||||
QByteArray _data;
|
||||
FileLocation *_location;
|
||||
std_::unique_ptr<FileLocation> _location;
|
||||
bool _accessed = false;
|
||||
|
||||
QBuffer _buffer;
|
||||
@ -458,6 +473,7 @@ private:
|
||||
int _width = 0;
|
||||
int _height = 0;
|
||||
|
||||
bool _hasAudio = false;
|
||||
int64 _durationMs = 0;
|
||||
uint64 _animationStarted = 0;
|
||||
uint64 _nextFrameWhen = 0;
|
||||
@ -547,6 +563,12 @@ bool Manager::handleProcessResult(ReaderPrivate *reader, ProcessResult result, u
|
||||
if (i != _readerPointers.cend()) _readerPointers.erase(i);
|
||||
}
|
||||
return false;
|
||||
} else if (result == ProcessResult::Finished) {
|
||||
if (it != _readerPointers.cend()) {
|
||||
it.key()->finished();
|
||||
emit callback(it.key(), it.key()->threadIndex(), NotificationReinit);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
if (it == _readerPointers.cend()) {
|
||||
return false;
|
||||
@ -555,6 +577,7 @@ bool Manager::handleProcessResult(ReaderPrivate *reader, ProcessResult result, u
|
||||
if (result == ProcessResult::Started) {
|
||||
_loadLevel.fetchAndAddRelaxed(reader->_width * reader->_height - AverageGifSize);
|
||||
it.key()->_durationMs = reader->_durationMs;
|
||||
it.key()->_hasAudio = reader->_hasAudio;
|
||||
}
|
||||
// See if we need to pause GIF because it is not displayed right now.
|
||||
if (!reader->_paused && reader->_mode == Reader::Mode::Gif && result == ProcessResult::Repaint) {
|
||||
@ -719,7 +742,9 @@ MTPDocumentAttribute readAttributes(const QString &fname, const QByteArray &data
|
||||
auto reader = std_::make_unique<internal::FFMpegReaderImplementation>(&localloc, &localdata, playId);
|
||||
if (reader->start(internal::ReaderImplementation::Mode::OnlyGifv)) {
|
||||
bool hasAlpha = false;
|
||||
if (reader->readFramesTill(-1) && reader->renderFrame(cover, hasAlpha, QSize())) {
|
||||
auto readResult = reader->readFramesTill(-1);
|
||||
auto readFrame = (readResult == internal::ReaderImplementation::ReadResult::Success);
|
||||
if (readFrame && reader->renderFrame(cover, hasAlpha, QSize())) {
|
||||
if (cover.width() > 0 && cover.height() > 0 && cover.width() < cover.height() * 10 && cover.height() < cover.width() * 10) {
|
||||
if (hasAlpha) {
|
||||
QImage cacheForResize;
|
||||
|
@ -28,6 +28,7 @@ namespace Clip {
|
||||
enum class State {
|
||||
Reading,
|
||||
Error,
|
||||
Finished,
|
||||
};
|
||||
|
||||
struct FrameRequest {
|
||||
@ -107,6 +108,7 @@ public:
|
||||
|
||||
void stop();
|
||||
void error();
|
||||
void finished();
|
||||
|
||||
Mode mode() const {
|
||||
return _mode;
|
||||
@ -165,6 +167,7 @@ private:
|
||||
enum class ProcessResult {
|
||||
Error,
|
||||
Started,
|
||||
Finished,
|
||||
Paused,
|
||||
Repaint,
|
||||
CopyFrame,
|
||||
|
@ -71,7 +71,7 @@ void Controller::showAnimated() {
|
||||
|
||||
void Controller::hideAnimated() {
|
||||
startFading([this]() {
|
||||
_fadeAnimation->fadeOut(st::mvShowDuration);
|
||||
_fadeAnimation->fadeOut(st::mvHideDuration);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -35,7 +35,8 @@ Playback::Playback(QWidget *parent) : TWidget(parent)
|
||||
void Playback::updateState(const AudioPlaybackState &playbackState) {
|
||||
qint64 position = 0, duration = playbackState.duration;
|
||||
|
||||
if (!(playbackState.state & AudioPlayerStoppedMask) && playbackState.state != AudioPlayerFinishing) {
|
||||
_playing = !(playbackState.state & AudioPlayerStoppedMask);
|
||||
if (_playing && playbackState.state != AudioPlayerFinishing) {
|
||||
position = playbackState.position;
|
||||
} else if (playbackState.state == AudioPlayerStoppedAtEnd) {
|
||||
position = playbackState.duration;
|
||||
@ -50,7 +51,7 @@ void Playback::updateState(const AudioPlaybackState &playbackState) {
|
||||
progress = duration ? snap(float64(position) / duration, 0., 1.) : 0.;
|
||||
}
|
||||
if (duration != _duration || position != _position) {
|
||||
if (duration && _duration) {
|
||||
if (duration && _duration && playbackState.state != AudioPlayerStopped) {
|
||||
a_progress.start(progress);
|
||||
_a_progress.start();
|
||||
} else {
|
||||
|
@ -65,6 +65,7 @@ private:
|
||||
float64 _downProgress = 0.;
|
||||
|
||||
float64 _fadeOpacity = 1.;
|
||||
bool _playing = false;
|
||||
|
||||
};
|
||||
|
||||
|
@ -349,7 +349,7 @@ void MediaView::updateControls() {
|
||||
_dateNav = myrtlrect(st::mvTextLeft, height() - st::mvTextTop, st::mvFont->width(_dateText), st::mvFont->height);
|
||||
}
|
||||
updateHeader();
|
||||
if (_photo || (_history && (_overview == OverviewPhotos || _overview == OverviewChatPhotos || _overview == OverviewFiles))) {
|
||||
if (_photo || (_history && (_overview == OverviewPhotos || _overview == OverviewChatPhotos || _overview == OverviewFiles || _overview == OverviewVideos))) {
|
||||
_leftNavVisible = (_index > 0) || (_index == 0 && (
|
||||
(!_msgmigrated && _history && _history->overview[_overview].size() < _history->overviewCount(_overview)) ||
|
||||
(_msgmigrated && _migrated && _migrated->overview[_overview].size() < _migrated->overviewCount(_overview)) ||
|
||||
@ -554,7 +554,7 @@ void MediaView::close() {
|
||||
}
|
||||
|
||||
void MediaView::activateControls() {
|
||||
if (!_menu && !_mousePressed) {
|
||||
if (!_menu && !_mousePressed && (!_clipController || !_clipController->geometry().contains(_lastMouseMovePos))) {
|
||||
_controlsHideTimer.start(int(st::mvWaitHide));
|
||||
}
|
||||
if (_controlsState == ControlsHiding || _controlsState == ControlsHidden) {
|
||||
@ -564,19 +564,19 @@ void MediaView::activateControls() {
|
||||
if (!_a_state.animating()) _a_state.start();
|
||||
}
|
||||
if (_clipController) {
|
||||
_clipController->showAnimated();
|
||||
// _clipController->showAnimated();
|
||||
}
|
||||
}
|
||||
|
||||
void MediaView::onHideControls(bool force) {
|
||||
if (!force && (!_dropdown.isHidden() || _menu || _mousePressed)) return;
|
||||
if (!force && (!_dropdown.isHidden() || _menu || _mousePressed || (_clipController && _clipController->geometry().contains(_lastMouseMovePos)))) return;
|
||||
if (_controlsState == ControlsHiding || _controlsState == ControlsHidden) return;
|
||||
_controlsState = ControlsHiding;
|
||||
_controlsAnimStarted = getms();
|
||||
a_cOpacity.start(0);
|
||||
if (!_a_state.animating()) _a_state.start();
|
||||
if (_clipController) {
|
||||
_clipController->hideAnimated();
|
||||
// _clipController->hideAnimated();
|
||||
}
|
||||
}
|
||||
|
||||
@ -706,12 +706,16 @@ void MediaView::clipCallback(Media::Clip::Notification notification) {
|
||||
if (auto item = App::histItemById(_msgmigrated ? 0 : _channel, _msgid)) {
|
||||
if (_gif->state() == State::Error) {
|
||||
_current = QPixmap();
|
||||
}
|
||||
_videoIsSilent = _doc->isVideo() && !_gif->hasAudio();
|
||||
if (_videoIsSilent) {
|
||||
} else if (_gif->state() == State::Finished) {
|
||||
_videoPositionMs = _videoDurationMs;
|
||||
updateSilentVideoPlaybackState();
|
||||
} else {
|
||||
_videoIsSilent = _doc->isVideo() && !_gif->hasAudio();
|
||||
_videoDurationMs = _gif->getDurationMs();
|
||||
_videoPositionMs = _gif->getPositionMs();
|
||||
updateSilentVideoPlaybackState();
|
||||
if (_videoIsSilent) {
|
||||
updateSilentVideoPlaybackState();
|
||||
}
|
||||
}
|
||||
displayDocument(_doc, item);
|
||||
} else {
|
||||
@ -721,8 +725,8 @@ void MediaView::clipCallback(Media::Clip::Notification notification) {
|
||||
|
||||
case NotificationRepaint: {
|
||||
if (!_gif->currentDisplayed()) {
|
||||
_videoPositionMs = _gif->getPositionMs();
|
||||
if (_videoIsSilent) {
|
||||
_videoPositionMs = _gif->getPositionMs();
|
||||
updateSilentVideoPlaybackState();
|
||||
}
|
||||
update(_x, _y, _w, _h);
|
||||
@ -1016,7 +1020,7 @@ void MediaView::showDocument(DocumentData *doc, HistoryItem *context) {
|
||||
_canForward = _msgid > 0;
|
||||
_canDelete = context ? context->canDelete() : false;
|
||||
if (_history) {
|
||||
_overview = OverviewFiles;
|
||||
_overview = doc->isVideo() ? OverviewVideos : OverviewFiles;
|
||||
findCurrent();
|
||||
}
|
||||
displayDocument(doc, context);
|
||||
@ -1287,7 +1291,29 @@ void MediaView::setClipControllerGeometry() {
|
||||
}
|
||||
|
||||
void MediaView::onVideoPlay() {
|
||||
if (auto item = App::histItemById(_msgmigrated ? 0 : _channel, _msgid)) {
|
||||
if (_gif->state() == Media::Clip::State::Error) {
|
||||
displayDocument(_doc, item);
|
||||
} else if (_gif->state() == Media::Clip::State::Finished) {
|
||||
_current = _gif->current(_gif->width(), _gif->height(), _gif->width(), _gif->height(), getms());
|
||||
_gif = std_::make_unique<Media::Clip::Reader>(_doc->location(), _doc->data(), func(this, &MediaView::clipCallback), Media::Clip::Reader::Mode::Video);
|
||||
|
||||
// Correct values will be set when gif gets inited.
|
||||
_videoIsSilent = false;
|
||||
_videoPositionMs = 0;
|
||||
|
||||
AudioPlaybackState state;
|
||||
state.state = AudioPlayerStopped;
|
||||
state.position = _videoPositionMs;
|
||||
state.duration = _videoDurationMs;
|
||||
state.frequency = _videoFrequencyMs;
|
||||
updateVideoPlaybackState(state);
|
||||
} else {
|
||||
//
|
||||
}
|
||||
} else {
|
||||
stopGif();
|
||||
}
|
||||
}
|
||||
|
||||
void MediaView::onVideoPause() {
|
||||
@ -1329,6 +1355,9 @@ void MediaView::onVideoPlayProgress(const AudioMsgId &audioId) {
|
||||
void MediaView::updateVideoPlaybackState(const AudioPlaybackState &state) {
|
||||
if (state.frequency) {
|
||||
_clipController->updatePlayback(state);
|
||||
} else { // Audio has stopped already.
|
||||
_videoIsSilent = true;
|
||||
updateSilentVideoPlaybackState();
|
||||
}
|
||||
}
|
||||
|
||||
@ -1660,6 +1689,8 @@ void MediaView::keyPressEvent(QKeyEvent *e) {
|
||||
} else if (e->key() == Qt::Key_Enter || e->key() == Qt::Key_Return || e->key() == Qt::Key_Space) {
|
||||
if (_doc && !_doc->loading() && !fileShown()) {
|
||||
onDocClick();
|
||||
} else if (_doc->isVideo()) {
|
||||
onVideoPlay();
|
||||
}
|
||||
} else if (e->key() == Qt::Key_Left) {
|
||||
moveToNext(-1);
|
||||
@ -1777,7 +1808,7 @@ bool MediaView::moveToNext(int32 delta) {
|
||||
}
|
||||
return false;
|
||||
}
|
||||
if ((_history && _overview != OverviewPhotos && _overview != OverviewChatPhotos && _overview != OverviewFiles) || (_overview == OverviewCount && !_user)) {
|
||||
if ((_history && _overview != OverviewPhotos && _overview != OverviewChatPhotos && _overview != OverviewFiles && _overview != OverviewVideos) || (_overview == OverviewCount && !_user)) {
|
||||
return false;
|
||||
}
|
||||
if (_msgmigrated && !_history->overviewLoaded(_overview)) {
|
||||
@ -1807,6 +1838,7 @@ bool MediaView::moveToNext(int32 delta) {
|
||||
switch (media->type()) {
|
||||
case MediaTypePhoto: displayPhoto(static_cast<HistoryPhoto*>(item->getMedia())->photo(), item); preloadData(delta); break;
|
||||
case MediaTypeFile:
|
||||
case MediaTypeVideo:
|
||||
case MediaTypeGif:
|
||||
case MediaTypeSticker: displayDocument(media->getDocument(), item); preloadData(delta); break;
|
||||
}
|
||||
@ -1870,6 +1902,7 @@ void MediaView::preloadData(int32 delta) {
|
||||
switch (media->type()) {
|
||||
case MediaTypePhoto: static_cast<HistoryPhoto*>(media)->photo()->forget(); break;
|
||||
case MediaTypeFile:
|
||||
case MediaTypeVideo:
|
||||
case MediaTypeGif:
|
||||
case MediaTypeSticker: media->getDocument()->forget(); break;
|
||||
}
|
||||
@ -1895,6 +1928,7 @@ void MediaView::preloadData(int32 delta) {
|
||||
switch (media->type()) {
|
||||
case MediaTypePhoto: static_cast<HistoryPhoto*>(media)->photo()->download(); break;
|
||||
case MediaTypeFile:
|
||||
case MediaTypeVideo:
|
||||
case MediaTypeGif: {
|
||||
DocumentData *doc = media->getDocument();
|
||||
doc->thumb->load();
|
||||
|
Loading…
Reference in New Issue
Block a user