Video playing in MediaView seek fixed, fullscreen mode improved.

Shortcut commands now return bool, so you know if it was executed.
This commit is contained in:
John Preston 2016-07-14 14:20:46 +03:00
parent cb0c99acc8
commit 505e5a69a6
15 changed files with 367 additions and 170 deletions

View File

@ -3616,24 +3616,33 @@ void HistoryWidget::notify_clipStopperHidden(ClipStopperType type) {
if (_list) _list->update();
}
void HistoryWidget::cmd_search() {
if (!inFocusChain() || !_peer) return;
bool HistoryWidget::cmd_search() {
if (!inFocusChain() || !_peer) return false;
App::main()->searchInPeer(_peer);
return true;
}
void HistoryWidget::cmd_next_chat() {
bool HistoryWidget::cmd_next_chat() {
PeerData *p = 0;
MsgId m = 0;
App::main()->peerAfter(_peer, qMax(_showAtMsgId, 0), p, m);
if (p) Ui::showPeerHistory(p, m);
if (p) {
Ui::showPeerHistory(p, m);
return true;
}
return false;
}
void HistoryWidget::cmd_previous_chat() {
bool HistoryWidget::cmd_previous_chat() {
PeerData *p = 0;
MsgId m = 0;
App::main()->peerBefore(_peer, qMax(_showAtMsgId, 0), p, m);
if (p) Ui::showPeerHistory(p, m);
if (p) {
Ui::showPeerHistory(p, m);
return true;
}
return false;
}
void HistoryWidget::stickersGot(const MTPmessages_AllStickers &stickers) {

View File

@ -726,9 +726,9 @@ public:
void notify_clipStopperHidden(ClipStopperType type);
void notify_handlePendingHistoryUpdate();
void cmd_search();
void cmd_next_chat();
void cmd_previous_chat();
bool cmd_search();
bool cmd_next_chat();
bool cmd_previous_chat();
~HistoryWidget();

View File

@ -515,16 +515,19 @@ void MainWidget::notify_handlePendingHistoryUpdate() {
_history->notify_handlePendingHistoryUpdate();
}
void MainWidget::cmd_search() {
_history->cmd_search();
bool MainWidget::cmd_search() {
if (Ui::isLayerShown() || Ui::isMediaViewShown()) return false;
return _history->cmd_search();
}
void MainWidget::cmd_next_chat() {
_history->cmd_next_chat();
bool MainWidget::cmd_next_chat() {
if (Ui::isLayerShown() || Ui::isMediaViewShown()) return false;
return _history->cmd_next_chat();
}
void MainWidget::cmd_previous_chat() {
_history->cmd_previous_chat();
bool MainWidget::cmd_previous_chat() {
if (Ui::isLayerShown() || Ui::isMediaViewShown()) return false;
return _history->cmd_previous_chat();
}
void MainWidget::noHider(HistoryHider *destroyed) {

View File

@ -421,9 +421,9 @@ public:
void notify_historyMuteUpdated(History *history);
void notify_handlePendingHistoryUpdate();
void cmd_search();
void cmd_next_chat();
void cmd_previous_chat();
bool cmd_search();
bool cmd_next_chat();
bool cmd_previous_chat();
~MainWidget();

View File

@ -489,11 +489,24 @@ void AudioPlayer::initFromVideo(uint64 videoPlayId, std_::unique_ptr<VideoSoundD
{
QMutexLocker lock(&playerMutex);
// Pause current song.
auto currentSong = dataForType(AudioMsgId::Type::Song);
float64 suppressGain = suppressSongGain * Global::SongVolume();
switch (currentSong->playbackState.state) {
case AudioPlayerStarting:
case AudioPlayerResuming:
case AudioPlayerPlaying:
currentSong->playbackState.state = AudioPlayerPausing;
updateCurrentStarted(AudioMsgId::Type::Song);
break;
case AudioPlayerFinishing: currentSong->playbackState.state = AudioPlayerPausing; break;
}
auto type = AudioMsgId::Type::Video;
auto current = dataForType(type);
t_assert(current != nullptr);
fadedStop(AudioMsgId::Type::Song);
if (current->audio) {
fadedStop(type);
stopped = current->audio;
@ -625,8 +638,8 @@ void AudioPlayer::feedFromVideo(VideoSoundPart &&part) {
_loader->feedFromVideo(std_::move(part));
}
int64 AudioPlayer::getVideoCorrectedTime(uint64 playId, uint64 systemMs) {
int64 result = systemMs;
int64 AudioPlayer::getVideoCorrectedTime(uint64 playId, int64 frameMs, uint64 systemMs) {
int64 result = frameMs;
QMutexLocker videoLock(&_lastVideoMutex);
if (_lastVideoPlayId == playId && _lastVideoPlaybackWhen > 0) {

View File

@ -69,7 +69,7 @@ public:
// Video player audio stream interface.
void initFromVideo(uint64 videoPlayId, std_::unique_ptr<VideoSoundData> &&data, int64 position);
void feedFromVideo(VideoSoundPart &&part);
int64 getVideoCorrectedTime(uint64 playId, uint64 systemMs);
int64 getVideoCorrectedTime(uint64 playId, int64 frameMs, uint64 systemMs);
void videoSoundProgress(const AudioMsgId &audio);
AudioPlaybackState currentVideoState(uint64 videoPlayId);
void stopFromVideo(uint64 videoPlayId);

View File

@ -44,7 +44,7 @@ ReaderImplementation::ReadResult FFMpegReaderImplementation::readNextFrame() {
while (true) {
while (_packetQueue.isEmpty()) {
auto packetResult = readPacket();
auto packetResult = readAndProcessPacket();
if (packetResult == PacketResult::Error) {
return ReadResult::Error;
} else if (packetResult == PacketResult::EndOfFile) {
@ -135,24 +135,24 @@ ReaderImplementation::ReadResult FFMpegReaderImplementation::readNextFrame() {
return ReadResult::Error;
}
ReaderImplementation::ReadResult FFMpegReaderImplementation::readFramesTill(int64 ms) {
ReaderImplementation::ReadResult FFMpegReaderImplementation::readFramesTill(int64 frameMs, uint64 systemMs) {
if (_audioStreamId < 0) { // just keep up
if (_frameRead && _frameTime > ms) {
if (_frameRead && _frameTime > frameMs) {
return ReadResult::Success;
}
auto readResult = readNextFrame();
if (readResult != ReadResult::Success || _frameTime > ms) {
if (readResult != ReadResult::Success || _frameTime > frameMs) {
return readResult;
}
readResult = readNextFrame();
if (_frameTime <= ms) {
_frameTime = ms + 5; // keep up
if (_frameTime <= frameMs) {
_frameTime = frameMs + 5; // keep up
}
return readResult;
}
// sync by audio stream
auto correctMs = audioPlayer()->getVideoCorrectedTime(_playId, ms);
auto correctMs = (frameMs >= 0) ? audioPlayer()->getVideoCorrectedTime(_playId, frameMs, systemMs) : frameMs;
if (!_frameRead) {
auto readResult = readNextFrame();
@ -166,7 +166,9 @@ ReaderImplementation::ReadResult FFMpegReaderImplementation::readFramesTill(int6
return readResult;
}
}
_frameTimeCorrection = ms - correctMs;
if (frameMs >= 0) {
_frameTimeCorrection = frameMs - correctMs;
}
return ReadResult::Success;
}
@ -235,7 +237,7 @@ bool FFMpegReaderImplementation::renderFrame(QImage &to, bool &hasAlpha, const Q
// Read some future packets for audio stream.
if (_audioStreamId >= 0) {
while (_frameMs + 5000 > _lastReadPacketMs) {
auto packetResult = readPacket();
auto packetResult = readAndProcessPacket();
if (packetResult != PacketResult::Ok) {
break;
}
@ -246,7 +248,7 @@ bool FFMpegReaderImplementation::renderFrame(QImage &to, bool &hasAlpha, const Q
return true;
}
bool FFMpegReaderImplementation::start(Mode mode, int64 positionMs) {
bool FFMpegReaderImplementation::start(Mode mode, int64 &positionMs) {
_mode = mode;
initDevice();
@ -300,7 +302,7 @@ bool FFMpegReaderImplementation::start(Mode mode, int64 positionMs) {
if (_codecContext->codec_id != AV_CODEC_ID_H264) {
return false;
}
} else if (_mode == Mode::Silent || !audioPlayer()) {
} else if (_mode == Mode::Silent || !audioPlayer() || !_playId) {
_audioStreamId = -1;
}
av_opt_set_int(_codecContext, "refcounted_frames", 1, 0);
@ -337,20 +339,30 @@ bool FFMpegReaderImplementation::start(Mode mode, int64 positionMs) {
}
}
if (positionMs) {
if (positionMs > 0) {
int64 ts = (positionMs * _fmtContext->streams[_streamId]->time_base.den) / (1000LL * _fmtContext->streams[_streamId]->time_base.num);
if (av_seek_frame(_fmtContext, _streamId, ts, AVSEEK_FLAG_ANY) < 0) {
if (av_seek_frame(_fmtContext, _streamId, ts, 0) < 0) {
positionMs = 0;
if (av_seek_frame(_fmtContext, _streamId, ts, 0) < 0) {
if (av_seek_frame(_fmtContext, _streamId, ts, AVSEEK_FLAG_BACKWARD) < 0) {
return false;
}
}
}
AVPacket packet;
auto readResult = readPacket(&packet);
if (readResult == PacketResult::Ok && positionMs > 0) {
positionMs = countPacketMs(&packet);
}
if (_audioStreamId >= 0) {
int64 position = (positionMs * soundData->frequency) / 1000LL;
audioPlayer()->initFromVideo(_playId, std_::move(soundData), position);
}
if (readResult == PacketResult::Ok) {
processPacket(&packet);
}
return true;
}
@ -384,14 +396,13 @@ FFMpegReaderImplementation::~FFMpegReaderImplementation() {
av_frame_free(&_frame);
}
FFMpegReaderImplementation::PacketResult FFMpegReaderImplementation::readPacket() {
AVPacket packet;
av_init_packet(&packet);
packet.data = nullptr;
packet.size = 0;
FFMpegReaderImplementation::PacketResult FFMpegReaderImplementation::readPacket(AVPacket *packet) {
av_init_packet(packet);
packet->data = nullptr;
packet->size = 0;
int res = 0;
if ((res = av_read_frame(_fmtContext, &packet)) < 0) {
if ((res = av_read_frame(_fmtContext, packet)) < 0) {
if (res == AVERROR_EOF) {
if (_audioStreamId >= 0) {
// queue terminating packet to audio player
@ -406,27 +417,42 @@ FFMpegReaderImplementation::PacketResult FFMpegReaderImplementation::readPacket(
LOG(("Gif Error: Unable to av_read_frame() %1, error %2, %3").arg(logData()).arg(res).arg(av_make_error_string(err, sizeof(err), res)));
return PacketResult::Error;
}
return PacketResult::Ok;
}
bool videoPacket = (packet.stream_index == _streamId);
bool audioPacket = (_audioStreamId >= 0 && packet.stream_index == _audioStreamId);
void FFMpegReaderImplementation::processPacket(AVPacket *packet) {
bool videoPacket = (packet->stream_index == _streamId);
bool audioPacket = (_audioStreamId >= 0 && packet->stream_index == _audioStreamId);
if (audioPacket || videoPacket) {
int64 packetPts = (packet.pts == AV_NOPTS_VALUE) ? packet.dts : packet.pts;
int64 packetMs = (packetPts * 1000LL * _fmtContext->streams[packet.stream_index]->time_base.num) / _fmtContext->streams[packet.stream_index]->time_base.den;
_lastReadPacketMs = packetMs;
_lastReadPacketMs = countPacketMs(packet);
if (videoPacket) {
_packetQueue.enqueue(packet);
_packetQueue.enqueue(*packet);
} else if (audioPacket) {
// queue packet to audio player
VideoSoundPart part;
part.packet = &packet;
part.packet = packet;
part.videoPlayId = _playId;
audioPlayer()->feedFromVideo(std_::move(part));
}
} else {
av_packet_unref(&packet);
av_packet_unref(packet);
}
return PacketResult::Ok;
}
int64 FFMpegReaderImplementation::countPacketMs(AVPacket *packet) const {
int64 packetPts = (packet->pts == AV_NOPTS_VALUE) ? packet->dts : packet->pts;
int64 packetMs = (packetPts * 1000LL * _fmtContext->streams[packet->stream_index]->time_base.num) / _fmtContext->streams[packet->stream_index]->time_base.den;
return packetMs;
}
FFMpegReaderImplementation::PacketResult FFMpegReaderImplementation::readAndProcessPacket() {
AVPacket packet;
auto result = readPacket(&packet);
if (result == PacketResult::Ok) {
processPacket(&packet);
}
return result;
}
void FFMpegReaderImplementation::startPacket() {

View File

@ -29,6 +29,8 @@ extern "C" {
#include "media/media_clip_implementation.h"
struct VideoSoundData;
namespace Media {
namespace Clip {
namespace internal {
@ -37,7 +39,7 @@ class FFMpegReaderImplementation : public ReaderImplementation {
public:
FFMpegReaderImplementation(FileLocation *location, QByteArray *data, uint64 playId);
ReadResult readFramesTill(int64 ms) override;
ReadResult readFramesTill(int64 frameMs, uint64 systemMs) override;
int64 frameRealTime() const override;
uint64 framePresentationTime() const override;
@ -51,7 +53,7 @@ public:
void pauseAudio() override;
void resumeAudio() override;
bool start(Mode mode, int64 positionMs) override;
bool start(Mode mode, int64 &positionMs) override;
QString logData() const;
@ -65,7 +67,11 @@ private:
EndOfFile,
Error,
};
PacketResult readPacket();
PacketResult readPacket(AVPacket *packet);
void processPacket(AVPacket *packet);
int64 countPacketMs(AVPacket *packet) const;
PacketResult readAndProcessPacket();
void startPacket();
void finishPacket();
void clearPacketQueue();

View File

@ -43,8 +43,8 @@ public:
Error,
Eof,
};
// Read frames till current frame will have presentation time > ms.
virtual ReadResult readFramesTill(int64 ms) = 0;
// Read frames till current frame will have presentation time > frameMs, systemMs = getms().
virtual ReadResult readFramesTill(int64 frameMs, uint64 systemMs) = 0;
// Get current frame real and presentation time.
virtual int64 frameRealTime() const = 0;
@ -58,7 +58,7 @@ public:
virtual void pauseAudio() = 0;
virtual void resumeAudio() = 0;
virtual bool start(Mode mode, int64 positionMs) = 0;
virtual bool start(Mode mode, int64 &positionMs) = 0;
virtual ~ReaderImplementation() {
}
int64 dataSize() const {

View File

@ -28,17 +28,17 @@ namespace internal {
QtGifReaderImplementation::QtGifReaderImplementation(FileLocation *location, QByteArray *data) : ReaderImplementation(location, data) {
}
ReaderImplementation::ReadResult QtGifReaderImplementation::readFramesTill(int64 ms) {
if (!_frame.isNull() && _frameTime > ms) {
ReaderImplementation::ReadResult QtGifReaderImplementation::readFramesTill(int64 frameMs, uint64 systemMs) {
if (!_frame.isNull() && _frameTime > frameMs) {
return ReadResult::Success;
}
auto readResult = readNextFrame();
if (readResult != ReadResult::Success || _frameTime > ms) {
if (readResult != ReadResult::Success || _frameTime > frameMs) {
return readResult;
}
readResult = readNextFrame();
if (_frameTime <= ms) {
_frameTime = ms + 5; // keep up
if (_frameTime <= frameMs) {
_frameTime = frameMs + 5; // keep up
}
return readResult;
}
@ -99,7 +99,7 @@ int64 QtGifReaderImplementation::durationMs() const {
return 0; // not supported
}
bool QtGifReaderImplementation::start(Mode mode, int64 positionMs) {
bool QtGifReaderImplementation::start(Mode mode, int64 &positionMs) {
if (mode == Mode::OnlyGifv) return false;
_mode = mode;
return jumpToStart();

View File

@ -31,7 +31,7 @@ public:
QtGifReaderImplementation(FileLocation *location, QByteArray *data);
ReadResult readFramesTill(int64 ms) override;
ReadResult readFramesTill(int64 frameMs, uint64 systemMs) override;
int64 frameRealTime() const override;
uint64 framePresentationTime() const override;
@ -47,7 +47,7 @@ public:
void resumeAudio() override {
}
bool start(Mode mode, int64 positionMs) override;
bool start(Mode mode, int64 &positionMs) override;
~QtGifReaderImplementation();

View File

@ -341,12 +341,37 @@ public:
}
ProcessResult start(uint64 ms) {
if (!_implementation && !init(_seekPositionMs)) {
if (!_implementation && !init()) {
return error();
}
if (frame() && frame()->original.isNull()) {
auto readResult = _implementation->readFramesTill(-1);
if (readResult != internal::ReaderImplementation::ReadResult::Success) { // Read the first frame.
auto readResult = _implementation->readFramesTill(-1, ms);
if (readResult == internal::ReaderImplementation::ReadResult::Eof && _seekPositionMs > 0) {
// If seek was done to the end: try to read the first frame,
// get the frame size and return a black frame with that size.
auto firstFramePlayId = 0LL;
auto firstFramePositionMs = 0LL;
auto reader = std_::make_unique<internal::FFMpegReaderImplementation>(_location.get(), &_data, firstFramePlayId);
if (reader->start(internal::ReaderImplementation::Mode::Normal, firstFramePositionMs)) {
auto firstFrameReadResult = reader->readFramesTill(-1, ms);
if (firstFrameReadResult == internal::ReaderImplementation::ReadResult::Success) {
if (reader->renderFrame(frame()->original, frame()->alpha, QSize())) {
frame()->original.fill(QColor(0, 0, 0));
frame()->positionMs = _seekPositionMs;
_width = frame()->original.width();
_height = frame()->original.height();
_durationMs = _implementation->durationMs();
_hasAudio = _implementation->hasAudio();
return ProcessResult::Started;
}
}
}
return error();
} else if (readResult != internal::ReaderImplementation::ReadResult::Success) { // Read the first frame.
return error();
}
if (!_implementation->renderFrame(frame()->original, frame()->alpha, QSize())) {
@ -387,7 +412,8 @@ public:
}
ProcessResult finishProcess(uint64 ms) {
auto readResult = _implementation->readFramesTill(_skippedMs + ms - _animationStarted);
auto frameMs = _seekPositionMs + ms - _animationStarted;
auto readResult = _implementation->readFramesTill(frameMs, ms);
if (readResult == internal::ReaderImplementation::ReadResult::Eof) {
stop();
_state = State::Finished;
@ -397,8 +423,8 @@ public:
}
_nextFramePositionMs = _implementation->frameRealTime();
_nextFrameWhen = _animationStarted + _implementation->framePresentationTime();
if (static_cast<int64>(_nextFrameWhen) > _skippedMs) {
_nextFrameWhen -= _skippedMs;
if (static_cast<int64>(_nextFrameWhen) > _seekPositionMs) {
_nextFrameWhen -= _seekPositionMs;
} else {
_nextFrameWhen = 1;
}
@ -422,7 +448,7 @@ public:
return true;
}
bool init(int64 positionMs) {
bool init() {
if (_data.isEmpty() && QFileInfo(_location->name()).size() <= AnimationInMemory) {
QFile f(_location->name());
if (f.open(QIODevice::ReadOnly)) {
@ -443,8 +469,7 @@ public:
}
return ImplementationMode::Normal;
};
_skippedMs = positionMs;
return _implementation->start(implementationMode(), positionMs);
return _implementation->start(implementationMode(), _seekPositionMs);
}
void startedAt(uint64 ms) {
@ -530,7 +555,6 @@ private:
uint64 _animationStarted = 0;
uint64 _nextFrameWhen = 0;
int64 _nextFramePositionMs = 0;
int64 _skippedMs = 0;
bool _autoPausedGif = false;
bool _started = false;
@ -816,10 +840,11 @@ MTPDocumentAttribute readAttributes(const QString &fname, const QByteArray &data
QByteArray localdata(data);
auto playId = 0ULL;
auto seekPositionMs = 0LL;
auto reader = std_::make_unique<internal::FFMpegReaderImplementation>(&localloc, &localdata, playId);
if (reader->start(internal::ReaderImplementation::Mode::OnlyGifv, 0)) {
if (reader->start(internal::ReaderImplementation::Mode::OnlyGifv, seekPositionMs)) {
bool hasAlpha = false;
auto readResult = reader->readFramesTill(-1);
auto readResult = reader->readFramesTill(-1, getms());
auto readFrame = (readResult == internal::ReaderImplementation::ReadResult::Success);
if (readFrame && reader->renderFrame(cover, hasAlpha, QSize())) {
if (cover.width() > 0 && cover.height() > 0 && cover.width() < cover.height() * 10 && cover.height() < cover.width() * 10) {

View File

@ -236,7 +236,8 @@ bool MediaView::gifShown() const {
void MediaView::stopGif() {
_gif = nullptr;
_videoPaused = _videoIsSilent = false;
_videoPaused = _videoStopped = _videoIsSilent = false;
_fullScreenVideo = false;
_clipController.destroy();
Sandbox::removeEventFilter(this);
if (audioPlayer()) {
@ -530,6 +531,7 @@ void MediaView::clearData() {
_user = nullptr;
_photo = _additionalChatPhoto = nullptr;
_doc = nullptr;
_fullScreenVideo = false;
_saveMsgText.clear();
_caption.clear();
}
@ -559,30 +561,42 @@ void MediaView::close() {
}
void MediaView::activateControls() {
if (!_menu && !_mousePressed && (!_clipController || !_clipController->geometry().contains(_lastMouseMovePos))) {
if (!_menu && !_mousePressed) {
_controlsHideTimer.start(int(st::mvWaitHide));
}
if (_fullScreenVideo) {
if (_clipController) {
_clipController->showAnimated();
}
}
if (_controlsState == ControlsHiding || _controlsState == ControlsHidden) {
_controlsState = ControlsShowing;
_controlsAnimStarted = getms();
a_cOpacity.start(1);
if (!_a_state.animating()) _a_state.start();
}
if (_clipController) {
// _clipController->showAnimated();
}
}
void MediaView::onHideControls(bool force) {
if (!force && (!_dropdown.isHidden() || _menu || _mousePressed || (_clipController && _clipController->geometry().contains(_lastMouseMovePos)))) return;
if (!force) {
if (!_dropdown.isHidden()
|| _menu
|| _mousePressed
|| (_fullScreenVideo && _clipController && _clipController->geometry().contains(_lastMouseMovePos))) {
return;
}
}
if (_fullScreenVideo) {
if (_clipController) {
_clipController->hideAnimated();
}
}
if (_controlsState == ControlsHiding || _controlsState == ControlsHidden) return;
_controlsState = ControlsHiding;
_controlsAnimStarted = getms();
a_cOpacity.start(0);
if (!_a_state.animating()) _a_state.start();
if (_clipController) {
// _clipController->hideAnimated();
}
}
void MediaView::onDropdownHiding() {
@ -713,6 +727,7 @@ void MediaView::clipCallback(Media::Clip::Notification notification) {
_current = QPixmap();
} else if (_gif->state() == State::Finished) {
_videoPositionMs = _videoDurationMs;
_videoStopped = true;
updateSilentVideoPlaybackState();
} else {
_videoIsSilent = _doc->isVideo() && !_gif->hasAudio();
@ -725,6 +740,8 @@ void MediaView::clipCallback(Media::Clip::Notification notification) {
displayDocument(_doc, item);
} else {
stopGif();
updateControls();
update();
}
} break;
@ -1039,6 +1056,7 @@ void MediaView::showDocument(DocumentData *doc, HistoryItem *context) {
void MediaView::displayPhoto(PhotoData *photo, HistoryItem *item) {
stopGif();
_doc = nullptr;
_fullScreenVideo = false;
_photo = photo;
_radial.stop();
@ -1092,6 +1110,7 @@ void MediaView::displayPhoto(PhotoData *photo, HistoryItem *item) {
void MediaView::displayDocument(DocumentData *doc, HistoryItem *item) { // empty messages shown as docs: doc can be NULL
if (!doc || (!doc->isAnimation() && !doc->isVideo()) || doc != _doc || (item && (item->id != _msgid || (item->history() != (_msgmigrated ? _migrated : _history))))) {
_fullScreenVideo = false;
stopGif();
}
_doc = doc;
@ -1201,7 +1220,7 @@ void MediaView::displayDocument(DocumentData *doc, HistoryItem *item) { // empty
} else {
_zoomToScreen = 0;
}
if ((_w > width()) || (_h > height())) {
if ((_w > width()) || (_h > height()) || _fullScreenVideo) {
_zoom = ZoomToScreenLevel;
if (_zoomToScreen >= 0) {
_w = qRound(_w * (_zoomToScreen + 1));
@ -1259,7 +1278,7 @@ void MediaView::createClipReader() {
_gif = std_::make_unique<Media::Clip::Reader>(_doc->location(), _doc->data(), func(this, &MediaView::clipCallback), mode);
// Correct values will be set when gif gets inited.
_videoPaused = _videoIsSilent = false;
_videoPaused = _videoIsSilent = _videoStopped = false;
_videoPositionMs = 0ULL;
_videoDurationMs = _doc->duration() * 1000ULL;
@ -1279,8 +1298,8 @@ void MediaView::createClipController() {
connect(_clipController, SIGNAL(seekProgress(int64)), this, SLOT(onVideoSeekProgress(int64)));
connect(_clipController, SIGNAL(seekFinished(int64)), this, SLOT(onVideoSeekFinished(int64)));
connect(_clipController, SIGNAL(volumeChanged(float64)), this, SLOT(onVideoVolumeChanged(float64)));
connect(_clipController, SIGNAL(toFullScreenPressed()), this, SLOT(onVideoToFullScreen()));
connect(_clipController, SIGNAL(fromFullScreenPressed()), this, SLOT(onVideoFromFullScreen()));
connect(_clipController, SIGNAL(toFullScreenPressed()), this, SLOT(onVideoToggleFullScreen()));
connect(_clipController, SIGNAL(fromFullScreenPressed()), this, SLOT(onVideoToggleFullScreen()));
Sandbox::removeEventFilter(this);
Sandbox::installEventFilter(this);
@ -1317,6 +1336,8 @@ void MediaView::onVideoPauseResume() {
}
} else {
stopGif();
updateControls();
update();
}
}
@ -1329,7 +1350,7 @@ void MediaView::restartVideoAtSeekPosition(int64 positionMs) {
_gif = std_::make_unique<Media::Clip::Reader>(_doc->location(), _doc->data(), func(this, &MediaView::clipCallback), Media::Clip::Reader::Mode::Video, positionMs);
// Correct values will be set when gif gets inited.
_videoPaused = _videoIsSilent = false;
_videoPaused = _videoIsSilent = _videoStopped = false;
_videoPositionMs = positionMs;
AudioPlaybackState state;
@ -1341,7 +1362,7 @@ void MediaView::restartVideoAtSeekPosition(int64 positionMs) {
}
void MediaView::onVideoSeekProgress(int64 positionMs) {
if (!_videoPaused) {
if (!_videoPaused && !_videoStopped) {
onVideoPauseResume();
}
}
@ -1355,12 +1376,20 @@ void MediaView::onVideoVolumeChanged(float64 volume) {
emit audioPlayer()->videoVolumeChanged();
}
void MediaView::onVideoToFullScreen() {
void MediaView::onVideoToggleFullScreen() {
if (!_clipController) return;
}
void MediaView::onVideoFromFullScreen() {
_fullScreenVideo = !_fullScreenVideo;
if (_fullScreenVideo) {
_fullScreenZoomCache = _zoom;
setZoomLevel(ZoomToScreenLevel);
} else {
setZoomLevel(_fullScreenZoomCache);
}
_clipController->setInFullScreen(_fullScreenVideo);
updateControls();
update();
}
void MediaView::onVideoPlayProgress(const AudioMsgId &audioId) {
@ -1377,6 +1406,9 @@ void MediaView::onVideoPlayProgress(const AudioMsgId &audioId) {
void MediaView::updateVideoPlaybackState(const AudioPlaybackState &state, bool reset) {
if (state.frequency) {
if (state.state & AudioPlayerStoppedMask) {
_videoStopped = true;
}
_clipController->updatePlayback(state, reset);
} else { // Audio has stopped already.
_videoIsSilent = true;
@ -1415,11 +1447,17 @@ void MediaView::paintEvent(QPaintEvent *e) {
// main bg
QPainter::CompositionMode m = p.compositionMode();
p.setCompositionMode(QPainter::CompositionMode_Source);
p.setOpacity(st::mvBgOpacity);
for (int i = 0, l = region.rectCount(); i < l; ++i) {
p.fillRect(rs.at(i), st::mvBgColor->b);
if (_fullScreenVideo) {
for (int i = 0, l = region.rectCount(); i < l; ++i) {
p.fillRect(rs.at(i), st::black);
}
} else {
p.setOpacity(st::mvBgOpacity);
for (int i = 0, l = region.rectCount(); i < l; ++i) {
p.fillRect(rs.at(i), st::mvBgColor->b);
}
p.setCompositionMode(m);
}
p.setCompositionMode(m);
// photo
if (_photo) {
@ -1578,7 +1616,7 @@ void MediaView::paintEvent(QPaintEvent *e) {
}
}
float64 co = a_cOpacity.current();
float64 co = _fullScreenVideo ? 0. : a_cOpacity.current();
if (co > 0) {
// left nav bar
if (_leftNav.intersects(r) && _leftNavVisible) {
@ -1703,6 +1741,22 @@ void MediaView::paintEvent(QPaintEvent *e) {
}
void MediaView::keyPressEvent(QKeyEvent *e) {
if (_clipController) {
auto toggle1 = (e->key() == Qt::Key_F && e->modifiers().testFlag(Qt::ControlModifier));
auto toggle2 = (e->key() == Qt::Key_Enter || e->key() == Qt::Key_Return) && (e->modifiers().testFlag(Qt::AltModifier) || e->modifiers().testFlag(Qt::ControlModifier));
if (toggle1 || toggle2) {
onVideoToggleFullScreen();
return;
}
if (_fullScreenVideo) {
if (e->key() == Qt::Key_Escape) {
onVideoToggleFullScreen();
} else if (e->key() == Qt::Key_Enter || e->key() == Qt::Key_Return || e->key() == Qt::Key_Space) {
onVideoPauseResume();
}
return;
}
}
if (!_menu && e->key() == Qt::Key_Escape) {
close();
} else if (e == QKeySequence::Save || e == QKeySequence::SaveAs) {
@ -1774,36 +1828,40 @@ void MediaView::keyPressEvent(QKeyEvent *e) {
++newZoom;
}
}
if (_zoom != newZoom) {
float64 nx, ny, z = (_zoom == ZoomToScreenLevel) ? _zoomToScreen : _zoom;
_w = gifShown() ? _gif->width() : (_current.width() / cIntRetinaFactor());
_h = gifShown() ? _gif->height() : (_current.height() / cIntRetinaFactor());
if (z >= 0) {
nx = (_x - width() / 2.) / (z + 1);
ny = (_y - height() / 2.) / (z + 1);
} else {
nx = (_x - width() / 2.) * (-z + 1);
ny = (_y - height() / 2.) * (-z + 1);
}
_zoom = newZoom;
z = (_zoom == ZoomToScreenLevel) ? _zoomToScreen : _zoom;
if (z > 0) {
_w = qRound(_w * (z + 1));
_h = qRound(_h * (z + 1));
_x = qRound(nx * (z + 1) + width() / 2.);
_y = qRound(ny * (z + 1) + height() / 2.);
} else {
_w = qRound(_w / (-z + 1));
_h = qRound(_h / (-z + 1));
_x = qRound(nx / (-z + 1) + width() / 2.);
_y = qRound(ny / (-z + 1) + height() / 2.);
}
snapXY();
update();
}
setZoomLevel(newZoom);
}
}
void MediaView::setZoomLevel(int newZoom) {
if (_zoom == newZoom) return;
float64 nx, ny, z = (_zoom == ZoomToScreenLevel) ? _zoomToScreen : _zoom;
_w = gifShown() ? convertScale(_gif->width()) : (convertScale(_current.width()) / cIntRetinaFactor());
_h = gifShown() ? convertScale(_gif->height()) : (convertScale(_current.height()) / cIntRetinaFactor());
if (z >= 0) {
nx = (_x - width() / 2.) / (z + 1);
ny = (_y - height() / 2.) / (z + 1);
} else {
nx = (_x - width() / 2.) * (-z + 1);
ny = (_y - height() / 2.) * (-z + 1);
}
_zoom = newZoom;
z = (_zoom == ZoomToScreenLevel) ? _zoomToScreen : _zoom;
if (z > 0) {
_w = qRound(_w * (z + 1));
_h = qRound(_h * (z + 1));
_x = qRound(nx * (z + 1) + width() / 2.);
_y = qRound(ny * (z + 1) + height() / 2.);
} else {
_w = qRound(_w / (-z + 1));
_h = qRound(_h / (-z + 1));
_x = qRound(nx / (-z + 1) + width() / 2.);
_y = qRound(ny / (-z + 1) + height() / 2.);
}
snapXY();
update();
}
bool MediaView::moveToNext(int32 delta) {
if (_index < 0) {
if (delta == -1 && _photo == _additionalChatPhoto) {
@ -2008,6 +2066,8 @@ void MediaView::mousePressEvent(QMouseEvent *e) {
_down = OverMore;
} else if (_over == OverClose) {
_down = OverClose;
} else if (_over == OverVideo) {
_down = OverVideo;
} else if (!_saveMsg.contains(e->pos()) || !_saveMsgStarted) {
_pressed = true;
_dragging = 0;
@ -2021,6 +2081,18 @@ void MediaView::mousePressEvent(QMouseEvent *e) {
activateControls();
}
void MediaView::mouseDoubleClickEvent(QMouseEvent *e) {
updateOver(e->pos());
if (_over == OverVideo) {
onVideoToggleFullScreen();
onVideoPauseResume();
} else {
e->ignore();
return TWidget::mouseDoubleClickEvent(e);
}
}
void MediaView::snapXY() {
int32 xmin = width() - _w, xmax = 0;
int32 ymin = height() - _h, ymax = 0;
@ -2137,7 +2209,9 @@ void MediaView::updateOver(QPoint pos) {
if (_pressed || _dragging) return;
if (_leftNavVisible && _leftNav.contains(pos)) {
if (_fullScreenVideo) {
updateOverState(OverVideo);
} else if (_leftNavVisible && _leftNav.contains(pos)) {
updateOverState(OverLeftNav);
} else if (_rightNavVisible && _rightNav.contains(pos)) {
updateOverState(OverRightNav);
@ -2155,6 +2229,8 @@ void MediaView::updateOver(QPoint pos) {
updateOverState(OverMore);
} else if (_closeNav.contains(pos)) {
updateOverState(OverClose);
} else if (_doc && _doc->isVideo() && _gif && QRect(_x, _y, _w, _h).contains(pos)) {
updateOverState(OverVideo);
} else if (_over != OverNone) {
updateOverState(OverNone);
}
@ -2185,6 +2261,8 @@ void MediaView::mouseReleaseEvent(QMouseEvent *e) {
QTimer::singleShot(0, this, SLOT(onDropdown()));
} else if (_over == OverClose && _down == OverClose) {
close();
} else if (_over == OverVideo && _down == OverVideo) {
onVideoPauseResume();
} else if (_pressed) {
if (_dragging) {
if (_dragging > 0) {
@ -2300,7 +2378,7 @@ bool MediaView::event(QEvent *e) {
}
}
}
return QWidget::event(e);
return TWidget::event(e);
}
bool MediaView::eventFilter(QObject *obj, QEvent *e) {
@ -2328,7 +2406,7 @@ void MediaView::hide() {
_controlsHideTimer.stop();
_controlsState = ControlsShown;
a_cOpacity = anim::fvalue(1, 1);
QWidget::hide();
TWidget::hide();
stopGif();
_radial.stop();

View File

@ -108,6 +108,7 @@ protected:
void keyPressEvent(QKeyEvent *e) override;
void mousePressEvent(QMouseEvent *e) override;
void mouseDoubleClickEvent(QMouseEvent *e) override;
void mouseMoveEvent(QMouseEvent *e) override;
void mouseReleaseEvent(QMouseEvent *e) override;
void contextMenuEvent(QContextMenuEvent *e) override;
@ -121,8 +122,7 @@ private slots:
void onVideoSeekProgress(int64 positionMs);
void onVideoSeekFinished(int64 positionMs);
void onVideoVolumeChanged(float64 volume);
void onVideoToFullScreen();
void onVideoFromFullScreen();
void onVideoToggleFullScreen();
void onVideoPlayProgress(const AudioMsgId &audioId);
private:
@ -131,6 +131,8 @@ private:
void findCurrent();
void loadBack();
void setZoomLevel(int newZoom);
void updateVideoPlaybackState(const AudioPlaybackState &state, bool reset = false);
void updateSilentVideoPlaybackState();
void restartVideoAtSeekPosition(int64 positionMs);
@ -185,6 +187,8 @@ private:
ChildWidget<Media::Clip::Controller> _clipController = { nullptr };
DocumentData *_autoplayVideoDocument = nullptr;
bool _fullScreenVideo = false;
int _fullScreenZoomCache = 0;
Text _caption;
QRect _captionRect;
@ -206,6 +210,7 @@ private:
// Video without audio stream playback information.
bool _videoIsSilent = false;
bool _videoPaused = false;
bool _videoStopped = false;
int64 _videoPositionMs = 0;
int64 _videoDurationMs = 0;
int32 _videoFrequencyMs = 1000; // 1000 ms per second.
@ -263,6 +268,7 @@ private:
OverSave,
OverMore,
OverIcon,
OverVideo,
};
OverState _over = OverNone;
OverState _down = OverNone;

View File

@ -28,19 +28,22 @@ Copyright (c) 2014-2016 John Preston, https://desktop.telegram.org
namespace ShortcutCommands {
typedef void(*Handler)();
typedef bool(*Handler)();
void lock_telegram() {
bool lock_telegram() {
if (auto w = App::wnd()) {
if (App::passcoded()) {
w->passcodeWidget()->onSubmit();
return true;
} else if (cHasPasscode()) {
w->setupPasscode(true);
return true;
}
}
return false;
}
void minimize_telegram() {
bool minimize_telegram() {
if (auto w = App::wnd()) {
if (cWorkMode() == dbiwmTrayOnly) {
w->minimizeToTray();
@ -48,18 +51,21 @@ void minimize_telegram() {
w->setWindowState(Qt::WindowMinimized);
}
}
return true;
}
void close_telegram() {
bool close_telegram() {
if (!Ui::hideWindowNoQuit()) {
if (auto w = App::wnd()) {
w->close();
}
}
return true;
}
void quit_telegram() {
bool quit_telegram() {
App::quit();
return true;
}
//void start_stop_recording() {
@ -70,58 +76,85 @@ void quit_telegram() {
//}
void media_play() {
if (MainWidget *m = App::main()) {
m->player()->playPressed();
bool media_play() {
if (auto m = App::main()) {
if (!m->player()->isHidden()) {
m->player()->playPressed();
return true;
}
}
return false;
}
void media_pause() {
if (MainWidget *m = App::main()) {
m->player()->pausePressed();
bool media_pause() {
if (auto m = App::main()) {
if (!m->player()->isHidden()) {
m->player()->pausePressed();
return true;
}
}
return false;
}
void media_playpause() {
if (MainWidget *m = App::main()) {
m->player()->playPausePressed();
bool media_playpause() {
if (auto m = App::main()) {
if (!m->player()->isHidden()) {
m->player()->playPausePressed();
return true;
}
}
return false;
}
void media_stop() {
if (MainWidget *m = App::main()) {
m->player()->stopPressed();
bool media_stop() {
if (auto m = App::main()) {
if (!m->player()->isHidden()) {
m->player()->stopPressed();
return true;
}
}
return false;
}
void media_previous() {
if (MainWidget *m = App::main()) {
m->player()->prevPressed();
bool media_previous() {
if (auto m = App::main()) {
if (!m->player()->isHidden()) {
m->player()->prevPressed();
return true;
}
}
return false;
}
void media_next() {
if (MainWidget *m = App::main()) {
m->player()->nextPressed();
bool media_next() {
if (auto m = App::main()) {
if (!m->player()->isHidden()) {
m->player()->nextPressed();
return true;
}
}
return false;
}
void search() {
if (MainWidget *m = App::main()) {
m->cmd_search();
bool search() {
if (auto m = App::main()) {
return m->cmd_search();
}
return false;
}
void previous_chat() {
if (MainWidget *m = App::main()) {
m->cmd_previous_chat();
bool previous_chat() {
if (auto m = App::main()) {
return m->cmd_previous_chat();
}
return false;
}
void next_chat() {
if (MainWidget *m = App::main()) {
m->cmd_next_chat();
bool next_chat() {
if (auto m = App::main()) {
return m->cmd_next_chat();
}
return false;
}
// other commands here
@ -501,8 +534,7 @@ bool launch(int shortcutId) {
if (it == DataPtr->handlers.cend()) {
return false;
}
(*it.value())();
return true;
return (*it.value())();
}
bool launch(const QString &command) {
@ -512,8 +544,7 @@ bool launch(const QString &command) {
if (it == DataPtr->commands.cend()) {
return false;
}
(*it.value())();
return true;
return (*it.value())();
}
void enableMediaShortcuts() {