Fix sync video to audio.

This commit is contained in:
John Preston 2019-02-25 14:17:25 +04:00
parent d37b65e624
commit ccd04b98b9
8 changed files with 26 additions and 54 deletions

View File

@ -28,7 +28,7 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "mainwindow.h"
#include "core/application.h"
// #TODO streaming
// #TODO streaming ui
#include "media/streaming/media_streaming_player.h"
#include "media/streaming/media_streaming_loader_mtproto.h"

View File

@ -181,7 +181,7 @@ rpl::producer<crl::time> AudioTrack::playPosition() {
const auto type = AudioMsgId::Type::Video;
const auto state = Media::Player::mixer()->currentState(type);
if (state.id != _audioId) {
// #TODO streaming muted by other
// #TODO streaming later muted by other
return;
} else switch (state.state) {
case State::Stopped:

View File

@ -10,8 +10,6 @@ https://github.com/telegramdesktop/tdesktop/blob/master/LEGAL
#include "media/streaming/media_streaming_loader.h"
#include "media/streaming/media_streaming_file_delegate.h"
#include "ui/toast/toast.h" // #TODO streaming
namespace Media {
namespace Streaming {
@ -266,9 +264,10 @@ void File::Context::readNextPacket() {
handleEndOfFile();
}
}
void File::Context::handleEndOfFile() {
const auto more = _delegate->fileProcessPacket(Packet());
// #TODO streaming looping
// #TODO streaming later looping
_readTillEnd = true;
}
@ -314,7 +313,8 @@ File::Context::~Context() {
}
bool File::Context::finished() const {
return unroll() || _readTillEnd; // #TODO streaming looping
// #TODO streaming later looping
return unroll() || _readTillEnd;
}
File::File(
@ -332,10 +332,6 @@ void File::start(not_null<FileDelegate*> delegate, crl::time position) {
while (!context->finished()) {
context->readNextPacket();
}
crl::on_main(context, [] { AssertIsDebug();
Ui::Toast::Show("Finished loading.");
});
});
}

View File

@ -96,7 +96,7 @@ void LoaderMtproto::changeCdnParams(
const QByteArray &encryptionKey,
const QByteArray &encryptionIV,
const QVector<MTPFileHash> &hashes) {
// #TODO streaming
// #TODO streaming cdn
}
void LoaderMtproto::requestFailed(int offset, const RPCError &error) {
@ -106,7 +106,7 @@ void LoaderMtproto::requestFailed(int offset, const RPCError &error) {
return;
}
const auto callback = [=](const Data::UpdatedFileReferences &updated) {
// #TODO streaming
// #TODO streaming file_reference
};
_api->refreshFileReference(_origin, crl::guard(this, callback));
}

View File

@ -98,7 +98,6 @@ void Player::checkNextFrame() {
if (now < _nextFrameTime) {
_renderFrameTimer.callOnce(_nextFrameTime - now);
} else {
_renderFrameTimer.cancel();
renderFrame(now);
}
}
@ -269,20 +268,12 @@ bool Player::fileProcessPacket(Packet &&packet) {
}
} else if (_audio && _audio->streamIndex() == native.stream_index) {
const auto time = PacketPosition(packet, _audio->streamTimeBase());
//LOG(("[%2] AUDIO PACKET FOR %1ms"
// ).arg(time
// ).arg(crl::now() % 10000, 4, 10, QChar('0')));
crl::on_main(&_sessionGuard, [=] {
audioReceivedTill(time);
});
_audio->process(std::move(packet));
} else if (_video && _video->streamIndex() == native.stream_index) {
const auto time = PacketPosition(packet, _video->streamTimeBase());
//LOG(("[%2] VIDEO PACKET FOR %1ms"
// ).arg(time
// ).arg(crl::now() % 10000, 4, 10, QChar('0')));
crl::on_main(&_sessionGuard, [=] {
videoReceivedTill(time);
});

View File

@ -343,26 +343,6 @@ QImage ConvertFrame(
return QImage();
}
}
//if (stream.rotation == 180) {
// storage = std::move(storage).mirrored(true, true);
//} else if (stream.rotation != 0) {
// auto transform = QTransform();
// transform.rotate(stream.rotation);
// storage = storage.transformed(transform);
//}
// Read some future packets for audio stream.
//if (_audioStreamId >= 0) {
// while (_frameMs + 5000 > _lastReadAudioMs
// && _frameMs + 15000 > _lastReadVideoMs) {
// auto packetResult = readAndProcessPacket();
// if (packetResult != PacketResult::Ok) {
// break;
// }
// }
//}
// #TODO streaming
ClearFrameMemory(stream.frame.get());
return storage;
}

View File

@ -76,7 +76,8 @@ private:
crl::time _resumedTime = kTimeUnknown;
mutable TimePoint _syncTimePoint;
mutable crl::time _previousFramePosition = kTimeUnknown;
rpl::variable<crl::time> _nextFrameDisplayTime = kTimeUnknown;
crl::time _nextFrameDisplayTime = kTimeUnknown;
rpl::event_stream<crl::time> _nextFrameTimeUpdates;
rpl::event_stream<> _waitingForData;
bool _queued = false;
@ -110,7 +111,10 @@ VideoTrackObject::VideoTrackObject(
rpl::producer<crl::time> VideoTrackObject::displayFrameAt() const {
return interrupted()
? rpl::complete<crl::time>()
: _nextFrameDisplayTime.value();
: (_nextFrameDisplayTime == kTimeUnknown)
? _nextFrameTimeUpdates.events()
: _nextFrameTimeUpdates.events_starting_with_copy(
_nextFrameDisplayTime);
}
rpl::producer<> VideoTrackObject::waitingForData() const {
@ -164,7 +168,7 @@ bool VideoTrackObject::readFrame(not_null<Frame*> frame) {
if (const auto error = ReadNextFrame(_stream)) {
if (error.code() == AVERROR_EOF) {
interrupt();
_nextFrameDisplayTime.reset(kTimeUnknown);
_nextFrameTimeUpdates = rpl::event_stream<crl::time>();
} else if (error.code() != AVERROR(EAGAIN) || _noMoreData) {
interrupt();
_error();
@ -184,9 +188,9 @@ bool VideoTrackObject::readFrame(not_null<Frame*> frame) {
QSize(),
std::move(frame->original));
frame->position = position;
frame->displayPosition = position; // #TODO streaming adjust / sync
frame->displayed = kTimeUnknown;
// #TODO streaming later prepare frame
//frame->request
//frame->prepared
@ -201,8 +205,12 @@ void VideoTrackObject::presentFrameIfNeeded() {
const auto presented = _shared->presentFrame(time.trackTime);
if (presented.displayPosition != kTimeUnknown) {
const auto trackLeft = presented.displayPosition - time.trackTime;
// We don't use rpl::variable, because we want an event each time
// we assign a new value, even if the value really didn't change.
_nextFrameDisplayTime = time.worldTime
+ crl::time(std::round(trackLeft / _options.speed));
_nextFrameTimeUpdates.fire_copy(_nextFrameDisplayTime);
}
queueReadFrames(presented.nextCheckDelay);
}
@ -300,7 +308,6 @@ bool VideoTrackObject::processFirstFrame() {
return false;
}
_shared->init(std::move(frame), _syncTimePoint.trackTime);
_nextFrameDisplayTime.reset(_syncTimePoint.trackTime);
callReady();
if (!_stream.queue.empty()) {
queueReadFrames();
@ -383,7 +390,6 @@ void VideoTrack::Shared::init(QImage &&cover, crl::time position) {
_frames[0].original = std::move(cover);
_frames[0].position = position;
_frames[0].displayPosition = position;
// Usually main thread sets displayed time before _counter increment.
// But in this case we update _counter, so we set a fake displayed time.
@ -407,7 +413,7 @@ not_null<VideoTrack::Frame*> VideoTrack::Shared::getFrame(int index) {
}
bool VideoTrack::Shared::IsPrepared(not_null<Frame*> frame) {
return (frame->displayPosition != kTimeUnknown)
return (frame->position != kTimeUnknown)
&& (frame->displayed == kTimeUnknown)
&& !frame->original.isNull();
}
@ -417,7 +423,7 @@ bool VideoTrack::Shared::IsStale(
crl::time trackTime) {
Expects(IsPrepared(frame));
return (frame->displayPosition < trackTime);
return (frame->position < trackTime);
}
auto VideoTrack::Shared::prepareState(crl::time trackTime) -> PrepareState {
@ -433,7 +439,7 @@ auto VideoTrack::Shared::prepareState(crl::time trackTime) -> PrepareState {
} else if (!IsPrepared(next)) {
return next;
} else {
return PrepareNextCheck(frame->displayPosition - trackTime + 1);
return PrepareNextCheck(frame->position - trackTime + 1);
}
};
const auto finishPrepare = [&](int index) {
@ -459,7 +465,7 @@ auto VideoTrack::Shared::presentFrame(crl::time trackTime) -> PresentFrame {
const auto present = [&](int counter, int index) -> PresentFrame {
const auto frame = getFrame(index);
Assert(IsPrepared(frame));
const auto position = frame->displayPosition;
const auto position = frame->position;
// Release this frame to the main thread for rendering.
_counter.store(
@ -475,7 +481,7 @@ auto VideoTrack::Shared::presentFrame(crl::time trackTime) -> PresentFrame {
|| IsStale(frame, trackTime)) {
return { kTimeUnknown, crl::time(0) };
}
return { kTimeUnknown, (trackTime - frame->displayPosition + 1) };
return { kTimeUnknown, (trackTime - frame->position + 1) };
};
switch (counter()) {
@ -596,7 +602,7 @@ QImage VideoTrack::frame(const FrameRequest &request) const {
if (request.resize.isEmpty()) {
return frame->original;
} else if (frame->prepared.isNull() || frame->request != request) {
// #TODO streaming prepare frame
// #TODO streaming later prepare frame
//frame->request = request;
//frame->prepared = PrepareFrame(
// frame->original,

View File

@ -59,7 +59,6 @@ private:
struct Frame {
QImage original;
crl::time position = kTimeUnknown;
crl::time displayPosition = kTimeUnknown;
crl::time displayed = kTimeUnknown;
FrameRequest request;