1
0
Fork 0
mirror of https://git.jami.net/savoirfairelinux/jami-client-qt.git synced 2025-08-26 01:33:56 +02:00

avmodel: use synchronous callbacks for DecodingStarted/Stopped

This is an older issue that has resurfaced where mobile device rotation
at the beginning of a call cause a frame-copy to a stale buffer because
the DecodingStarted event is handled asynchronously.

Noticed on Windows but I believe any non-dbus build should have it.

So we make all the connections blocking and adjust some parameters.
This commit also removes the DecodingStarted handler in CallModel which
was causing the client's target video frame to be reallocated for each
account present.

Change-Id: I23ac4e0bd4b446e7a532f0d362f7ecd209d3c790
GitLab: #536
This commit is contained in:
Andreas Traczyk 2022-09-26 21:01:45 -04:00 committed by Adrien Béraud
parent 7b7d12fb22
commit 15e133301e
11 changed files with 79 additions and 86 deletions

View file

@ -298,8 +298,9 @@ AvAdapter::onAudioDeviceEvent()
} }
void void
AvAdapter::onRendererStarted(const QString& id) AvAdapter::onRendererStarted(const QString& id, const QSize& size)
{ {
Q_UNUSED(size)
auto callId = lrcInstance_->getCurrentCallId(); auto callId = lrcInstance_->getCurrentCallId();
auto callModel = lrcInstance_->getCurrentCallModel(); auto callModel = lrcInstance_->getCurrentCallModel();
auto renderDevice = callModel->getCurrentRenderedDevice(callId); auto renderDevice = callModel->getCurrentRenderedDevice(callId);

View file

@ -108,7 +108,7 @@ protected:
private Q_SLOTS: private Q_SLOTS:
void onAudioDeviceEvent(); void onAudioDeviceEvent();
void onRendererStarted(const QString& id); void onRendererStarted(const QString& id, const QSize& size);
private: private:
// Get screens arrangement rect relative to primary screen. // Get screens arrangement rect relative to primary screen.

View file

@ -64,9 +64,17 @@ VideoProvider::registerSink(const QString& id, QVideoSink* obj)
if (it == framesObjects_.end()) { if (it == framesObjects_.end()) {
auto fo = std::make_unique<FrameObject>(); auto fo = std::make_unique<FrameObject>();
fo->subscribers.insert(obj); fo->subscribers.insert(obj);
framesObjects_.emplace(id, std::move(fo)); qDebug() << "Creating new FrameObject for id:" << id;
auto emplaced = framesObjects_.emplace(id, std::move(fo));
if (!emplaced.second) {
qWarning() << "Couldn't create FrameObject for id:" << id;
return; return;
} }
it = emplaced.first;
}
qDebug().noquote() << QString("Adding sink: 0x%1 to subscribers for id: %2")
.arg((quintptr) obj, QT_POINTER_SIZE * 2, 16, QChar('0'))
.arg(id);
it->second->subscribers.insert(obj); it->second->subscribers.insert(obj);
} }
@ -78,7 +86,11 @@ VideoProvider::unregisterSink(QVideoSink* obj)
auto& subs = frameObjIt.second->subscribers; auto& subs = frameObjIt.second->subscribers;
auto it = subs.constFind(obj); auto it = subs.constFind(obj);
if (it != subs.cend()) { if (it != subs.cend()) {
qDebug().noquote() << QString("Removing sink: 0x%1 from subscribers for id: %2")
.arg((quintptr) obj, QT_POINTER_SIZE * 2, 16, QChar('0'))
.arg(frameObjIt.first);
subs.erase(it); subs.erase(it);
return;
} }
} }
} }
@ -121,14 +133,10 @@ VideoProvider::captureVideoFrame(const QString& id)
} }
void void
VideoProvider::onRendererStarted(const QString& id) VideoProvider::onRendererStarted(const QString& id, const QSize& size)
{ {
auto size = avModel_.getRendererSize(id); static const auto pixelFormat = avModel_.useDirectRenderer()
// This slot is queued, the renderer may have been destroyed. ? QVideoFrameFormat::Format_RGBA8888
if (size.width() == 0 || size.height() == 0 || activeRenderers_[id] == size) {
return;
}
auto pixelFormat = avModel_.useDirectRenderer() ? QVideoFrameFormat::Format_RGBA8888
: QVideoFrameFormat::Format_BGRA8888; : QVideoFrameFormat::Format_BGRA8888;
auto frameFormat = QVideoFrameFormat(size, pixelFormat); auto frameFormat = QVideoFrameFormat(size, pixelFormat);
{ {

View file

@ -48,7 +48,7 @@ public:
Q_INVOKABLE QString captureVideoFrame(const QString& id); Q_INVOKABLE QString captureVideoFrame(const QString& id);
private Q_SLOTS: private Q_SLOTS:
void onRendererStarted(const QString& id); void onRendererStarted(const QString& id, const QSize& size);
void onFrameBufferRequested(const QString& id, AVFrame* avframe); void onFrameBufferRequested(const QString& id, AVFrame* avframe);
void onFrameUpdated(const QString& id); void onFrameUpdated(const QString& id);
void onRendererStopped(const QString& id); void onRendererStopped(const QString& id);

View file

@ -281,8 +281,9 @@ Q_SIGNALS:
/** /**
* Emitted when a renderer is started * Emitted when a renderer is started
* @param id of the renderer * @param id of the renderer
* @param size of the renderer
*/ */
void rendererStarted(const QString& id); void rendererStarted(const QString& id, const QSize& size);
/** /**
* Emitted when a renderer is stopped * Emitted when a renderer is stopped
* @param id of the renderer * @param id of the renderer

View file

@ -694,11 +694,13 @@ AVModelPimpl::AVModelPimpl(AVModel& linked, const CallbacksHandler& callbacksHan
connect(&callbacksHandler, connect(&callbacksHandler,
&CallbacksHandler::decodingStarted, &CallbacksHandler::decodingStarted,
this, this,
&AVModelPimpl::onDecodingStarted); &AVModelPimpl::onDecodingStarted,
Qt::DirectConnection);
connect(&callbacksHandler, connect(&callbacksHandler,
&CallbacksHandler::decodingStopped, &CallbacksHandler::decodingStopped,
this, this,
&AVModelPimpl::onDecodingStopped); &AVModelPimpl::onDecodingStopped,
Qt::DirectConnection);
auto startedPreview = false; auto startedPreview = false;
auto restartRenderers = [&](const QStringList& callList) { auto restartRenderers = [&](const QStringList& callList) {
@ -750,8 +752,6 @@ AVModelPimpl::getRecordingPath() const
void void
AVModelPimpl::onDecodingStarted(const QString& id, const QString& shmPath, int width, int height) AVModelPimpl::onDecodingStarted(const QString& id, const QString& shmPath, int width, int height)
{ {
if (id != "" && id != "local" && !id.contains("://")) // Else managed by callmodel
return;
addRenderer(id, QSize(width, height), shmPath); addRenderer(id, QSize(width, height), shmPath);
} }
@ -823,15 +823,14 @@ createRenderer(const QString& id, const QSize& res, const QString& shmPath = {})
void void
AVModelPimpl::addRenderer(const QString& id, const QSize& res, const QString& shmPath) AVModelPimpl::addRenderer(const QString& id, const QSize& res, const QString& shmPath)
{
{ {
auto connectRenderer = [this](Renderer* renderer, const QString& id) { auto connectRenderer = [this](Renderer* renderer, const QString& id) {
connect( connect(
renderer, renderer,
&Renderer::started, &Renderer::started,
this, this,
[this, id] { Q_EMIT linked_.rendererStarted(id); }, [this, id](const QSize& size) { Q_EMIT linked_.rendererStarted(id, size); },
Qt::QueuedConnection); Qt::DirectConnection);
connect( connect(
renderer, renderer,
&Renderer::frameBufferRequested, &Renderer::frameBufferRequested,
@ -859,7 +858,6 @@ AVModelPimpl::addRenderer(const QString& id, const QSize& res, const QString& sh
connectRenderer(r.get(), id); connectRenderer(r.get(), id);
r->startRendering(); r->startRendering();
} }
}
void void
AVModelPimpl::removeRenderer(const QString& id) AVModelPimpl::removeRenderer(const QString& id)

View file

@ -275,13 +275,13 @@ CallbacksHandler::CallbacksHandler(const Lrc& parent)
&VideoManagerInterface::decodingStarted, &VideoManagerInterface::decodingStarted,
this, this,
&CallbacksHandler::decodingStarted, &CallbacksHandler::decodingStarted,
Qt::QueuedConnection); Qt::DirectConnection);
connect(&VideoManager::instance(), connect(&VideoManager::instance(),
&VideoManagerInterface::decodingStopped, &VideoManagerInterface::decodingStopped,
this, this,
&CallbacksHandler::decodingStopped, &CallbacksHandler::decodingStopped,
Qt::QueuedConnection); Qt::DirectConnection);
connect(&VideoManager::instance(), connect(&VideoManager::instance(),
&VideoManagerInterface::deviceEvent, &VideoManagerInterface::deviceEvent,

View file

@ -253,14 +253,6 @@ public Q_SLOTS:
* @param state the new state * @param state the new state
*/ */
void remoteRecordingChanged(const QString& callId, const QString& peerNumber, bool state); void remoteRecordingChanged(const QString& callId, const QString& peerNumber, bool state);
/**
* Listen from CallbacksHandler when a renderer starts
* @param id
* @param shmPath
* @param width
* @param height
*/
void onDecodingStarted(const QString& id, const QString& shmPath, int width, int height);
}; };
CallModel::CallModel(const account::Info& owner, CallModel::CallModel(const account::Info& owner,
@ -578,11 +570,14 @@ CallModel::removeMedia(const QString& callId,
return; return;
} else if (!hasVideo) { } else if (!hasVideo) {
// To receive the remote video, we need a muted camera // To receive the remote video, we need a muted camera
proposedList.push_back( proposedList.push_back(MapStringString {
MapStringString {{MediaAttributeKey::MEDIA_TYPE, MediaAttributeValue::VIDEO}, {MediaAttributeKey::MEDIA_TYPE, MediaAttributeValue::VIDEO},
{MediaAttributeKey::ENABLED, TRUE_STR}, {MediaAttributeKey::ENABLED, TRUE_STR},
{MediaAttributeKey::MUTED, TRUE_STR}, {MediaAttributeKey::MUTED, TRUE_STR},
{MediaAttributeKey::SOURCE, pimpl_->lrc.getAVModel().getCurrentVideoCaptureDevice()}, // not needed to set the source. Daemon should be able to check it {MediaAttributeKey::SOURCE,
pimpl_->lrc.getAVModel()
.getCurrentVideoCaptureDevice()}, // not needed to set the source. Daemon should be
// able to check it
{MediaAttributeKey::LABEL, label.isEmpty() ? "video_0" : label}}); {MediaAttributeKey::LABEL, label.isEmpty() ? "video_0" : label}});
} }
@ -945,10 +940,6 @@ CallModelPimpl::CallModelPimpl(const CallModel& linked,
&CallbacksHandler::remoteRecordingChanged, &CallbacksHandler::remoteRecordingChanged,
this, this,
&CallModelPimpl::remoteRecordingChanged); &CallModelPimpl::remoteRecordingChanged);
connect(&callbacksHandler,
&CallbacksHandler::decodingStarted,
this,
&CallModelPimpl::onDecodingStarted);
#ifndef ENABLE_LIBWRAP #ifndef ENABLE_LIBWRAP
// Only necessary with dbus since the daemon runs separately // Only necessary with dbus since the daemon runs separately
@ -1644,12 +1635,6 @@ CallModelPimpl::remoteRecordingChanged(const QString& callId, const QString& pee
Q_EMIT linked.remoteRecordingChanged(callId, it->second->peerRec, state); Q_EMIT linked.remoteRecordingChanged(callId, it->second->peerRec, state);
} }
void
CallModelPimpl::onDecodingStarted(const QString& id, const QString& shmPath, int width, int height)
{
lrc.getAVModel().addRenderer(id, QSize(width, height), shmPath);
}
} // namespace lrc } // namespace lrc
#include "api/moc_callmodel.cpp" #include "api/moc_callmodel.cpp"

View file

@ -104,7 +104,7 @@ DirectRenderer::~DirectRenderer() {}
void void
DirectRenderer::startRendering() DirectRenderer::startRendering()
{ {
Q_EMIT started(); Q_EMIT started(size());
} }
void void

View file

@ -61,7 +61,7 @@ public Q_SLOTS:
Q_SIGNALS: Q_SIGNALS:
void frameUpdated(); void frameUpdated();
void stopped(); void stopped();
void started(); void started(const QSize& size);
void frameBufferRequested(AVFrame* avFrame); void frameBufferRequested(AVFrame* avFrame);
private: private:

View file

@ -328,7 +328,7 @@ ShmRenderer::startRendering()
pimpl_->timer->start(); pimpl_->timer->start();
Q_EMIT started(); Q_EMIT started(size());
} }
// Done on destroy instead // Done on destroy instead