Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qgstvideorenderersink.cpp
Go to the documentation of this file.
1// Copyright (C) 2016 Jolla Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
5
6#include <QtMultimedia/qvideoframe.h>
7#include <QtMultimedia/qvideosink.h>
8#include <QtMultimedia/private/qvideoframe_p.h>
9#include <QtGui/rhi/qrhi.h>
10#include <QtGui/qguiapplication.h>
11#include <QtCore/qcoreapplication.h>
12#include <QtCore/qdebug.h>
13#include <QtCore/qloggingcategory.h>
14#include <QtCore/private/qfactoryloader_p.h>
15#include <QtCore/private/quniquehandle_p.h>
16
17#include <common/qgst_debug_p.h>
18#include <common/qgstreamermetadata_p.h>
19#include <common/qgstreamervideosink_p.h>
20#include <common/qgstutils_p.h>
21#include <common/qgstvideobuffer_p.h>
22
23#include <gst/video/video.h>
24#include <gst/video/gstvideometa.h>
25
26
27#if QT_CONFIG(gstreamer_gl)
28#include <gst/gl/gl.h>
29#endif // #if QT_CONFIG(gstreamer_gl)
30
31// DMA support
32#if QT_CONFIG(gstreamer_gl_egl) && QT_CONFIG(linux_dmabuf)
33# include <gst/allocators/gstdmabuf.h>
34#endif
35
36// NOLINTBEGIN(readability-convert-member-functions-to-static)
37
38Q_STATIC_LOGGING_CATEGORY(qLcGstVideoRenderer, "qt.multimedia.gstvideorenderer");
39
40QT_BEGIN_NAMESPACE
41
42QGstVideoRenderer::QGstVideoRenderer(QGstreamerRelayVideoSink *sink)
43 : m_sink(sink), m_surfaceCaps(createSurfaceCaps(sink))
44{
45 QObject::connect(
46 sink, &QGstreamerRelayVideoSink::aboutToBeDestroyed, this,
47 [this] {
48 QMutexLocker locker(&m_sinkMutex);
49 m_sink = nullptr;
50 },
51 Qt::DirectConnection);
52}
53
55
56QGstCaps QGstVideoRenderer::createSurfaceCaps([[maybe_unused]] QGstreamerRelayVideoSink *sink)
57{
59
60 // All the formats that both we and gstreamer support
61 auto formats = QList<QVideoFrameFormat::PixelFormat>()
62 << QVideoFrameFormat::Format_YUV420P
63 << QVideoFrameFormat::Format_YUV422P
64 << QVideoFrameFormat::Format_YV12
65 << QVideoFrameFormat::Format_UYVY
66 << QVideoFrameFormat::Format_YUYV
67 << QVideoFrameFormat::Format_NV12
68 << QVideoFrameFormat::Format_NV21
69 << QVideoFrameFormat::Format_AYUV
70 << QVideoFrameFormat::Format_P010
71 << QVideoFrameFormat::Format_XRGB8888
72 << QVideoFrameFormat::Format_XBGR8888
73 << QVideoFrameFormat::Format_RGBX8888
74 << QVideoFrameFormat::Format_BGRX8888
75 << QVideoFrameFormat::Format_ARGB8888
76 << QVideoFrameFormat::Format_ABGR8888
77 << QVideoFrameFormat::Format_RGBA8888
78 << QVideoFrameFormat::Format_BGRA8888
79 << QVideoFrameFormat::Format_Y8
80 << QVideoFrameFormat::Format_Y16
81 ;
82#if QT_CONFIG(gstreamer_gl)
83 QRhi *rhi = sink->rhi();
84 if (rhi && rhi->backend() == QRhi::OpenGLES2) {
85 caps.addPixelFormats(formats, GST_CAPS_FEATURE_MEMORY_GL_MEMORY);
86# if QT_CONFIG(gstreamer_gl_egl) && QT_CONFIG(linux_dmabuf)
87 if (sink->eglDisplay() && sink->eglImageTargetTexture2D()) {
88 caps.addPixelFormats(formats, GST_CAPS_FEATURE_MEMORY_DMABUF);
89 }
90# endif
91 }
92#endif
93 caps.addPixelFormats(formats);
94 return caps;
95}
96
97void QGstVideoRenderer::customEvent(QEvent *event)
98{
99QT_WARNING_PUSH
100QT_WARNING_DISABLE_GCC("-Wswitch") // case value not in enumerated type ‘QEvent::Type’
101
102 switch (event->type()) {
103 case renderFramesEvent: {
104 // LATER: we currently show every frame. however it may be reasonable to drop frames
105 // here if the queue contains more than one frame
106 while (std::optional<RenderBufferState> nextState = m_bufferQueue.dequeue())
107 handleNewBuffer(std::move(*nextState));
108 return;
109 }
110 case stopEvent: {
111 m_currentPipelineFrame = {};
112 updateCurrentVideoFrame(m_currentVideoFrame);
113 return;
114 }
115
116 default:
117 return;
118 }
119QT_WARNING_POP
120}
121
122
123void QGstVideoRenderer::handleNewBuffer(RenderBufferState state)
124{
125 auto videoBuffer = std::make_unique<QGstVideoBuffer>(state.buffer, state.videoInfo, m_sink,
126 state.format, state.memoryFormat);
127 QVideoFrame frame = QVideoFramePrivate::createFrame(std::move(videoBuffer), state.format);
128 QGstUtils::setFrameTimeStampsFromBuffer(&frame, state.buffer.get());
129 m_currentPipelineFrame = std::move(frame);
130
131 if (!m_isActive) {
132 qCDebug(qLcGstVideoRenderer) << " showing empty video frame";
133 updateCurrentVideoFrame({});
134 return;
135 }
136
137 updateCurrentVideoFrame(m_currentPipelineFrame);
138}
139
141{
142 return m_surfaceCaps;
143}
144
146{
147 qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::start" << caps;
148
149 auto optionalFormatAndVideoInfo = caps.formatAndVideoInfo();
150 if (optionalFormatAndVideoInfo) {
151 std::tie(m_format, m_videoInfo) = std::move(*optionalFormatAndVideoInfo);
152 } else {
153 m_format = {};
154 m_videoInfo = {};
155 }
156 m_capsMemoryFormat = caps.memoryFormat();
157
158 // NOTE: m_format will not be fully populated until GST_EVENT_TAG is processed
159
160 return true;
161}
162
164{
165 qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::stop";
166
167 m_bufferQueue.clear();
168 QCoreApplication::postEvent(this, new QEvent(stopEvent));
169}
170
172{
173 qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::unlock";
174}
175
177{
178 if (!query || GST_QUERY_TYPE(query) != GST_QUERY_ALLOCATION)
179 return false;
180
181 GstCaps *queryCaps = nullptr;
182 gboolean needPool = false;
183 gst_query_parse_allocation(query, &queryCaps, &needPool);
184 GstVideoInfo info;
185 int size = 0;
186 if (queryCaps && gst_video_info_from_caps(&info, queryCaps)) {
187 size = info.size;
188 } else {
189 qWarning(qLcGstVideoRenderer) << "QGstVideoRenderer::proposeAllocation failed to "
190 "get size from query caps";
191 return true;
192 }
193
194 constexpr int defaultMinBuffers = 3;
195 static const int env
196 = qEnvironmentVariableIntValue("QT_GSTREAMER_PROPOSE_ALLOCATION_MIN_BUFFERS");
197 static const int minBuffers = env ? env : defaultMinBuffers;
198 qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::proposeAllocation: "
199 "needPool:" << needPool
200 << "size:" << size
201 << "minBuffers:" << minBuffers;
202
203 // This call is needed to avoid enabling of copy threshold by v4l2 decoders, which can result
204 // in a mix of dmabuf and system memory buffers. The query sender should use its own buffer
205 // pool, and will only take the size value and our suggested minimum buffers into account.
206 // The driver can force a higher minimum if minBuffers is set too low, making 3 sufficient.
207 gst_query_add_allocation_pool(query, nullptr, size, minBuffers, 0);
208
209 return true;
210}
211
213{
214 qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::render";
215
216 if (m_flushing) {
217 qCDebug(qLcGstVideoRenderer)
218 << " buffer received while flushing the sink ... discarding buffer";
219 return GST_FLOW_FLUSHING;
220 }
221
222 GstVideoCropMeta *meta = gst_buffer_get_video_crop_meta(buffer);
223 if (meta) {
224 QRect vp(meta->x, meta->y, meta->width, meta->height);
225 if (m_format.viewport() != vp) {
226 qCDebug(qLcGstVideoRenderer)
227 << Q_FUNC_INFO << " Update viewport on Metadata: [" << meta->height << "x"
228 << meta->width << " | " << meta->x << "x" << meta->y << "]";
229 // Update viewport if data is not the same
230 m_format.setViewport(vp);
231 }
232 }
233
234 // Some gst elements, like v4l2h264dec, can provide Direct Memory Access buffers (DMA-BUF)
235 // without specifying it in their caps. So we check the memory format manually:
236 QGstCaps::MemoryFormat bufferMemoryFormat = [&] {
237 if (m_capsMemoryFormat != QGstCaps::CpuMemory)
238 return m_capsMemoryFormat;
239
240 [[maybe_unused]] GstMemory *mem = gst_buffer_peek_memory(buffer, 0);
241#if QT_CONFIG(gstreamer_gl_egl) && QT_CONFIG(linux_dmabuf)
242 if (gst_is_dmabuf_memory(mem))
243 return QGstCaps::DMABuf;
244#endif
245#if QT_CONFIG(gstreamer_gl)
246 if (gst_is_gl_memory(mem))
247 return QGstCaps::GLTexture;
248#endif
249 return QGstCaps::CpuMemory;
250 }();
251
252 qCDebug(qLcGstVideoRenderer) << "m_capsMemoryFormat" << m_capsMemoryFormat
253 << "bufferMemoryFormat" << bufferMemoryFormat;
254
255 QVideoFrameFormat bufferVideoFrameFormat = m_format;
256
257 static const bool isEglfsQPA = QGuiApplication::platformName() == QLatin1String("eglfs");
258 if (m_sink && m_sink->eglDisplay() && isEglfsQPA) {
259 // EGL seems to do implicit YUV->RGB conversion for UYVY and YUYV (YUY2), so we change the
260 // pixel format to Format_RGBA8888 to select an appropriate shader.
261 const bool setFormat_RGBA8888 =
262 (bufferMemoryFormat == QGstCaps::DMABuf
263 && (m_format.pixelFormat() == QVideoFrameFormat::Format_UYVY
264 || m_format.pixelFormat() == QVideoFrameFormat::Format_YUYV));
265 if (setFormat_RGBA8888) {
266 // TODO: Replace with new private setter of pixel format.
267 qCDebug(qLcGstVideoRenderer) << "Setting pixel format to Format_RGBA8888";
268 bufferVideoFrameFormat = QVideoFrameFormat(m_format.frameSize(),
269 QVideoFrameFormat::Format_RGBA8888);
270 bufferVideoFrameFormat.setStreamFrameRate(m_format.streamFrameRate());
271 bufferVideoFrameFormat.setColorRange(m_format.colorRange());
272 bufferVideoFrameFormat.setColorTransfer(m_format.colorTransfer());
273 bufferVideoFrameFormat.setColorSpace(m_format.colorSpace());
274 }
275 }
276
277 RenderBufferState state{
278 QGstBufferHandle{ buffer, QGstBufferHandle::NeedsRef },
279 bufferVideoFrameFormat,
280 m_videoInfo,
281 bufferMemoryFormat,
282 };
283
284 qCDebug(qLcGstVideoRenderer) << " sending video frame";
285
286 qsizetype sizeOfQueue = m_bufferQueue.enqueue(std::move(state));
287 if (sizeOfQueue == 1)
288 // we only need to wake up, if we don't have a pending frame
289 QCoreApplication::postEvent(this, new QEvent(renderFramesEvent));
290
291 return GST_FLOW_OK;
292}
293
294bool QGstVideoRenderer::query(GstQuery *query)
295{
296#if QT_CONFIG(gstreamer_gl)
297 if (GST_QUERY_TYPE(query) == GST_QUERY_CONTEXT) {
298 const gchar *type = nullptr;
299 gst_query_parse_context_type(query, &type);
300
301 QLatin1StringView typeStr(type);
302 if (typeStr != QLatin1StringView("gst.gl.GLDisplay")
303 && typeStr != QLatin1StringView("gst.gl.local_context")) {
304 return false;
305 }
306
307 QMutexLocker locker(&m_sinkMutex);
308 if (!m_sink)
309 return false;
310
311 auto *gstGlContext = typeStr == QLatin1StringView("gst.gl.GLDisplay")
312 ? m_sink->gstGlDisplayContext() : m_sink->gstGlLocalContext();
313 if (!gstGlContext)
314 return false;
315
316 gst_query_set_context(query, gstGlContext);
317
318 return true;
319 }
320#else
321 Q_UNUSED(query);
322#endif
323 return false;
324}
325
326void QGstVideoRenderer::gstEvent(GstEvent *event)
327{
328 qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::gstEvent:" << event;
329
330 switch (GST_EVENT_TYPE(event)) {
331 case GST_EVENT_TAG:
332 return gstEventHandleTag(event);
333 case GST_EVENT_EOS:
334 return gstEventHandleEOS(event);
335 case GST_EVENT_FLUSH_START:
336 return gstEventHandleFlushStart(event);
337 case GST_EVENT_FLUSH_STOP:
338 return gstEventHandleFlushStop(event);
339
340 default:
341 return;
342 }
343}
344
345void QGstVideoRenderer::setActive(bool isActive)
346{
347 if (isActive == m_isActive)
348 return;
349
350 m_isActive = isActive;
351 if (isActive)
352 updateCurrentVideoFrame(m_currentPipelineFrame);
353 else
354 updateCurrentVideoFrame({});
355}
356
357void QGstVideoRenderer::updateCurrentVideoFrame(QVideoFrame frame)
358{
359 m_currentVideoFrame = std::move(frame);
360 if (m_sink)
361 m_sink->setVideoFrame(m_currentVideoFrame);
362}
363
364void QGstVideoRenderer::gstEventHandleTag(GstEvent *event)
365{
366 GstTagList *taglist = nullptr;
367 gst_event_parse_tag(event, &taglist);
368 if (!taglist)
369 return;
370
371 qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::gstEventHandleTag:" << taglist;
372
373 QGString value;
374 if (!gst_tag_list_get_string(taglist, GST_TAG_IMAGE_ORIENTATION, &value))
375 return;
376
377 RotationResult parsed = parseRotationTag(value.get());
378
379 m_format.setMirrored(parsed.flip);
380 m_format.setRotation(parsed.rotation);
381}
382
383void QGstVideoRenderer::gstEventHandleEOS(GstEvent *)
384{
385 stop();
386}
387
388void QGstVideoRenderer::gstEventHandleFlushStart(GstEvent *)
389{
390 // "data is to be discarded"
391 m_flushing = true;
392 m_bufferQueue.clear();
393}
394
395void QGstVideoRenderer::gstEventHandleFlushStop(GstEvent *)
396{
397 // "data is allowed again"
398 m_flushing = false;
399}
400
403
404#define VO_SINK(s) QGstVideoRendererSink *sink(reinterpret_cast<QGstVideoRendererSink *>(s))
405
407{
408 setSink(sink);
409 QGstVideoRendererSink *gstSink = reinterpret_cast<QGstVideoRendererSink *>(
410 g_object_new(QGstVideoRendererSink::get_type(), nullptr));
411
412 return QGstVideoRendererSinkElement{
413 gstSink,
414 QGstElement::NeedsRef,
415 };
416}
417
419{
420 gvrs_current_sink = sink;
421}
422
423GType QGstVideoRendererSink::get_type()
424{
425 static const GTypeInfo info =
426 {
427 sizeof(QGstVideoRendererSinkClass), // class_size
428 base_init, // base_init
429 nullptr, // base_finalize
430 class_init, // class_init
431 nullptr, // class_finalize
432 nullptr, // class_data
433 sizeof(QGstVideoRendererSink), // instance_size
434 0, // n_preallocs
435 instance_init, // instance_init
436 nullptr // value_table
437 };
438
439 static const GType type = g_type_register_static(GST_TYPE_VIDEO_SINK, "QGstVideoRendererSink",
440 &info, GTypeFlags(0));
441
442 return type;
443}
444
445void QGstVideoRendererSink::class_init(gpointer g_class, gpointer class_data)
446{
447 Q_UNUSED(class_data);
448
449 gvrs_sink_parent_class = reinterpret_cast<GstVideoSinkClass *>(g_type_class_peek_parent(g_class));
450
451 GstVideoSinkClass *video_sink_class = reinterpret_cast<GstVideoSinkClass *>(g_class);
452 video_sink_class->show_frame = QGstVideoRendererSink::show_frame;
453
454 GstBaseSinkClass *base_sink_class = reinterpret_cast<GstBaseSinkClass *>(g_class);
455 base_sink_class->get_caps = QGstVideoRendererSink::get_caps;
456 base_sink_class->set_caps = QGstVideoRendererSink::set_caps;
457 base_sink_class->propose_allocation = QGstVideoRendererSink::propose_allocation;
458 base_sink_class->stop = QGstVideoRendererSink::stop;
459 base_sink_class->unlock = QGstVideoRendererSink::unlock;
460 base_sink_class->query = QGstVideoRendererSink::query;
461 base_sink_class->event = QGstVideoRendererSink::event;
462
463 GstElementClass *element_class = reinterpret_cast<GstElementClass *>(g_class);
464 element_class->change_state = QGstVideoRendererSink::change_state;
465 gst_element_class_set_metadata(element_class,
466 "Qt built-in video renderer sink",
467 "Sink/Video",
468 "Qt default built-in video renderer sink",
469 "The Qt Company");
470
471 GObjectClass *object_class = reinterpret_cast<GObjectClass *>(g_class);
472 object_class->finalize = QGstVideoRendererSink::finalize;
473}
474
475void QGstVideoRendererSink::base_init(gpointer g_class)
476{
477 static GstStaticPadTemplate sink_pad_template = GST_STATIC_PAD_TEMPLATE(
478 "sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS(
479 "video/x-raw, "
480 "framerate = (fraction) [ 0, MAX ], "
481 "width = (int) [ 1, MAX ], "
482 "height = (int) [ 1, MAX ]"));
483
484 gst_element_class_add_pad_template(
485 GST_ELEMENT_CLASS(g_class), gst_static_pad_template_get(&sink_pad_template));
486}
487
488void QGstVideoRendererSink::instance_init(GTypeInstance *instance, gpointer g_class)
489{
490 Q_UNUSED(g_class);
491 VO_SINK(instance);
492
493 Q_ASSERT(gvrs_current_sink);
494
495 sink->renderer = new QGstVideoRenderer(gvrs_current_sink);
496 sink->renderer->moveToThread(gvrs_current_sink->thread());
497 gvrs_current_sink = nullptr;
498}
499
500void QGstVideoRendererSink::finalize(GObject *object)
501{
502 VO_SINK(object);
503
504 delete sink->renderer;
505
506 // Chain up
507 G_OBJECT_CLASS(gvrs_sink_parent_class)->finalize(object);
508}
509
510GstStateChangeReturn QGstVideoRendererSink::change_state(
511 GstElement *element, GstStateChange transition)
512{
513 GstStateChangeReturn ret =
514 GST_ELEMENT_CLASS(gvrs_sink_parent_class)->change_state(element, transition);
515 qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::change_state:" << transition << ret;
516 return ret;
517}
518
519GstCaps *QGstVideoRendererSink::get_caps(GstBaseSink *base, GstCaps *filter)
520{
521 VO_SINK(base);
522
523 QGstCaps caps = sink->renderer->caps();
524 if (filter)
525 caps = QGstCaps(gst_caps_intersect(caps.caps(), filter), QGstCaps::HasRef);
526
527 return caps.release();
528}
529
530gboolean QGstVideoRendererSink::set_caps(GstBaseSink *base, GstCaps *gcaps)
531{
532 VO_SINK(base);
533 auto caps = QGstCaps(gcaps, QGstCaps::NeedsRef);
534
535 qCDebug(qLcGstVideoRenderer) << "set_caps:" << caps;
536
537 if (!caps) {
538 sink->renderer->stop();
539 return TRUE;
540 }
541
542 return sink->renderer->start(caps);
543}
544
545gboolean QGstVideoRendererSink::propose_allocation(GstBaseSink *base, GstQuery *query)
546{
547 VO_SINK(base);
548 return sink->renderer->proposeAllocation(query);
549}
550
551gboolean QGstVideoRendererSink::stop(GstBaseSink *base)
552{
553 VO_SINK(base);
554 sink->renderer->stop();
555 return TRUE;
556}
557
558gboolean QGstVideoRendererSink::unlock(GstBaseSink *base)
559{
560 VO_SINK(base);
561 sink->renderer->unlock();
562 return TRUE;
563}
564
565GstFlowReturn QGstVideoRendererSink::show_frame(GstVideoSink *base, GstBuffer *buffer)
566{
567 VO_SINK(base);
568 return sink->renderer->render(buffer);
569}
570
571gboolean QGstVideoRendererSink::query(GstBaseSink *base, GstQuery *query)
572{
573 VO_SINK(base);
574 if (sink->renderer->query(query))
575 return TRUE;
576
577 return GST_BASE_SINK_CLASS(gvrs_sink_parent_class)->query(base, query);
578}
579
580gboolean QGstVideoRendererSink::event(GstBaseSink *base, GstEvent * event)
581{
582 VO_SINK(base);
583 sink->renderer->gstEvent(event);
584 return GST_BASE_SINK_CLASS(gvrs_sink_parent_class)->event(base, event);
585}
586
595
597{
598 qGstVideoRendererSink()->renderer->setActive(isActive);
599}
600
602{
603 return reinterpret_cast<QGstVideoRendererSink *>(element());
604}
605
606QT_END_NAMESPACE
MemoryFormat memoryFormat() const
Definition qgst.cpp:551
static QGstCaps create()
Definition qgst.cpp:578
MemoryFormat
Definition qgst_p.h:343
@ CpuMemory
Definition qgst_p.h:343
QGstVideoRendererSinkElement(QGstVideoRendererSink *, RefMode)
QGstVideoRendererSink * qGstVideoRendererSink() const
static QGstVideoRendererSinkElement createSink(QGstreamerRelayVideoSink *surface)
GstFlowReturn render(GstBuffer *)
~QGstVideoRenderer() override
void customEvent(QEvent *) override
This event handler can be reimplemented in a subclass to receive custom events.
bool start(const QGstCaps &)
QGstVideoRenderer(QGstreamerRelayVideoSink *)
bool proposeAllocation(GstQuery *)
void setFrameTimeStampsFromBuffer(QVideoFrame *frame, GstBuffer *buffer)
Q_STATIC_LOGGING_CATEGORY(lcAccessibilityCore, "qt.accessibility.core")
#define VO_SINK(s)
static GstVideoSinkClass * gvrs_sink_parent_class
static thread_local QGstreamerRelayVideoSink * gvrs_current_sink