Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qgstvideorenderersink.cpp
Go to the documentation of this file.
1// Copyright (C) 2016 Jolla Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
5
6#include <QtMultimedia/qvideoframe.h>
7#include <QtMultimedia/qvideosink.h>
8#include <QtMultimedia/private/qvideoframe_p.h>
9#include <QtGui/rhi/qrhi.h>
10#include <QtGui/qguiapplication.h>
11#include <QtGui/qopenglcontext.h>
12#include <QtCore/qcoreapplication.h>
13#include <QtCore/qdebug.h>
14#include <QtCore/qloggingcategory.h>
15#include <QtCore/private/qfactoryloader_p.h>
16#include <QtCore/private/quniquehandle_p.h>
17
18#include <common/qgst_debug_p.h>
19#include <common/qgstreamermetadata_p.h>
20#include <common/qgstreamervideosink_p.h>
21#include <common/qgstutils_p.h>
22#include <common/qgstvideobuffer_p.h>
23
24#include <gst/video/video.h>
25#include <gst/video/gstvideometa.h>
26
27
28#if QT_CONFIG(gstreamer_gl)
29#include <gst/gl/gl.h>
30#endif // #if QT_CONFIG(gstreamer_gl)
31
32// DMA support
33#if QT_CONFIG(gstreamer_gl_egl) && QT_CONFIG(linux_dmabuf)
34# include <gst/allocators/gstdmabuf.h>
35#endif
36
37// NOLINTBEGIN(readability-convert-member-functions-to-static)
38
39Q_STATIC_LOGGING_CATEGORY(qLcGstVideoRenderer, "qt.multimedia.gstvideorenderer");
40
41QT_BEGIN_NAMESPACE
42
43QGstVideoRenderer::QGstVideoRenderer(QGstreamerRelayVideoSink *sink)
44 : m_sink(sink), m_surfaceCaps(createSurfaceCaps(sink))
45{
46 QObject::connect(
47 sink, &QGstreamerRelayVideoSink::aboutToBeDestroyed, this,
48 [this] {
49 QMutexLocker locker(&m_sinkMutex);
50 m_sink = nullptr;
51 },
52 Qt::DirectConnection);
53}
54
56
57QGstCaps QGstVideoRenderer::createSurfaceCaps([[maybe_unused]] QGstreamerRelayVideoSink *sink)
58{
60
61 // All the formats that both we and gstreamer support
62 auto formats = QList<QVideoFrameFormat::PixelFormat>()
63 << QVideoFrameFormat::Format_YUV420P
64 << QVideoFrameFormat::Format_YUV422P
65 << QVideoFrameFormat::Format_YV12
66 << QVideoFrameFormat::Format_UYVY
67 << QVideoFrameFormat::Format_YUYV
68 << QVideoFrameFormat::Format_NV12
69 << QVideoFrameFormat::Format_NV21
70 << QVideoFrameFormat::Format_AYUV
71 << QVideoFrameFormat::Format_P010
72 << QVideoFrameFormat::Format_XRGB8888
73 << QVideoFrameFormat::Format_XBGR8888
74 << QVideoFrameFormat::Format_RGBX8888
75 << QVideoFrameFormat::Format_BGRX8888
76 << QVideoFrameFormat::Format_ARGB8888
77 << QVideoFrameFormat::Format_ABGR8888
78 << QVideoFrameFormat::Format_RGBA8888
79 << QVideoFrameFormat::Format_BGRA8888
80 << QVideoFrameFormat::Format_Y8
81 << QVideoFrameFormat::Format_Y16
82 ;
83 caps.addPixelFormats(formats);
84#if QT_CONFIG(gstreamer_gl)
85 QRhi *rhi = sink->rhi();
86 if (rhi && rhi->backend() == QRhi::OpenGLES2) {
87 caps.addPixelFormats(formats, GST_CAPS_FEATURE_MEMORY_GL_MEMORY);
88# if QT_CONFIG(gstreamer_gl_egl) && QT_CONFIG(linux_dmabuf)
89 if (sink->eglDisplay() && sink->eglImageTargetTexture2D()) {
90 caps.addPixelFormats(formats, GST_CAPS_FEATURE_MEMORY_DMABUF);
91 }
92# endif
93 }
94#endif
95 return caps;
96}
97
98void QGstVideoRenderer::customEvent(QEvent *event)
99{
100QT_WARNING_PUSH
101QT_WARNING_DISABLE_GCC("-Wswitch") // case value not in enumerated type ‘QEvent::Type’
102
103 switch (event->type()) {
104 case renderFramesEvent: {
105 // LATER: we currently show every frame. however it may be reasonable to drop frames
106 // here if the queue contains more than one frame
107 while (std::optional<RenderBufferState> nextState = m_bufferQueue.dequeue())
108 handleNewBuffer(std::move(*nextState));
109 return;
110 }
111 case stopEvent: {
112 m_currentPipelineFrame = {};
113 updateCurrentVideoFrame(m_currentVideoFrame);
114 return;
115 }
116
117 default:
118 return;
119 }
120QT_WARNING_POP
121}
122
123
124void QGstVideoRenderer::handleNewBuffer(RenderBufferState state)
125{
126 auto videoBuffer =
127 std::make_unique<QGstVideoBuffer>(state.buffer, state.videoInfo, state.format);
128 QVideoFrame frame = QVideoFramePrivate::createFrame(std::move(videoBuffer), state.format);
129 QGstUtils::setFrameTimeStampsFromBuffer(&frame, state.buffer.get());
130 m_currentPipelineFrame = std::move(frame);
131
132 if (!m_isActive) {
133 qCDebug(qLcGstVideoRenderer) << " showing empty video frame";
134 updateCurrentVideoFrame({});
135 return;
136 }
137
138 updateCurrentVideoFrame(m_currentPipelineFrame);
139}
140
142{
143 return m_surfaceCaps;
144}
145
147{
148 qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::start" << caps;
149
150 auto optionalVideoInfo = caps.videoInfo();
151 if (optionalVideoInfo) {
152 m_videoInfo = std::move(*optionalVideoInfo);
153 m_format = qVideoFrameFormatFromGstVideoInfo(m_videoInfo);
154 } else {
155 m_format = {};
156 m_videoInfo = {};
157 }
158 m_capsMemoryFormat = caps.memoryFormat();
159
160 // NOTE: m_format will not be fully populated until GST_EVENT_TAG is processed
161
162 return true;
163}
164
166{
167 qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::stop";
168
169 m_bufferQueue.clear();
170 QCoreApplication::postEvent(this, new QEvent(stopEvent));
171}
172
174{
175 qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::unlock";
176}
177
179{
180 if (!query || GST_QUERY_TYPE(query) != GST_QUERY_ALLOCATION)
181 return false;
182
183 GstCaps *queryCaps = nullptr;
184 gboolean needPool = false;
185 gst_query_parse_allocation(query, &queryCaps, &needPool);
186 GstVideoInfo info;
187 int size = 0;
188 if (queryCaps && gst_video_info_from_caps(&info, queryCaps)) {
189 size = info.size;
190 } else {
191 qWarning(qLcGstVideoRenderer) << "QGstVideoRenderer::proposeAllocation failed to "
192 "get size from query caps";
193 return true;
194 }
195
196 constexpr int defaultMinBuffers = 3;
197 static const int env
198 = qEnvironmentVariableIntValue("QT_GSTREAMER_PROPOSE_ALLOCATION_MIN_BUFFERS");
199 static const int minBuffers = env ? env : defaultMinBuffers;
200 qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::proposeAllocation: "
201 "needPool:" << needPool
202 << "size:" << size
203 << "minBuffers:" << minBuffers;
204
205 // This call is needed to avoid enabling of copy threshold by v4l2 decoders, which can result
206 // in a mix of dmabuf and system memory buffers. The query sender should use its own buffer
207 // pool, and will only take the size value and our suggested minimum buffers into account.
208 // The driver can force a higher minimum if minBuffers is set too low, making 3 sufficient.
209 gst_query_add_allocation_pool(query, nullptr, size, minBuffers, 0);
210
211 // Advertise GstVideoMeta support, often needed for DMA buffer negotiation.
212 gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, nullptr);
213
214 return true;
215}
216
218{
219 qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::render";
220
221 if (m_flushing) {
222 qCDebug(qLcGstVideoRenderer)
223 << " buffer received while flushing the sink ... discarding buffer";
224 return GST_FLOW_FLUSHING;
225 }
226
227 GstVideoCropMeta *meta = gst_buffer_get_video_crop_meta(buffer);
228 if (meta) {
229 QRect vp(meta->x, meta->y, meta->width, meta->height);
230 if (m_format.viewport() != vp) {
231 qCDebug(qLcGstVideoRenderer)
232 << Q_FUNC_INFO << " Update viewport on Metadata: [" << meta->height << "x"
233 << meta->width << " | " << meta->x << "x" << meta->y << "]";
234 // Update viewport if data is not the same
235 m_format.setViewport(vp);
236 }
237 }
238
239 // Some gst elements, like v4l2h264dec, can provide Direct Memory Access buffers (DMA-BUF)
240 // without specifying it in their caps. So we check the memory format manually:
241 QGstCaps::MemoryFormat bufferMemoryFormat = [&] {
242 if (m_capsMemoryFormat != QGstCaps::CpuMemory)
243 return m_capsMemoryFormat;
244
245 return qMemoryFormatFromGstBuffer(buffer);
246 }();
247
248 qCDebug(qLcGstVideoRenderer) << "m_capsMemoryFormat" << m_capsMemoryFormat
249 << "bufferMemoryFormat" << bufferMemoryFormat;
250
251 QVideoFrameFormat bufferVideoFrameFormat = m_format;
252
253 // When rendering DMAbuf frames with OpenGLES, EGL seems to do implicit YUV->RGB conversion for
254 // UYVY and YUYV (YUY2), so we need to change the pixel format to Format_RGBA8888 to select the
255 // correct shader.
256 const bool setFormat_RGBA8888 = [&] {
257#if QT_CONFIG(gstreamer_gl_egl) && QT_CONFIG(linux_dmabuf)
258 if ((m_format.pixelFormat() == QVideoFrameFormat::Format_UYVY
259 || m_format.pixelFormat() == QVideoFrameFormat::Format_YUYV)
260 && bufferMemoryFormat == QGstCaps::DMABuf
261 && m_sink && m_sink->eglDisplay() && m_sink->eglImageTargetTexture2D()) {
262
263 QRhi *rhi = m_sink->rhi();
264 if (!rhi || rhi->backend() != QRhi::OpenGLES2)
265 return false;
266
267 auto *nativeHandles = static_cast<const QRhiGles2NativeHandles *>(rhi->nativeHandles());
268 QOpenGLContext *glContext = nativeHandles ? nativeHandles->context : nullptr;
269 return glContext && glContext->isOpenGLES();
270 }
271#endif
272 return false;
273 }();
274 if (setFormat_RGBA8888) {
275 // TODO: Replace with new private setter of pixel format.
276 qCDebug(qLcGstVideoRenderer) << "Setting pixel format to Format_RGBA8888";
277 bufferVideoFrameFormat = QVideoFrameFormat(m_format.frameSize(),
278 QVideoFrameFormat::Format_RGBA8888);
279 bufferVideoFrameFormat.setStreamFrameRate(m_format.streamFrameRate());
280 bufferVideoFrameFormat.setColorRange(m_format.colorRange());
281 bufferVideoFrameFormat.setColorTransfer(m_format.colorTransfer());
282 bufferVideoFrameFormat.setColorSpace(m_format.colorSpace());
283 }
284
285 RenderBufferState state{
286 QGstBufferHandle{ buffer, QGstBufferHandle::NeedsRef },
287 bufferVideoFrameFormat,
288 m_videoInfo,
289 bufferMemoryFormat,
290 };
291
292 qCDebug(qLcGstVideoRenderer) << " sending video frame";
293
294 qsizetype sizeOfQueue = m_bufferQueue.enqueue(std::move(state));
295 if (sizeOfQueue == 1)
296 // we only need to wake up, if we don't have a pending frame
297 QCoreApplication::postEvent(this, new QEvent(renderFramesEvent));
298
299 return GST_FLOW_OK;
300}
301
302bool QGstVideoRenderer::query(GstQuery *query)
303{
304#if QT_CONFIG(gstreamer_gl)
305 if (GST_QUERY_TYPE(query) == GST_QUERY_CONTEXT) {
306 const gchar *type = nullptr;
307 gst_query_parse_context_type(query, &type);
308
309 QLatin1StringView typeStr(type);
310 if (typeStr != QLatin1StringView("gst.gl.GLDisplay")
311 && typeStr != QLatin1StringView("gst.gl.local_context")) {
312 return false;
313 }
314
315 QMutexLocker locker(&m_sinkMutex);
316 if (!m_sink)
317 return false;
318
319 auto *gstGlContext = typeStr == QLatin1StringView("gst.gl.GLDisplay")
320 ? m_sink->gstGlDisplayContext() : m_sink->gstGlLocalContext();
321 if (!gstGlContext)
322 return false;
323
324 gst_query_set_context(query, gstGlContext);
325
326 return true;
327 }
328#else
329 Q_UNUSED(query);
330#endif
331 return false;
332}
333
334void QGstVideoRenderer::gstEvent(GstEvent *event)
335{
336 qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::gstEvent:" << event;
337
338 switch (GST_EVENT_TYPE(event)) {
339 case GST_EVENT_TAG:
340 return gstEventHandleTag(event);
341 case GST_EVENT_EOS:
342 return gstEventHandleEOS(event);
343 case GST_EVENT_FLUSH_START:
344 return gstEventHandleFlushStart(event);
345 case GST_EVENT_FLUSH_STOP:
346 return gstEventHandleFlushStop(event);
347
348 default:
349 return;
350 }
351}
352
353void QGstVideoRenderer::setActive(bool isActive)
354{
355 if (isActive == m_isActive)
356 return;
357
358 m_isActive = isActive;
359 if (isActive)
360 updateCurrentVideoFrame(m_currentPipelineFrame);
361 else
362 updateCurrentVideoFrame({});
363}
364
365void QGstVideoRenderer::updateCurrentVideoFrame(QVideoFrame frame)
366{
367 m_currentVideoFrame = std::move(frame);
368 if (m_sink)
369 m_sink->setVideoFrame(m_currentVideoFrame);
370}
371
372void QGstVideoRenderer::gstEventHandleTag(GstEvent *event)
373{
374 GstTagList *taglist = nullptr;
375 gst_event_parse_tag(event, &taglist);
376 if (!taglist)
377 return;
378
379 qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::gstEventHandleTag:" << taglist;
380
381 QGString value;
382 if (!gst_tag_list_get_string(taglist, GST_TAG_IMAGE_ORIENTATION, &value))
383 return;
384
385 RotationResult parsed = parseRotationTag(value.get());
386
387 m_format.setMirrored(parsed.flip);
388 m_format.setRotation(parsed.rotation);
389}
390
391void QGstVideoRenderer::gstEventHandleEOS(GstEvent *)
392{
393 stop();
394}
395
396void QGstVideoRenderer::gstEventHandleFlushStart(GstEvent *)
397{
398 // "data is to be discarded"
399 m_flushing = true;
400 m_bufferQueue.clear();
401}
402
403void QGstVideoRenderer::gstEventHandleFlushStop(GstEvent *)
404{
405 // "data is allowed again"
406 m_flushing = false;
407}
408
411
412#define VO_SINK(s) QGstVideoRendererSink *sink(reinterpret_cast<QGstVideoRendererSink *>(s))
413
415{
416 setSink(sink);
417 QGstVideoRendererSink *gstSink = reinterpret_cast<QGstVideoRendererSink *>(
418 g_object_new(QGstVideoRendererSink::get_type(), nullptr));
419
420 return QGstVideoRendererSinkElement{
421 gstSink,
422 QGstElement::NeedsRef,
423 };
424}
425
427{
428 gvrs_current_sink = sink;
429}
430
431GType QGstVideoRendererSink::get_type()
432{
433 static const GTypeInfo info =
434 {
435 sizeof(QGstVideoRendererSinkClass), // class_size
436 base_init, // base_init
437 nullptr, // base_finalize
438 class_init, // class_init
439 nullptr, // class_finalize
440 nullptr, // class_data
441 sizeof(QGstVideoRendererSink), // instance_size
442 0, // n_preallocs
443 instance_init, // instance_init
444 nullptr // value_table
445 };
446
447 static const GType type = g_type_register_static(GST_TYPE_VIDEO_SINK, "QGstVideoRendererSink",
448 &info, GTypeFlags(0));
449
450 return type;
451}
452
453void QGstVideoRendererSink::class_init(gpointer g_class, gpointer class_data)
454{
455 Q_UNUSED(class_data);
456
457 gvrs_sink_parent_class = reinterpret_cast<GstVideoSinkClass *>(g_type_class_peek_parent(g_class));
458
459 GstVideoSinkClass *video_sink_class = reinterpret_cast<GstVideoSinkClass *>(g_class);
460 video_sink_class->show_frame = QGstVideoRendererSink::show_frame;
461
462 GstBaseSinkClass *base_sink_class = reinterpret_cast<GstBaseSinkClass *>(g_class);
463 base_sink_class->get_caps = QGstVideoRendererSink::get_caps;
464 base_sink_class->set_caps = QGstVideoRendererSink::set_caps;
465 base_sink_class->propose_allocation = QGstVideoRendererSink::propose_allocation;
466 base_sink_class->stop = QGstVideoRendererSink::stop;
467 base_sink_class->unlock = QGstVideoRendererSink::unlock;
468 base_sink_class->query = QGstVideoRendererSink::query;
469 base_sink_class->event = QGstVideoRendererSink::event;
470
471 GstElementClass *element_class = reinterpret_cast<GstElementClass *>(g_class);
472 element_class->change_state = QGstVideoRendererSink::change_state;
473 gst_element_class_set_metadata(element_class,
474 "Qt built-in video renderer sink",
475 "Sink/Video",
476 "Qt default built-in video renderer sink",
477 "The Qt Company");
478
479 GObjectClass *object_class = reinterpret_cast<GObjectClass *>(g_class);
480 object_class->finalize = QGstVideoRendererSink::finalize;
481}
482
483void QGstVideoRendererSink::base_init(gpointer g_class)
484{
485 static GstStaticPadTemplate sink_pad_template = GST_STATIC_PAD_TEMPLATE(
486 "sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS(
487 "video/x-raw, "
488 "framerate = (fraction) [ 0, MAX ], "
489 "width = (int) [ 1, MAX ], "
490 "height = (int) [ 1, MAX ]"));
491
492 gst_element_class_add_pad_template(
493 GST_ELEMENT_CLASS(g_class), gst_static_pad_template_get(&sink_pad_template));
494}
495
496void QGstVideoRendererSink::instance_init(GTypeInstance *instance, gpointer g_class)
497{
498 Q_UNUSED(g_class);
499 VO_SINK(instance);
500
501 Q_ASSERT(gvrs_current_sink);
502
503 sink->renderer = new QGstVideoRenderer(gvrs_current_sink);
504 sink->renderer->moveToThread(gvrs_current_sink->thread());
505 gvrs_current_sink = nullptr;
506}
507
508void QGstVideoRendererSink::finalize(GObject *object)
509{
510 VO_SINK(object);
511
512 delete sink->renderer;
513
514 // Chain up
515 G_OBJECT_CLASS(gvrs_sink_parent_class)->finalize(object);
516}
517
518GstStateChangeReturn QGstVideoRendererSink::change_state(
519 GstElement *element, GstStateChange transition)
520{
521 GstStateChangeReturn ret =
522 GST_ELEMENT_CLASS(gvrs_sink_parent_class)->change_state(element, transition);
523 qCDebug(qLcGstVideoRenderer) << "QGstVideoRenderer::change_state:" << transition << ret;
524 return ret;
525}
526
527GstCaps *QGstVideoRendererSink::get_caps(GstBaseSink *base, GstCaps *filter)
528{
529 VO_SINK(base);
530
531 QGstCaps caps = sink->renderer->caps();
532 if (filter)
533 caps = QGstCaps(gst_caps_intersect(caps.caps(), filter), QGstCaps::HasRef);
534
535 return caps.release();
536}
537
538gboolean QGstVideoRendererSink::set_caps(GstBaseSink *base, GstCaps *gcaps)
539{
540 VO_SINK(base);
541 auto caps = QGstCaps(gcaps, QGstCaps::NeedsRef);
542
543 qCDebug(qLcGstVideoRenderer) << "set_caps:" << caps;
544
545 if (!caps) {
546 sink->renderer->stop();
547 return TRUE;
548 }
549
550 return sink->renderer->start(caps);
551}
552
553gboolean QGstVideoRendererSink::propose_allocation(GstBaseSink *base, GstQuery *query)
554{
555 VO_SINK(base);
556 return sink->renderer->proposeAllocation(query);
557}
558
559gboolean QGstVideoRendererSink::stop(GstBaseSink *base)
560{
561 VO_SINK(base);
562 sink->renderer->stop();
563 return TRUE;
564}
565
566gboolean QGstVideoRendererSink::unlock(GstBaseSink *base)
567{
568 VO_SINK(base);
569 sink->renderer->unlock();
570 return TRUE;
571}
572
573GstFlowReturn QGstVideoRendererSink::show_frame(GstVideoSink *base, GstBuffer *buffer)
574{
575 VO_SINK(base);
576 return sink->renderer->render(buffer);
577}
578
579gboolean QGstVideoRendererSink::query(GstBaseSink *base, GstQuery *query)
580{
581 VO_SINK(base);
582 if (sink->renderer->query(query))
583 return TRUE;
584
585 return GST_BASE_SINK_CLASS(gvrs_sink_parent_class)->query(base, query);
586}
587
588gboolean QGstVideoRendererSink::event(GstBaseSink *base, GstEvent * event)
589{
590 VO_SINK(base);
591 sink->renderer->gstEvent(event);
592 return GST_BASE_SINK_CLASS(gvrs_sink_parent_class)->event(base, event);
593}
594
603
605{
606 qGstVideoRendererSink()->renderer->setActive(isActive);
607}
608
610{
611 return reinterpret_cast<QGstVideoRendererSink *>(element());
612}
613
614QT_END_NAMESPACE
MemoryFormat memoryFormat() const
Definition qgst.cpp:606
static QGstCaps create()
Definition qgst.cpp:633
MemoryFormat
Definition qgst_p.h:394
@ CpuMemory
Definition qgst_p.h:394
QGstVideoRendererSinkElement(QGstVideoRendererSink *, RefMode)
QGstVideoRendererSink * qGstVideoRendererSink() const
static QGstVideoRendererSinkElement createSink(QGstreamerRelayVideoSink *surface)
GstFlowReturn render(GstBuffer *)
~QGstVideoRenderer() override
void customEvent(QEvent *) override
This event handler can be reimplemented in a subclass to receive custom events.
bool start(const QGstCaps &)
QGstVideoRenderer(QGstreamerRelayVideoSink *)
bool proposeAllocation(GstQuery *)
void setFrameTimeStampsFromBuffer(QVideoFrame *frame, GstBuffer *buffer)
#define VO_SINK(s)
static GstVideoSinkClass * gvrs_sink_parent_class
static QGstreamerRelayVideoSink * gvrs_current_sink
QT_BEGIN_NAMESPACE Q_STATIC_LOGGING_CATEGORY(lcSynthesizedIterableAccess, "qt.iterable.synthesized", QtWarningMsg)