6#include <QtMultimedia/qvideoframe.h>
7#include <QtMultimedia/qvideosink.h>
8#include <QtMultimedia/private/qvideoframe_p.h>
9#include <QtGui/rhi/qrhi.h>
10#include <QtCore/qcoreapplication.h>
11#include <QtCore/qdebug.h>
12#include <QtCore/qloggingcategory.h>
13#include <QtCore/private/qfactoryloader_p.h>
14#include <QtCore/private/quniquehandle_p.h>
16#include <common/qgst_debug_p.h>
17#include <common/qgstreamermetadata_p.h>
18#include <common/qgstreamervideosink_p.h>
19#include <common/qgstutils_p.h>
20#include <common/qgstvideobuffer_p.h>
22#include <gst/video/video.h>
23#include <gst/video/gstvideometa.h>
26#if QT_CONFIG(gstreamer_gl)
31#if QT_CONFIG(gstreamer_gl_egl) && QT_CONFIG(linux_dmabuf)
32# include <gst/allocators/gstdmabuf.h>
41QGstVideoRenderer::QGstVideoRenderer(QGstreamerVideoSink *sink)
42 : m_sink(sink), m_surfaceCaps(createSurfaceCaps(sink))
45 sink, &QGstreamerVideoSink::aboutToBeDestroyed,
this,
47 QMutexLocker locker(&m_sinkMutex);
50 Qt::DirectConnection);
60 auto formats = QList<QVideoFrameFormat::PixelFormat>()
61 << QVideoFrameFormat::Format_YUV420P
62 << QVideoFrameFormat::Format_YUV422P
63 << QVideoFrameFormat::Format_YV12
64 << QVideoFrameFormat::Format_UYVY
65 << QVideoFrameFormat::Format_YUYV
66 << QVideoFrameFormat::Format_NV12
67 << QVideoFrameFormat::Format_NV21
68 << QVideoFrameFormat::Format_AYUV
69 << QVideoFrameFormat::Format_P010
70 << QVideoFrameFormat::Format_XRGB8888
71 << QVideoFrameFormat::Format_XBGR8888
72 << QVideoFrameFormat::Format_RGBX8888
73 << QVideoFrameFormat::Format_BGRX8888
74 << QVideoFrameFormat::Format_ARGB8888
75 << QVideoFrameFormat::Format_ABGR8888
76 << QVideoFrameFormat::Format_RGBA8888
77 << QVideoFrameFormat::Format_BGRA8888
78 << QVideoFrameFormat::Format_Y8
79 << QVideoFrameFormat::Format_Y16
81#if QT_CONFIG(gstreamer_gl)
82 QRhi *rhi = sink->rhi();
83 if (rhi && rhi->backend() == QRhi::OpenGLES2) {
84 caps.addPixelFormats(formats, GST_CAPS_FEATURE_MEMORY_GL_MEMORY);
85# if QT_CONFIG(gstreamer_gl_egl) && QT_CONFIG(linux_dmabuf)
86 if (sink->eglDisplay() && sink->eglImageTargetTexture2D()) {
87 caps.addPixelFormats(formats, GST_CAPS_FEATURE_MEMORY_DMABUF);
92 caps.addPixelFormats(formats);
99QT_WARNING_DISABLE_GCC(
"-Wswitch")
101 switch (event->type()) {
102 case renderFramesEvent: {
105 while (std::optional<RenderBufferState> nextState = m_bufferQueue.dequeue())
106 handleNewBuffer(std::move(*nextState));
110 m_currentPipelineFrame = {};
111 updateCurrentVideoFrame(m_currentVideoFrame);
124 auto videoBuffer =
std::make_unique<QGstVideoBuffer>(state.buffer, state.videoInfo, m_sink,
125 state.format, state.memoryFormat);
126 QVideoFrame frame = QVideoFramePrivate::createFrame(std::move(videoBuffer), state.format);
128 m_currentPipelineFrame = std::move(frame);
131 qCDebug(qLcGstVideoRenderer) <<
" showing empty video frame";
132 updateCurrentVideoFrame({});
136 updateCurrentVideoFrame(m_currentPipelineFrame);
141 return m_surfaceCaps;
146 qCDebug(qLcGstVideoRenderer) <<
"QGstVideoRenderer::start" << caps;
148 auto optionalFormatAndVideoInfo = caps.formatAndVideoInfo();
149 if (optionalFormatAndVideoInfo) {
150 std::tie(m_format, m_videoInfo) = std::move(*optionalFormatAndVideoInfo);
164 qCDebug(qLcGstVideoRenderer) <<
"QGstVideoRenderer::stop";
166 m_bufferQueue.clear();
167 QCoreApplication::postEvent(
this,
new QEvent(stopEvent));
172 qCDebug(qLcGstVideoRenderer) <<
"QGstVideoRenderer::unlock";
177 if (!query || GST_QUERY_TYPE(query) != GST_QUERY_ALLOCATION)
180 GstCaps *queryCaps =
nullptr;
181 gboolean needPool =
false;
182 gst_query_parse_allocation(query, &queryCaps, &needPool);
185 if (queryCaps && gst_video_info_from_caps(&info, queryCaps)) {
188 qWarning(qLcGstVideoRenderer) <<
"QGstVideoRenderer::proposeAllocation failed to "
189 "get size from query caps";
193 constexpr int defaultMinBuffers = 3;
195 = qEnvironmentVariableIntValue(
"QT_GSTREAMER_PROPOSE_ALLOCATION_MIN_BUFFERS");
196 static const int minBuffers = env ? env : defaultMinBuffers;
197 qCDebug(qLcGstVideoRenderer) <<
"QGstVideoRenderer::proposeAllocation: "
198 "needPool:" << needPool
200 <<
"minBuffers:" << minBuffers;
206 gst_query_add_allocation_pool(query,
nullptr, size, minBuffers, 0);
213 qCDebug(qLcGstVideoRenderer) <<
"QGstVideoRenderer::render";
216 qCDebug(qLcGstVideoRenderer)
217 <<
" buffer received while flushing the sink ... discarding buffer";
218 return GST_FLOW_FLUSHING;
221 GstVideoCropMeta *meta = gst_buffer_get_video_crop_meta(buffer);
223 QRect vp(meta->x, meta->y, meta->width, meta->height);
224 if (m_format.viewport() != vp) {
225 qCDebug(qLcGstVideoRenderer)
226 << Q_FUNC_INFO <<
" Update viewport on Metadata: [" << meta->height <<
"x"
227 << meta->width <<
" | " << meta->x <<
"x" << meta->y <<
"]";
229 m_format.setViewport(vp);
237 return m_capsMemoryFormat;
239 [[maybe_unused]] GstMemory *mem = gst_buffer_peek_memory(buffer, 0);
240#if QT_CONFIG(gstreamer_gl_egl) && QT_CONFIG(linux_dmabuf)
241 if (gst_is_dmabuf_memory(mem))
242 return QGstCaps::DMABuf;
244#if QT_CONFIG(gstreamer_gl)
245 if (gst_is_gl_memory(mem))
246 return QGstCaps::GLTexture;
251 qCDebug(qLcGstVideoRenderer) <<
"m_capsMemoryFormat" << m_capsMemoryFormat
252 <<
"bufferMemoryFormat" << bufferMemoryFormat;
254 QVideoFrameFormat bufferVideoFrameFormat = m_format;
256 if (m_sink->eglDisplay()) {
259 const bool setFormat_RGBA8888 =
260 (bufferMemoryFormat == QGstCaps::DMABuf
261 && (m_format.pixelFormat() == QVideoFrameFormat::Format_UYVY
262 || m_format.pixelFormat() == QVideoFrameFormat::Format_YUYV));
263 if (setFormat_RGBA8888) {
265 qCDebug(qLcGstVideoRenderer) <<
"Setting pixel format to Format_RGBA8888";
266 bufferVideoFrameFormat = QVideoFrameFormat(m_format.frameSize(),
267 QVideoFrameFormat::Format_RGBA8888);
268 bufferVideoFrameFormat.setStreamFrameRate(m_format.streamFrameRate());
269 bufferVideoFrameFormat.setColorRange(m_format.colorRange());
270 bufferVideoFrameFormat.setColorTransfer(m_format.colorTransfer());
271 bufferVideoFrameFormat.setColorSpace(m_format.colorSpace());
275 RenderBufferState state{
276 QGstBufferHandle{ buffer, QGstBufferHandle::NeedsRef },
277 bufferVideoFrameFormat,
282 qCDebug(qLcGstVideoRenderer) <<
" sending video frame";
284 qsizetype sizeOfQueue = m_bufferQueue.enqueue(std::move(state));
285 if (sizeOfQueue == 1)
287 QCoreApplication::postEvent(
this,
new QEvent(renderFramesEvent));
294#if QT_CONFIG(gstreamer_gl)
295 if (GST_QUERY_TYPE(query) == GST_QUERY_CONTEXT) {
296 const gchar *type =
nullptr;
297 gst_query_parse_context_type(query, &type);
299 QLatin1StringView typeStr(type);
300 if (typeStr != QLatin1StringView(
"gst.gl.GLDisplay")
301 && typeStr != QLatin1StringView(
"gst.gl.local_context")) {
305 QMutexLocker locker(&m_sinkMutex);
309 auto *gstGlContext = typeStr == QLatin1StringView(
"gst.gl.GLDisplay")
310 ? m_sink->gstGlDisplayContext() : m_sink->gstGlLocalContext();
314 gst_query_set_context(query, gstGlContext);
326 qCDebug(qLcGstVideoRenderer) <<
"QGstVideoRenderer::gstEvent:" << event;
328 switch (GST_EVENT_TYPE(event)) {
330 return gstEventHandleTag(event);
332 return gstEventHandleEOS(event);
333 case GST_EVENT_FLUSH_START:
334 return gstEventHandleFlushStart(event);
335 case GST_EVENT_FLUSH_STOP:
336 return gstEventHandleFlushStop(event);
345 if (isActive == m_isActive)
348 m_isActive = isActive;
350 updateCurrentVideoFrame(m_currentPipelineFrame);
352 updateCurrentVideoFrame({});
357 m_currentVideoFrame = std::move(frame);
359 m_sink->setVideoFrame(m_currentVideoFrame);
364 GstTagList *taglist =
nullptr;
365 gst_event_parse_tag(event, &taglist);
369 qCDebug(qLcGstVideoRenderer) <<
"QGstVideoRenderer::gstEventHandleTag:" << taglist;
372 if (!gst_tag_list_get_string(taglist, GST_TAG_IMAGE_ORIENTATION, &value))
377 m_format.setMirrored(parsed.flip);
378 m_format.setRotation(parsed.rotation);
390 m_bufferQueue.clear();
402#define VO_SINK(s) QGstVideoRendererSink *sink(reinterpret_cast<QGstVideoRendererSink *>(s))
408 g_object_new(QGstVideoRendererSink::get_type(),
nullptr));
410 return QGstVideoRendererSinkElement{
412 QGstElement::NeedsRef,
423 static const GTypeInfo info =
437 static const GType type = g_type_register_static(GST_TYPE_VIDEO_SINK,
"QGstVideoRendererSink",
438 &info, GTypeFlags(0));
445 Q_UNUSED(class_data);
449 GstVideoSinkClass *video_sink_class =
reinterpret_cast<GstVideoSinkClass *>(g_class);
450 video_sink_class->show_frame = QGstVideoRendererSink::show_frame;
452 GstBaseSinkClass *base_sink_class =
reinterpret_cast<GstBaseSinkClass *>(g_class);
453 base_sink_class->get_caps = QGstVideoRendererSink::get_caps;
454 base_sink_class->set_caps = QGstVideoRendererSink::set_caps;
455 base_sink_class->propose_allocation = QGstVideoRendererSink::propose_allocation;
456 base_sink_class->stop = QGstVideoRendererSink::stop;
457 base_sink_class->unlock = QGstVideoRendererSink::unlock;
458 base_sink_class->query = QGstVideoRendererSink::query;
459 base_sink_class->event = QGstVideoRendererSink::event;
461 GstElementClass *element_class =
reinterpret_cast<GstElementClass *>(g_class);
462 element_class->change_state = QGstVideoRendererSink::change_state;
463 gst_element_class_set_metadata(element_class,
464 "Qt built-in video renderer sink",
466 "Qt default built-in video renderer sink",
469 GObjectClass *object_class =
reinterpret_cast<GObjectClass *>(g_class);
470 object_class->finalize = QGstVideoRendererSink::finalize;
475 static GstStaticPadTemplate sink_pad_template = GST_STATIC_PAD_TEMPLATE(
476 "sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS(
478 "framerate = (fraction) [ 0, MAX ], "
479 "width = (int) [ 1, MAX ], "
480 "height = (int) [ 1, MAX ]"));
482 gst_element_class_add_pad_template(
483 GST_ELEMENT_CLASS(g_class), gst_static_pad_template_get(&sink_pad_template));
502 delete sink->renderer;
505 G_OBJECT_CLASS(gvrs_sink_parent_class)->finalize(object);
509 GstElement *element, GstStateChange transition)
511 GstStateChangeReturn ret =
512 GST_ELEMENT_CLASS(gvrs_sink_parent_class)->change_state(element, transition);
513 qCDebug(qLcGstVideoRenderer) <<
"QGstVideoRenderer::change_state:" << transition << ret;
523 caps = QGstCaps(gst_caps_intersect(caps.caps(), filter), QGstCaps::HasRef);
525 return caps.release();
531 auto caps = QGstCaps(gcaps, QGstCaps::NeedsRef);
533 qCDebug(qLcGstVideoRenderer) <<
"set_caps:" << caps;
546 return sink->renderer->proposeAllocation(query);
566 return sink->renderer->render(buffer);
572 if (sink->renderer->query(query))
575 return GST_BASE_SINK_CLASS(gvrs_sink_parent_class)->query(base, query);
581 sink->renderer->gstEvent(event);
582 return GST_BASE_SINK_CLASS(gvrs_sink_parent_class)->event(base, event);
601 return reinterpret_cast<QGstVideoRendererSink *>(element());
MemoryFormat memoryFormat() const
QGstVideoRendererSinkElement(QGstVideoRendererSink *, RefMode)
QGstVideoRendererSink * qGstVideoRendererSink() const
static QGstVideoRendererSinkElement createSink(QGstreamerVideoSink *surface)
GstFlowReturn render(GstBuffer *)
~QGstVideoRenderer() override
QGstVideoRenderer(QGstreamerVideoSink *)
void customEvent(QEvent *) override
This event handler can be reimplemented in a subclass to receive custom events.
bool start(const QGstCaps &)
void gstEvent(GstEvent *)
bool proposeAllocation(GstQuery *)
void setFrameTimeStampsFromBuffer(QVideoFrame *frame, GstBuffer *buffer)
Q_STATIC_LOGGING_CATEGORY(lcAccessibilityCore, "qt.accessibility.core")
static GstVideoSinkClass * gvrs_sink_parent_class
static thread_local QGstreamerVideoSink * gvrs_current_sink