Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qgstvideobuffer.cpp
Go to the documentation of this file.
1// Copyright (C) 2016 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
5#include <private/qvideotexturehelper_p.h>
6#include <qpa/qplatformnativeinterface.h>
7#include <qguiapplication.h>
8#include <QtCore/qapplicationstatic.h>
9#include <QtCore/qloggingcategory.h>
10
11#include <gst/video/video.h>
12#include <gst/video/video-frame.h>
13#include <gst/video/gstvideometa.h>
14#include <gst/pbutils/gstpluginsbaseversion.h>
15
16#include <common/qgstutils_p.h>
17
18#if QT_CONFIG(gstreamer_gl)
19# include <QtGui/rhi/qrhi.h>
20# include <QtGui/qopenglcontext.h>
21# include <QtGui/qopenglfunctions.h>
22# include <QtGui/qopengl.h>
23
24# include <gst/gl/gstglconfig.h>
25# include <gst/gl/gstglmemory.h>
26# include <gst/gl/gstglsyncmeta.h>
27
28# if QT_CONFIG(gstreamer_gl_egl)
29# include <EGL/egl.h>
30# include <EGL/eglext.h>
31# endif
32
33# if QT_CONFIG(gstreamer_gl_egl) && QT_CONFIG(linux_dmabuf)
34# include <gst/allocators/gstdmabuf.h>
35# endif
36#endif
37
39
40Q_STATIC_LOGGING_CATEGORY(qLcGstVideoBuffer, "qt.multimedia.gstreamer.videobuffer");
41
42#if QT_CONFIG(gstreamer_gl) && QT_CONFIG(gstreamer_gl_egl) && QT_CONFIG(linux_dmabuf)
43Qt::HANDLE getEglDisplay() {
44 using namespace Qt::StringLiterals;
45 return qGuiApp
46 ? qGuiApp->platformNativeInterface()->nativeResourceForIntegration("egldisplay"_ba)
47 : nullptr;
48}
49
50Q_APPLICATION_STATIC(Qt::HANDLE, s_eglDisplay, getEglDisplay());
51
52Q_GLOBAL_STATIC(QFunctionPointer, g_eglImageTargetTexture2D,
53 eglGetProcAddress("glEGLImageTargetTexture2DOES"));
54#endif
55
56// keep things building without drm_fourcc.h
57#define fourcc_code(a, b, c, d) ((uint32_t)(a) | ((uint32_t)(b) << 8) |
58 ((uint32_t)(c) << 16) | ((uint32_t)(d) << 24))
59#define DRM_FORMAT_RGBA8888 fourcc_code('R', 'A', '2', '4') /* [31:0] R:G:B:A 8:8:8:8 little endian */
60#define DRM_FORMAT_BGRA8888 fourcc_code('B', 'A', '2', '4') /* [31:0] B:G:R:A 8:8:8:8 little endian */
61#define DRM_FORMAT_RGB888 fourcc_code('R', 'G', '2', '4') /* [23:0] R:G:B little endian */
62#define DRM_FORMAT_RG88 fourcc_code('R', 'G', '8', '8') /* [15:0] R:G 8:8 little endian */
63#define DRM_FORMAT_ARGB8888 fourcc_code('A', 'R', '2', '4') /* [31:0] A:R:G:B 8:8:8:8 little endian */
64#define DRM_FORMAT_ABGR8888 fourcc_code('A', 'B', '2', '4') /* [31:0] A:B:G:R 8:8:8:8 little endian */
65#define DRM_FORMAT_BGR888 fourcc_code('B', 'G', '2', '4') /* [23:0] B:G:R little endian */
66#define DRM_FORMAT_GR88 fourcc_code('G', 'R', '8', '8') /* [15:0] G:R 8:8 little endian */
67#define DRM_FORMAT_R8 fourcc_code('R', '8', ' ', ' ') /* [7:0] R */
68#define DRM_FORMAT_R16 fourcc_code('R', '1', '6', ' ') /* [15:0] R little endian */
69#define DRM_FORMAT_RGB565 fourcc_code('R', 'G', '1', '6') /* [15:0] R:G:B 5:6:5 little endian */
70#define DRM_FORMAT_RG1616 fourcc_code('R', 'G', '3', '2') /* [31:0] R:G 16:16 little endian */
71#define DRM_FORMAT_GR1616 fourcc_code('G', 'R', '3', '2') /* [31:0] G:R 16:16 little endian */
72#define DRM_FORMAT_BGRA1010102 fourcc_code('B', 'A', '3', '0') /* [31:0] B:G:R:A 10:10:10:2 little endian */
73#define DRM_FORMAT_YUYV fourcc_code('Y', 'U', 'Y', 'V') /* [31:0] Cr0:Y1:Cb0:Y0 8:8:8:8 little endian */
74#define DRM_FORMAT_UYVY fourcc_code('U', 'Y', 'V', 'Y') /* [31:0] Y1:Cr0:Y0:Cb0 8:8:8:8 little endian */
75#define DRM_FORMAT_AYUV fourcc_code('A', 'Y', 'U', 'V') /* [31:0] A:Y:Cb:Cr 8:8:8:8 little endian */
76#define DRM_FORMAT_NV12 fourcc_code('N', 'V', '1', '2') /* 2x2 subsampled Cr:Cb plane */
77#define DRM_FORMAT_NV21 fourcc_code('N', 'V', '2', '1') /* 2x2 subsampled Cb:Cr plane */
78#define DRM_FORMAT_P010 fourcc_code('P', '0', '1', '0') /* 2x2 subsampled Cr:Cb plane 10 bits per channel */
79#define DRM_FORMAT_YUV411 fourcc_code('Y', 'U', '1', '1') /* 4x1 subsampled Cb (1) and Cr (2) planes */
80#define DRM_FORMAT_YUV420 fourcc_code('Y', 'U', '1', '2') /* 2x2 subsampled Cb (1) and Cr (2) planes */
81#define DRM_FORMAT_YVU420 fourcc_code('Y', 'V', '1', '2') /* 2x2 subsampled Cr (1) and Cb (2) planes */
82#define DRM_FORMAT_YUV422 fourcc_code('Y', 'U', '1', '6') /* 2x1 subsampled Cb (1) and Cr (2) planes */
83#define DRM_FORMAT_YUV444 fourcc_code('Y', 'U', '2', '4') /* non-subsampled Cb (1) and Cr (2) planes */
84
85QGstVideoBuffer::QGstVideoBuffer(QGstBufferHandle buffer, const QGstVideoInfo &videoInfo,
86 const QVideoFrameFormat &frameFormat)
93{
94 m_type = m_memoryFormat != QGstCaps::CpuMemory ? QVideoFrame::RhiTextureHandle
95 : QVideoFrame::NoHandle;
96#if QT_CONFIG(gstreamer_gl) && QT_CONFIG(gstreamer_gl_egl) && QT_CONFIG(linux_dmabuf)
97 m_eglDisplay = *s_eglDisplay();
98 if (m_eglDisplay)
99 m_eglImageTargetTexture2D = *g_eglImageTargetTexture2D();
100#endif
101}
102
103QGstVideoBuffer::~QGstVideoBuffer()
104{
105 Q_ASSERT(m_mode == QVideoFrame::NotMapped);
106}
107
108QAbstractVideoBuffer::MapData QGstVideoBuffer::map(QVideoFrame::MapMode mode)
109{
110 const GstMapFlags flags = GstMapFlags(((mode & QVideoFrame::ReadOnly) ? GST_MAP_READ : 0)
111 | ((mode & QVideoFrame::WriteOnly) ? GST_MAP_WRITE : 0));
112
113 MapData mapData;
114 if (mode == QVideoFrame::NotMapped || m_mode != QVideoFrame::NotMapped)
115 return mapData;
116
117 const GstVideoInfo &gstVideoInfo = m_videoInfo.gstVideoInfo;
118 if (!gstVideoInfo.finfo || gstVideoInfo.finfo->n_planes == 0) { // Encoded
119 if (gst_buffer_map(m_buffer.get(), &m_frame.map[0], flags)) {
120 mapData.planeCount = 1;
121 mapData.bytesPerLine[0] = -1;
122 mapData.dataSize[0] = m_frame.map[0].size;
123 mapData.data[0] = static_cast<uchar *>(m_frame.map[0].data);
124
125 m_mode = mode;
126 }
127 } else if (gst_video_frame_map(&m_frame, &gstVideoInfo, m_buffer.get(), flags)) {
128 mapData.planeCount = GST_VIDEO_FRAME_N_PLANES(&m_frame);
129
130 for (guint i = 0; i < GST_VIDEO_FRAME_N_PLANES(&m_frame); ++i) {
131 mapData.bytesPerLine[i] = GST_VIDEO_FRAME_PLANE_STRIDE(&m_frame, i);
132 mapData.data[i] = static_cast<uchar *>(GST_VIDEO_FRAME_PLANE_DATA(&m_frame, i));
133 mapData.dataSize[i] = mapData.bytesPerLine[i]*GST_VIDEO_FRAME_COMP_HEIGHT(&m_frame, i);
134 }
135
136 m_mode = mode;
137 }
138 return mapData;
139}
140
141void QGstVideoBuffer::unmap()
142{
143 if (m_mode != QVideoFrame::NotMapped) {
144 if (!m_videoInfo.gstVideoInfo.finfo || m_videoInfo.gstVideoInfo.finfo->n_planes == 0)
145 gst_buffer_unmap(m_buffer.get(), &m_frame.map[0]);
146 else
147 gst_video_frame_unmap(&m_frame);
148 }
149 m_mode = QVideoFrame::NotMapped;
150}
151
152bool QGstVideoBuffer::isDmaBuf() const
153{
154 return m_memoryFormat == QGstCaps::DMABuf;
155}
156
157#if QT_CONFIG(gstreamer_gl_egl) && QT_CONFIG(linux_dmabuf)
158
159static int
160fourccFromGstVideoFormat(const GstVideoFormat format, int plane, bool singleEGLImage)
161{
162#if G_BYTE_ORDER == G_LITTLE_ENDIAN
163 const gint argb_fourcc = DRM_FORMAT_ARGB8888;
164 const gint rgba_fourcc = DRM_FORMAT_ABGR8888;
165 const gint rgb_fourcc = DRM_FORMAT_BGR888;
166 const gint rg_fourcc = DRM_FORMAT_GR88;
167#else
168 const gint argb_fourcc = DRM_FORMAT_BGRA8888;
169 const gint rgba_fourcc = DRM_FORMAT_RGBA8888;
170 const gint rgb_fourcc = DRM_FORMAT_RGB888;
171 const gint rg_fourcc = DRM_FORMAT_RG88;
172#endif
173
174 switch (format) {
175 case GST_VIDEO_FORMAT_RGB16:
176 case GST_VIDEO_FORMAT_BGR16:
177 return DRM_FORMAT_RGB565;
178
179 case GST_VIDEO_FORMAT_RGB:
180 case GST_VIDEO_FORMAT_BGR:
181 return rgb_fourcc;
182
183 case GST_VIDEO_FORMAT_BGRx:
184 case GST_VIDEO_FORMAT_BGRA:
185 return argb_fourcc;
186
187 case GST_VIDEO_FORMAT_AYUV:
188 if (singleEGLImage) return DRM_FORMAT_AYUV;
189 [[fallthrough]];
190 case GST_VIDEO_FORMAT_RGBx:
191 case GST_VIDEO_FORMAT_RGBA:
192 case GST_VIDEO_FORMAT_ARGB:
193 case GST_VIDEO_FORMAT_xRGB:
194 case GST_VIDEO_FORMAT_ABGR:
195 case GST_VIDEO_FORMAT_xBGR:
196 return rgba_fourcc;
197
198 case GST_VIDEO_FORMAT_GRAY8:
199 return DRM_FORMAT_R8;
200
201 case GST_VIDEO_FORMAT_YUY2:
202 return DRM_FORMAT_YUYV;
203
204 case GST_VIDEO_FORMAT_UYVY:
205 return DRM_FORMAT_UYVY;
206
207 case GST_VIDEO_FORMAT_GRAY16_LE:
208 case GST_VIDEO_FORMAT_GRAY16_BE:
209 if (singleEGLImage) return DRM_FORMAT_R16;
210 return rg_fourcc;
211
212 case GST_VIDEO_FORMAT_NV12:
213 if (singleEGLImage) return DRM_FORMAT_NV12;
214 [[fallthrough]];
215 case GST_VIDEO_FORMAT_NV21:
216 if (singleEGLImage) return DRM_FORMAT_NV21;
217 return plane == 0 ? DRM_FORMAT_R8 : rg_fourcc;
218
219 case GST_VIDEO_FORMAT_I420:
220 if (singleEGLImage) return DRM_FORMAT_YUV420;
221 [[fallthrough]];
222 case GST_VIDEO_FORMAT_YV12:
223 if (singleEGLImage) return DRM_FORMAT_YVU420;
224 [[fallthrough]];
225 case GST_VIDEO_FORMAT_Y41B:
226 if (singleEGLImage) return DRM_FORMAT_YUV411;
227 [[fallthrough]];
228 case GST_VIDEO_FORMAT_Y42B:
229 if (singleEGLImage) return DRM_FORMAT_YUV422;
230 [[fallthrough]];
231 case GST_VIDEO_FORMAT_Y444:
232 if (singleEGLImage) return DRM_FORMAT_YUV444;
233 return DRM_FORMAT_R8;
234
235 case GST_VIDEO_FORMAT_BGR10A2_LE:
236 return DRM_FORMAT_BGRA1010102;
237
238 case GST_VIDEO_FORMAT_P010_10LE:
239 case GST_VIDEO_FORMAT_P010_10BE:
240 if (singleEGLImage) return DRM_FORMAT_P010;
241 return plane == 0 ? DRM_FORMAT_R16 : DRM_FORMAT_RG1616;
242
243 default:
244 return -1;
245 }
246}
247
248static void logGlAndEglErrors(const char *context)
249{
250 if (!qLcGstVideoBuffer().isDebugEnabled())
251 return;
252
253 const GLenum glError = glGetError();
254 const EGLint eglError = eglGetError();
255 if (glError == GL_NO_ERROR && eglError == EGL_SUCCESS)
256 return;
257
258 qCDebug(qLcGstVideoBuffer).nospace()
259 << context << ": GL error 0x" << Qt::hex << glError
260 << ", EGL error 0x" << eglError;
261}
262#endif
263
264#if QT_CONFIG(gstreamer_gl)
265struct GlTextures
266{
267 uint count = 0;
268 bool owned = false;
269 std::array<guint32, QVideoTextureHelper::TextureDescription::maxPlanes> names{};
270};
271
272class QGstQVideoFrameTextures : public QVideoFrameTextures
273{
274public:
275 QGstQVideoFrameTextures(QRhi *rhi,
276 QSize size,
277 QVideoFrameFormat::PixelFormat format,
278 GlTextures &textures,
279 QGstCaps::MemoryFormat memoryFormat)
280 : m_rhi(rhi)
281 , m_glTextures(textures)
282 {
283 QRhiTexture::Flags textureFlags = {};
284 if (QVideoTextureHelper::forceGlTextureExternalOesIsSet()
285 && m_rhi && rhi->backend() == QRhi::OpenGLES2)
286 textureFlags = {QRhiTexture::ExternalOES};
287
288 bool isDmaBuf = memoryFormat == QGstCaps::DMABuf;
289 auto fallbackPolicy = isDmaBuf
290 ? QVideoTextureHelper::TextureDescription::FallbackPolicy::Disable
291 : QVideoTextureHelper::TextureDescription::FallbackPolicy::Enable;
292
293 auto desc = QVideoTextureHelper::textureDescription(format);
294 for (uint i = 0; i < textures.count; ++i) {
295 // Pass nullptr to rhiPlaneSize to disable fallback in its call to rhiTextureFormat
296 QSize planeSize = desc->rhiPlaneSize(size, i, isDmaBuf ? nullptr : m_rhi);
297 QRhiTexture::Format format = desc->rhiTextureFormat(i, m_rhi, fallbackPolicy);
298 m_textures[i].reset(rhi->newTexture(format, planeSize, 1, textureFlags));
299 m_textures[i]->createFrom({textures.names[i], 0});
300 }
301 }
302
303 ~QGstQVideoFrameTextures() override
304 {
305 m_rhi->makeThreadLocalNativeContextCurrent();
306 auto ctx = QOpenGLContext::currentContext();
307 if (m_glTextures.owned && ctx)
308 ctx->functions()->glDeleteTextures(int(m_glTextures.count), m_glTextures.names.data());
309 }
310
311 QRhiTexture *texture(uint plane) const override
312 {
313 return plane < m_glTextures.count ? m_textures[plane].get() : nullptr;
314 }
315
316private:
317 QRhi *m_rhi = nullptr;
318 GlTextures m_glTextures;
319 std::unique_ptr<QRhiTexture> m_textures[QVideoTextureHelper::TextureDescription::maxPlanes];
320};
321
322static GlTextures mapFromGlTexture(const QGstBufferHandle &bufferHandle, GstVideoFrame &frame,
323 GstVideoInfo &videoInfo)
324{
325 qCDebug(qLcGstVideoBuffer) << "Mapping textures from GL memory";
326
327 GstBuffer *buffer = bufferHandle.get();
328 auto *mem = GST_GL_BASE_MEMORY_CAST(gst_buffer_peek_memory(buffer, 0));
329 if (!mem)
330 return {};
331
332 if (!gst_video_frame_map(&frame, &videoInfo, buffer, GstMapFlags(GST_MAP_READ|GST_MAP_GL))) {
333 qWarning() << "Could not map GL textures";
334 return {};
335 }
336
337 auto *sync_meta = gst_buffer_get_gl_sync_meta(buffer);
338 GstBuffer *sync_buffer = nullptr;
339 if (!sync_meta) {
340 sync_buffer = gst_buffer_new();
341 sync_meta = gst_buffer_add_gl_sync_meta(mem->context, sync_buffer);
342 }
343 gst_gl_sync_meta_set_sync_point (sync_meta, mem->context);
344 gst_gl_sync_meta_wait (sync_meta, mem->context);
345 if (sync_buffer)
346 gst_buffer_unref(sync_buffer);
347
348 GlTextures textures;
349 textures.count = frame.info.finfo->n_planes;
350
351 for (uint i = 0; i < textures.count; ++i)
352 textures.names[i] = *(guint32 *)frame.data[i];
353
354 gst_video_frame_unmap(&frame);
355
356 return textures;
357}
358
359# if QT_CONFIG(gstreamer_gl_egl) && QT_CONFIG(linux_dmabuf)
360static GlTextures mapFromDmaBuffer(QRhi *rhi, const QGstBufferHandle &bufferHandle,
361 const QGstVideoInfo &videoInfo, Qt::HANDLE eglDisplay,
362 QFunctionPointer eglImageTargetTexture2D)
363{
364 qCDebug(qLcGstVideoBuffer) << "Importing textures from DMA buffer";
365 logGlAndEglErrors("mapFromDmaBuffer");
366
367 GstBuffer *buffer = bufferHandle.get();
368
369 Q_ASSERT(gst_is_dmabuf_memory(gst_buffer_peek_memory(buffer, 0)));
370 Q_ASSERT(eglDisplay);
371 Q_ASSERT(eglImageTargetTexture2D);
372 Q_ASSERT(rhi);
373 Q_ASSERT(rhi->backend() == QRhi::OpenGLES2);
374
375 auto *nativeHandles = static_cast<const QRhiGles2NativeHandles *>(rhi->nativeHandles());
376 auto glContext = nativeHandles->context;
377 if (!glContext) {
378 qCWarning(qLcGstVideoBuffer) << "no GL context";
379 return {};
380 }
381
382 const GstVideoInfo &gstVideoInfo = videoInfo.gstVideoInfo;
383 if (!gstVideoInfo.finfo) {
384 qCWarning(qLcGstVideoBuffer) << "Missing valid GstVideoInfo for DMABuf GstBuffer";
385 return {};
386 }
387
388 if (videoInfo.dmaDrmModifier && *videoInfo.dmaDrmModifier != 0) {
389 qCWarning(qLcGstVideoBuffer) << "Unsupported non-linear DMABuf modifier:"
390 << Qt::hex << *videoInfo.dmaDrmModifier;
391 return {};
392 }
393
394 const GstVideoMeta *videoMeta = gst_buffer_get_video_meta(buffer);
395 const GstVideoFormat videoInfoFormat = GST_VIDEO_INFO_FORMAT(&gstVideoInfo);
396 GstVideoFormat format = videoMeta ? videoMeta->format : videoInfoFormat;
397 if (format == GST_VIDEO_FORMAT_UNKNOWN)
398 format = videoInfoFormat;
399
400 const int nPlanes = videoMeta ? videoMeta->n_planes : GST_VIDEO_INFO_N_PLANES(&gstVideoInfo);
401 const int nMemoryBlocks = gst_buffer_n_memory(buffer);
402 static const bool externalOes = QVideoTextureHelper::forceGlTextureExternalOesIsSet();
403 static const bool singleEGLImage =
404 externalOes || qEnvironmentVariableIsSet("QT_GSTREAMER_FORCE_SINGLE_EGLIMAGE");
405
406 qCDebug(qLcGstVideoBuffer) << "format:" << gst_video_format_to_string(format)
407 << "nPlanes:" << nPlanes
408 << "nMemoryBlocks:" << nMemoryBlocks
409 << "externalOes:" << externalOes
410 << "singleEGLImage:" << singleEGLImage;
411
412 constexpr int maxPlanes = 4;
413 Q_ASSERT(nPlanes >= 1
414 && nPlanes <= maxPlanes
415 && (nMemoryBlocks == 1 || nMemoryBlocks == nPlanes));
416
417 const int nEGLImages = singleEGLImage ? 1 : nPlanes;
418 std::array<EGLAttrib, maxPlanes> planeFourcc{};
419 for (int plane = 0; plane < nEGLImages; ++plane) {
420 const int fourcc = fourccFromGstVideoFormat(format, plane, singleEGLImage);
421 if (fourcc < 0) {
422 qCWarning(qLcGstVideoBuffer) << "Unsupported format for DMABuf:"
423 << gst_video_format_to_string(format) << "plane:" << plane
424 << "singleEGLImage" << singleEGLImage;
425 return {};
426 }
427 planeFourcc[plane] = EGLAttrib(fourcc);
428 }
429
430 GlTextures textures = {};
431 textures.owned = true;
432 textures.count = nEGLImages;
433
434 QOpenGLFunctions functions(glContext);
435 functions.glGenTextures(int(textures.count), textures.names.data());
436 logGlAndEglErrors("glGenTextures");
437
438 std::array<int, maxPlanes> fds{-1, -1, -1, -1};
439 for (int i = 0; i < nMemoryBlocks && i < maxPlanes; ++i) {
440 fds[i] = gst_dmabuf_memory_get_fd(gst_buffer_peek_memory(buffer, i));
441 }
442
443 auto fdForPlane = [&](int plane) -> EGLAttrib {
444 if (plane < 0 || plane >= maxPlanes || plane >= nMemoryBlocks)
445 return fds[0];
446 return (fds[plane] >= 0) ? fds[plane] : fds[0];
447 };
448
449 auto compWidth = [&](int plane) -> EGLAttrib {
450 return singleEGLImage ? GST_VIDEO_INFO_WIDTH(&gstVideoInfo)
451 : GST_VIDEO_INFO_COMP_WIDTH(&gstVideoInfo, plane);
452 };
453
454 auto compHeight = [&](int plane) -> EGLAttrib {
455 return singleEGLImage ? GST_VIDEO_INFO_HEIGHT(&gstVideoInfo)
456 : GST_VIDEO_INFO_COMP_HEIGHT(&gstVideoInfo, plane);
457 };
458
459 auto planeOffset = [&](int plane) -> EGLAttrib {
460 // videoMeta/videoInfo offset can be incorrect when each plane has a separate memory black.
461 if (nPlanes == nMemoryBlocks)
462 return 0;
463 if (videoMeta)
464 return videoMeta->offset[plane];
465 return GST_VIDEO_INFO_PLANE_OFFSET(&gstVideoInfo, plane);
466 };
467
468 auto planeStride = [&](int plane) -> EGLAttrib {
469 if (videoMeta)
470 return videoMeta->stride[plane];
471 return GST_VIDEO_INFO_PLANE_STRIDE(&gstVideoInfo, plane);
472 };
473
474 for (int plane = 0; plane < nEGLImages; ++plane) {
475 constexpr int maxAttrCount = 31;
476 std::array<EGLAttrib, maxAttrCount> attr;
477 int i = 0;
478
479 const int width = compWidth(plane);
480 const int height = compHeight(plane);
481
482 attr[i++] = EGL_WIDTH;
483 attr[i++] = width;
484 attr[i++] = EGL_HEIGHT;
485 attr[i++] = height;
486 attr[i++] = EGL_LINUX_DRM_FOURCC_EXT;
487 attr[i++] = planeFourcc[plane];
488
489 attr[i++] = EGL_DMA_BUF_PLANE0_FD_EXT;
490 attr[i++] = fdForPlane(plane);
491 attr[i++] = EGL_DMA_BUF_PLANE0_OFFSET_EXT;
492 attr[i++] = planeOffset(plane);
493 attr[i++] = EGL_DMA_BUF_PLANE0_PITCH_EXT;
494 attr[i++] = planeStride(plane);
495
496 if (singleEGLImage && nPlanes > 1) {
497 attr[i++] = EGL_DMA_BUF_PLANE1_FD_EXT;
498 attr[i++] = fdForPlane(1);
499 attr[i++] = EGL_DMA_BUF_PLANE1_OFFSET_EXT;
500 attr[i++] = planeOffset(1);
501 attr[i++] = EGL_DMA_BUF_PLANE1_PITCH_EXT;
502 attr[i++] = planeStride(1);
503 }
504
505 if (singleEGLImage && nPlanes > 2) {
506 attr[i++] = EGL_DMA_BUF_PLANE2_FD_EXT;
507 attr[i++] = fdForPlane(2);
508 attr[i++] = EGL_DMA_BUF_PLANE2_OFFSET_EXT;
509 attr[i++] = planeOffset(2);
510 attr[i++] = EGL_DMA_BUF_PLANE2_PITCH_EXT;
511 attr[i++] = planeStride(2);
512 }
513
514 if (singleEGLImage && nPlanes > 3) {
515 attr[i++] = EGL_DMA_BUF_PLANE3_FD_EXT;
516 attr[i++] = fdForPlane(3);
517 attr[i++] = EGL_DMA_BUF_PLANE3_OFFSET_EXT;
518 attr[i++] = planeOffset(3);
519 attr[i++] = EGL_DMA_BUF_PLANE3_PITCH_EXT;
520 attr[i++] = planeStride(3);
521 }
522
523 attr[i++] = EGL_NONE;
524 Q_ASSERT(i <= maxAttrCount);
525
526 EGLImage image = eglCreateImage(eglDisplay,
527 EGL_NO_CONTEXT,
528 EGL_LINUX_DMA_BUF_EXT,
529 nullptr,
530 attr.data());
531 if (image == EGL_NO_IMAGE_KHR) {
532 qCWarning(qLcGstVideoBuffer) << "could not create EGL image for plane" << plane
533 << ", EGL error 0x" << Qt::hex << eglGetError();
534 continue;
535 }
536 logGlAndEglErrors("eglCreateImage");
537
538 #ifdef GL_OES_EGL_image_external
539 GLenum target = externalOes ? GL_TEXTURE_EXTERNAL_OES : GL_TEXTURE_2D;
540 #else
541 GLenum target = GL_TEXTURE_2D;
542 #endif
543 functions.glBindTexture(target, textures.names[plane]);
544
545 auto EGLImageTargetTexture2D = (PFNGLEGLIMAGETARGETTEXTURE2DOESPROC)eglImageTargetTexture2D;
546 EGLImageTargetTexture2D(target, image);
547 logGlAndEglErrors("glEGLImageTargetTexture2DOES");
548
549 eglDestroyImage(eglDisplay, image);
550 }
551
552 return textures;
553}
554#endif
555#endif
556
557QVideoFrameTexturesUPtr QGstVideoBuffer::mapTextures(QRhi &rhi, QVideoFrameTexturesUPtr& /*oldTextures*/)
558{
559#if QT_CONFIG(gstreamer_gl)
560 GlTextures textures = {};
561 if (m_memoryFormat == QGstCaps::GLTexture)
562 textures = mapFromGlTexture(m_buffer, m_frame, m_videoInfo.gstVideoInfo);
563
564# if QT_CONFIG(gstreamer_gl_egl) && QT_CONFIG(linux_dmabuf)
565 else if (m_memoryFormat == QGstCaps::DMABuf && m_eglDisplay && m_eglImageTargetTexture2D
566 && rhi.backend() == QRhi::OpenGLES2)
567 textures = mapFromDmaBuffer(&rhi, m_buffer, m_videoInfo, m_eglDisplay,
568 m_eglImageTargetTexture2D);
569
570# endif
571 if (textures.count > 0)
572 return std::make_unique<QGstQVideoFrameTextures>(
573 &rhi, QSize{ m_videoInfo.gstVideoInfo.width, m_videoInfo.gstVideoInfo.height },
574 m_frameFormat.pixelFormat(), textures, m_memoryFormat);
575#endif
576 return {};
577}
578
579QT_END_NAMESPACE
@ DMABuf
Definition qgst_p.h:394
QGstVideoBuffer(QGstBufferHandle buffer, const QGstVideoInfo &videoInfo, const QVideoFrameFormat &frameFormat)
~QGstVideoBuffer() override
bool isDmaBuf() const override
QVideoFrameTexturesUPtr mapTextures(QRhi &, QVideoFrameTexturesUPtr &) override
void unmap() override
Releases the memory mapped by the map() function.
MapData map(QVideoFrame::MapMode mode) override
Maps the planes of a video buffer to memory.
Combined button and popup list for selecting options.
#define fourcc_code(a, b, c, d)