Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qvideoframeconverter.cpp
Go to the documentation of this file.
1// Copyright (C) 2022 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3// Qt-Security score:critical reason:data-parser
4
11#include "qcachedvalue_p.h"
12
13#include <QtCore/qcoreapplication.h>
14#include <QtCore/qsize.h>
15#include <QtCore/qhash.h>
16#include <QtCore/qfile.h>
17#include <QtGui/qimage.h>
18#include <QtCore/qloggingcategory.h>
19
20#include <QtMultimedia/private/qmultimedia_ranges_p.h>
21#include <QtMultimedia/private/qvideotexturehelper_p.h>
22
23#include <rhi/qrhi.h>
24
25#ifdef Q_OS_DARWIN
26#include <QtCore/private/qcore_mac_p.h>
27#endif
28
30
31namespace ranges = QtMultimediaPrivate::ranges;
32
33Q_STATIC_LOGGING_CATEGORY(qLcVideoFrameConverter, "qt.multimedia.video.frameconverter")
34
35// clang-format off
36static constexpr float g_quad[] = {
37 // Rotation 0 CW
38 1.f, -1.f, 1.f, 1.f,
39 1.f, 1.f, 1.f, 0.f,
40 -1.f, -1.f, 0.f, 1.f,
41 -1.f, 1.f, 0.f, 0.f,
42 // Rotation 90 CW
43 1.f, -1.f, 1.f, 0.f,
44 1.f, 1.f, 0.f, 0.f,
45 -1.f, -1.f, 1.f, 1.f,
46 -1.f, 1.f, 0.f, 1.f,
47 // Rotation 180 CW
48 1.f, -1.f, 0.f, 0.f,
49 1.f, 1.f, 0.f, 1.f,
50 -1.f, -1.f, 1.f, 0.f,
51 -1.f, 1.f, 1.f, 1.f,
52 // Rotation 270 CW
53 1.f, -1.f, 0.f, 1.f,
54 1.f, 1.f, 1.f, 1.f,
55 -1.f, -1.f, 0.f, 0.f,
56 -1.f, 1.f, 1.f, 0.f,
57};
58// clang-format on
59
60static bool pixelFormatHasAlpha(QVideoFrameFormat::PixelFormat format)
61{
62 switch (format) {
63 case QVideoFrameFormat::Format_ARGB8888:
64 case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
65 case QVideoFrameFormat::Format_BGRA8888:
66 case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
67 case QVideoFrameFormat::Format_ABGR8888:
68 case QVideoFrameFormat::Format_RGBA8888:
69 case QVideoFrameFormat::Format_AYUV:
70 case QVideoFrameFormat::Format_AYUV_Premultiplied:
71 return true;
72 default:
73 return false;
74 }
75};
76
77static QShader ensureShader(const QString &name)
78{
79 static QCachedValueMap<QString, QShader> shaderCache;
80
81 return shaderCache.ensure(name, [&name]() {
82 QFile f(name);
83 return f.open(QIODevice::ReadOnly) ? QShader::fromSerialized(f.readAll()) : QShader();
84 });
85}
86
87static void rasterTransform(QImage &image, VideoTransformation transformation)
88{
89 QTransform t;
90 if (transformation.rotation != QtVideo::Rotation::None)
91 t.rotate(qreal(transformation.rotation));
92 if (transformation.mirroredHorizontallyAfterRotation)
93 t.scale(-1., 1);
94 if (!t.isIdentity())
95 image = image.transformed(t);
96}
97
98static void imageCleanupHandler(void *info)
99{
100 QByteArray *imageData = reinterpret_cast<QByteArray *>(info);
101 delete imageData;
102}
103
104static bool updateTextures(QRhi *rhi,
105 std::unique_ptr<QRhiBuffer> &uniformBuffer,
106 std::unique_ptr<QRhiSampler> &textureSampler,
107 std::unique_ptr<QRhiShaderResourceBindings> &shaderResourceBindings,
108 std::unique_ptr<QRhiGraphicsPipeline> &graphicsPipeline,
109 std::unique_ptr<QRhiRenderPassDescriptor> &renderPass,
110 QVideoFrame &frame,
111 const QVideoFrameTexturesUPtr &videoFrameTextures)
112{
113 auto format = frame.surfaceFormat();
114 auto pixelFormat = format.pixelFormat();
115
116 auto textureDesc = QVideoTextureHelper::textureDescription(pixelFormat);
117
118 QRhiShaderResourceBinding bindings[4];
119 auto *b = bindings;
120 *b++ = QRhiShaderResourceBinding::uniformBuffer(0, QRhiShaderResourceBinding::VertexStage | QRhiShaderResourceBinding::FragmentStage,
121 uniformBuffer.get());
122 for (int i = 0; i < textureDesc->nplanes; ++i)
123 *b++ = QRhiShaderResourceBinding::sampledTexture(i + 1, QRhiShaderResourceBinding::FragmentStage,
124 videoFrameTextures->texture(i), textureSampler.get());
125 shaderResourceBindings->setBindings(bindings, b);
126 if (!shaderResourceBindings->create()) {
127 qCDebug(qLcVideoFrameConverter)
128 << Q_FUNC_INFO << ": failed to create shader resource bindings";
129 return false;
130 }
131
132 graphicsPipeline.reset(rhi->newGraphicsPipeline());
133 graphicsPipeline->setTopology(QRhiGraphicsPipeline::TriangleStrip);
134
135 QShader vs = ensureShader(QVideoTextureHelper::vertexShaderFileName(format));
136 if (!vs.isValid())
137 return false;
138
139 QShader fs = ensureShader(QVideoTextureHelper::fragmentShaderFileName(format, rhi));
140 if (!fs.isValid())
141 return false;
142
143 graphicsPipeline->setShaderStages({
144 { QRhiShaderStage::Vertex, vs },
145 { QRhiShaderStage::Fragment, fs }
146 });
147
148 QRhiVertexInputLayout inputLayout;
149 inputLayout.setBindings({
150 { 4 * sizeof(float) }
151 });
152 inputLayout.setAttributes({
153 { 0, 0, QRhiVertexInputAttribute::Float2, 0 },
154 { 0, 1, QRhiVertexInputAttribute::Float2, 2 * sizeof(float) }
155 });
156
157 graphicsPipeline->setVertexInputLayout(inputLayout);
158 graphicsPipeline->setShaderResourceBindings(shaderResourceBindings.get());
159 graphicsPipeline->setRenderPassDescriptor(renderPass.get());
160 if (!graphicsPipeline->create()) {
161 qCDebug(qLcVideoFrameConverter) << Q_FUNC_INFO << ": failed to create graphics pipeline";
162 return false;
163 }
164
165 return true;
166}
167
168static QImage convertJPEG(const QVideoFrame &frame, const VideoTransformation &transform)
169{
170 QVideoFrame varFrame = frame;
171 if (!varFrame.map(QVideoFrame::ReadOnly)) {
172 qCDebug(qLcVideoFrameConverter) << Q_FUNC_INFO << ": frame mapping failed";
173 return {};
174 }
175
176 auto unmap = std::optional(QScopeGuard([&] {
177 varFrame.unmap();
178 }));
179
180 QSpan<uchar> jpegData{
181 varFrame.bits(0),
182 varFrame.mappedBytes(0),
183 };
184
185 constexpr std::array<uchar, 2> soiMarker{ uchar(0xff), uchar(0xd8) };
186 if (!ranges::equal(jpegData.first(2), soiMarker, std::equal_to<void>{})) {
187 qCDebug(qLcVideoFrameConverter)
188 << Q_FUNC_INFO << ": JPEG data does not start with SOI marker";
189 return QImage{};
190 }
191
192 constexpr std::array<uchar, 2> eoiMarker{ uchar(0xff), uchar(0xd9) };
193
194 // some JPEG cameras contain extra data after the JPEG marker. If so, we drop it to make
195 // libjpeg happy.
196 if (!ranges::equal(jpegData.last(2), eoiMarker, std::equal_to<void>{})) {
197 qCDebug(qLcVideoFrameConverter)
198 << Q_FUNC_INFO << ": JPEG data does not end with EOI marker";
199
200 auto eoi_it = std::find_end(jpegData.begin(), jpegData.end(), std::begin(eoiMarker),
201 std::end(eoiMarker));
202 if (eoi_it == jpegData.end()) {
203 qCWarning(qLcVideoFrameConverter)
204 << Q_FUNC_INFO << ": JPEG data does not contain EOI marker";
205 return QImage{};
206 };
207
208 const size_t newSize = std::distance(jpegData.begin(), eoi_it) + std::size(eoiMarker);
209 jpegData = jpegData.first(newSize);
210 }
211
212 QImage image = QImage::fromData(jpegData, "JPG");
213 unmap = std::nullopt; // Release unmap guard
214 rasterTransform(image, transform);
215 return image;
216}
217
218static QImage convertCPU(const QVideoFrame &frame, const VideoTransformation &transform)
219{
220 VideoFrameConvertFunc convert = qConverterForFormat(frame.pixelFormat());
221 if (!convert) {
222 qCDebug(qLcVideoFrameConverter) << Q_FUNC_INFO << ": unsupported pixel format" << frame.pixelFormat();
223 return {};
224 } else {
225 QVideoFrame varFrame = frame;
226 if (!varFrame.map(QVideoFrame::ReadOnly)) {
227 qCDebug(qLcVideoFrameConverter) << Q_FUNC_INFO << ": frame mapping failed";
228 return {};
229 }
230 auto format = pixelFormatHasAlpha(varFrame.pixelFormat()) ? QImage::Format_ARGB32_Premultiplied : QImage::Format_RGB32;
231 QImage image = QImage(varFrame.width(), varFrame.height(), format);
232 convert(varFrame, image.bits());
233 varFrame.unmap();
234 rasterTransform(image, transform);
235 return image;
236 }
237}
238
239QImage qImageFromVideoFrame(const QVideoFrame &frame, bool forceCpu)
240{
241 // by default, surface transformation is applied, as full transformation is used for presentation only
242 return qImageFromVideoFrame(frame, qNormalizedSurfaceTransformation(frame.surfaceFormat()),
243 forceCpu);
244}
245
246QImage qImageFromVideoFrame(const QVideoFrame &frame, const VideoTransformation &transformation,
247 bool forceCpu)
248{
249#ifdef Q_OS_DARWIN
250 QMacAutoReleasePool releasePool;
251#endif
252
253 std::unique_ptr<QRhiRenderPassDescriptor> renderPass;
254 std::unique_ptr<QRhiBuffer> vertexBuffer;
255 std::unique_ptr<QRhiBuffer> uniformBuffer;
256 std::unique_ptr<QRhiTexture> targetTexture;
257 std::unique_ptr<QRhiTextureRenderTarget> renderTarget;
258 std::unique_ptr<QRhiSampler> textureSampler;
259 std::unique_ptr<QRhiShaderResourceBindings> shaderResourceBindings;
260 std::unique_ptr<QRhiGraphicsPipeline> graphicsPipeline;
261
262 if (frame.size().isEmpty() || frame.pixelFormat() == QVideoFrameFormat::Format_Invalid)
263 return {};
264
265 if (frame.pixelFormat() == QVideoFrameFormat::Format_Jpeg)
266 return convertJPEG(frame, transformation);
267
268 if (forceCpu) // For test purposes
269 return convertCPU(frame, transformation);
270
271 QRhi *rhi = nullptr;
272
273 if (QHwVideoBuffer *buffer = QVideoFramePrivate::hwBuffer(frame))
274 rhi = buffer->associatedCurrentThreadRhi();
275
276 if (!rhi) {
277 // TODO: if a case with more the one available preferred backends appears,
278 // e.g. vulkan vs opengl, then we should:
279 // 1. Implement QHwVideoBuffer::preferredRhiBackend
280 // 2. Implement a map backend=>rhi inside qEnsureThreadLocalRhi
281 rhi = qEnsureThreadLocalRhi(/*buffer->preferredRhiBackend()*/);
282 }
283
284 if (!rhi || rhi->isRecordingFrame())
285 return convertCPU(frame, transformation);
286
287 Q_ASSERT(rhi->thread()->isCurrentThread());
288
289 // Do conversion using shaders
290
291 const QSize frameSize = qRotatedFrameSize(frame.size(), frame.surfaceFormat().rotation());
292
293 vertexBuffer.reset(rhi->newBuffer(QRhiBuffer::Immutable, QRhiBuffer::VertexBuffer, sizeof(g_quad)));
294 if (!vertexBuffer->create()) {
295 qCDebug(qLcVideoFrameConverter) << "Failed to create vertex buffer. Using CPU conversion.";
296 return convertCPU(frame, transformation);
297 }
298
299 uniformBuffer.reset(rhi->newBuffer(QRhiBuffer::Dynamic, QRhiBuffer::UniformBuffer, sizeof(QVideoTextureHelper::UniformData)));
300 if (!uniformBuffer->create()) {
301 qCDebug(qLcVideoFrameConverter) << "Failed to create uniform buffer. Using CPU conversion.";
302 return convertCPU(frame, transformation);
303 }
304
305 textureSampler.reset(rhi->newSampler(QRhiSampler::Linear, QRhiSampler::Linear, QRhiSampler::None,
306 QRhiSampler::ClampToEdge, QRhiSampler::ClampToEdge));
307 if (!textureSampler->create()) {
308 qCDebug(qLcVideoFrameConverter)
309 << "Failed to create texture sampler. Using CPU conversion.";
310 return convertCPU(frame, transformation);
311 }
312
313 shaderResourceBindings.reset(rhi->newShaderResourceBindings());
314
315 targetTexture.reset(rhi->newTexture(QRhiTexture::RGBA8, frameSize, 1, QRhiTexture::RenderTarget));
316 if (!targetTexture->create()) {
317 qCDebug(qLcVideoFrameConverter) << "Failed to create target texture. Using CPU conversion.";
318 return convertCPU(frame, transformation);
319 }
320
321 renderTarget.reset(rhi->newTextureRenderTarget({ { targetTexture.get() } }));
322 renderPass.reset(renderTarget->newCompatibleRenderPassDescriptor());
323 renderTarget->setRenderPassDescriptor(renderPass.get());
324 if (!renderTarget->create()) {
325 qCDebug(qLcVideoFrameConverter) << "Failed to create render target. Using CPU conversion.";
326 return convertCPU(frame, transformation);
327 }
328
329 QRhiCommandBuffer *cb = nullptr;
330 QRhi::FrameOpResult r = rhi->beginOffscreenFrame(&cb);
331 if (r != QRhi::FrameOpSuccess) {
332 qCDebug(qLcVideoFrameConverter) << "Failed to set up offscreen frame. Using CPU conversion.";
333 return convertCPU(frame, transformation);
334 }
335
336 QRhiResourceUpdateBatch *rub = rhi->nextResourceUpdateBatch();
337 Q_ASSERT(rub);
338
339 rub->uploadStaticBuffer(vertexBuffer.get(), g_quad);
340
341 QVideoFrame frameTmp = frame;
342 QVideoFrameTexturesUPtr texturesTmp;
343 auto videoFrameTextures = QVideoTextureHelper::createTextures(frameTmp, *rhi, *rub, texturesTmp);
344 if (!videoFrameTextures) {
345 qCDebug(qLcVideoFrameConverter) << "Failed obtain textures. Using CPU conversion.";
346 return convertCPU(frame, transformation);
347 }
348
349 if (!updateTextures(rhi, uniformBuffer, textureSampler, shaderResourceBindings,
350 graphicsPipeline, renderPass, frameTmp, videoFrameTextures)) {
351 qCDebug(qLcVideoFrameConverter) << "Failed to update textures. Using CPU conversion.";
352 return convertCPU(frame, transformation);
353 }
354
355 float xScale = transformation.mirroredHorizontallyAfterRotation ? -1.0 : 1.0;
356 float yScale = 1.f;
357
358 if (rhi->isYUpInFramebuffer())
359 yScale = -yScale;
360
361 QMatrix4x4 transform;
362 transform.scale(xScale, yScale);
363
364 QByteArray uniformData(sizeof(QVideoTextureHelper::UniformData), Qt::Uninitialized);
365 QVideoTextureHelper::updateUniformData(&uniformData, rhi, frame.surfaceFormat(), frame,
366 transform, 1.f);
367 rub->updateDynamicBuffer(uniformBuffer.get(), 0, uniformData.size(), uniformData.constData());
368
369 cb->beginPass(renderTarget.get(), Qt::black, { 1.0f, 0 }, rub);
370 cb->setGraphicsPipeline(graphicsPipeline.get());
371
372 cb->setViewport({ 0, 0, float(frameSize.width()), float(frameSize.height()) });
373 cb->setShaderResources(shaderResourceBindings.get());
374
375 const quint32 vertexOffset = quint32(sizeof(float)) * 16 * transformation.rotationIndex();
376 const QRhiCommandBuffer::VertexInput vbufBinding(vertexBuffer.get(), vertexOffset);
377 cb->setVertexInput(0, 1, &vbufBinding);
378 cb->draw(4);
379
380 QRhiReadbackDescription readDesc(targetTexture.get());
381 QRhiReadbackResult readResult;
382 bool readCompleted = false;
383
384 readResult.completed = [&readCompleted] { readCompleted = true; };
385
386 rub = rhi->nextResourceUpdateBatch();
387 rub->readBackTexture(readDesc, &readResult);
388
389 cb->endPass(rub);
390
391 rhi->endOffscreenFrame();
392
393 if (!readCompleted) {
394 qCDebug(qLcVideoFrameConverter) << "Failed to read back texture. Using CPU conversion.";
395 return convertCPU(frame, transformation);
396 }
397
398 QByteArray *imageData = new QByteArray(readResult.data);
399
400 return QImage(reinterpret_cast<const uchar *>(imageData->constData()),
401 readResult.pixelSize.width(), readResult.pixelSize.height(),
402 QImage::Format_RGBA8888_Premultiplied, imageCleanupHandler, imageData);
403}
404
405QImage videoFramePlaneAsImage(QVideoFrame &frame, int plane, QImage::Format targetFormat,
406 QSize targetSize)
407{
408 if (plane >= frame.planeCount())
409 return {};
410
411 if (!frame.map(QVideoFrame::ReadOnly)) {
412 qWarning() << "Cannot map a video frame in ReadOnly mode!";
413 return {};
414 }
415
416 auto frameHandle = QVideoFramePrivate::handle(frame);
417
418 // With incrementing the reference counter, we share the mapped QVideoFrame
419 // with the target QImage. The function imageCleanupFunction is going to adopt
420 // the frameHandle by QVideoFrame and dereference it upon the destruction.
421 frameHandle->ref.ref();
422
423 auto imageCleanupFunction = [](void *data) {
424 QVideoFrame frame = reinterpret_cast<QVideoFramePrivate *>(data)->adoptThisByVideoFrame();
425 Q_ASSERT(frame.isMapped());
426 frame.unmap();
427 };
428
429 const auto bytesPerLine = frame.bytesPerLine(plane);
430 const auto height =
431 bytesPerLine ? qMin(targetSize.height(), frame.mappedBytes(plane) / bytesPerLine) : 0;
432
433 return QImage(reinterpret_cast<const uchar *>(frame.bits(plane)), targetSize.width(), height,
434 bytesPerLine, targetFormat, imageCleanupFunction, frameHandle);
435}
436
437QT_END_NAMESPACE
\inmodule QtGui
Definition qimage.h:38
\inmodule QtGuiPrivate \inheaderfile rhi/qrhi.h
Definition qrhi.h:783
\inmodule QtGuiPrivate \inheaderfile rhi/qrhi.h
Definition qrhi.h:441
\inmodule QtGuiPrivate \inheaderfile rhi/qrhi.h
Definition qrhi.h:323
\inmodule QtGui
Definition qshader.h:81
static QVideoFramePrivate * handle(QVideoFrame &frame)
Combined button and popup list for selecting options.
QT_BEGIN_NAMESPACE Q_STATIC_LOGGING_CATEGORY(lcSynthesizedIterableAccess, "qt.iterable.synthesized", QtWarningMsg)
static bool updateTextures(QRhi *rhi, std::unique_ptr< QRhiBuffer > &uniformBuffer, std::unique_ptr< QRhiSampler > &textureSampler, std::unique_ptr< QRhiShaderResourceBindings > &shaderResourceBindings, std::unique_ptr< QRhiGraphicsPipeline > &graphicsPipeline, std::unique_ptr< QRhiRenderPassDescriptor > &renderPass, QVideoFrame &frame, const QVideoFrameTexturesUPtr &videoFrameTextures)
static QImage convertJPEG(const QVideoFrame &frame, const VideoTransformation &transform)
static QT_BEGIN_NAMESPACE constexpr float g_quad[]
QImage qImageFromVideoFrame(const QVideoFrame &frame, const VideoTransformation &transformation, bool forceCpu)
QImage qImageFromVideoFrame(const QVideoFrame &frame, bool forceCpu)
static QShader ensureShader(const QString &name)
static bool pixelFormatHasAlpha(QVideoFrameFormat::PixelFormat format)
static QImage convertCPU(const QVideoFrame &frame, const VideoTransformation &transform)
static void rasterTransform(QImage &image, VideoTransformation transformation)
QImage videoFramePlaneAsImage(QVideoFrame &frame, int plane, QImage::Format targetFormat, QSize targetSize)
Maps the video frame and returns an image having a shared ownership for the video frame and referenci...
static void imageCleanupHandler(void *info)