Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qvideoframeconverter.cpp
Go to the documentation of this file.
1// Copyright (C) 2022 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3// Qt-Security score:critical reason:data-parser
4
11#include "qcachedvalue_p.h"
12
13#include <QtCore/qcoreapplication.h>
14#include <QtCore/qsize.h>
15#include <QtCore/qhash.h>
16#include <QtCore/qfile.h>
17#include <QtGui/qimage.h>
18#include <QtCore/qloggingcategory.h>
19
20#include <QtMultimedia/private/qmultimedia_ranges_p.h>
21#include <QtMultimedia/private/qvideotexturehelper_p.h>
22
23#include <rhi/qrhi.h>
24
25#ifdef Q_OS_DARWIN
26#include <QtCore/private/qcore_mac_p.h>
27#endif
28
30
31Q_STATIC_LOGGING_CATEGORY(qLcVideoFrameConverter, "qt.multimedia.video.frameconverter")
32
33// clang-format off
34static constexpr float g_quad[] = {
35 // Rotation 0 CW
36 1.f, -1.f, 1.f, 1.f,
37 1.f, 1.f, 1.f, 0.f,
38 -1.f, -1.f, 0.f, 1.f,
39 -1.f, 1.f, 0.f, 0.f,
40 // Rotation 90 CW
41 1.f, -1.f, 1.f, 0.f,
42 1.f, 1.f, 0.f, 0.f,
43 -1.f, -1.f, 1.f, 1.f,
44 -1.f, 1.f, 0.f, 1.f,
45 // Rotation 180 CW
46 1.f, -1.f, 0.f, 0.f,
47 1.f, 1.f, 0.f, 1.f,
48 -1.f, -1.f, 1.f, 0.f,
49 -1.f, 1.f, 1.f, 1.f,
50 // Rotation 270 CW
51 1.f, -1.f, 0.f, 1.f,
52 1.f, 1.f, 1.f, 1.f,
53 -1.f, -1.f, 0.f, 0.f,
54 -1.f, 1.f, 1.f, 0.f,
55};
56// clang-format on
57
58static bool pixelFormatHasAlpha(QVideoFrameFormat::PixelFormat format)
59{
60 switch (format) {
61 case QVideoFrameFormat::Format_ARGB8888:
62 case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
63 case QVideoFrameFormat::Format_BGRA8888:
64 case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
65 case QVideoFrameFormat::Format_ABGR8888:
66 case QVideoFrameFormat::Format_RGBA8888:
67 case QVideoFrameFormat::Format_AYUV:
68 case QVideoFrameFormat::Format_AYUV_Premultiplied:
69 return true;
70 default:
71 return false;
72 }
73};
74
75static QShader ensureShader(const QString &name)
76{
77 static QCachedValueMap<QString, QShader> shaderCache;
78
79 return shaderCache.ensure(name, [&name]() {
80 QFile f(name);
81 return f.open(QIODevice::ReadOnly) ? QShader::fromSerialized(f.readAll()) : QShader();
82 });
83}
84
85static void rasterTransform(QImage &image, VideoTransformation transformation)
86{
87 QTransform t;
88 if (transformation.rotation != QtVideo::Rotation::None)
89 t.rotate(qreal(transformation.rotation));
90 if (transformation.mirroredHorizontallyAfterRotation)
91 t.scale(-1., 1);
92 if (!t.isIdentity())
93 image = image.transformed(t);
94}
95
96static void imageCleanupHandler(void *info)
97{
98 QByteArray *imageData = reinterpret_cast<QByteArray *>(info);
99 delete imageData;
100}
101
102static bool updateTextures(QRhi *rhi,
103 std::unique_ptr<QRhiBuffer> &uniformBuffer,
104 std::unique_ptr<QRhiSampler> &textureSampler,
105 std::unique_ptr<QRhiShaderResourceBindings> &shaderResourceBindings,
106 std::unique_ptr<QRhiGraphicsPipeline> &graphicsPipeline,
107 std::unique_ptr<QRhiRenderPassDescriptor> &renderPass,
108 QVideoFrame &frame,
109 const QVideoFrameTexturesUPtr &videoFrameTextures)
110{
111 auto format = frame.surfaceFormat();
112 auto pixelFormat = format.pixelFormat();
113
114 auto textureDesc = QVideoTextureHelper::textureDescription(pixelFormat);
115
116 QRhiShaderResourceBinding bindings[4];
117 auto *b = bindings;
118 *b++ = QRhiShaderResourceBinding::uniformBuffer(0, QRhiShaderResourceBinding::VertexStage | QRhiShaderResourceBinding::FragmentStage,
119 uniformBuffer.get());
120 for (int i = 0; i < textureDesc->nplanes; ++i)
121 *b++ = QRhiShaderResourceBinding::sampledTexture(i + 1, QRhiShaderResourceBinding::FragmentStage,
122 videoFrameTextures->texture(i), textureSampler.get());
123 shaderResourceBindings->setBindings(bindings, b);
124 if (!shaderResourceBindings->create()) {
125 qCDebug(qLcVideoFrameConverter)
126 << Q_FUNC_INFO << ": failed to create shader resource bindings";
127 return false;
128 }
129
130 graphicsPipeline.reset(rhi->newGraphicsPipeline());
131 graphicsPipeline->setTopology(QRhiGraphicsPipeline::TriangleStrip);
132
133 QShader vs = ensureShader(QVideoTextureHelper::vertexShaderFileName(format));
134 if (!vs.isValid())
135 return false;
136
137 QShader fs = ensureShader(QVideoTextureHelper::fragmentShaderFileName(format, rhi));
138 if (!fs.isValid())
139 return false;
140
141 graphicsPipeline->setShaderStages({
142 { QRhiShaderStage::Vertex, vs },
143 { QRhiShaderStage::Fragment, fs }
144 });
145
146 QRhiVertexInputLayout inputLayout;
147 inputLayout.setBindings({
148 { 4 * sizeof(float) }
149 });
150 inputLayout.setAttributes({
151 { 0, 0, QRhiVertexInputAttribute::Float2, 0 },
152 { 0, 1, QRhiVertexInputAttribute::Float2, 2 * sizeof(float) }
153 });
154
155 graphicsPipeline->setVertexInputLayout(inputLayout);
156 graphicsPipeline->setShaderResourceBindings(shaderResourceBindings.get());
157 graphicsPipeline->setRenderPassDescriptor(renderPass.get());
158 if (!graphicsPipeline->create()) {
159 qCDebug(qLcVideoFrameConverter) << Q_FUNC_INFO << ": failed to create graphics pipeline";
160 return false;
161 }
162
163 return true;
164}
165
166static QImage convertJPEG(const QVideoFrame &frame, const VideoTransformation &transform)
167{
168 QVideoFrame varFrame = frame;
169 if (!varFrame.map(QVideoFrame::ReadOnly)) {
170 qCDebug(qLcVideoFrameConverter) << Q_FUNC_INFO << ": frame mapping failed";
171 return {};
172 }
173
174 auto unmap = std::optional(QScopeGuard([&] {
175 varFrame.unmap();
176 }));
177
178 QSpan<uchar> jpegData{
179 varFrame.bits(0),
180 varFrame.mappedBytes(0),
181 };
182
183 constexpr std::array<uchar, 2> soiMarker{ uchar(0xff), uchar(0xd8) };
184 if (!QtMultimediaPrivate::ranges::equal(jpegData.first(2), soiMarker, std::equal_to<void>{})) {
185 qCDebug(qLcVideoFrameConverter)
186 << Q_FUNC_INFO << ": JPEG data does not start with SOI marker";
187 return QImage{};
188 }
189
190 constexpr std::array<uchar, 2> eoiMarker{ uchar(0xff), uchar(0xd9) };
191
192 // some JPEG cameras contain extra data after the JPEG marker. If so, we drop it to make
193 // libjpeg happy.
194 if (!QtMultimediaPrivate::ranges::equal(jpegData.last(2), eoiMarker, std::equal_to<void>{})) {
195 qCDebug(qLcVideoFrameConverter)
196 << Q_FUNC_INFO << ": JPEG data does not end with EOI marker";
197
198 auto eoi_it = std::find_end(jpegData.begin(), jpegData.end(), std::begin(eoiMarker),
199 std::end(eoiMarker));
200 if (eoi_it == jpegData.end()) {
201 qCWarning(qLcVideoFrameConverter)
202 << Q_FUNC_INFO << ": JPEG data does not contain EOI marker";
203 return QImage{};
204 };
205
206 const size_t newSize = std::distance(jpegData.begin(), eoi_it) + std::size(eoiMarker);
207 jpegData = jpegData.first(newSize);
208 }
209
210 QImage image = QImage::fromData(jpegData, "JPG");
211 unmap = std::nullopt; // Release unmap guard
212 rasterTransform(image, transform);
213 return image;
214}
215
216static QImage convertCPU(const QVideoFrame &frame, const VideoTransformation &transform)
217{
218 VideoFrameConvertFunc convert = qConverterForFormat(frame.pixelFormat());
219 if (!convert) {
220 qCDebug(qLcVideoFrameConverter) << Q_FUNC_INFO << ": unsupported pixel format" << frame.pixelFormat();
221 return {};
222 } else {
223 QVideoFrame varFrame = frame;
224 if (!varFrame.map(QVideoFrame::ReadOnly)) {
225 qCDebug(qLcVideoFrameConverter) << Q_FUNC_INFO << ": frame mapping failed";
226 return {};
227 }
228 auto format = pixelFormatHasAlpha(varFrame.pixelFormat()) ? QImage::Format_ARGB32_Premultiplied : QImage::Format_RGB32;
229 QImage image = QImage(varFrame.width(), varFrame.height(), format);
230 convert(varFrame, image.bits());
231 varFrame.unmap();
232 rasterTransform(image, transform);
233 return image;
234 }
235}
236
237QImage qImageFromVideoFrame(const QVideoFrame &frame, bool forceCpu)
238{
239 // by default, surface transformation is applied, as full transformation is used for presentation only
240 return qImageFromVideoFrame(frame, qNormalizedSurfaceTransformation(frame.surfaceFormat()),
241 forceCpu);
242}
243
244QImage qImageFromVideoFrame(const QVideoFrame &frame, const VideoTransformation &transformation,
245 bool forceCpu)
246{
247#ifdef Q_OS_DARWIN
248 QMacAutoReleasePool releasePool;
249#endif
250
251 std::unique_ptr<QRhiRenderPassDescriptor> renderPass;
252 std::unique_ptr<QRhiBuffer> vertexBuffer;
253 std::unique_ptr<QRhiBuffer> uniformBuffer;
254 std::unique_ptr<QRhiTexture> targetTexture;
255 std::unique_ptr<QRhiTextureRenderTarget> renderTarget;
256 std::unique_ptr<QRhiSampler> textureSampler;
257 std::unique_ptr<QRhiShaderResourceBindings> shaderResourceBindings;
258 std::unique_ptr<QRhiGraphicsPipeline> graphicsPipeline;
259
260 if (frame.size().isEmpty() || frame.pixelFormat() == QVideoFrameFormat::Format_Invalid)
261 return {};
262
263 if (frame.pixelFormat() == QVideoFrameFormat::Format_Jpeg)
264 return convertJPEG(frame, transformation);
265
266 if (forceCpu) // For test purposes
267 return convertCPU(frame, transformation);
268
269 QRhi *rhi = nullptr;
270
271 if (QHwVideoBuffer *buffer = QVideoFramePrivate::hwBuffer(frame))
272 rhi = buffer->rhi();
273
274 if (!rhi || !rhi->thread()->isCurrentThread())
275 rhi = qEnsureThreadLocalRhi(rhi);
276
277 if (!rhi || rhi->isRecordingFrame())
278 return convertCPU(frame, transformation);
279
280 // Do conversion using shaders
281
282 const QSize frameSize = qRotatedFrameSize(frame.size(), frame.surfaceFormat().rotation());
283
284 vertexBuffer.reset(rhi->newBuffer(QRhiBuffer::Immutable, QRhiBuffer::VertexBuffer, sizeof(g_quad)));
285 if (!vertexBuffer->create()) {
286 qCDebug(qLcVideoFrameConverter) << "Failed to create vertex buffer. Using CPU conversion.";
287 return convertCPU(frame, transformation);
288 }
289
290 uniformBuffer.reset(rhi->newBuffer(QRhiBuffer::Dynamic, QRhiBuffer::UniformBuffer, sizeof(QVideoTextureHelper::UniformData)));
291 if (!uniformBuffer->create()) {
292 qCDebug(qLcVideoFrameConverter) << "Failed to create uniform buffer. Using CPU conversion.";
293 return convertCPU(frame, transformation);
294 }
295
296 textureSampler.reset(rhi->newSampler(QRhiSampler::Linear, QRhiSampler::Linear, QRhiSampler::None,
297 QRhiSampler::ClampToEdge, QRhiSampler::ClampToEdge));
298 if (!textureSampler->create()) {
299 qCDebug(qLcVideoFrameConverter)
300 << "Failed to create texture sampler. Using CPU conversion.";
301 return convertCPU(frame, transformation);
302 }
303
304 shaderResourceBindings.reset(rhi->newShaderResourceBindings());
305
306 targetTexture.reset(rhi->newTexture(QRhiTexture::RGBA8, frameSize, 1, QRhiTexture::RenderTarget));
307 if (!targetTexture->create()) {
308 qCDebug(qLcVideoFrameConverter) << "Failed to create target texture. Using CPU conversion.";
309 return convertCPU(frame, transformation);
310 }
311
312 renderTarget.reset(rhi->newTextureRenderTarget({ { targetTexture.get() } }));
313 renderPass.reset(renderTarget->newCompatibleRenderPassDescriptor());
314 renderTarget->setRenderPassDescriptor(renderPass.get());
315 if (!renderTarget->create()) {
316 qCDebug(qLcVideoFrameConverter) << "Failed to create render target. Using CPU conversion.";
317 return convertCPU(frame, transformation);
318 }
319
320 QRhiCommandBuffer *cb = nullptr;
321 QRhi::FrameOpResult r = rhi->beginOffscreenFrame(&cb);
322 if (r != QRhi::FrameOpSuccess) {
323 qCDebug(qLcVideoFrameConverter) << "Failed to set up offscreen frame. Using CPU conversion.";
324 return convertCPU(frame, transformation);
325 }
326
327 QRhiResourceUpdateBatch *rub = rhi->nextResourceUpdateBatch();
328 Q_ASSERT(rub);
329
330 rub->uploadStaticBuffer(vertexBuffer.get(), g_quad);
331
332 QVideoFrame frameTmp = frame;
333 QVideoFrameTexturesUPtr texturesTmp;
334 auto videoFrameTextures = QVideoTextureHelper::createTextures(frameTmp, *rhi, *rub, texturesTmp);
335 if (!videoFrameTextures) {
336 qCDebug(qLcVideoFrameConverter) << "Failed obtain textures. Using CPU conversion.";
337 return convertCPU(frame, transformation);
338 }
339
340 if (!updateTextures(rhi, uniformBuffer, textureSampler, shaderResourceBindings,
341 graphicsPipeline, renderPass, frameTmp, videoFrameTextures)) {
342 qCDebug(qLcVideoFrameConverter) << "Failed to update textures. Using CPU conversion.";
343 return convertCPU(frame, transformation);
344 }
345
346 float xScale = transformation.mirroredHorizontallyAfterRotation ? -1.0 : 1.0;
347 float yScale = 1.f;
348
349 if (rhi->isYUpInFramebuffer())
350 yScale = -yScale;
351
352 QMatrix4x4 transform;
353 transform.scale(xScale, yScale);
354
355 QByteArray uniformData(sizeof(QVideoTextureHelper::UniformData), Qt::Uninitialized);
356 QVideoTextureHelper::updateUniformData(&uniformData, rhi, frame.surfaceFormat(), frame,
357 transform, 1.f);
358 rub->updateDynamicBuffer(uniformBuffer.get(), 0, uniformData.size(), uniformData.constData());
359
360 cb->beginPass(renderTarget.get(), Qt::black, { 1.0f, 0 }, rub);
361 cb->setGraphicsPipeline(graphicsPipeline.get());
362
363 cb->setViewport({ 0, 0, float(frameSize.width()), float(frameSize.height()) });
364 cb->setShaderResources(shaderResourceBindings.get());
365
366 const quint32 vertexOffset = quint32(sizeof(float)) * 16 * transformation.rotationIndex();
367 const QRhiCommandBuffer::VertexInput vbufBinding(vertexBuffer.get(), vertexOffset);
368 cb->setVertexInput(0, 1, &vbufBinding);
369 cb->draw(4);
370
371 QRhiReadbackDescription readDesc(targetTexture.get());
372 QRhiReadbackResult readResult;
373 bool readCompleted = false;
374
375 readResult.completed = [&readCompleted] { readCompleted = true; };
376
377 rub = rhi->nextResourceUpdateBatch();
378 rub->readBackTexture(readDesc, &readResult);
379
380 cb->endPass(rub);
381
382 rhi->endOffscreenFrame();
383
384 if (!readCompleted) {
385 qCDebug(qLcVideoFrameConverter) << "Failed to read back texture. Using CPU conversion.";
386 return convertCPU(frame, transformation);
387 }
388
389 QByteArray *imageData = new QByteArray(readResult.data);
390
391 return QImage(reinterpret_cast<const uchar *>(imageData->constData()),
392 readResult.pixelSize.width(), readResult.pixelSize.height(),
393 QImage::Format_RGBA8888_Premultiplied, imageCleanupHandler, imageData);
394}
395
396QImage videoFramePlaneAsImage(QVideoFrame &frame, int plane, QImage::Format targetFormat,
397 QSize targetSize)
398{
399 if (plane >= frame.planeCount())
400 return {};
401
402 if (!frame.map(QVideoFrame::ReadOnly)) {
403 qWarning() << "Cannot map a video frame in ReadOnly mode!";
404 return {};
405 }
406
407 auto frameHandle = QVideoFramePrivate::handle(frame);
408
409 // With incrementing the reference counter, we share the mapped QVideoFrame
410 // with the target QImage. The function imageCleanupFunction is going to adopt
411 // the frameHandle by QVideoFrame and dereference it upon the destruction.
412 frameHandle->ref.ref();
413
414 auto imageCleanupFunction = [](void *data) {
415 QVideoFrame frame = reinterpret_cast<QVideoFramePrivate *>(data)->adoptThisByVideoFrame();
416 Q_ASSERT(frame.isMapped());
417 frame.unmap();
418 };
419
420 const auto bytesPerLine = frame.bytesPerLine(plane);
421 const auto height =
422 bytesPerLine ? qMin(targetSize.height(), frame.mappedBytes(plane) / bytesPerLine) : 0;
423
424 return QImage(reinterpret_cast<const uchar *>(frame.bits(plane)), targetSize.width(), height,
425 bytesPerLine, targetFormat, imageCleanupFunction, frameHandle);
426}
427
428QT_END_NAMESPACE
\inmodule QtGui
Definition qimage.h:37
\inmodule QtGuiPrivate \inheaderfile rhi/qrhi.h
Definition qrhi.h:782
\inmodule QtGuiPrivate \inheaderfile rhi/qrhi.h
Definition qrhi.h:440
\inmodule QtGuiPrivate \inheaderfile rhi/qrhi.h
Definition qrhi.h:322
\inmodule QtGui
Definition qshader.h:81
static QVideoFramePrivate * handle(QVideoFrame &frame)
Combined button and popup list for selecting options.
static bool updateTextures(QRhi *rhi, std::unique_ptr< QRhiBuffer > &uniformBuffer, std::unique_ptr< QRhiSampler > &textureSampler, std::unique_ptr< QRhiShaderResourceBindings > &shaderResourceBindings, std::unique_ptr< QRhiGraphicsPipeline > &graphicsPipeline, std::unique_ptr< QRhiRenderPassDescriptor > &renderPass, QVideoFrame &frame, const QVideoFrameTexturesUPtr &videoFrameTextures)
static QImage convertJPEG(const QVideoFrame &frame, const VideoTransformation &transform)
QImage qImageFromVideoFrame(const QVideoFrame &frame, const VideoTransformation &transformation, bool forceCpu)
QImage qImageFromVideoFrame(const QVideoFrame &frame, bool forceCpu)
static QShader ensureShader(const QString &name)
static bool pixelFormatHasAlpha(QVideoFrameFormat::PixelFormat format)
static QImage convertCPU(const QVideoFrame &frame, const VideoTransformation &transform)
static void rasterTransform(QImage &image, VideoTransformation transformation)
QImage videoFramePlaneAsImage(QVideoFrame &frame, int plane, QImage::Format targetFormat, QSize targetSize)
Maps the video frame and returns an image having a shared ownership for the video frame and referenci...
static void imageCleanupHandler(void *info)