Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qvideoframeconverter.cpp
Go to the documentation of this file.
1// Copyright (C) 2022 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3// Qt-Security score:critical reason:data-parser
4
11#include "qcachedvalue_p.h"
12
13#include <QtCore/qcoreapplication.h>
14#include <QtCore/qsize.h>
15#include <QtCore/qhash.h>
16#include <QtCore/qfile.h>
17#include <QtGui/qimage.h>
18#include <QtCore/qloggingcategory.h>
19
20#include <QtMultimedia/private/qmultimedia_ranges_p.h>
21#include <QtMultimedia/private/qvideotexturehelper_p.h>
22
23#include <rhi/qrhi.h>
24
25#ifdef Q_OS_DARWIN
26#include <QtCore/private/qcore_mac_p.h>
27#endif
28
30
31Q_STATIC_LOGGING_CATEGORY(qLcVideoFrameConverter, "qt.multimedia.video.frameconverter")
32
33// clang-format off
34static constexpr float g_quad[] = {
35 // Rotation 0 CW
36 1.f, -1.f, 1.f, 1.f,
37 1.f, 1.f, 1.f, 0.f,
38 -1.f, -1.f, 0.f, 1.f,
39 -1.f, 1.f, 0.f, 0.f,
40 // Rotation 90 CW
41 1.f, -1.f, 1.f, 0.f,
42 1.f, 1.f, 0.f, 0.f,
43 -1.f, -1.f, 1.f, 1.f,
44 -1.f, 1.f, 0.f, 1.f,
45 // Rotation 180 CW
46 1.f, -1.f, 0.f, 0.f,
47 1.f, 1.f, 0.f, 1.f,
48 -1.f, -1.f, 1.f, 0.f,
49 -1.f, 1.f, 1.f, 1.f,
50 // Rotation 270 CW
51 1.f, -1.f, 0.f, 1.f,
52 1.f, 1.f, 1.f, 1.f,
53 -1.f, -1.f, 0.f, 0.f,
54 -1.f, 1.f, 1.f, 0.f,
55};
56// clang-format on
57
58static bool pixelFormatHasAlpha(QVideoFrameFormat::PixelFormat format)
59{
60 switch (format) {
61 case QVideoFrameFormat::Format_ARGB8888:
62 case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
63 case QVideoFrameFormat::Format_BGRA8888:
64 case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
65 case QVideoFrameFormat::Format_ABGR8888:
66 case QVideoFrameFormat::Format_RGBA8888:
67 case QVideoFrameFormat::Format_AYUV:
68 case QVideoFrameFormat::Format_AYUV_Premultiplied:
69 return true;
70 default:
71 return false;
72 }
73};
74
75static QShader ensureShader(const QString &name)
76{
77 static QCachedValueMap<QString, QShader> shaderCache;
78
79 return shaderCache.ensure(name, [&name]() {
80 QFile f(name);
81 return f.open(QIODevice::ReadOnly) ? QShader::fromSerialized(f.readAll()) : QShader();
82 });
83}
84
85static void rasterTransform(QImage &image, VideoTransformation transformation)
86{
87 QTransform t;
88 if (transformation.rotation != QtVideo::Rotation::None)
89 t.rotate(qreal(transformation.rotation));
90 if (transformation.mirroredHorizontallyAfterRotation)
91 t.scale(-1., 1);
92 if (!t.isIdentity())
93 image = image.transformed(t);
94}
95
96static void imageCleanupHandler(void *info)
97{
98 QByteArray *imageData = reinterpret_cast<QByteArray *>(info);
99 delete imageData;
100}
101
102static bool updateTextures(QRhi *rhi,
103 std::unique_ptr<QRhiBuffer> &uniformBuffer,
104 std::unique_ptr<QRhiSampler> &textureSampler,
105 std::unique_ptr<QRhiShaderResourceBindings> &shaderResourceBindings,
106 std::unique_ptr<QRhiGraphicsPipeline> &graphicsPipeline,
107 std::unique_ptr<QRhiRenderPassDescriptor> &renderPass,
108 QVideoFrame &frame,
109 const QVideoFrameTexturesUPtr &videoFrameTextures)
110{
111 auto format = frame.surfaceFormat();
112 auto pixelFormat = format.pixelFormat();
113
114 auto textureDesc = QVideoTextureHelper::textureDescription(pixelFormat);
115
116 QRhiShaderResourceBinding bindings[4];
117 auto *b = bindings;
118 *b++ = QRhiShaderResourceBinding::uniformBuffer(0, QRhiShaderResourceBinding::VertexStage | QRhiShaderResourceBinding::FragmentStage,
119 uniformBuffer.get());
120 for (int i = 0; i < textureDesc->nplanes; ++i)
121 *b++ = QRhiShaderResourceBinding::sampledTexture(i + 1, QRhiShaderResourceBinding::FragmentStage,
122 videoFrameTextures->texture(i), textureSampler.get());
123 shaderResourceBindings->setBindings(bindings, b);
124 shaderResourceBindings->create();
125
126 graphicsPipeline.reset(rhi->newGraphicsPipeline());
127 graphicsPipeline->setTopology(QRhiGraphicsPipeline::TriangleStrip);
128
129 QShader vs = ensureShader(QVideoTextureHelper::vertexShaderFileName(format));
130 if (!vs.isValid())
131 return false;
132
133 QShader fs = ensureShader(QVideoTextureHelper::fragmentShaderFileName(format, rhi));
134 if (!fs.isValid())
135 return false;
136
137 graphicsPipeline->setShaderStages({
138 { QRhiShaderStage::Vertex, vs },
139 { QRhiShaderStage::Fragment, fs }
140 });
141
142 QRhiVertexInputLayout inputLayout;
143 inputLayout.setBindings({
144 { 4 * sizeof(float) }
145 });
146 inputLayout.setAttributes({
147 { 0, 0, QRhiVertexInputAttribute::Float2, 0 },
148 { 0, 1, QRhiVertexInputAttribute::Float2, 2 * sizeof(float) }
149 });
150
151 graphicsPipeline->setVertexInputLayout(inputLayout);
152 graphicsPipeline->setShaderResourceBindings(shaderResourceBindings.get());
153 graphicsPipeline->setRenderPassDescriptor(renderPass.get());
154 graphicsPipeline->create();
155
156 return true;
157}
158
159static QImage convertJPEG(const QVideoFrame &frame, const VideoTransformation &transform)
160{
161 QVideoFrame varFrame = frame;
162 if (!varFrame.map(QVideoFrame::ReadOnly)) {
163 qCDebug(qLcVideoFrameConverter) << Q_FUNC_INFO << ": frame mapping failed";
164 return {};
165 }
166
167 auto unmap = std::optional(QScopeGuard([&] {
168 varFrame.unmap();
169 }));
170
171 QSpan<uchar> jpegData{
172 varFrame.bits(0),
173 varFrame.mappedBytes(0),
174 };
175
176 constexpr std::array<uchar, 2> soiMarker{ uchar(0xff), uchar(0xd8) };
177 if (!QtMultimediaPrivate::ranges::equal(jpegData.first(2), soiMarker, std::equal_to<void>{})) {
178 qCDebug(qLcVideoFrameConverter)
179 << Q_FUNC_INFO << ": JPEG data does not start with SOI marker";
180 return QImage{};
181 }
182
183 constexpr std::array<uchar, 2> eoiMarker{ uchar(0xff), uchar(0xd9) };
184
185 // some JPEG cameras contain extra data after the JPEG marker. If so, we drop it to make
186 // libjpeg happy.
187 if (!QtMultimediaPrivate::ranges::equal(jpegData.last(2), eoiMarker, std::equal_to<void>{})) {
188 qCDebug(qLcVideoFrameConverter)
189 << Q_FUNC_INFO << ": JPEG data does not end with EOI marker";
190
191 auto eoi_it = std::find_end(jpegData.begin(), jpegData.end(), std::begin(eoiMarker),
192 std::end(eoiMarker));
193 if (eoi_it == jpegData.end()) {
194 qCWarning(qLcVideoFrameConverter)
195 << Q_FUNC_INFO << ": JPEG data does not contain EOI marker";
196 return QImage{};
197 };
198
199 const size_t newSize = std::distance(jpegData.begin(), eoi_it) + std::size(eoiMarker);
200 jpegData = jpegData.first(newSize);
201 }
202
203 QImage image = QImage::fromData(jpegData, "JPG");
204 unmap = std::nullopt; // Release unmap guard
205 rasterTransform(image, transform);
206 return image;
207}
208
209static QImage convertCPU(const QVideoFrame &frame, const VideoTransformation &transform)
210{
211 VideoFrameConvertFunc convert = qConverterForFormat(frame.pixelFormat());
212 if (!convert) {
213 qCDebug(qLcVideoFrameConverter) << Q_FUNC_INFO << ": unsupported pixel format" << frame.pixelFormat();
214 return {};
215 } else {
216 QVideoFrame varFrame = frame;
217 if (!varFrame.map(QVideoFrame::ReadOnly)) {
218 qCDebug(qLcVideoFrameConverter) << Q_FUNC_INFO << ": frame mapping failed";
219 return {};
220 }
221 auto format = pixelFormatHasAlpha(varFrame.pixelFormat()) ? QImage::Format_ARGB32_Premultiplied : QImage::Format_RGB32;
222 QImage image = QImage(varFrame.width(), varFrame.height(), format);
223 convert(varFrame, image.bits());
224 varFrame.unmap();
225 rasterTransform(image, transform);
226 return image;
227 }
228}
229
230QImage qImageFromVideoFrame(const QVideoFrame &frame, bool forceCpu)
231{
232 // by default, surface transformation is applied, as full transformation is used for presentation only
233 return qImageFromVideoFrame(frame, qNormalizedSurfaceTransformation(frame.surfaceFormat()),
234 forceCpu);
235}
236
237QImage qImageFromVideoFrame(const QVideoFrame &frame, const VideoTransformation &transformation,
238 bool forceCpu)
239{
240#ifdef Q_OS_DARWIN
241 QMacAutoReleasePool releasePool;
242#endif
243
244 std::unique_ptr<QRhiRenderPassDescriptor> renderPass;
245 std::unique_ptr<QRhiBuffer> vertexBuffer;
246 std::unique_ptr<QRhiBuffer> uniformBuffer;
247 std::unique_ptr<QRhiTexture> targetTexture;
248 std::unique_ptr<QRhiTextureRenderTarget> renderTarget;
249 std::unique_ptr<QRhiSampler> textureSampler;
250 std::unique_ptr<QRhiShaderResourceBindings> shaderResourceBindings;
251 std::unique_ptr<QRhiGraphicsPipeline> graphicsPipeline;
252
253 if (frame.size().isEmpty() || frame.pixelFormat() == QVideoFrameFormat::Format_Invalid)
254 return {};
255
256 if (frame.pixelFormat() == QVideoFrameFormat::Format_Jpeg)
257 return convertJPEG(frame, transformation);
258
259 if (forceCpu) // For test purposes
260 return convertCPU(frame, transformation);
261
262 QRhi *rhi = nullptr;
263
264 if (QHwVideoBuffer *buffer = QVideoFramePrivate::hwBuffer(frame))
265 rhi = buffer->rhi();
266
267 if (!rhi || !rhi->thread()->isCurrentThread())
268 rhi = qEnsureThreadLocalRhi(rhi);
269
270 if (!rhi || rhi->isRecordingFrame())
271 return convertCPU(frame, transformation);
272
273 // Do conversion using shaders
274
275 const QSize frameSize = qRotatedFrameSize(frame.size(), frame.surfaceFormat().rotation());
276
277 vertexBuffer.reset(rhi->newBuffer(QRhiBuffer::Immutable, QRhiBuffer::VertexBuffer, sizeof(g_quad)));
278 vertexBuffer->create();
279
280 uniformBuffer.reset(rhi->newBuffer(QRhiBuffer::Dynamic, QRhiBuffer::UniformBuffer, sizeof(QVideoTextureHelper::UniformData)));
281 uniformBuffer->create();
282
283 textureSampler.reset(rhi->newSampler(QRhiSampler::Linear, QRhiSampler::Linear, QRhiSampler::None,
284 QRhiSampler::ClampToEdge, QRhiSampler::ClampToEdge));
285 textureSampler->create();
286
287 shaderResourceBindings.reset(rhi->newShaderResourceBindings());
288
289 targetTexture.reset(rhi->newTexture(QRhiTexture::RGBA8, frameSize, 1, QRhiTexture::RenderTarget));
290 if (!targetTexture->create()) {
291 qCDebug(qLcVideoFrameConverter) << "Failed to create target texture. Using CPU conversion.";
292 return convertCPU(frame, transformation);
293 }
294
295 renderTarget.reset(rhi->newTextureRenderTarget({ { targetTexture.get() } }));
296 renderPass.reset(renderTarget->newCompatibleRenderPassDescriptor());
297 renderTarget->setRenderPassDescriptor(renderPass.get());
298 renderTarget->create();
299
300 QRhiCommandBuffer *cb = nullptr;
301 QRhi::FrameOpResult r = rhi->beginOffscreenFrame(&cb);
302 if (r != QRhi::FrameOpSuccess) {
303 qCDebug(qLcVideoFrameConverter) << "Failed to set up offscreen frame. Using CPU conversion.";
304 return convertCPU(frame, transformation);
305 }
306
307 QRhiResourceUpdateBatch *rub = rhi->nextResourceUpdateBatch();
308 Q_ASSERT(rub);
309
310 rub->uploadStaticBuffer(vertexBuffer.get(), g_quad);
311
312 QVideoFrame frameTmp = frame;
313 QVideoFrameTexturesUPtr texturesTmp;
314 auto videoFrameTextures = QVideoTextureHelper::createTextures(frameTmp, *rhi, *rub, texturesTmp);
315 if (!videoFrameTextures) {
316 qCDebug(qLcVideoFrameConverter) << "Failed obtain textures. Using CPU conversion.";
317 return convertCPU(frame, transformation);
318 }
319
320 if (!updateTextures(rhi, uniformBuffer, textureSampler, shaderResourceBindings,
321 graphicsPipeline, renderPass, frameTmp, videoFrameTextures)) {
322 qCDebug(qLcVideoFrameConverter) << "Failed to update textures. Using CPU conversion.";
323 return convertCPU(frame, transformation);
324 }
325
326 float xScale = transformation.mirroredHorizontallyAfterRotation ? -1.0 : 1.0;
327 float yScale = 1.f;
328
329 if (rhi->isYUpInFramebuffer())
330 yScale = -yScale;
331
332 QMatrix4x4 transform;
333 transform.scale(xScale, yScale);
334
335 QByteArray uniformData(sizeof(QVideoTextureHelper::UniformData), Qt::Uninitialized);
336 QVideoTextureHelper::updateUniformData(&uniformData, rhi, frame.surfaceFormat(), frame,
337 transform, 1.f);
338 rub->updateDynamicBuffer(uniformBuffer.get(), 0, uniformData.size(), uniformData.constData());
339
340 cb->beginPass(renderTarget.get(), Qt::black, { 1.0f, 0 }, rub);
341 cb->setGraphicsPipeline(graphicsPipeline.get());
342
343 cb->setViewport({ 0, 0, float(frameSize.width()), float(frameSize.height()) });
344 cb->setShaderResources(shaderResourceBindings.get());
345
346 const quint32 vertexOffset = quint32(sizeof(float)) * 16 * transformation.rotationIndex();
347 const QRhiCommandBuffer::VertexInput vbufBinding(vertexBuffer.get(), vertexOffset);
348 cb->setVertexInput(0, 1, &vbufBinding);
349 cb->draw(4);
350
351 QRhiReadbackDescription readDesc(targetTexture.get());
352 QRhiReadbackResult readResult;
353 bool readCompleted = false;
354
355 readResult.completed = [&readCompleted] { readCompleted = true; };
356
357 rub = rhi->nextResourceUpdateBatch();
358 rub->readBackTexture(readDesc, &readResult);
359
360 cb->endPass(rub);
361
362 rhi->endOffscreenFrame();
363
364 if (!readCompleted) {
365 qCDebug(qLcVideoFrameConverter) << "Failed to read back texture. Using CPU conversion.";
366 return convertCPU(frame, transformation);
367 }
368
369 QByteArray *imageData = new QByteArray(readResult.data);
370
371 return QImage(reinterpret_cast<const uchar *>(imageData->constData()),
372 readResult.pixelSize.width(), readResult.pixelSize.height(),
373 QImage::Format_RGBA8888_Premultiplied, imageCleanupHandler, imageData);
374}
375
376QImage videoFramePlaneAsImage(QVideoFrame &frame, int plane, QImage::Format targetFormat,
377 QSize targetSize)
378{
379 if (plane >= frame.planeCount())
380 return {};
381
382 if (!frame.map(QVideoFrame::ReadOnly)) {
383 qWarning() << "Cannot map a video frame in ReadOnly mode!";
384 return {};
385 }
386
387 auto frameHandle = QVideoFramePrivate::handle(frame);
388
389 // With incrementing the reference counter, we share the mapped QVideoFrame
390 // with the target QImage. The function imageCleanupFunction is going to adopt
391 // the frameHandle by QVideoFrame and dereference it upon the destruction.
392 frameHandle->ref.ref();
393
394 auto imageCleanupFunction = [](void *data) {
395 QVideoFrame frame = reinterpret_cast<QVideoFramePrivate *>(data)->adoptThisByVideoFrame();
396 Q_ASSERT(frame.isMapped());
397 frame.unmap();
398 };
399
400 const auto bytesPerLine = frame.bytesPerLine(plane);
401 const auto height =
402 bytesPerLine ? qMin(targetSize.height(), frame.mappedBytes(plane) / bytesPerLine) : 0;
403
404 return QImage(reinterpret_cast<const uchar *>(frame.bits(plane)), targetSize.width(), height,
405 bytesPerLine, targetFormat, imageCleanupFunction, frameHandle);
406}
407
408QT_END_NAMESPACE
\inmodule QtGui
Definition qimage.h:37
\inmodule QtGuiPrivate \inheaderfile rhi/qrhi.h
Definition qrhi.h:782
\inmodule QtGuiPrivate \inheaderfile rhi/qrhi.h
Definition qrhi.h:440
\inmodule QtGuiPrivate \inheaderfile rhi/qrhi.h
Definition qrhi.h:322
\inmodule QtGui
Definition qshader.h:81
static QVideoFramePrivate * handle(QVideoFrame &frame)
static bool updateTextures(QRhi *rhi, std::unique_ptr< QRhiBuffer > &uniformBuffer, std::unique_ptr< QRhiSampler > &textureSampler, std::unique_ptr< QRhiShaderResourceBindings > &shaderResourceBindings, std::unique_ptr< QRhiGraphicsPipeline > &graphicsPipeline, std::unique_ptr< QRhiRenderPassDescriptor > &renderPass, QVideoFrame &frame, const QVideoFrameTexturesUPtr &videoFrameTextures)
static QImage convertJPEG(const QVideoFrame &frame, const VideoTransformation &transform)
QImage qImageFromVideoFrame(const QVideoFrame &frame, const VideoTransformation &transformation, bool forceCpu)
QImage qImageFromVideoFrame(const QVideoFrame &frame, bool forceCpu)
static QShader ensureShader(const QString &name)
static bool pixelFormatHasAlpha(QVideoFrameFormat::PixelFormat format)
static QImage convertCPU(const QVideoFrame &frame, const VideoTransformation &transform)
static void rasterTransform(QImage &image, VideoTransformation transformation)
QImage videoFramePlaneAsImage(QVideoFrame &frame, int plane, QImage::Format targetFormat, QSize targetSize)
Maps the video frame and returns an image having a shared ownership for the video frame and referenci...
static void imageCleanupHandler(void *info)