Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qvideoframeconverter.cpp
Go to the documentation of this file.
1// Copyright (C) 2022 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
10#include "qcachedvalue_p.h"
11
12#include <QtCore/qcoreapplication.h>
13#include <QtCore/qsize.h>
14#include <QtCore/qhash.h>
15#include <QtCore/qfile.h>
16#include <QtGui/qimage.h>
17#include <QtCore/qloggingcategory.h>
18
19#include <private/qvideotexturehelper_p.h>
20
21#include <rhi/qrhi.h>
22
23#ifdef Q_OS_DARWIN
24#include <QtCore/private/qcore_mac_p.h>
25#endif
26
28
29Q_STATIC_LOGGING_CATEGORY(qLcVideoFrameConverter, "qt.multimedia.video.frameconverter")
30
31// TODO: investigate if we should use thread_local instead, QTBUG-133565
32static const float g_quad[] = {
33 // Rotation 0 CW
34 1.f, -1.f, 1.f, 1.f,
35 1.f, 1.f, 1.f, 0.f,
36 -1.f, -1.f, 0.f, 1.f,
37 -1.f, 1.f, 0.f, 0.f,
38 // Rotation 90 CW
39 1.f, -1.f, 1.f, 0.f,
40 1.f, 1.f, 0.f, 0.f,
41 -1.f, -1.f, 1.f, 1.f,
42 -1.f, 1.f, 0.f, 1.f,
43 // Rotation 180 CW
44 1.f, -1.f, 0.f, 0.f,
45 1.f, 1.f, 0.f, 1.f,
46 -1.f, -1.f, 1.f, 0.f,
47 -1.f, 1.f, 1.f, 1.f,
48 // Rotation 270 CW
49 1.f, -1.f, 0.f, 1.f,
50 1.f, 1.f, 1.f, 1.f,
51 -1.f, -1.f, 0.f, 0.f,
52 -1.f, 1.f, 1.f, 0.f,
53};
54
55static bool pixelFormatHasAlpha(QVideoFrameFormat::PixelFormat format)
56{
57 switch (format) {
58 case QVideoFrameFormat::Format_ARGB8888:
59 case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
60 case QVideoFrameFormat::Format_BGRA8888:
61 case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
62 case QVideoFrameFormat::Format_ABGR8888:
63 case QVideoFrameFormat::Format_RGBA8888:
64 case QVideoFrameFormat::Format_AYUV:
65 case QVideoFrameFormat::Format_AYUV_Premultiplied:
66 return true;
67 default:
68 return false;
69 }
70};
71
72static QShader ensureShader(const QString &name)
73{
74 static QCachedValueMap<QString, QShader> shaderCache;
75
76 return shaderCache.ensure(name, [&name]() {
77 QFile f(name);
78 return f.open(QIODevice::ReadOnly) ? QShader::fromSerialized(f.readAll()) : QShader();
79 });
80}
81
82static void rasterTransform(QImage &image, VideoTransformation transformation)
83{
84 QTransform t;
85 if (transformation.rotation != QtVideo::Rotation::None)
86 t.rotate(qreal(transformation.rotation));
87 if (transformation.mirroredHorizontallyAfterRotation)
88 t.scale(-1., 1);
89 if (!t.isIdentity())
90 image = image.transformed(t);
91}
92
93static void imageCleanupHandler(void *info)
94{
95 QByteArray *imageData = reinterpret_cast<QByteArray *>(info);
96 delete imageData;
97}
98
99static bool updateTextures(QRhi *rhi,
100 std::unique_ptr<QRhiBuffer> &uniformBuffer,
101 std::unique_ptr<QRhiSampler> &textureSampler,
102 std::unique_ptr<QRhiShaderResourceBindings> &shaderResourceBindings,
103 std::unique_ptr<QRhiGraphicsPipeline> &graphicsPipeline,
104 std::unique_ptr<QRhiRenderPassDescriptor> &renderPass,
105 QVideoFrame &frame,
106 const QVideoFrameTexturesUPtr &videoFrameTextures)
107{
108 auto format = frame.surfaceFormat();
109 auto pixelFormat = format.pixelFormat();
110
111 auto textureDesc = QVideoTextureHelper::textureDescription(pixelFormat);
112
113 QRhiShaderResourceBinding bindings[4];
114 auto *b = bindings;
115 *b++ = QRhiShaderResourceBinding::uniformBuffer(0, QRhiShaderResourceBinding::VertexStage | QRhiShaderResourceBinding::FragmentStage,
116 uniformBuffer.get());
117 for (int i = 0; i < textureDesc->nplanes; ++i)
118 *b++ = QRhiShaderResourceBinding::sampledTexture(i + 1, QRhiShaderResourceBinding::FragmentStage,
119 videoFrameTextures->texture(i), textureSampler.get());
120 shaderResourceBindings->setBindings(bindings, b);
121 shaderResourceBindings->create();
122
123 graphicsPipeline.reset(rhi->newGraphicsPipeline());
124 graphicsPipeline->setTopology(QRhiGraphicsPipeline::TriangleStrip);
125
126 QShader vs = ensureShader(QVideoTextureHelper::vertexShaderFileName(format));
127 if (!vs.isValid())
128 return false;
129
130 QShader fs = ensureShader(QVideoTextureHelper::fragmentShaderFileName(format, rhi));
131 if (!fs.isValid())
132 return false;
133
134 graphicsPipeline->setShaderStages({
135 { QRhiShaderStage::Vertex, vs },
136 { QRhiShaderStage::Fragment, fs }
137 });
138
139 QRhiVertexInputLayout inputLayout;
140 inputLayout.setBindings({
141 { 4 * sizeof(float) }
142 });
143 inputLayout.setAttributes({
144 { 0, 0, QRhiVertexInputAttribute::Float2, 0 },
145 { 0, 1, QRhiVertexInputAttribute::Float2, 2 * sizeof(float) }
146 });
147
148 graphicsPipeline->setVertexInputLayout(inputLayout);
149 graphicsPipeline->setShaderResourceBindings(shaderResourceBindings.get());
150 graphicsPipeline->setRenderPassDescriptor(renderPass.get());
151 graphicsPipeline->create();
152
153 return true;
154}
155
156static QImage convertJPEG(const QVideoFrame &frame, const VideoTransformation &transform)
157{
158 QVideoFrame varFrame = frame;
159 if (!varFrame.map(QVideoFrame::ReadOnly)) {
160 qCDebug(qLcVideoFrameConverter) << Q_FUNC_INFO << ": frame mapping failed";
161 return {};
162 }
163 QImage image;
164 image.loadFromData(varFrame.bits(0), varFrame.mappedBytes(0), "JPG");
165 varFrame.unmap();
166 rasterTransform(image, transform);
167 return image;
168}
169
170static QImage convertCPU(const QVideoFrame &frame, const VideoTransformation &transform)
171{
172 VideoFrameConvertFunc convert = qConverterForFormat(frame.pixelFormat());
173 if (!convert) {
174 qCDebug(qLcVideoFrameConverter) << Q_FUNC_INFO << ": unsupported pixel format" << frame.pixelFormat();
175 return {};
176 } else {
177 QVideoFrame varFrame = frame;
178 if (!varFrame.map(QVideoFrame::ReadOnly)) {
179 qCDebug(qLcVideoFrameConverter) << Q_FUNC_INFO << ": frame mapping failed";
180 return {};
181 }
182 auto format = pixelFormatHasAlpha(varFrame.pixelFormat()) ? QImage::Format_ARGB32_Premultiplied : QImage::Format_RGB32;
183 QImage image = QImage(varFrame.width(), varFrame.height(), format);
184 convert(varFrame, image.bits());
185 varFrame.unmap();
186 rasterTransform(image, transform);
187 return image;
188 }
189}
190
191QImage qImageFromVideoFrame(const QVideoFrame &frame, bool forceCpu)
192{
193 // by default, surface transformation is applied, as full transformation is used for presentation only
194 return qImageFromVideoFrame(frame, qNormalizedSurfaceTransformation(frame.surfaceFormat()),
195 forceCpu);
196}
197
198QImage qImageFromVideoFrame(const QVideoFrame &frame, const VideoTransformation &transformation,
199 bool forceCpu)
200{
201#ifdef Q_OS_DARWIN
202 QMacAutoReleasePool releasePool;
203#endif
204
205 std::unique_ptr<QRhiRenderPassDescriptor> renderPass;
206 std::unique_ptr<QRhiBuffer> vertexBuffer;
207 std::unique_ptr<QRhiBuffer> uniformBuffer;
208 std::unique_ptr<QRhiTexture> targetTexture;
209 std::unique_ptr<QRhiTextureRenderTarget> renderTarget;
210 std::unique_ptr<QRhiSampler> textureSampler;
211 std::unique_ptr<QRhiShaderResourceBindings> shaderResourceBindings;
212 std::unique_ptr<QRhiGraphicsPipeline> graphicsPipeline;
213
214 if (frame.size().isEmpty() || frame.pixelFormat() == QVideoFrameFormat::Format_Invalid)
215 return {};
216
217 if (frame.pixelFormat() == QVideoFrameFormat::Format_Jpeg)
218 return convertJPEG(frame, transformation);
219
220 if (forceCpu) // For test purposes
221 return convertCPU(frame, transformation);
222
223 QRhi *rhi = nullptr;
224
225 if (QHwVideoBuffer *buffer = QVideoFramePrivate::hwBuffer(frame))
226 rhi = buffer->rhi();
227
228 if (!rhi || !rhi->thread()->isCurrentThread())
229 rhi = ensureThreadLocalRhi(rhi);
230
231 if (!rhi || rhi->isRecordingFrame())
232 return convertCPU(frame, transformation);
233
234 // Do conversion using shaders
235
236 const QSize frameSize = qRotatedFrameSize(frame.size(), frame.surfaceFormat().rotation());
237
238 vertexBuffer.reset(rhi->newBuffer(QRhiBuffer::Immutable, QRhiBuffer::VertexBuffer, sizeof(g_quad)));
239 vertexBuffer->create();
240
241 uniformBuffer.reset(rhi->newBuffer(QRhiBuffer::Dynamic, QRhiBuffer::UniformBuffer, sizeof(QVideoTextureHelper::UniformData)));
242 uniformBuffer->create();
243
244 textureSampler.reset(rhi->newSampler(QRhiSampler::Linear, QRhiSampler::Linear, QRhiSampler::None,
245 QRhiSampler::ClampToEdge, QRhiSampler::ClampToEdge));
246 textureSampler->create();
247
248 shaderResourceBindings.reset(rhi->newShaderResourceBindings());
249
250 targetTexture.reset(rhi->newTexture(QRhiTexture::RGBA8, frameSize, 1, QRhiTexture::RenderTarget));
251 if (!targetTexture->create()) {
252 qCDebug(qLcVideoFrameConverter) << "Failed to create target texture. Using CPU conversion.";
253 return convertCPU(frame, transformation);
254 }
255
256 renderTarget.reset(rhi->newTextureRenderTarget({ { targetTexture.get() } }));
257 renderPass.reset(renderTarget->newCompatibleRenderPassDescriptor());
258 renderTarget->setRenderPassDescriptor(renderPass.get());
259 renderTarget->create();
260
261 QRhiCommandBuffer *cb = nullptr;
262 QRhi::FrameOpResult r = rhi->beginOffscreenFrame(&cb);
263 if (r != QRhi::FrameOpSuccess) {
264 qCDebug(qLcVideoFrameConverter) << "Failed to set up offscreen frame. Using CPU conversion.";
265 return convertCPU(frame, transformation);
266 }
267
268 QRhiResourceUpdateBatch *rub = rhi->nextResourceUpdateBatch();
269 Q_ASSERT(rub);
270
271 rub->uploadStaticBuffer(vertexBuffer.get(), g_quad);
272
273 QVideoFrame frameTmp = frame;
274 auto videoFrameTextures = QVideoTextureHelper::createTextures(frameTmp, *rhi, *rub, {});
275 if (!videoFrameTextures) {
276 qCDebug(qLcVideoFrameConverter) << "Failed obtain textures. Using CPU conversion.";
277 return convertCPU(frame, transformation);
278 }
279
280 if (!updateTextures(rhi, uniformBuffer, textureSampler, shaderResourceBindings,
281 graphicsPipeline, renderPass, frameTmp, videoFrameTextures)) {
282 qCDebug(qLcVideoFrameConverter) << "Failed to update textures. Using CPU conversion.";
283 return convertCPU(frame, transformation);
284 }
285
286 float xScale = transformation.mirroredHorizontallyAfterRotation ? -1.0 : 1.0;
287 float yScale = 1.f;
288
289 if (rhi->isYUpInFramebuffer())
290 yScale = -yScale;
291
292 QMatrix4x4 transform;
293 transform.scale(xScale, yScale);
294
295 QByteArray uniformData(sizeof(QVideoTextureHelper::UniformData), Qt::Uninitialized);
296 QVideoTextureHelper::updateUniformData(&uniformData, rhi, frame.surfaceFormat(), frame,
297 transform, 1.f);
298 rub->updateDynamicBuffer(uniformBuffer.get(), 0, uniformData.size(), uniformData.constData());
299
300 cb->beginPass(renderTarget.get(), Qt::black, { 1.0f, 0 }, rub);
301 cb->setGraphicsPipeline(graphicsPipeline.get());
302
303 cb->setViewport({ 0, 0, float(frameSize.width()), float(frameSize.height()) });
304 cb->setShaderResources(shaderResourceBindings.get());
305
306 const quint32 vertexOffset = quint32(sizeof(float)) * 16 * transformation.rotationIndex();
307 const QRhiCommandBuffer::VertexInput vbufBinding(vertexBuffer.get(), vertexOffset);
308 cb->setVertexInput(0, 1, &vbufBinding);
309 cb->draw(4);
310
311 QRhiReadbackDescription readDesc(targetTexture.get());
312 QRhiReadbackResult readResult;
313 bool readCompleted = false;
314
315 readResult.completed = [&readCompleted] { readCompleted = true; };
316
317 rub = rhi->nextResourceUpdateBatch();
318 rub->readBackTexture(readDesc, &readResult);
319
320 cb->endPass(rub);
321
322 rhi->endOffscreenFrame();
323
324 if (!readCompleted) {
325 qCDebug(qLcVideoFrameConverter) << "Failed to read back texture. Using CPU conversion.";
326 return convertCPU(frame, transformation);
327 }
328
329 QByteArray *imageData = new QByteArray(readResult.data);
330
331 return QImage(reinterpret_cast<const uchar *>(imageData->constData()),
332 readResult.pixelSize.width(), readResult.pixelSize.height(),
333 QImage::Format_RGBA8888_Premultiplied, imageCleanupHandler, imageData);
334}
335
336QImage videoFramePlaneAsImage(QVideoFrame &frame, int plane, QImage::Format targetFormat,
337 QSize targetSize)
338{
339 if (plane >= frame.planeCount())
340 return {};
341
342 if (!frame.map(QVideoFrame::ReadOnly)) {
343 qWarning() << "Cannot map a video frame in ReadOnly mode!";
344 return {};
345 }
346
347 auto frameHandle = QVideoFramePrivate::handle(frame);
348
349 // With incrementing the reference counter, we share the mapped QVideoFrame
350 // with the target QImage. The function imageCleanupFunction is going to adopt
351 // the frameHandle by QVideoFrame and dereference it upon the destruction.
352 frameHandle->ref.ref();
353
354 auto imageCleanupFunction = [](void *data) {
355 QVideoFrame frame = reinterpret_cast<QVideoFramePrivate *>(data)->adoptThisByVideoFrame();
356 Q_ASSERT(frame.isMapped());
357 frame.unmap();
358 };
359
360 const auto bytesPerLine = frame.bytesPerLine(plane);
361 const auto height =
362 bytesPerLine ? qMin(targetSize.height(), frame.mappedBytes(plane) / bytesPerLine) : 0;
363
364 return QImage(reinterpret_cast<const uchar *>(frame.bits(plane)), targetSize.width(), height,
365 bytesPerLine, targetFormat, imageCleanupFunction, frameHandle);
366}
367
368QT_END_NAMESPACE
\inmodule QtGui
Definition qimage.h:37
\inmodule QtGuiPrivate \inheaderfile rhi/qrhi.h
Definition qrhi.h:782
\inmodule QtGuiPrivate \inheaderfile rhi/qrhi.h
Definition qrhi.h:440
\inmodule QtGuiPrivate \inheaderfile rhi/qrhi.h
Definition qrhi.h:322
\inmodule QtGui
Definition qshader.h:81
static QVideoFramePrivate * handle(QVideoFrame &frame)
Combined button and popup list for selecting options.
static bool updateTextures(QRhi *rhi, std::unique_ptr< QRhiBuffer > &uniformBuffer, std::unique_ptr< QRhiSampler > &textureSampler, std::unique_ptr< QRhiShaderResourceBindings > &shaderResourceBindings, std::unique_ptr< QRhiGraphicsPipeline > &graphicsPipeline, std::unique_ptr< QRhiRenderPassDescriptor > &renderPass, QVideoFrame &frame, const QVideoFrameTexturesUPtr &videoFrameTextures)
static QImage convertJPEG(const QVideoFrame &frame, const VideoTransformation &transform)
QImage qImageFromVideoFrame(const QVideoFrame &frame, const VideoTransformation &transformation, bool forceCpu)
QImage qImageFromVideoFrame(const QVideoFrame &frame, bool forceCpu)
static QShader ensureShader(const QString &name)
static bool pixelFormatHasAlpha(QVideoFrameFormat::PixelFormat format)
static QImage convertCPU(const QVideoFrame &frame, const VideoTransformation &transform)
static void rasterTransform(QImage &image, VideoTransformation transformation)
QImage videoFramePlaneAsImage(QVideoFrame &frame, int plane, QImage::Format targetFormat, QSize targetSize)
Maps the video frame and returns an image having a shared ownership for the video frame and referenci...
static void imageCleanupHandler(void *info)