103 std::unique_ptr<QRhiBuffer> &uniformBuffer,
104 std::unique_ptr<QRhiSampler> &textureSampler,
105 std::unique_ptr<QRhiShaderResourceBindings> &shaderResourceBindings,
106 std::unique_ptr<QRhiGraphicsPipeline> &graphicsPipeline,
107 std::unique_ptr<QRhiRenderPassDescriptor> &renderPass,
109 const QVideoFrameTexturesUPtr &videoFrameTextures)
111 auto format = frame.surfaceFormat();
112 auto pixelFormat = format.pixelFormat();
114 auto textureDesc = QVideoTextureHelper::textureDescription(pixelFormat);
118 *b++ = QRhiShaderResourceBinding::uniformBuffer(0, QRhiShaderResourceBinding::VertexStage | QRhiShaderResourceBinding::FragmentStage,
119 uniformBuffer.get());
120 for (
int i = 0; i < textureDesc->nplanes; ++i)
121 *b++ = QRhiShaderResourceBinding::sampledTexture(i + 1, QRhiShaderResourceBinding::FragmentStage,
122 videoFrameTextures->texture(i), textureSampler.get());
123 shaderResourceBindings->setBindings(bindings, b);
124 if (!shaderResourceBindings->create()) {
125 qCDebug(qLcVideoFrameConverter)
126 << Q_FUNC_INFO <<
": failed to create shader resource bindings";
130 graphicsPipeline.reset(rhi->newGraphicsPipeline());
131 graphicsPipeline->setTopology(QRhiGraphicsPipeline::TriangleStrip);
133 QShader vs = ensureShader(QVideoTextureHelper::vertexShaderFileName(format));
137 QShader fs = ensureShader(QVideoTextureHelper::fragmentShaderFileName(format, rhi));
141 graphicsPipeline->setShaderStages({
142 { QRhiShaderStage::Vertex, vs },
143 { QRhiShaderStage::Fragment, fs }
147 inputLayout.setBindings({
148 { 4 *
sizeof(
float) }
150 inputLayout.setAttributes({
151 { 0, 0, QRhiVertexInputAttribute::Float2, 0 },
152 { 0, 1, QRhiVertexInputAttribute::Float2, 2 *
sizeof(
float) }
155 graphicsPipeline->setVertexInputLayout(inputLayout);
156 graphicsPipeline->setShaderResourceBindings(shaderResourceBindings.get());
157 graphicsPipeline->setRenderPassDescriptor(renderPass.get());
158 if (!graphicsPipeline->create()) {
159 qCDebug(qLcVideoFrameConverter) << Q_FUNC_INFO <<
": failed to create graphics pipeline";
168 QVideoFrame varFrame = frame;
169 if (!varFrame.map(QVideoFrame::ReadOnly)) {
170 qCDebug(qLcVideoFrameConverter) << Q_FUNC_INFO <<
": frame mapping failed";
174 auto unmap =
std::optional(QScopeGuard([&] {
178 QSpan<uchar> jpegData{
180 varFrame.mappedBytes(0),
183 constexpr std::array<uchar, 2> soiMarker{ uchar(0xff), uchar(0xd8) };
184 if (!QtMultimediaPrivate::ranges::equal(jpegData.first(2), soiMarker, std::equal_to<
void>{})) {
185 qCDebug(qLcVideoFrameConverter)
186 << Q_FUNC_INFO <<
": JPEG data does not start with SOI marker";
190 constexpr std::array<uchar, 2> eoiMarker{ uchar(0xff), uchar(0xd9) };
194 if (!QtMultimediaPrivate::ranges::equal(jpegData.last(2), eoiMarker, std::equal_to<
void>{})) {
195 qCDebug(qLcVideoFrameConverter)
196 << Q_FUNC_INFO <<
": JPEG data does not end with EOI marker";
198 auto eoi_it = std::find_end(jpegData.begin(), jpegData.end(), std::begin(eoiMarker),
199 std::end(eoiMarker));
200 if (eoi_it == jpegData.end()) {
201 qCWarning(qLcVideoFrameConverter)
202 << Q_FUNC_INFO <<
": JPEG data does not contain EOI marker";
206 const size_t newSize =
std::distance(jpegData.begin(), eoi_it) +
std::size(eoiMarker);
207 jpegData = jpegData.first(newSize);
210 QImage image = QImage::fromData(jpegData,
"JPG");
211 unmap =
std::nullopt;
248 QMacAutoReleasePool releasePool;
251 std::unique_ptr<QRhiRenderPassDescriptor> renderPass;
252 std::unique_ptr<QRhiBuffer> vertexBuffer;
253 std::unique_ptr<QRhiBuffer> uniformBuffer;
254 std::unique_ptr<QRhiTexture> targetTexture;
255 std::unique_ptr<QRhiTextureRenderTarget> renderTarget;
256 std::unique_ptr<QRhiSampler> textureSampler;
257 std::unique_ptr<QRhiShaderResourceBindings> shaderResourceBindings;
258 std::unique_ptr<QRhiGraphicsPipeline> graphicsPipeline;
260 if (frame.size().isEmpty() || frame.pixelFormat() == QVideoFrameFormat::Format_Invalid)
263 if (frame.pixelFormat() == QVideoFrameFormat::Format_Jpeg)
264 return convertJPEG(frame, transformation);
267 return convertCPU(frame, transformation);
271 if (QHwVideoBuffer *buffer = QVideoFramePrivate::hwBuffer(frame))
272 rhi = buffer->associatedCurrentThreadRhi();
279 rhi = qEnsureThreadLocalRhi();
282 if (!rhi || rhi->isRecordingFrame())
283 return convertCPU(frame, transformation);
285 Q_ASSERT(rhi->thread()->isCurrentThread());
289 const QSize frameSize = qRotatedFrameSize(frame.size(), frame.surfaceFormat().rotation());
291 vertexBuffer.reset(rhi->newBuffer(QRhiBuffer::Immutable, QRhiBuffer::VertexBuffer,
sizeof(g_quad)));
292 if (!vertexBuffer->create()) {
293 qCDebug(qLcVideoFrameConverter) <<
"Failed to create vertex buffer. Using CPU conversion.";
294 return convertCPU(frame, transformation);
297 uniformBuffer.reset(rhi->newBuffer(QRhiBuffer::Dynamic, QRhiBuffer::UniformBuffer,
sizeof(QVideoTextureHelper::UniformData)));
298 if (!uniformBuffer->create()) {
299 qCDebug(qLcVideoFrameConverter) <<
"Failed to create uniform buffer. Using CPU conversion.";
300 return convertCPU(frame, transformation);
303 textureSampler.reset(rhi->newSampler(QRhiSampler::Linear, QRhiSampler::Linear, QRhiSampler::None,
304 QRhiSampler::ClampToEdge, QRhiSampler::ClampToEdge));
305 if (!textureSampler->create()) {
306 qCDebug(qLcVideoFrameConverter)
307 <<
"Failed to create texture sampler. Using CPU conversion.";
308 return convertCPU(frame, transformation);
311 shaderResourceBindings.reset(rhi->newShaderResourceBindings());
313 targetTexture.reset(rhi->newTexture(QRhiTexture::RGBA8, frameSize, 1, QRhiTexture::RenderTarget));
314 if (!targetTexture->create()) {
315 qCDebug(qLcVideoFrameConverter) <<
"Failed to create target texture. Using CPU conversion.";
316 return convertCPU(frame, transformation);
319 renderTarget.reset(rhi->newTextureRenderTarget({ { targetTexture.get() } }));
320 renderPass.reset(renderTarget->newCompatibleRenderPassDescriptor());
321 renderTarget->setRenderPassDescriptor(renderPass.get());
322 if (!renderTarget->create()) {
323 qCDebug(qLcVideoFrameConverter) <<
"Failed to create render target. Using CPU conversion.";
324 return convertCPU(frame, transformation);
327 QRhiCommandBuffer *cb =
nullptr;
328 QRhi::FrameOpResult r = rhi->beginOffscreenFrame(&cb);
329 if (r != QRhi::FrameOpSuccess) {
330 qCDebug(qLcVideoFrameConverter) <<
"Failed to set up offscreen frame. Using CPU conversion.";
331 return convertCPU(frame, transformation);
334 QRhiResourceUpdateBatch *rub = rhi->nextResourceUpdateBatch();
337 rub->uploadStaticBuffer(vertexBuffer.get(), g_quad);
339 QVideoFrame frameTmp = frame;
340 QVideoFrameTexturesUPtr texturesTmp;
341 auto videoFrameTextures = QVideoTextureHelper::createTextures(frameTmp, *rhi, *rub, texturesTmp);
342 if (!videoFrameTextures) {
343 qCDebug(qLcVideoFrameConverter) <<
"Failed obtain textures. Using CPU conversion.";
344 return convertCPU(frame, transformation);
347 if (!updateTextures(rhi, uniformBuffer, textureSampler, shaderResourceBindings,
348 graphicsPipeline, renderPass, frameTmp, videoFrameTextures)) {
349 qCDebug(qLcVideoFrameConverter) <<
"Failed to update textures. Using CPU conversion.";
350 return convertCPU(frame, transformation);
353 float xScale = transformation.mirroredHorizontallyAfterRotation ? -1.0 : 1.0;
356 if (rhi->isYUpInFramebuffer())
359 QMatrix4x4 transform;
360 transform.scale(xScale, yScale);
362 QByteArray uniformData(
sizeof(QVideoTextureHelper::UniformData), Qt::Uninitialized);
363 QVideoTextureHelper::updateUniformData(&uniformData, rhi, frame.surfaceFormat(), frame,
365 rub->updateDynamicBuffer(uniformBuffer.get(), 0, uniformData.size(), uniformData.constData());
367 cb->beginPass(renderTarget.get(), Qt::black, { 1.0f, 0 }, rub);
368 cb->setGraphicsPipeline(graphicsPipeline.get());
370 cb->setViewport({ 0, 0,
float(frameSize.width()),
float(frameSize.height()) });
371 cb->setShaderResources(shaderResourceBindings.get());
373 const quint32 vertexOffset = quint32(
sizeof(
float)) * 16 * transformation.rotationIndex();
374 const QRhiCommandBuffer::VertexInput vbufBinding(vertexBuffer.get(), vertexOffset);
375 cb->setVertexInput(0, 1, &vbufBinding);
379 QRhiReadbackResult readResult;
380 bool readCompleted =
false;
382 readResult.completed = [&readCompleted] { readCompleted =
true; };
384 rub = rhi->nextResourceUpdateBatch();
385 rub->readBackTexture(readDesc, &readResult);
389 rhi->endOffscreenFrame();
391 if (!readCompleted) {
392 qCDebug(qLcVideoFrameConverter) <<
"Failed to read back texture. Using CPU conversion.";
393 return convertCPU(frame, transformation);
396 QByteArray *imageData =
new QByteArray(readResult.data);
398 return QImage(
reinterpret_cast<
const uchar *>(imageData->constData()),
399 readResult.pixelSize.width(), readResult.pixelSize.height(),