Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qvideotexturehelper.cpp
Go to the documentation of this file.
1// Copyright (C) 2021 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
5
10#include "private/qmultimediautils_p.h"
11#include "private/qaudio_alignment_support_p.h"
12
13#include <QtCore/qfile.h>
14#include <qpainter.h>
15#include <qloggingcategory.h>
16
18
19Q_STATIC_LOGGING_CATEGORY(qLcVideoTextureHelper, "qt.multimedia.video.texturehelper")
20
21namespace QVideoTextureHelper
22{
23
25 static const bool isSet =
26 qEnvironmentVariableIsSet("QT_MULTIMEDIA_FORCE_GL_TEXTURE_EXTERNAL_OES");
27 return isSet;
28}
29
31 // Format_Invalid
32 { 0, 0,
33 [](int, int) { return 0; },
35 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
36 },
37 // Format_ARGB8888
38 { 1, 4,
39 [](int stride, int height) { return stride*height; },
41 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
42 },
43 // Format_ARGB8888_Premultiplied
44 { 1, 4,
45 [](int stride, int height) { return stride*height; },
47 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
48 },
49 // Format_XRGB8888
50 { 1, 4,
51 [](int stride, int height) { return stride*height; },
53 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
54 },
55 // Format_BGRA8888
56 { 1, 4,
57 [](int stride, int height) { return stride*height; },
59 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
60 },
61 // Format_BGRA8888_Premultiplied
62 { 1, 4,
63 [](int stride, int height) { return stride*height; },
65 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
66 },
67 // Format_BGRX8888
68 { 1, 4,
69 [](int stride, int height) { return stride*height; },
71 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
72 },
73 // Format_ABGR8888
74 { 1, 4,
75 [](int stride, int height) { return stride*height; },
77 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
78 },
79 // Format_XBGR8888
80 { 1, 4,
81 [](int stride, int height) { return stride*height; },
83 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
84 },
85 // Format_RGBA8888
86 { 1, 4,
87 [](int stride, int height) { return stride*height; },
89 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
90 },
91 // Format_RGBX8888
92 { 1, 4,
93 [](int stride, int height) { return stride*height; },
95 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
96 },
97 // Format_AYUV
98 { 1, 4,
99 [](int stride, int height) { return stride*height; },
101 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
102 },
103 // Format_AYUV_Premultiplied
104 { 1, 4,
105 [](int stride, int height) { return stride*height; },
107 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
108 },
109 // Format_YUV420P
110 { 3, 1,
111 [](int stride, int height) { return stride * (height + QtMultimediaPrivate::alignUp(height, 2) / 2); },
113 { { 1, 1 }, { 2, 2 }, { 2, 2 } }
114 },
115 // Format_YUV422P
116 { 3, 1,
117 [](int stride, int height) { return stride * height * 2; },
119 { { 1, 1 }, { 2, 1 }, { 2, 1 } }
120 },
121 // Format_YV12
122 { 3, 1,
123 [](int stride, int height) { return stride * (height + QtMultimediaPrivate::alignUp(height, 2) / 2); },
125 { { 1, 1 }, { 2, 2 }, { 2, 2 } }
126 },
127 // Format_UYVY
128 { 1, 2,
129 [](int stride, int height) { return stride*height; },
131 { { 2, 1 }, { 1, 1 }, { 1, 1 } }
132 },
133 // Format_YUYV
134 { 1, 2,
135 [](int stride, int height) { return stride*height; },
137 { { 2, 1 }, { 1, 1 }, { 1, 1 } }
138 },
139 // Format_NV12
140 { 2, 1,
141 [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
143 { { 1, 1 }, { 2, 2 }, { 1, 1 } }
144 },
145 // Format_NV21
146 { 2, 1,
147 [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
149 { { 1, 1 }, { 2, 2 }, { 1, 1 } }
150 },
151 // Format_IMC1
152 { 3, 1,
153 [](int stride, int height) {
154 // IMC1 requires that U and V components are aligned on a multiple of 16 lines
155 int h = (height + 15) & ~15;
156 h += 2*(((h/2) + 15) & ~15);
157 return stride * h;
158 },
160 { { 1, 1 }, { 2, 2 }, { 2, 2 } }
161 },
162 // Format_IMC2
163 { 2, 1,
164 [](int stride, int height) { return 2*stride*height; },
166 { { 1, 1 }, { 1, 2 }, { 1, 1 } }
167 },
168 // Format_IMC3
169 { 3, 1,
170 [](int stride, int height) {
171 // IMC3 requires that U and V components are aligned on a multiple of 16 lines
172 int h = (height + 15) & ~15;
173 h += 2*(((h/2) + 15) & ~15);
174 return stride * h;
175 },
177 { { 1, 1 }, { 2, 2 }, { 2, 2 } }
178 },
179 // Format_IMC4
180 { 2, 1,
181 [](int stride, int height) { return 2*stride*height; },
183 { { 1, 1 }, { 1, 2 }, { 1, 1 } }
184 },
185 // Format_Y8
186 { 1, 1,
187 [](int stride, int height) { return stride*height; },
189 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
190 },
191 // Format_Y16
192 { 1, 2,
193 [](int stride, int height) { return stride*height; },
195 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
196 },
197 // Format_P010
198 { 2, 2,
199 [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
201 { { 1, 1 }, { 2, 2 }, { 1, 1 } }
202 },
203 // Format_P016
204 { 2, 2,
205 [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
207 { { 1, 1 }, { 2, 2 }, { 1, 1 } }
208 },
209 // Format_SamplerExternalOES
210 {
211 1, 0,
212 [](int, int) { return 0; },
214 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
215 },
216 // Format_Jpeg
217 { 1, 4,
218 [](int stride, int height) { return stride*height; },
220 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
221 },
222 // Format_SamplerRect
223 {
224 1, 0,
225 [](int, int) { return 0; },
227 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
228 },
229 // Format_YUV420P10
230 { 3, 2,
231 [](int stride, int height) { return stride * (height + QtMultimediaPrivate::alignUp(height, 2) / 2); },
233 { { 1, 1 }, { 2, 2 }, { 2, 2 } }
234 },
235};
236
237Q_GLOBAL_STATIC(QList<QRhiTexture::Format>, g_excludedRhiTextureFormats) // for tests only
238
239static bool isRhiTextureFormatSupported(const QRhi *rhi, QRhiTexture::Format format)
240{
241 if (g_excludedRhiTextureFormats->contains(format))
242 return false;
243 if (!rhi) // consider the format is supported if no rhi specified
244 return true;
245 return rhi->isTextureFormatSupported(format);
246}
247
281
282QRhiTexture::Format resolvedRhiTextureFormat(QRhiTexture::Format format, QRhi *rhi)
283{
284 if (isRhiTextureFormatSupported(rhi, format))
285 return format;
286
287 QRhiTexture::Format fallbackFormat;
288 switch (format) {
289 case QRhiTexture::R8:
290 fallbackFormat = resolvedRhiTextureFormat(QRhiTexture::RED_OR_ALPHA8, rhi);
291 break;
292 case QRhiTexture::RG8:
293 case QRhiTexture::RG16:
294 fallbackFormat = resolvedRhiTextureFormat(QRhiTexture::RGBA8, rhi);
295 break;
296 case QRhiTexture::R16:
297 fallbackFormat = resolvedRhiTextureFormat(QRhiTexture::RG8, rhi);
298 break;
299 default:
300 // End fallback chain here, and return UnknownFormat
301 return QRhiTexture::UnknownFormat;
302 }
303
304 if (fallbackFormat == QRhiTexture::UnknownFormat) {
305 // TODO: QTBUG-135911: In some cases rhi claims format and fallbacks are all
306 // unsupported, but when using preferred format video plays fine
307 qCDebug(qLcVideoTextureHelper) << "Cannot determine any usable texture format, using preferred format" << format;
308 return format;
309 }
310
311 qCDebug(qLcVideoTextureHelper) << "Using fallback texture format" << fallbackFormat;
312 return fallbackFormat;
313}
314
315void setExcludedRhiTextureFormats(QList<QRhiTexture::Format> formats)
316{
317 g_excludedRhiTextureFormats->swap(formats);
318}
319
320const TextureDescription *textureDescription(QVideoFrameFormat::PixelFormat format)
321{
322 return descriptions + format;
323}
324
325QString vertexShaderFileName(const QVideoFrameFormat &format)
326{
327 auto fmt = format.pixelFormat();
328 Q_UNUSED(fmt);
329
330#if 1//def Q_OS_ANDROID
331 if (fmt == QVideoFrameFormat::Format_SamplerExternalOES)
332 return QStringLiteral(":/qt-project.org/multimedia/shaders/externalsampler.vert.qsb");
333#endif
334#if 1//def Q_OS_MACOS
335 if (fmt == QVideoFrameFormat::Format_SamplerRect)
336 return QStringLiteral(":/qt-project.org/multimedia/shaders/rectsampler.vert.qsb");
337#endif
338
339 return QStringLiteral(":/qt-project.org/multimedia/shaders/vertex.vert.qsb");
340}
341
342QString fragmentShaderFileName(const QVideoFrameFormat &format, QRhi *,
343 QRhiSwapChain::Format surfaceFormat)
344{
345 QString shaderFile;
346 switch (format.pixelFormat()) {
347 case QVideoFrameFormat::Format_Y8:
348 shaderFile = QStringLiteral("y");
349 break;
350 case QVideoFrameFormat::Format_Y16:
351 shaderFile = QStringLiteral("y16");
352 break;
353 case QVideoFrameFormat::Format_AYUV:
354 case QVideoFrameFormat::Format_AYUV_Premultiplied:
355 shaderFile = QStringLiteral("ayuv");
356 break;
357 case QVideoFrameFormat::Format_ARGB8888:
358 case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
359 case QVideoFrameFormat::Format_XRGB8888:
360 shaderFile = QStringLiteral("argb");
361 break;
362 case QVideoFrameFormat::Format_ABGR8888:
363 case QVideoFrameFormat::Format_XBGR8888:
364 shaderFile = QStringLiteral("abgr");
365 break;
366 case QVideoFrameFormat::Format_Jpeg: // Jpeg is decoded transparently into an ARGB texture
367 shaderFile = QStringLiteral("bgra");
368 break;
369 case QVideoFrameFormat::Format_RGBA8888:
370 case QVideoFrameFormat::Format_RGBX8888:
371 case QVideoFrameFormat::Format_BGRA8888:
372 case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
373 case QVideoFrameFormat::Format_BGRX8888:
374 shaderFile = QStringLiteral("rgba");
375 break;
376 case QVideoFrameFormat::Format_YUV420P:
377 case QVideoFrameFormat::Format_YUV422P:
378 case QVideoFrameFormat::Format_IMC3:
379 shaderFile = QStringLiteral("yuv_triplanar");
380 break;
381 case QVideoFrameFormat::Format_YUV420P10:
382 shaderFile = QStringLiteral("yuv_triplanar_p10");
383 break;
384 case QVideoFrameFormat::Format_YV12:
385 case QVideoFrameFormat::Format_IMC1:
386 shaderFile = QStringLiteral("yvu_triplanar");
387 break;
388 case QVideoFrameFormat::Format_IMC2:
389 shaderFile = QStringLiteral("imc2");
390 break;
391 case QVideoFrameFormat::Format_IMC4:
392 shaderFile = QStringLiteral("imc4");
393 break;
394 case QVideoFrameFormat::Format_UYVY:
395 shaderFile = QStringLiteral("uyvy");
396 break;
397 case QVideoFrameFormat::Format_YUYV:
398 shaderFile = QStringLiteral("yuyv");
399 break;
400 case QVideoFrameFormat::Format_P010:
401 case QVideoFrameFormat::Format_P016:
402 // P010/P016 have the same layout as NV12, just 16 instead of 8 bits per pixel
403 if (format.colorTransfer() == QVideoFrameFormat::ColorTransfer_ST2084) {
404 shaderFile = QStringLiteral("nv12_bt2020_pq");
405 break;
406 }
407 if (format.colorTransfer() == QVideoFrameFormat::ColorTransfer_STD_B67) {
408 shaderFile = QStringLiteral("nv12_bt2020_hlg");
409 break;
410 }
411 shaderFile = QStringLiteral("p016");
412 break;
413 case QVideoFrameFormat::Format_NV12:
414 shaderFile = QStringLiteral("nv12");
415 break;
416 case QVideoFrameFormat::Format_NV21:
417 shaderFile = QStringLiteral("nv21");
418 break;
419 case QVideoFrameFormat::Format_SamplerExternalOES:
420#if 1//def Q_OS_ANDROID
421 shaderFile = QStringLiteral("externalsampler");
422 break;
423#endif
424 case QVideoFrameFormat::Format_SamplerRect:
425#if 1//def Q_OS_MACOS
426 shaderFile = QStringLiteral("rectsampler_bgra");
427 break;
428#endif
429 // fallthrough
430 case QVideoFrameFormat::Format_Invalid:
431 default:
432 break;
433 }
434
435 if (shaderFile.isEmpty())
436 return QString();
437
438 shaderFile.prepend(u":/qt-project.org/multimedia/shaders/");
439
440 if (surfaceFormat == QRhiSwapChain::HDRExtendedSrgbLinear)
441 shaderFile.append(u"_linear");
442
443 shaderFile.append(u".frag.qsb");
444
445 Q_ASSERT_X(QFile::exists(shaderFile), Q_FUNC_INFO,
446 QStringLiteral("Shader file %1 does not exist").arg(shaderFile).toLatin1());
447 qCDebug(qLcVideoTextureHelper) << "fragmentShaderFileName returns" << shaderFile;
448 return shaderFile;
449}
450
451// Matrices are calculated from
452// https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.601-7-201103-I!!PDF-E.pdf
453// https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.709-6-201506-I!!PDF-E.pdf
454// https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.2020-2-201510-I!!PDF-E.pdf
455//
456// For BT2020, we also need to convert the Rec2020 RGB colorspace to sRGB see
457// shaders/colorconvert.glsl for details.
458//
459// Doing the math gives the following (Y, U & V normalized to [0..1] range):
460//
461// Y = a*R + b*G + c*B
462// R = Y + e*V
463// G = Y - c*d/b*U - a*e/b*V
464// B = Y + d*U
465
466// BT2020:
467// a = .2627, b = 0.6780, c = 0.0593
468// d = 1.8814
469// e = 1.4746
470//
471// BT709:
472// a = 0.2126, b = 0.7152, c = 0.0722
473// d = 1.8556
474// e = 1.5748
475//
476// BT601:
477// a = 0.299, b = 0.578, c = 0.114
478// d = 1.42
479// e = 1.772
480//
481
482// clang-format off
483static QMatrix4x4 colorMatrix(const QVideoFrameFormat &format)
484{
485 auto colorSpace = format.colorSpace();
486 if (colorSpace == QVideoFrameFormat::ColorSpace_Undefined) {
487 if (format.frameHeight() > 576)
488 // HD video, assume BT709
489 colorSpace = QVideoFrameFormat::ColorSpace_BT709;
490 else
491 // SD video, assume BT601
492 colorSpace = QVideoFrameFormat::ColorSpace_BT601;
493 }
494 switch (colorSpace) {
495 case QVideoFrameFormat::ColorSpace_AdobeRgb:
496 return {
497 1.0f, 0.000f, 1.402f, -0.701f,
498 1.0f, -0.344f, -0.714f, 0.529f,
499 1.0f, 1.772f, 0.000f, -0.886f,
500 0.0f, 0.000f, 0.000f, 1.000f
501 };
502 default:
503 case QVideoFrameFormat::ColorSpace_BT709:
504 if (format.colorRange() == QVideoFrameFormat::ColorRange_Full)
505 return {
506 1.0f, 0.0f, 1.5748f, -0.790488f,
507 1.0f, -0.187324f, -0.468124f, 0.329010f,
508 1.0f, 1.855600f, 0.0f, -0.931439f,
509 0.0f, 0.0f, 0.0f, 1.0f
510 };
511 return {
512 1.1644f, 0.0000f, 1.7927f, -0.9729f,
513 1.1644f, -0.2132f, -0.5329f, 0.3015f,
514 1.1644f, 2.1124f, 0.0000f, -1.1334f,
515 0.0000f, 0.0000f, 0.0000f, 1.0000f
516 };
517 case QVideoFrameFormat::ColorSpace_BT2020:
518 if (format.colorRange() == QVideoFrameFormat::ColorRange_Full)
519 return {
520 1.f, 0.0000f, 1.4746f, -0.7402f,
521 1.f, -0.1646f, -0.5714f, 0.3694f,
522 1.f, 1.8814f, 0.000f, -0.9445f,
523 0.0f, 0.0000f, 0.000f, 1.0000f
524 };
525 return {
526 1.1644f, 0.000f, 1.6787f, -0.9157f,
527 1.1644f, -0.1874f, -0.6504f, 0.3475f,
528 1.1644f, 2.1418f, 0.0000f, -1.1483f,
529 0.0000f, 0.0000f, 0.0000f, 1.0000f
530 };
531 case QVideoFrameFormat::ColorSpace_BT601:
532 // Corresponds to the primaries used by NTSC BT601. For PAL BT601, we use the BT709 conversion
533 // as those are very close.
534 if (format.colorRange() == QVideoFrameFormat::ColorRange_Full)
535 return {
536 1.f, 0.000f, 1.772f, -0.886f,
537 1.f, -0.1646f, -0.57135f, 0.36795f,
538 1.f, 1.42f, 0.000f, -0.71f,
539 0.0f, 0.000f, 0.000f, 1.0000f
540 };
541 return {
542 1.164f, 0.000f, 1.596f, -0.8708f,
543 1.164f, -0.392f, -0.813f, 0.5296f,
544 1.164f, 2.017f, 0.000f, -1.0810f,
545 0.000f, 0.000f, 0.000f, 1.0000f
546 };
547 }
548}
549// clang-format on
550
551// PQ transfer function, see also https://en.wikipedia.org/wiki/Perceptual_quantizer
552// or https://ieeexplore.ieee.org/document/7291452
553static float convertPQFromLinear(float sig)
554{
555 const float m1 = 1305.f/8192.f;
556 const float m2 = 2523.f/32.f;
557 const float c1 = 107.f/128.f;
558 const float c2 = 2413.f/128.f;
559 const float c3 = 2392.f/128.f;
560
561 const float SDR_LEVEL = 100.f;
562 sig *= SDR_LEVEL/10000.f;
563 float psig = powf(sig, m1);
564 float num = c1 + c2*psig;
565 float den = 1 + c3*psig;
566 return powf(num/den, m2);
567}
568
569float convertHLGFromLinear(float sig)
570{
571 const float a = 0.17883277f;
572 const float b = 0.28466892f; // = 1 - 4a
573 const float c = 0.55991073f; // = 0.5 - a ln(4a)
574
575 if (sig < 1.f/12.f)
576 return sqrtf(3.f*sig);
577 return a*logf(12.f*sig - b) + c;
578}
579
580static float convertSDRFromLinear(float sig)
581{
582 return sig;
583}
584
585void updateUniformData(QByteArray *dst, QRhi *rhi, const QVideoFrameFormat &format,
586 const QVideoFrame &frame, const QMatrix4x4 &transform, float opacity,
587 float maxNits)
588{
589#ifndef Q_OS_ANDROID
590 Q_UNUSED(frame);
591#endif
592
593 QMatrix4x4 cmat;
594 switch (format.pixelFormat()) {
595 case QVideoFrameFormat::Format_Invalid:
596 return;
597
598 case QVideoFrameFormat::Format_ARGB8888:
599 case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
600 case QVideoFrameFormat::Format_XRGB8888:
601 case QVideoFrameFormat::Format_BGRA8888:
602 case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
603 case QVideoFrameFormat::Format_BGRX8888:
604 case QVideoFrameFormat::Format_ABGR8888:
605 case QVideoFrameFormat::Format_XBGR8888:
606 case QVideoFrameFormat::Format_RGBA8888:
607 case QVideoFrameFormat::Format_RGBX8888: {
608 if (format.colorRange() == QVideoFrameFormat::ColorRange_Video) {
609 constexpr float scale = 255.0f / 219.0f; // (255 - 0) / (235 - 16)
610 constexpr float offset = -16.0f / 219.0f; // -16 / (235 - 16)
611 // clang-format off
612 cmat = QMatrix4x4 {
613 scale, 0.f, 0.f, offset,
614 0.f, scale, 0.f, offset,
615 0.f, 0.f, scale, offset,
616 0.f, 0.f, 0.f, 1.f,
617 };
618 // clang-format on
619 }
620
621 break;
622 }
623
624 case QVideoFrameFormat::Format_Jpeg:
625 case QVideoFrameFormat::Format_Y8:
626 case QVideoFrameFormat::Format_Y16:
627 break;
628 case QVideoFrameFormat::Format_IMC1:
629 case QVideoFrameFormat::Format_IMC2:
630 case QVideoFrameFormat::Format_IMC3:
631 case QVideoFrameFormat::Format_IMC4:
632 case QVideoFrameFormat::Format_AYUV:
633 case QVideoFrameFormat::Format_AYUV_Premultiplied:
634 case QVideoFrameFormat::Format_YUV420P:
635 case QVideoFrameFormat::Format_YUV420P10:
636 case QVideoFrameFormat::Format_YUV422P:
637 case QVideoFrameFormat::Format_YV12:
638 case QVideoFrameFormat::Format_UYVY:
639 case QVideoFrameFormat::Format_YUYV:
640 case QVideoFrameFormat::Format_NV12:
641 case QVideoFrameFormat::Format_NV21:
642 case QVideoFrameFormat::Format_P010:
643 case QVideoFrameFormat::Format_P016:
644 cmat = colorMatrix(format);
645 break;
646 case QVideoFrameFormat::Format_SamplerExternalOES:
647 // get Android specific transform for the externalsampler texture
648 if (auto hwBuffer = QVideoFramePrivate::hwBuffer(frame))
649 cmat = hwBuffer->externalTextureMatrix();
650 break;
651 case QVideoFrameFormat::Format_SamplerRect:
652 {
653 // Similarly to SamplerExternalOES, the "color matrix" is used here to
654 // transform the texture coordinates. OpenGL texture rectangles expect
655 // non-normalized UVs, so apply a scale to have the fragment shader see
656 // UVs in range [width,height] instead of [0,1].
657 const QSize videoSize = frame.size();
658 cmat.scale(videoSize.width(), videoSize.height());
659 }
660 break;
661 }
662
663 // HDR with a PQ or HLG transfer function uses a BT2390 based tone mapping to cut off the HDR peaks
664 // This requires that we pass the max luminance the tonemapper should clip to over to the fragment
665 // shader. To reduce computations there, it's precomputed in PQ values here.
666 auto fromLinear = convertSDRFromLinear;
667 switch (format.colorTransfer()) {
668 case QVideoFrameFormat::ColorTransfer_ST2084:
669 fromLinear = convertPQFromLinear;
670 break;
671 case QVideoFrameFormat::ColorTransfer_STD_B67:
672 fromLinear = convertHLGFromLinear;
673 break;
674 default:
675 break;
676 }
677
678 if (dst->size() < qsizetype(sizeof(UniformData)))
679 dst->resize(sizeof(UniformData));
680
681 auto ud = reinterpret_cast<UniformData*>(dst->data());
682 memcpy(ud->transformMatrix, transform.constData(), sizeof(ud->transformMatrix));
683 memcpy(ud->colorMatrix, cmat.constData(), sizeof(ud->transformMatrix));
684 ud->opacity = opacity;
685 ud->width = float(format.frameWidth());
686 ud->masteringWhite = fromLinear(float(format.maxLuminance())/100.f);
687 ud->maxLum = fromLinear(float(maxNits)/100.f);
688 const TextureDescription* desc = textureDescription(format.pixelFormat());
689
690 const bool isDmaBuf = QVideoFramePrivate::hasDmaBuf(frame);
691 using FallbackPolicy = QVideoTextureHelper::TextureDescription::FallbackPolicy;
692 auto fallbackPolicy = isDmaBuf
693 ? FallbackPolicy::Disable
694 : FallbackPolicy::Enable;
695
696 // Let's consider using the red component if Red_8 is not used,
697 // it's useful for compatibility the shaders with 16bit formats.
698
699 const bool useRedComponent =
700 !desc->hasTextureFormat(TextureDescription::Red_8)
701 || isRhiTextureFormatSupported(rhi, QRhiTexture::R8)
702 || rhi->isFeatureSupported(QRhi::RedOrAlpha8IsRed)
703 || isDmaBuf;
704 ud->redOrAlphaIndex = useRedComponent ? 0 : 3; // r:0 g:1 b:2 a:3
705
706 for (int plane = 0; plane < desc->nplanes; ++plane)
707 ud->planeFormats[plane] = desc->rhiTextureFormat(plane, rhi, fallbackPolicy);
708}
709
715
716static UpdateTextureWithMapResult updateTextureWithMap(const QVideoFrame &frame, QRhi &rhi,
717 QRhiResourceUpdateBatch &rub, int plane,
718 std::unique_ptr<QRhiTexture> &tex)
719{
720 Q_ASSERT(frame.isMapped());
721
722 QVideoFrameFormat fmt = frame.surfaceFormat();
723 QVideoFrameFormat::PixelFormat pixelFormat = fmt.pixelFormat();
724 QSize size = fmt.frameSize();
725
726 const TextureDescription &texDesc = descriptions[pixelFormat];
727 QSize planeSize = texDesc.rhiPlaneSize(size, plane, &rhi);
728
729 bool needsRebuild = !tex || tex->pixelSize() != planeSize || tex->format() != texDesc.rhiTextureFormat(plane, &rhi);
730 if (!tex) {
731 tex.reset(rhi.newTexture(texDesc.rhiTextureFormat(plane, &rhi), planeSize, 1, {}));
732 if (!tex) {
733 qWarning("Failed to create new texture (size %dx%d)", planeSize.width(), planeSize.height());
735 }
736 }
737
738 if (needsRebuild) {
739 tex->setFormat(texDesc.rhiTextureFormat(plane, &rhi));
740 tex->setPixelSize(planeSize);
741 if (!tex->create()) {
742 qWarning("Failed to create texture (size %dx%d)", planeSize.width(), planeSize.height());
744 }
745 }
746
748
750
751 if (pixelFormat == QVideoFrameFormat::Format_Jpeg) {
752 Q_ASSERT(plane == 0);
753
754 QImage image;
755
756 // calling QVideoFrame::toImage is not accurate. To be fixed.
757 // frame transformation will be considered later
758 const QVideoFrameFormat surfaceFormat = frame.surfaceFormat();
759
760 const bool hasSurfaceTransform = surfaceFormat.isMirrored()
761 || surfaceFormat.scanLineDirection() == QVideoFrameFormat::BottomToTop
762 || surfaceFormat.rotation() != QtVideo::Rotation::None;
763
764 if (hasSurfaceTransform)
765 image = qImageFromVideoFrame(frame, VideoTransformation{});
766 else
767 image = frame.toImage(); // use the frame cache, no surface transforms applied
768
769 image.convertTo(QImage::Format_ARGB32);
770 subresDesc.setImage(image);
771
772 } else {
773 // Note, QByteArray::fromRawData creare QByteArray as a view without data copying
774 subresDesc.setData(QByteArray::fromRawData(
775 reinterpret_cast<const char *>(frame.bits(plane)), frame.mappedBytes(plane)));
776 subresDesc.setDataStride(frame.bytesPerLine(plane));
778 }
779
780 QRhiTextureUploadEntry entry(0, 0, subresDesc);
781 QRhiTextureUploadDescription desc({ entry });
782 rub.uploadTexture(tex.get(), desc);
783
784 return result;
785}
786
788createTextureFromHandle(QVideoFrameTexturesHandles &texturesSet, QRhi &rhi,
789 QVideoFrameFormat::PixelFormat pixelFormat, QSize size, int plane)
790{
791 const TextureDescription &texDesc = descriptions[pixelFormat];
792 QSize planeSize = texDesc.rhiPlaneSize(size, plane, &rhi);
793
794 QRhiTexture::Flags textureFlags = {};
795 if (pixelFormat == QVideoFrameFormat::Format_SamplerExternalOES) {
796#ifdef Q_OS_ANDROID
797 if (rhi.backend() == QRhi::OpenGLES2)
798 textureFlags |= QRhiTexture::ExternalOES;
799#endif
800 }
801 if (pixelFormat == QVideoFrameFormat::Format_SamplerRect) {
802#ifdef Q_OS_MACOS
803 if (rhi.backend() == QRhi::OpenGLES2)
804 textureFlags |= QRhiTexture::TextureRectangleGL;
805#endif
806 }
807
808 if (quint64 handle = texturesSet.textureHandle(rhi, plane); handle) {
809 std::unique_ptr<QRhiTexture> tex(rhi.newTexture(texDesc.rhiTextureFormat(plane, &rhi), planeSize, 1, textureFlags));
810 if (tex->createFrom({handle, 0}))
811 return tex;
812
813 qWarning("Failed to initialize QRhiTexture wrapper for native texture object %llu",handle);
814 }
815 return {};
816}
817
818template <typename TexturesType, typename... Args>
820createTexturesArray(QRhi &rhi, QVideoFrameTexturesHandles &texturesSet,
821 QVideoFrameFormat::PixelFormat pixelFormat, QSize size, Args &&...args)
822{
823 const TextureDescription &texDesc = descriptions[pixelFormat];
824 bool ok = true;
825 RhiTextureArray textures;
826 for (quint8 plane = 0; plane < texDesc.nplanes; ++plane) {
827 textures[plane] = QVideoTextureHelper::createTextureFromHandle(texturesSet, rhi,
828 pixelFormat, size, plane);
829 ok &= bool(textures[plane]);
830 }
831 if (ok)
832 return std::make_unique<TexturesType>(std::move(textures), std::forward<Args>(args)...);
833 else
834 return {};
835}
836
837QVideoFrameTexturesUPtr createTexturesFromHandles(QVideoFrameTexturesHandlesUPtr texturesSet,
838 QRhi &rhi,
839 QVideoFrameFormat::PixelFormat pixelFormat,
840 QSize size)
841{
842 if (!texturesSet)
843 return nullptr;
844
845 if (pixelFormat == QVideoFrameFormat::Format_Invalid)
846 return nullptr;
847
848 if (size.isEmpty())
849 return nullptr;
850
851 auto &texturesSetRef = *texturesSet;
852 return createTexturesArray<QVideoFrameTexturesFromHandlesSet>(rhi, texturesSetRef, pixelFormat,
853 size, std::move(texturesSet));
854}
855
856static QVideoFrameTexturesUPtr createTexturesFromMemory(QVideoFrame frame, QRhi &rhi,
857 QRhiResourceUpdateBatch &rub,
858 QVideoFrameTexturesUPtr &oldTextures)
859{
860 qCDebug(qLcVideoTextureHelper) << "createTexturesFromMemory, pixelFormat:" << frame.pixelFormat();
861 if (!frame.map(QVideoFrame::ReadOnly)) {
862 qWarning() << "Cannot map a video frame in ReadOnly mode!";
863 return {};
864 }
865
866 auto unmapFrameGuard = qScopeGuard([&frame] { frame.unmap(); });
867
868 const TextureDescription &texDesc = descriptions[frame.surfaceFormat().pixelFormat()];
869
870 const bool canReuseTextures(dynamic_cast<QVideoFrameTexturesFromMemory*>(oldTextures.get()));
871
872 std::unique_ptr<QVideoFrameTexturesFromMemory> textures(canReuseTextures ?
873 static_cast<QVideoFrameTexturesFromMemory *>(oldTextures.release()) :
875
876 RhiTextureArray& textureArray = textures->textureArray();
877 bool shouldKeepMapping = false;
878 for (quint8 plane = 0; plane < texDesc.nplanes; ++plane) {
879 const auto result = updateTextureWithMap(frame, rhi, rub, plane, textureArray[plane]);
881 return {};
882
884 shouldKeepMapping = true;
885 }
886
887 // as QVideoFrame::unmap does nothing with null frames, we just move the frame to the result
888 textures->setMappedFrame(shouldKeepMapping ? std::move(frame) : QVideoFrame());
889
890 return textures;
891}
892
893QVideoFrameTexturesUPtr createTextures(const QVideoFrame &frame, QRhi &rhi,
894 QRhiResourceUpdateBatch &rub,
895 QVideoFrameTexturesUPtr &oldTextures)
896{
897 if (!frame.isValid())
898 return {};
899
900 auto setSourceFrame = [&frame](QVideoFrameTexturesUPtr result) {
901 result->setSourceFrame(frame);
902 return result;
903 };
904
905 if (QHwVideoBuffer *hwBuffer = QVideoFramePrivate::hwBuffer(frame)) {
906 if (auto textures = hwBuffer->mapTextures(rhi, oldTextures))
907 return setSourceFrame(std::move(textures));
908
909 QVideoFrameFormat format = frame.surfaceFormat();
910 if (auto textures = createTexturesArray<QVideoFrameTexturesFromRhiTextureArray>(
911 rhi, *hwBuffer, format.pixelFormat(), format.frameSize()))
912 return setSourceFrame(std::move(textures));
913 }
914
915 if (auto textures = createTexturesFromMemory(frame, rhi, rub, oldTextures))
916 return setSourceFrame(std::move(textures));
917
918 return {};
919}
920
922{
924 if (layout.text() == text && videoSize == frameSize)
925 return false;
926
928 QFont font;
929 // 0.045 - based on this https://www.md-subs.com/saa-subtitle-font-size
930 qreal fontSize = frameSize.height() * 0.045;
932
934 if (text.isEmpty()) {
935 bounds = {};
936 return true;
937 }
943
945 int leading = metrics.leading();
946
948 qreal margin = videoSize.width()*.05;
949 qreal height = 0;
950 qreal textWidth = 0;
952 while (1) {
954 if (!line.isValid())
955 break;
956
958 height += leading;
960 height += line.height();
962 }
964
965 // put subtitles vertically in lower part of the video but not stuck to the bottom
966 int bottomMargin = videoSize.height() / 20;
969 textWidth += fontSize/4.;
970
972 return true;
973}
974
994
996{
997 auto size = bounds.size().toSize();
998 if (size.isEmpty())
999 return QImage();
1002 bgColor.setAlpha(128);
1003 img.fill(bgColor);
1004
1008 range.start = 0;
1009 range.length = layout.text().size();
1011 layout.draw(&painter, {}, { range });
1012 return img;
1013}
1014
1015}
1016
1017QT_END_NAMESPACE
\inmodule QtGui
Definition qimage.h:37
\inmodule QtGuiPrivate \inheaderfile rhi/qrhi.h
Definition qrhi.h:661
\inmodule QtGuiPrivate \inheaderfile rhi/qrhi.h
Definition qrhi.h:721
\inmodule QtGuiPrivate \inheaderfile rhi/qrhi.h
Definition qrhi.h:698
The QVideoFrameFormat class specifies the stream format of a video presentation surface.
static bool hasDmaBuf(const QVideoFrame &frame)
Combined button and popup list for selecting options.
float convertHLGFromLinear(float sig)
static QMatrix4x4 colorMatrix(const QVideoFrameFormat &format)
static float convertPQFromLinear(float sig)
static float convertSDRFromLinear(float sig)
Q_MULTIMEDIA_EXPORT void setExcludedRhiTextureFormats(QList< QRhiTexture::Format > formats)
static bool isRhiTextureFormatSupported(const QRhi *rhi, QRhiTexture::Format format)
Q_MULTIMEDIA_EXPORT bool forceGlTextureExternalOesIsSet()
static std::unique_ptr< QRhiTexture > createTextureFromHandle(QVideoFrameTexturesHandles &texturesSet, QRhi &rhi, QVideoFrameFormat::PixelFormat pixelFormat, QSize size, int plane)
static const TextureDescription descriptions[QVideoFrameFormat::NPixelFormats]
static QVideoFrameTexturesUPtr createTexturesFromMemory(QVideoFrame frame, QRhi &rhi, QRhiResourceUpdateBatch &rub, QVideoFrameTexturesUPtr &oldTextures)
static UpdateTextureWithMapResult updateTextureWithMap(const QVideoFrame &frame, QRhi &rhi, QRhiResourceUpdateBatch &rub, int plane, std::unique_ptr< QRhiTexture > &tex)
static QVideoFrameTexturesUPtr createTexturesArray(QRhi &rhi, QVideoFrameTexturesHandles &texturesSet, QVideoFrameFormat::PixelFormat pixelFormat, QSize size, Args &&...args)
Q_MULTIMEDIA_EXPORT void updateUniformData(QByteArray *dst, QRhi *rhi, const QVideoFrameFormat &format, const QVideoFrame &frame, const QMatrix4x4 &transform, float opacity, float maxNits=100)
#define qCDebug(category,...)
#define Q_STATIC_LOGGING_CATEGORY(name,...)