Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qvideotexturehelper.cpp
Go to the documentation of this file.
1// Copyright (C) 2021 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
5
10#include "private/qmultimediautils_p.h"
11
12#include <QtCore/qfile.h>
13#include <qpainter.h>
14#include <qloggingcategory.h>
15
17
18Q_STATIC_LOGGING_CATEGORY(qLcVideoTextureHelper, "qt.multimedia.video.texturehelper")
19
20namespace QVideoTextureHelper
21{
22
24 static const bool isSet =
25 qEnvironmentVariableIsSet("QT_MULTIMEDIA_FORCE_GL_TEXTURE_EXTERNAL_OES");
26 return isSet;
27}
28
30 // Format_Invalid
31 { 0, 0,
32 [](int, int) { return 0; },
34 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
35 },
36 // Format_ARGB8888
37 { 1, 4,
38 [](int stride, int height) { return stride*height; },
40 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
41 },
42 // Format_ARGB8888_Premultiplied
43 { 1, 4,
44 [](int stride, int height) { return stride*height; },
46 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
47 },
48 // Format_XRGB8888
49 { 1, 4,
50 [](int stride, int height) { return stride*height; },
52 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
53 },
54 // Format_BGRA8888
55 { 1, 4,
56 [](int stride, int height) { return stride*height; },
58 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
59 },
60 // Format_BGRA8888_Premultiplied
61 { 1, 4,
62 [](int stride, int height) { return stride*height; },
64 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
65 },
66 // Format_BGRX8888
67 { 1, 4,
68 [](int stride, int height) { return stride*height; },
70 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
71 },
72 // Format_ABGR8888
73 { 1, 4,
74 [](int stride, int height) { return stride*height; },
76 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
77 },
78 // Format_XBGR8888
79 { 1, 4,
80 [](int stride, int height) { return stride*height; },
82 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
83 },
84 // Format_RGBA8888
85 { 1, 4,
86 [](int stride, int height) { return stride*height; },
88 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
89 },
90 // Format_RGBX8888
91 { 1, 4,
92 [](int stride, int height) { return stride*height; },
94 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
95 },
96 // Format_AYUV
97 { 1, 4,
98 [](int stride, int height) { return stride*height; },
100 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
101 },
102 // Format_AYUV_Premultiplied
103 { 1, 4,
104 [](int stride, int height) { return stride*height; },
106 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
107 },
108 // Format_YUV420P
109 { 3, 1,
110 [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
112 { { 1, 1 }, { 2, 2 }, { 2, 2 } }
113 },
114 // Format_YUV422P
115 { 3, 1,
116 [](int stride, int height) { return stride * height * 2; },
118 { { 1, 1 }, { 2, 1 }, { 2, 1 } }
119 },
120 // Format_YV12
121 { 3, 1,
122 [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
124 { { 1, 1 }, { 2, 2 }, { 2, 2 } }
125 },
126 // Format_UYVY
127 { 1, 2,
128 [](int stride, int height) { return stride*height; },
130 { { 2, 1 }, { 1, 1 }, { 1, 1 } }
131 },
132 // Format_YUYV
133 { 1, 2,
134 [](int stride, int height) { return stride*height; },
136 { { 2, 1 }, { 1, 1 }, { 1, 1 } }
137 },
138 // Format_NV12
139 { 2, 1,
140 [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
142 { { 1, 1 }, { 2, 2 }, { 1, 1 } }
143 },
144 // Format_NV21
145 { 2, 1,
146 [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
148 { { 1, 1 }, { 2, 2 }, { 1, 1 } }
149 },
150 // Format_IMC1
151 { 3, 1,
152 [](int stride, int height) {
153 // IMC1 requires that U and V components are aligned on a multiple of 16 lines
154 int h = (height + 15) & ~15;
155 h += 2*(((h/2) + 15) & ~15);
156 return stride * h;
157 },
159 { { 1, 1 }, { 2, 2 }, { 2, 2 } }
160 },
161 // Format_IMC2
162 { 2, 1,
163 [](int stride, int height) { return 2*stride*height; },
165 { { 1, 1 }, { 1, 2 }, { 1, 1 } }
166 },
167 // Format_IMC3
168 { 3, 1,
169 [](int stride, int height) {
170 // IMC3 requires that U and V components are aligned on a multiple of 16 lines
171 int h = (height + 15) & ~15;
172 h += 2*(((h/2) + 15) & ~15);
173 return stride * h;
174 },
176 { { 1, 1 }, { 2, 2 }, { 2, 2 } }
177 },
178 // Format_IMC4
179 { 2, 1,
180 [](int stride, int height) { return 2*stride*height; },
182 { { 1, 1 }, { 1, 2 }, { 1, 1 } }
183 },
184 // Format_Y8
185 { 1, 1,
186 [](int stride, int height) { return stride*height; },
188 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
189 },
190 // Format_Y16
191 { 1, 2,
192 [](int stride, int height) { return stride*height; },
194 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
195 },
196 // Format_P010
197 { 2, 2,
198 [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
200 { { 1, 1 }, { 2, 2 }, { 1, 1 } }
201 },
202 // Format_P016
203 { 2, 2,
204 [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
206 { { 1, 1 }, { 2, 2 }, { 1, 1 } }
207 },
208 // Format_SamplerExternalOES
209 {
210 1, 0,
211 [](int, int) { return 0; },
213 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
214 },
215 // Format_Jpeg
216 { 1, 4,
217 [](int stride, int height) { return stride*height; },
219 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
220 },
221 // Format_SamplerRect
222 {
223 1, 0,
224 [](int, int) { return 0; },
226 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
227 },
228 // Format_YUV420P10
229 { 3, 2,
230 [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
232 { { 1, 1 }, { 2, 2 }, { 2, 2 } }
233 },
234};
235
236Q_GLOBAL_STATIC(QList<QRhiTexture::Format>, g_excludedRhiTextureFormats) // for tests only
237
238static bool isRhiTextureFormatSupported(const QRhi *rhi, QRhiTexture::Format format)
239{
240 if (g_excludedRhiTextureFormats->contains(format))
241 return false;
242 if (!rhi) // consider the format is supported if no rhi specified
243 return true;
244 return rhi->isTextureFormatSupported(format);
245}
246
280
281QRhiTexture::Format resolvedRhiTextureFormat(QRhiTexture::Format format, QRhi *rhi)
282{
283 if (isRhiTextureFormatSupported(rhi, format))
284 return format;
285
286 QRhiTexture::Format fallbackFormat;
287 switch (format) {
288 case QRhiTexture::R8:
289 fallbackFormat = resolvedRhiTextureFormat(QRhiTexture::RED_OR_ALPHA8, rhi);
290 break;
291 case QRhiTexture::RG8:
292 case QRhiTexture::RG16:
293 fallbackFormat = resolvedRhiTextureFormat(QRhiTexture::RGBA8, rhi);
294 break;
295 case QRhiTexture::R16:
296 fallbackFormat = resolvedRhiTextureFormat(QRhiTexture::RG8, rhi);
297 break;
298 default:
299 // End fallback chain here, and return UnknownFormat
300 return QRhiTexture::UnknownFormat;
301 }
302
303 if (fallbackFormat == QRhiTexture::UnknownFormat) {
304 // TODO: QTBUG-135911: In some cases rhi claims format and fallbacks are all
305 // unsupported, but when using preferred format video plays fine
306 qCDebug(qLcVideoTextureHelper) << "Cannot determine any usable texture format, using preferred format" << format;
307 return format;
308 }
309
310 qCDebug(qLcVideoTextureHelper) << "Using fallback texture format" << fallbackFormat;
311 return fallbackFormat;
312}
313
314void setExcludedRhiTextureFormats(QList<QRhiTexture::Format> formats)
315{
316 g_excludedRhiTextureFormats->swap(formats);
317}
318
319const TextureDescription *textureDescription(QVideoFrameFormat::PixelFormat format)
320{
321 return descriptions + format;
322}
323
324QString vertexShaderFileName(const QVideoFrameFormat &format)
325{
326 auto fmt = format.pixelFormat();
327 Q_UNUSED(fmt);
328
329#if 1//def Q_OS_ANDROID
330 if (fmt == QVideoFrameFormat::Format_SamplerExternalOES)
331 return QStringLiteral(":/qt-project.org/multimedia/shaders/externalsampler.vert.qsb");
332#endif
333#if 1//def Q_OS_MACOS
334 if (fmt == QVideoFrameFormat::Format_SamplerRect)
335 return QStringLiteral(":/qt-project.org/multimedia/shaders/rectsampler.vert.qsb");
336#endif
337
338 return QStringLiteral(":/qt-project.org/multimedia/shaders/vertex.vert.qsb");
339}
340
341QString fragmentShaderFileName(const QVideoFrameFormat &format, QRhi *,
342 QRhiSwapChain::Format surfaceFormat)
343{
344 QString shaderFile;
345 switch (format.pixelFormat()) {
346 case QVideoFrameFormat::Format_Y8:
347 shaderFile = QStringLiteral("y");
348 break;
349 case QVideoFrameFormat::Format_Y16:
350 shaderFile = QStringLiteral("y16");
351 break;
352 case QVideoFrameFormat::Format_AYUV:
353 case QVideoFrameFormat::Format_AYUV_Premultiplied:
354 shaderFile = QStringLiteral("ayuv");
355 break;
356 case QVideoFrameFormat::Format_ARGB8888:
357 case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
358 case QVideoFrameFormat::Format_XRGB8888:
359 shaderFile = QStringLiteral("argb");
360 break;
361 case QVideoFrameFormat::Format_ABGR8888:
362 case QVideoFrameFormat::Format_XBGR8888:
363 shaderFile = QStringLiteral("abgr");
364 break;
365 case QVideoFrameFormat::Format_Jpeg: // Jpeg is decoded transparently into an ARGB texture
366 shaderFile = QStringLiteral("bgra");
367 break;
368 case QVideoFrameFormat::Format_RGBA8888:
369 case QVideoFrameFormat::Format_RGBX8888:
370 case QVideoFrameFormat::Format_BGRA8888:
371 case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
372 case QVideoFrameFormat::Format_BGRX8888:
373 shaderFile = QStringLiteral("rgba");
374 break;
375 case QVideoFrameFormat::Format_YUV420P:
376 case QVideoFrameFormat::Format_YUV422P:
377 case QVideoFrameFormat::Format_IMC3:
378 shaderFile = QStringLiteral("yuv_triplanar");
379 break;
380 case QVideoFrameFormat::Format_YUV420P10:
381 shaderFile = QStringLiteral("yuv_triplanar_p10");
382 break;
383 case QVideoFrameFormat::Format_YV12:
384 case QVideoFrameFormat::Format_IMC1:
385 shaderFile = QStringLiteral("yvu_triplanar");
386 break;
387 case QVideoFrameFormat::Format_IMC2:
388 shaderFile = QStringLiteral("imc2");
389 break;
390 case QVideoFrameFormat::Format_IMC4:
391 shaderFile = QStringLiteral("imc4");
392 break;
393 case QVideoFrameFormat::Format_UYVY:
394 shaderFile = QStringLiteral("uyvy");
395 break;
396 case QVideoFrameFormat::Format_YUYV:
397 shaderFile = QStringLiteral("yuyv");
398 break;
399 case QVideoFrameFormat::Format_P010:
400 case QVideoFrameFormat::Format_P016:
401 // P010/P016 have the same layout as NV12, just 16 instead of 8 bits per pixel
402 if (format.colorTransfer() == QVideoFrameFormat::ColorTransfer_ST2084) {
403 shaderFile = QStringLiteral("nv12_bt2020_pq");
404 break;
405 }
406 if (format.colorTransfer() == QVideoFrameFormat::ColorTransfer_STD_B67) {
407 shaderFile = QStringLiteral("nv12_bt2020_hlg");
408 break;
409 }
410 shaderFile = QStringLiteral("p016");
411 break;
412 case QVideoFrameFormat::Format_NV12:
413 shaderFile = QStringLiteral("nv12");
414 break;
415 case QVideoFrameFormat::Format_NV21:
416 shaderFile = QStringLiteral("nv21");
417 break;
418 case QVideoFrameFormat::Format_SamplerExternalOES:
419#if 1//def Q_OS_ANDROID
420 shaderFile = QStringLiteral("externalsampler");
421 break;
422#endif
423 case QVideoFrameFormat::Format_SamplerRect:
424#if 1//def Q_OS_MACOS
425 shaderFile = QStringLiteral("rectsampler_bgra");
426 break;
427#endif
428 // fallthrough
429 case QVideoFrameFormat::Format_Invalid:
430 default:
431 break;
432 }
433
434 if (shaderFile.isEmpty())
435 return QString();
436
437 shaderFile.prepend(u":/qt-project.org/multimedia/shaders/");
438
439 if (surfaceFormat == QRhiSwapChain::HDRExtendedSrgbLinear)
440 shaderFile.append(u"_linear");
441
442 shaderFile.append(u".frag.qsb");
443
444 Q_ASSERT_X(QFile::exists(shaderFile), Q_FUNC_INFO,
445 QStringLiteral("Shader file %1 does not exist").arg(shaderFile).toLatin1());
446 qCDebug(qLcVideoTextureHelper) << "fragmentShaderFileName returns" << shaderFile;
447 return shaderFile;
448}
449
450// Matrices are calculated from
451// https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.601-7-201103-I!!PDF-E.pdf
452// https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.709-6-201506-I!!PDF-E.pdf
453// https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.2020-2-201510-I!!PDF-E.pdf
454//
455// For BT2020, we also need to convert the Rec2020 RGB colorspace to sRGB see
456// shaders/colorconvert.glsl for details.
457//
458// Doing the math gives the following (Y, U & V normalized to [0..1] range):
459//
460// Y = a*R + b*G + c*B
461// R = Y + e*V
462// G = Y - c*d/b*U - a*e/b*V
463// B = Y + d*U
464
465// BT2020:
466// a = .2627, b = 0.6780, c = 0.0593
467// d = 1.8814
468// e = 1.4746
469//
470// BT709:
471// a = 0.2126, b = 0.7152, c = 0.0722
472// d = 1.8556
473// e = 1.5748
474//
475// BT601:
476// a = 0.299, b = 0.578, c = 0.114
477// d = 1.42
478// e = 1.772
479//
480
481// clang-format off
482static QMatrix4x4 colorMatrix(const QVideoFrameFormat &format)
483{
484 auto colorSpace = format.colorSpace();
485 if (colorSpace == QVideoFrameFormat::ColorSpace_Undefined) {
486 if (format.frameHeight() > 576)
487 // HD video, assume BT709
488 colorSpace = QVideoFrameFormat::ColorSpace_BT709;
489 else
490 // SD video, assume BT601
491 colorSpace = QVideoFrameFormat::ColorSpace_BT601;
492 }
493 switch (colorSpace) {
494 case QVideoFrameFormat::ColorSpace_AdobeRgb:
495 return {
496 1.0f, 0.000f, 1.402f, -0.701f,
497 1.0f, -0.344f, -0.714f, 0.529f,
498 1.0f, 1.772f, 0.000f, -0.886f,
499 0.0f, 0.000f, 0.000f, 1.000f
500 };
501 default:
502 case QVideoFrameFormat::ColorSpace_BT709:
503 if (format.colorRange() == QVideoFrameFormat::ColorRange_Full)
504 return {
505 1.0f, 0.0f, 1.5748f, -0.790488f,
506 1.0f, -0.187324f, -0.468124f, 0.329010f,
507 1.0f, 1.855600f, 0.0f, -0.931439f,
508 0.0f, 0.0f, 0.0f, 1.0f
509 };
510 return {
511 1.1644f, 0.0000f, 1.7927f, -0.9729f,
512 1.1644f, -0.2132f, -0.5329f, 0.3015f,
513 1.1644f, 2.1124f, 0.0000f, -1.1334f,
514 0.0000f, 0.0000f, 0.0000f, 1.0000f
515 };
516 case QVideoFrameFormat::ColorSpace_BT2020:
517 if (format.colorRange() == QVideoFrameFormat::ColorRange_Full)
518 return {
519 1.f, 0.0000f, 1.4746f, -0.7402f,
520 1.f, -0.1646f, -0.5714f, 0.3694f,
521 1.f, 1.8814f, 0.000f, -0.9445f,
522 0.0f, 0.0000f, 0.000f, 1.0000f
523 };
524 return {
525 1.1644f, 0.000f, 1.6787f, -0.9157f,
526 1.1644f, -0.1874f, -0.6504f, 0.3475f,
527 1.1644f, 2.1418f, 0.0000f, -1.1483f,
528 0.0000f, 0.0000f, 0.0000f, 1.0000f
529 };
530 case QVideoFrameFormat::ColorSpace_BT601:
531 // Corresponds to the primaries used by NTSC BT601. For PAL BT601, we use the BT709 conversion
532 // as those are very close.
533 if (format.colorRange() == QVideoFrameFormat::ColorRange_Full)
534 return {
535 1.f, 0.000f, 1.772f, -0.886f,
536 1.f, -0.1646f, -0.57135f, 0.36795f,
537 1.f, 1.42f, 0.000f, -0.71f,
538 0.0f, 0.000f, 0.000f, 1.0000f
539 };
540 return {
541 1.164f, 0.000f, 1.596f, -0.8708f,
542 1.164f, -0.392f, -0.813f, 0.5296f,
543 1.164f, 2.017f, 0.000f, -1.0810f,
544 0.000f, 0.000f, 0.000f, 1.0000f
545 };
546 }
547}
548// clang-format on
549
550// PQ transfer function, see also https://en.wikipedia.org/wiki/Perceptual_quantizer
551// or https://ieeexplore.ieee.org/document/7291452
552static float convertPQFromLinear(float sig)
553{
554 const float m1 = 1305.f/8192.f;
555 const float m2 = 2523.f/32.f;
556 const float c1 = 107.f/128.f;
557 const float c2 = 2413.f/128.f;
558 const float c3 = 2392.f/128.f;
559
560 const float SDR_LEVEL = 100.f;
561 sig *= SDR_LEVEL/10000.f;
562 float psig = powf(sig, m1);
563 float num = c1 + c2*psig;
564 float den = 1 + c3*psig;
565 return powf(num/den, m2);
566}
567
568float convertHLGFromLinear(float sig)
569{
570 const float a = 0.17883277f;
571 const float b = 0.28466892f; // = 1 - 4a
572 const float c = 0.55991073f; // = 0.5 - a ln(4a)
573
574 if (sig < 1.f/12.f)
575 return sqrtf(3.f*sig);
576 return a*logf(12.f*sig - b) + c;
577}
578
579static float convertSDRFromLinear(float sig)
580{
581 return sig;
582}
583
584void updateUniformData(QByteArray *dst, QRhi *rhi, const QVideoFrameFormat &format,
585 const QVideoFrame &frame, const QMatrix4x4 &transform, float opacity,
586 float maxNits)
587{
588#ifndef Q_OS_ANDROID
589 Q_UNUSED(frame);
590#endif
591
592 QMatrix4x4 cmat;
593 switch (format.pixelFormat()) {
594 case QVideoFrameFormat::Format_Invalid:
595 return;
596
597 case QVideoFrameFormat::Format_ARGB8888:
598 case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
599 case QVideoFrameFormat::Format_XRGB8888:
600 case QVideoFrameFormat::Format_BGRA8888:
601 case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
602 case QVideoFrameFormat::Format_BGRX8888:
603 case QVideoFrameFormat::Format_ABGR8888:
604 case QVideoFrameFormat::Format_XBGR8888:
605 case QVideoFrameFormat::Format_RGBA8888:
606 case QVideoFrameFormat::Format_RGBX8888: {
607 if (format.colorRange() == QVideoFrameFormat::ColorRange_Video) {
608 constexpr float scale = 255.0f / 219.0f; // (255 - 0) / (235 - 16)
609 constexpr float offset = -16.0f / 219.0f; // -16 / (235 - 16)
610 // clang-format off
611 cmat = QMatrix4x4 {
612 scale, 0.f, 0.f, offset,
613 0.f, scale, 0.f, offset,
614 0.f, 0.f, scale, offset,
615 0.f, 0.f, 0.f, 1.f,
616 };
617 // clang-format on
618 }
619
620 break;
621 }
622
623 case QVideoFrameFormat::Format_Jpeg:
624 case QVideoFrameFormat::Format_Y8:
625 case QVideoFrameFormat::Format_Y16:
626 break;
627 case QVideoFrameFormat::Format_IMC1:
628 case QVideoFrameFormat::Format_IMC2:
629 case QVideoFrameFormat::Format_IMC3:
630 case QVideoFrameFormat::Format_IMC4:
631 case QVideoFrameFormat::Format_AYUV:
632 case QVideoFrameFormat::Format_AYUV_Premultiplied:
633 case QVideoFrameFormat::Format_YUV420P:
634 case QVideoFrameFormat::Format_YUV420P10:
635 case QVideoFrameFormat::Format_YUV422P:
636 case QVideoFrameFormat::Format_YV12:
637 case QVideoFrameFormat::Format_UYVY:
638 case QVideoFrameFormat::Format_YUYV:
639 case QVideoFrameFormat::Format_NV12:
640 case QVideoFrameFormat::Format_NV21:
641 case QVideoFrameFormat::Format_P010:
642 case QVideoFrameFormat::Format_P016:
643 cmat = colorMatrix(format);
644 break;
645 case QVideoFrameFormat::Format_SamplerExternalOES:
646 // get Android specific transform for the externalsampler texture
647 if (auto hwBuffer = QVideoFramePrivate::hwBuffer(frame))
648 cmat = hwBuffer->externalTextureMatrix();
649 break;
650 case QVideoFrameFormat::Format_SamplerRect:
651 {
652 // Similarly to SamplerExternalOES, the "color matrix" is used here to
653 // transform the texture coordinates. OpenGL texture rectangles expect
654 // non-normalized UVs, so apply a scale to have the fragment shader see
655 // UVs in range [width,height] instead of [0,1].
656 const QSize videoSize = frame.size();
657 cmat.scale(videoSize.width(), videoSize.height());
658 }
659 break;
660 }
661
662 // HDR with a PQ or HLG transfer function uses a BT2390 based tone mapping to cut off the HDR peaks
663 // This requires that we pass the max luminance the tonemapper should clip to over to the fragment
664 // shader. To reduce computations there, it's precomputed in PQ values here.
665 auto fromLinear = convertSDRFromLinear;
666 switch (format.colorTransfer()) {
667 case QVideoFrameFormat::ColorTransfer_ST2084:
668 fromLinear = convertPQFromLinear;
669 break;
670 case QVideoFrameFormat::ColorTransfer_STD_B67:
671 fromLinear = convertHLGFromLinear;
672 break;
673 default:
674 break;
675 }
676
677 if (dst->size() < qsizetype(sizeof(UniformData)))
678 dst->resize(sizeof(UniformData));
679
680 auto ud = reinterpret_cast<UniformData*>(dst->data());
681 memcpy(ud->transformMatrix, transform.constData(), sizeof(ud->transformMatrix));
682 memcpy(ud->colorMatrix, cmat.constData(), sizeof(ud->transformMatrix));
683 ud->opacity = opacity;
684 ud->width = float(format.frameWidth());
685 ud->masteringWhite = fromLinear(float(format.maxLuminance())/100.f);
686 ud->maxLum = fromLinear(float(maxNits)/100.f);
687 const TextureDescription* desc = textureDescription(format.pixelFormat());
688
689 const bool isDmaBuf = QVideoFramePrivate::hasDmaBuf(frame);
690 using FallbackPolicy = QVideoTextureHelper::TextureDescription::FallbackPolicy;
691 auto fallbackPolicy = isDmaBuf
692 ? FallbackPolicy::Disable
693 : FallbackPolicy::Enable;
694
695 // Let's consider using the red component if Red_8 is not used,
696 // it's useful for compatibility the shaders with 16bit formats.
697
698 const bool useRedComponent =
699 !desc->hasTextureFormat(TextureDescription::Red_8)
700 || isRhiTextureFormatSupported(rhi, QRhiTexture::R8)
701 || rhi->isFeatureSupported(QRhi::RedOrAlpha8IsRed)
702 || isDmaBuf;
703 ud->redOrAlphaIndex = useRedComponent ? 0 : 3; // r:0 g:1 b:2 a:3
704
705 for (int plane = 0; plane < desc->nplanes; ++plane)
706 ud->planeFormats[plane] = desc->rhiTextureFormat(plane, rhi, fallbackPolicy);
707}
708
714
715static UpdateTextureWithMapResult updateTextureWithMap(const QVideoFrame &frame, QRhi &rhi,
716 QRhiResourceUpdateBatch &rub, int plane,
717 std::unique_ptr<QRhiTexture> &tex)
718{
719 Q_ASSERT(frame.isMapped());
720
721 QVideoFrameFormat fmt = frame.surfaceFormat();
722 QVideoFrameFormat::PixelFormat pixelFormat = fmt.pixelFormat();
723 QSize size = fmt.frameSize();
724
725 const TextureDescription &texDesc = descriptions[pixelFormat];
726 QSize planeSize = texDesc.rhiPlaneSize(size, plane, &rhi);
727
728 bool needsRebuild = !tex || tex->pixelSize() != planeSize || tex->format() != texDesc.rhiTextureFormat(plane, &rhi);
729 if (!tex) {
730 tex.reset(rhi.newTexture(texDesc.rhiTextureFormat(plane, &rhi), planeSize, 1, {}));
731 if (!tex) {
732 qWarning("Failed to create new texture (size %dx%d)", planeSize.width(), planeSize.height());
734 }
735 }
736
737 if (needsRebuild) {
738 tex->setFormat(texDesc.rhiTextureFormat(plane, &rhi));
739 tex->setPixelSize(planeSize);
740 if (!tex->create()) {
741 qWarning("Failed to create texture (size %dx%d)", planeSize.width(), planeSize.height());
743 }
744 }
745
747
749
750 if (pixelFormat == QVideoFrameFormat::Format_Jpeg) {
751 Q_ASSERT(plane == 0);
752
753 QImage image;
754
755 // calling QVideoFrame::toImage is not accurate. To be fixed.
756 // frame transformation will be considered later
757 const QVideoFrameFormat surfaceFormat = frame.surfaceFormat();
758
759 const bool hasSurfaceTransform = surfaceFormat.isMirrored()
760 || surfaceFormat.scanLineDirection() == QVideoFrameFormat::BottomToTop
761 || surfaceFormat.rotation() != QtVideo::Rotation::None;
762
763 if (hasSurfaceTransform)
764 image = qImageFromVideoFrame(frame, VideoTransformation{});
765 else
766 image = frame.toImage(); // use the frame cache, no surface transforms applied
767
768 image.convertTo(QImage::Format_ARGB32);
769 subresDesc.setImage(image);
770
771 } else {
772 // Note, QByteArray::fromRawData creare QByteArray as a view without data copying
773 subresDesc.setData(QByteArray::fromRawData(
774 reinterpret_cast<const char *>(frame.bits(plane)), frame.mappedBytes(plane)));
775 subresDesc.setDataStride(frame.bytesPerLine(plane));
777 }
778
779 QRhiTextureUploadEntry entry(0, 0, subresDesc);
780 QRhiTextureUploadDescription desc({ entry });
781 rub.uploadTexture(tex.get(), desc);
782
783 return result;
784}
785
787createTextureFromHandle(QVideoFrameTexturesHandles &texturesSet, QRhi &rhi,
788 QVideoFrameFormat::PixelFormat pixelFormat, QSize size, int plane)
789{
790 const TextureDescription &texDesc = descriptions[pixelFormat];
791 QSize planeSize = texDesc.rhiPlaneSize(size, plane, &rhi);
792
793 QRhiTexture::Flags textureFlags = {};
794 if (pixelFormat == QVideoFrameFormat::Format_SamplerExternalOES) {
795#ifdef Q_OS_ANDROID
796 if (rhi.backend() == QRhi::OpenGLES2)
797 textureFlags |= QRhiTexture::ExternalOES;
798#endif
799 }
800 if (pixelFormat == QVideoFrameFormat::Format_SamplerRect) {
801#ifdef Q_OS_MACOS
802 if (rhi.backend() == QRhi::OpenGLES2)
803 textureFlags |= QRhiTexture::TextureRectangleGL;
804#endif
805 }
806
807 if (quint64 handle = texturesSet.textureHandle(rhi, plane); handle) {
808 std::unique_ptr<QRhiTexture> tex(rhi.newTexture(texDesc.rhiTextureFormat(plane, &rhi), planeSize, 1, textureFlags));
809 if (tex->createFrom({handle, 0}))
810 return tex;
811
812 qWarning("Failed to initialize QRhiTexture wrapper for native texture object %llu",handle);
813 }
814 return {};
815}
816
817template <typename TexturesType, typename... Args>
819createTexturesArray(QRhi &rhi, QVideoFrameTexturesHandles &texturesSet,
820 QVideoFrameFormat::PixelFormat pixelFormat, QSize size, Args &&...args)
821{
822 const TextureDescription &texDesc = descriptions[pixelFormat];
823 bool ok = true;
824 RhiTextureArray textures;
825 for (quint8 plane = 0; plane < texDesc.nplanes; ++plane) {
826 textures[plane] = QVideoTextureHelper::createTextureFromHandle(texturesSet, rhi,
827 pixelFormat, size, plane);
828 ok &= bool(textures[plane]);
829 }
830 if (ok)
831 return std::make_unique<TexturesType>(std::move(textures), std::forward<Args>(args)...);
832 else
833 return {};
834}
835
836QVideoFrameTexturesUPtr createTexturesFromHandles(QVideoFrameTexturesHandlesUPtr texturesSet,
837 QRhi &rhi,
838 QVideoFrameFormat::PixelFormat pixelFormat,
839 QSize size)
840{
841 if (!texturesSet)
842 return nullptr;
843
844 if (pixelFormat == QVideoFrameFormat::Format_Invalid)
845 return nullptr;
846
847 if (size.isEmpty())
848 return nullptr;
849
850 auto &texturesSetRef = *texturesSet;
851 return createTexturesArray<QVideoFrameTexturesFromHandlesSet>(rhi, texturesSetRef, pixelFormat,
852 size, std::move(texturesSet));
853}
854
855static QVideoFrameTexturesUPtr createTexturesFromMemory(QVideoFrame frame, QRhi &rhi,
856 QRhiResourceUpdateBatch &rub,
857 QVideoFrameTexturesUPtr &oldTextures)
858{
859 qCDebug(qLcVideoTextureHelper) << "createTexturesFromMemory, pixelFormat:" << frame.pixelFormat();
860 if (!frame.map(QVideoFrame::ReadOnly)) {
861 qWarning() << "Cannot map a video frame in ReadOnly mode!";
862 return {};
863 }
864
865 auto unmapFrameGuard = qScopeGuard([&frame] { frame.unmap(); });
866
867 const TextureDescription &texDesc = descriptions[frame.surfaceFormat().pixelFormat()];
868
869 const bool canReuseTextures(dynamic_cast<QVideoFrameTexturesFromMemory*>(oldTextures.get()));
870
871 std::unique_ptr<QVideoFrameTexturesFromMemory> textures(canReuseTextures ?
872 static_cast<QVideoFrameTexturesFromMemory *>(oldTextures.release()) :
874
875 RhiTextureArray& textureArray = textures->textureArray();
876 bool shouldKeepMapping = false;
877 for (quint8 plane = 0; plane < texDesc.nplanes; ++plane) {
878 const auto result = updateTextureWithMap(frame, rhi, rub, plane, textureArray[plane]);
880 return {};
881
883 shouldKeepMapping = true;
884 }
885
886 // as QVideoFrame::unmap does nothing with null frames, we just move the frame to the result
887 textures->setMappedFrame(shouldKeepMapping ? std::move(frame) : QVideoFrame());
888
889 return textures;
890}
891
892QVideoFrameTexturesUPtr createTextures(const QVideoFrame &frame, QRhi &rhi,
893 QRhiResourceUpdateBatch &rub,
894 QVideoFrameTexturesUPtr &oldTextures)
895{
896 if (!frame.isValid())
897 return {};
898
899 auto setSourceFrame = [&frame](QVideoFrameTexturesUPtr result) {
900 result->setSourceFrame(frame);
901 return result;
902 };
903
904 if (QHwVideoBuffer *hwBuffer = QVideoFramePrivate::hwBuffer(frame)) {
905 if (auto textures = hwBuffer->mapTextures(rhi, oldTextures))
906 return setSourceFrame(std::move(textures));
907
908 QVideoFrameFormat format = frame.surfaceFormat();
909 if (auto textures = createTexturesArray<QVideoFrameTexturesFromRhiTextureArray>(
910 rhi, *hwBuffer, format.pixelFormat(), format.frameSize()))
911 return setSourceFrame(std::move(textures));
912 }
913
914 if (auto textures = createTexturesFromMemory(frame, rhi, rub, oldTextures))
915 return setSourceFrame(std::move(textures));
916
917 return {};
918}
919
921{
923 if (layout.text() == text && videoSize == frameSize)
924 return false;
925
927 QFont font;
928 // 0.045 - based on this https://www.md-subs.com/saa-subtitle-font-size
929 qreal fontSize = frameSize.height() * 0.045;
931
933 if (text.isEmpty()) {
934 bounds = {};
935 return true;
936 }
942
944 int leading = metrics.leading();
945
947 qreal margin = videoSize.width()*.05;
948 qreal height = 0;
949 qreal textWidth = 0;
951 while (1) {
953 if (!line.isValid())
954 break;
955
957 height += leading;
959 height += line.height();
961 }
963
964 // put subtitles vertically in lower part of the video but not stuck to the bottom
965 int bottomMargin = videoSize.height() / 20;
968 textWidth += fontSize/4.;
969
971 return true;
972}
973
993
995{
996 auto size = bounds.size().toSize();
997 if (size.isEmpty())
998 return QImage();
1001 bgColor.setAlpha(128);
1002 img.fill(bgColor);
1003
1007 range.start = 0;
1008 range.length = layout.text().size();
1010 layout.draw(&painter, {}, { range });
1011 return img;
1012}
1013
1014}
1015
1016QT_END_NAMESPACE
\inmodule QtGui
Definition qimage.h:37
\inmodule QtGuiPrivate \inheaderfile rhi/qrhi.h
Definition qrhi.h:661
\inmodule QtGuiPrivate \inheaderfile rhi/qrhi.h
Definition qrhi.h:721
\inmodule QtGuiPrivate \inheaderfile rhi/qrhi.h
Definition qrhi.h:698
The QVideoFrameFormat class specifies the stream format of a video presentation surface.
static bool hasDmaBuf(const QVideoFrame &frame)
float convertHLGFromLinear(float sig)
static QMatrix4x4 colorMatrix(const QVideoFrameFormat &format)
static float convertPQFromLinear(float sig)
static float convertSDRFromLinear(float sig)
Q_MULTIMEDIA_EXPORT void setExcludedRhiTextureFormats(QList< QRhiTexture::Format > formats)
static bool isRhiTextureFormatSupported(const QRhi *rhi, QRhiTexture::Format format)
Q_MULTIMEDIA_EXPORT bool forceGlTextureExternalOesIsSet()
static std::unique_ptr< QRhiTexture > createTextureFromHandle(QVideoFrameTexturesHandles &texturesSet, QRhi &rhi, QVideoFrameFormat::PixelFormat pixelFormat, QSize size, int plane)
static const TextureDescription descriptions[QVideoFrameFormat::NPixelFormats]
static QVideoFrameTexturesUPtr createTexturesFromMemory(QVideoFrame frame, QRhi &rhi, QRhiResourceUpdateBatch &rub, QVideoFrameTexturesUPtr &oldTextures)
static UpdateTextureWithMapResult updateTextureWithMap(const QVideoFrame &frame, QRhi &rhi, QRhiResourceUpdateBatch &rub, int plane, std::unique_ptr< QRhiTexture > &tex)
static QVideoFrameTexturesUPtr createTexturesArray(QRhi &rhi, QVideoFrameTexturesHandles &texturesSet, QVideoFrameFormat::PixelFormat pixelFormat, QSize size, Args &&...args)
Q_MULTIMEDIA_EXPORT void updateUniformData(QByteArray *dst, QRhi *rhi, const QVideoFrameFormat &format, const QVideoFrame &frame, const QMatrix4x4 &transform, float opacity, float maxNits=100)
#define qCDebug(category,...)
#define Q_STATIC_LOGGING_CATEGORY(name,...)