Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qvideotexturehelper.cpp
Go to the documentation of this file.
1// Copyright (C) 2021 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
5
10#include "private/qmultimediautils_p.h"
11#include "private/qaudio_alignment_support_p.h"
12
13#include <QtCore/qfile.h>
14#include <qpainter.h>
15#include <qloggingcategory.h>
16
18
19Q_STATIC_LOGGING_CATEGORY(qLcVideoTextureHelper, "qt.multimedia.video.texturehelper")
20
21namespace QVideoTextureHelper
22{
23
25 static const bool isSet =
26 qEnvironmentVariableIsSet("QT_MULTIMEDIA_FORCE_GL_TEXTURE_EXTERNAL_OES");
27 return isSet;
28}
29
31 // Format_Invalid
32 { 0, 0,
33 [](int, int) { return 0; },
35 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
36 },
37 // Format_ARGB8888
38 { 1, 4,
39 [](int stride, int height) { return stride*height; },
41 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
42 },
43 // Format_ARGB8888_Premultiplied
44 { 1, 4,
45 [](int stride, int height) { return stride*height; },
47 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
48 },
49 // Format_XRGB8888
50 { 1, 4,
51 [](int stride, int height) { return stride*height; },
53 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
54 },
55 // Format_BGRA8888
56 { 1, 4,
57 [](int stride, int height) { return stride*height; },
59 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
60 },
61 // Format_BGRA8888_Premultiplied
62 { 1, 4,
63 [](int stride, int height) { return stride*height; },
65 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
66 },
67 // Format_BGRX8888
68 { 1, 4,
69 [](int stride, int height) { return stride*height; },
71 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
72 },
73 // Format_ABGR8888
74 { 1, 4,
75 [](int stride, int height) { return stride*height; },
77 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
78 },
79 // Format_XBGR8888
80 { 1, 4,
81 [](int stride, int height) { return stride*height; },
83 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
84 },
85 // Format_RGBA8888
86 { 1, 4,
87 [](int stride, int height) { return stride*height; },
89 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
90 },
91 // Format_RGBX8888
92 { 1, 4,
93 [](int stride, int height) { return stride*height; },
95 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
96 },
97 // Format_AYUV
98 { 1, 4,
99 [](int stride, int height) { return stride*height; },
101 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
102 },
103 // Format_AYUV_Premultiplied
104 { 1, 4,
105 [](int stride, int height) { return stride*height; },
107 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
108 },
109 // Format_YUV420P
110 { 3, 1,
111 [](int stride, int height) { return stride * (height + QtMultimediaPrivate::alignUp(height, 2) / 2); },
113 { { 1, 1 }, { 2, 2 }, { 2, 2 } }
114 },
115 // Format_YUV422P
116 { 3, 1,
117 [](int stride, int height) { return stride * height * 2; },
119 { { 1, 1 }, { 2, 1 }, { 2, 1 } }
120 },
121 // Format_YV12
122 { 3, 1,
123 [](int stride, int height) { return stride * (height + QtMultimediaPrivate::alignUp(height, 2) / 2); },
125 { { 1, 1 }, { 2, 2 }, { 2, 2 } }
126 },
127 // Format_UYVY
128 { 1, 2,
129 [](int stride, int height) { return stride*height; },
131 { { 2, 1 }, { 1, 1 }, { 1, 1 } }
132 },
133 // Format_YUYV
134 { 1, 2,
135 [](int stride, int height) { return stride*height; },
137 { { 2, 1 }, { 1, 1 }, { 1, 1 } }
138 },
139 // Format_NV12
140 { 2, 1,
141 [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
143 { { 1, 1 }, { 2, 2 }, { 1, 1 } }
144 },
145 // Format_NV21
146 { 2, 1,
147 [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
149 { { 1, 1 }, { 2, 2 }, { 1, 1 } }
150 },
151 // Format_IMC1
152 { 3, 1,
153 [](int stride, int height) {
154 // IMC1 requires that U and V components are aligned on a multiple of 16 lines
155 int h = (height + 15) & ~15;
156 h += 2*(((h/2) + 15) & ~15);
157 return stride * h;
158 },
160 { { 1, 1 }, { 2, 2 }, { 2, 2 } }
161 },
162 // Format_IMC2
163 { 2, 1,
164 [](int stride, int height) { return 2*stride*height; },
166 { { 1, 1 }, { 1, 2 }, { 1, 1 } }
167 },
168 // Format_IMC3
169 { 3, 1,
170 [](int stride, int height) {
171 // IMC3 requires that U and V components are aligned on a multiple of 16 lines
172 int h = (height + 15) & ~15;
173 h += 2*(((h/2) + 15) & ~15);
174 return stride * h;
175 },
177 { { 1, 1 }, { 2, 2 }, { 2, 2 } }
178 },
179 // Format_IMC4
180 { 2, 1,
181 [](int stride, int height) { return 2*stride*height; },
183 { { 1, 1 }, { 1, 2 }, { 1, 1 } }
184 },
185 // Format_Y8
186 { 1, 1,
187 [](int stride, int height) { return stride*height; },
189 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
190 },
191 // Format_Y16
192 { 1, 2,
193 [](int stride, int height) { return stride*height; },
195 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
196 },
197 // Format_P010
198 { 2, 2,
199 [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
201 { { 1, 1 }, { 2, 2 }, { 1, 1 } }
202 },
203 // Format_P016
204 { 2, 2,
205 [](int stride, int height) { return stride * ((height * 3 / 2 + 1) & ~1); },
207 { { 1, 1 }, { 2, 2 }, { 1, 1 } }
208 },
209 // Format_SamplerExternalOES
210 {
211 1, 0,
212 [](int, int) { return 0; },
214 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
215 },
216 // Format_Jpeg
217 { 1, 4,
218 [](int stride, int height) { return stride*height; },
220 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
221 },
222 // Format_SamplerRect
223 {
224 1, 0,
225 [](int, int) { return 0; },
227 { { 1, 1 }, { 1, 1 }, { 1, 1 } }
228 },
229 // Format_YUV420P10
230 { 3, 2,
231 [](int stride, int height) { return stride * (height + QtMultimediaPrivate::alignUp(height, 2) / 2); },
233 { { 1, 1 }, { 2, 2 }, { 2, 2 } }
234 },
235};
236
237Q_GLOBAL_STATIC(QList<QRhiTexture::Format>, g_excludedRhiTextureFormats) // for tests only
238
239static bool isRhiTextureFormatSupported(const QRhi *rhi, QRhiTexture::Format format)
240{
241 if (g_excludedRhiTextureFormats->contains(format))
242 return false;
243 if (!rhi) // consider the format is supported if no rhi specified
244 return true;
245 return rhi->isTextureFormatSupported(format);
246}
247
281
282QRhiTexture::Format resolvedRhiTextureFormat(QRhiTexture::Format format, QRhi *rhi)
283{
284 if (isRhiTextureFormatSupported(rhi, format))
285 return format;
286
287 QRhiTexture::Format fallbackFormat;
288 switch (format) {
289 case QRhiTexture::R8:
290 fallbackFormat = resolvedRhiTextureFormat(QRhiTexture::RED_OR_ALPHA8, rhi);
291 break;
292 case QRhiTexture::RG8:
293 case QRhiTexture::RG16:
294 fallbackFormat = resolvedRhiTextureFormat(QRhiTexture::RGBA8, rhi);
295 break;
296 case QRhiTexture::R16:
297 fallbackFormat = resolvedRhiTextureFormat(QRhiTexture::RG8, rhi);
298 break;
299 default:
300 // End fallback chain here, and return UnknownFormat
301 return QRhiTexture::UnknownFormat;
302 }
303
304 if (fallbackFormat == QRhiTexture::UnknownFormat) {
305 // TODO: QTBUG-135911: In some cases rhi claims format and fallbacks are all
306 // unsupported, but when using preferred format video plays fine
307 qCDebug(qLcVideoTextureHelper) << "Cannot determine any usable texture format, using preferred format" << format;
308 return format;
309 }
310
311 qCDebug(qLcVideoTextureHelper) << "Using fallback texture format" << fallbackFormat;
312 return fallbackFormat;
313}
314
315void setExcludedRhiTextureFormats(QList<QRhiTexture::Format> formats)
316{
317 g_excludedRhiTextureFormats->swap(formats);
318}
319
320const TextureDescription *textureDescription(QVideoFrameFormat::PixelFormat format)
321{
322 return descriptions + format;
323}
324
325QString vertexShaderFileName(const QVideoFrameFormat &format)
326{
327 auto fmt = format.pixelFormat();
328 Q_UNUSED(fmt);
329
330#if 1//def Q_OS_ANDROID
331 if (fmt == QVideoFrameFormat::Format_SamplerExternalOES)
332 return QStringLiteral(":/qt-project.org/multimedia/shaders/externalsampler.vert.qsb");
333#endif
334#if 1//def Q_OS_MACOS
335 if (fmt == QVideoFrameFormat::Format_SamplerRect)
336 return QStringLiteral(":/qt-project.org/multimedia/shaders/rectsampler.vert.qsb");
337#endif
338
339 return QStringLiteral(":/qt-project.org/multimedia/shaders/vertex.vert.qsb");
340}
341
342QString fragmentShaderFileName(const QVideoFrameFormat &format, QRhi *,
343 QRhiSwapChain::Format surfaceFormat)
344{
345 QString shaderFile;
346 switch (format.pixelFormat()) {
347 case QVideoFrameFormat::Format_Y8:
348 shaderFile = QStringLiteral("y");
349 break;
350 case QVideoFrameFormat::Format_Y16:
351 shaderFile = QStringLiteral("y16");
352 break;
353 case QVideoFrameFormat::Format_AYUV:
354 case QVideoFrameFormat::Format_AYUV_Premultiplied:
355 shaderFile = QStringLiteral("ayuv");
356 break;
357 case QVideoFrameFormat::Format_ARGB8888:
358 case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
359 case QVideoFrameFormat::Format_XRGB8888:
360 shaderFile = QStringLiteral("argb");
361 break;
362 case QVideoFrameFormat::Format_ABGR8888:
363 case QVideoFrameFormat::Format_XBGR8888:
364 shaderFile = QStringLiteral("abgr");
365 break;
366 case QVideoFrameFormat::Format_Jpeg: // Jpeg is decoded transparently into an ARGB texture
367 shaderFile = QStringLiteral("bgra");
368 break;
369 case QVideoFrameFormat::Format_RGBA8888:
370 case QVideoFrameFormat::Format_RGBX8888:
371 case QVideoFrameFormat::Format_BGRA8888:
372 case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
373 case QVideoFrameFormat::Format_BGRX8888:
374 shaderFile = QStringLiteral("rgba");
375 break;
376 case QVideoFrameFormat::Format_YUV420P:
377 case QVideoFrameFormat::Format_YUV422P:
378 case QVideoFrameFormat::Format_IMC3:
379 shaderFile = QStringLiteral("yuv_triplanar");
380 break;
381 case QVideoFrameFormat::Format_YUV420P10:
382 shaderFile = QStringLiteral("yuv_triplanar_p10");
383 break;
384 case QVideoFrameFormat::Format_YV12:
385 case QVideoFrameFormat::Format_IMC1:
386 shaderFile = QStringLiteral("yvu_triplanar");
387 break;
388 case QVideoFrameFormat::Format_IMC2:
389 shaderFile = QStringLiteral("imc2");
390 break;
391 case QVideoFrameFormat::Format_IMC4:
392 shaderFile = QStringLiteral("imc4");
393 break;
394 case QVideoFrameFormat::Format_UYVY:
395 shaderFile = QStringLiteral("uyvy");
396 break;
397 case QVideoFrameFormat::Format_YUYV:
398 shaderFile = QStringLiteral("yuyv");
399 break;
400 case QVideoFrameFormat::Format_P010:
401 case QVideoFrameFormat::Format_P016:
402 // P010/P016 have the same layout as NV12, just 16 instead of 8 bits per pixel
403 if (format.colorTransfer() == QVideoFrameFormat::ColorTransfer_ST2084) {
404 shaderFile = QStringLiteral("nv12_bt2020_pq");
405 break;
406 }
407 if (format.colorTransfer() == QVideoFrameFormat::ColorTransfer_STD_B67) {
408 shaderFile = QStringLiteral("nv12_bt2020_hlg");
409 break;
410 }
411 shaderFile = QStringLiteral("p016");
412 break;
413 case QVideoFrameFormat::Format_NV12:
414 shaderFile = QStringLiteral("nv12");
415 break;
416 case QVideoFrameFormat::Format_NV21:
417 shaderFile = QStringLiteral("nv21");
418 break;
419 case QVideoFrameFormat::Format_SamplerExternalOES:
420#if 1//def Q_OS_ANDROID
421 shaderFile = QStringLiteral("externalsampler");
422 break;
423#endif
424 case QVideoFrameFormat::Format_SamplerRect:
425#if 1//def Q_OS_MACOS
426 shaderFile = QStringLiteral("rectsampler_bgra");
427 break;
428#endif
429 // fallthrough
430 case QVideoFrameFormat::Format_Invalid:
431 default:
432 break;
433 }
434
435 if (shaderFile.isEmpty())
436 return QString();
437
438 shaderFile.prepend(u":/qt-project.org/multimedia/shaders/");
439
440 if (surfaceFormat == QRhiSwapChain::HDRExtendedSrgbLinear)
441 shaderFile.append(u"_linear");
442
443 shaderFile.append(u".frag.qsb");
444
445 Q_ASSERT_X(
446 QFile::exists(shaderFile), Q_FUNC_INFO,
447 QStringLiteral("Shader file %1 does not exist").arg(shaderFile).toLatin1().constData());
448 qCDebug(qLcVideoTextureHelper) << "fragmentShaderFileName returns" << shaderFile;
449 return shaderFile;
450}
451
452// Matrices are calculated from
453// https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.601-7-201103-I!!PDF-E.pdf
454// https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.709-6-201506-I!!PDF-E.pdf
455// https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.2020-2-201510-I!!PDF-E.pdf
456//
457// For BT2020, we also need to convert the Rec2020 RGB colorspace to sRGB see
458// shaders/colorconvert.glsl for details.
459//
460// Doing the math gives the following (Y, U & V normalized to [0..1] range):
461//
462// Y = a*R + b*G + c*B
463// R = Y + e*V
464// G = Y - c*d/b*U - a*e/b*V
465// B = Y + d*U
466
467// BT2020:
468// a = .2627, b = 0.6780, c = 0.0593
469// d = 1.8814
470// e = 1.4746
471//
472// BT709:
473// a = 0.2126, b = 0.7152, c = 0.0722
474// d = 1.8556
475// e = 1.5748
476//
477// BT601:
478// a = 0.299, b = 0.578, c = 0.114
479// d = 1.42
480// e = 1.772
481//
482
483// clang-format off
484static QMatrix4x4 colorMatrix(const QVideoFrameFormat &format)
485{
486 auto colorSpace = format.colorSpace();
487 if (colorSpace == QVideoFrameFormat::ColorSpace_Undefined) {
488 if (format.frameHeight() > 576)
489 // HD video, assume BT709
490 colorSpace = QVideoFrameFormat::ColorSpace_BT709;
491 else
492 // SD video, assume BT601
493 colorSpace = QVideoFrameFormat::ColorSpace_BT601;
494 }
495 switch (colorSpace) {
496 case QVideoFrameFormat::ColorSpace_AdobeRgb:
497 return {
498 1.0f, 0.000f, 1.402f, -0.701f,
499 1.0f, -0.344f, -0.714f, 0.529f,
500 1.0f, 1.772f, 0.000f, -0.886f,
501 0.0f, 0.000f, 0.000f, 1.000f
502 };
503 default:
504 case QVideoFrameFormat::ColorSpace_BT709:
505 if (format.colorRange() == QVideoFrameFormat::ColorRange_Full)
506 return {
507 1.0f, 0.0f, 1.5748f, -0.790488f,
508 1.0f, -0.187324f, -0.468124f, 0.329010f,
509 1.0f, 1.855600f, 0.0f, -0.931439f,
510 0.0f, 0.0f, 0.0f, 1.0f
511 };
512 return {
513 1.1644f, 0.0000f, 1.7927f, -0.9729f,
514 1.1644f, -0.2132f, -0.5329f, 0.3015f,
515 1.1644f, 2.1124f, 0.0000f, -1.1334f,
516 0.0000f, 0.0000f, 0.0000f, 1.0000f
517 };
518 case QVideoFrameFormat::ColorSpace_BT2020:
519 if (format.colorRange() == QVideoFrameFormat::ColorRange_Full)
520 return {
521 1.f, 0.0000f, 1.4746f, -0.7402f,
522 1.f, -0.1646f, -0.5714f, 0.3694f,
523 1.f, 1.8814f, 0.000f, -0.9445f,
524 0.0f, 0.0000f, 0.000f, 1.0000f
525 };
526 return {
527 1.1644f, 0.000f, 1.6787f, -0.9157f,
528 1.1644f, -0.1874f, -0.6504f, 0.3475f,
529 1.1644f, 2.1418f, 0.0000f, -1.1483f,
530 0.0000f, 0.0000f, 0.0000f, 1.0000f
531 };
532 case QVideoFrameFormat::ColorSpace_BT601:
533 // Corresponds to the primaries used by NTSC BT601. For PAL BT601, we use the BT709 conversion
534 // as those are very close.
535 if (format.colorRange() == QVideoFrameFormat::ColorRange_Full)
536 return {
537 1.f, 0.000f, 1.772f, -0.886f,
538 1.f, -0.1646f, -0.57135f, 0.36795f,
539 1.f, 1.42f, 0.000f, -0.71f,
540 0.0f, 0.000f, 0.000f, 1.0000f
541 };
542 return {
543 1.164f, 0.000f, 1.596f, -0.8708f,
544 1.164f, -0.392f, -0.813f, 0.5296f,
545 1.164f, 2.017f, 0.000f, -1.0810f,
546 0.000f, 0.000f, 0.000f, 1.0000f
547 };
548 }
549}
550// clang-format on
551
552// PQ transfer function, see also https://en.wikipedia.org/wiki/Perceptual_quantizer
553// or https://ieeexplore.ieee.org/document/7291452
554static float convertPQFromLinear(float sig)
555{
556 const float m1 = 1305.f/8192.f;
557 const float m2 = 2523.f/32.f;
558 const float c1 = 107.f/128.f;
559 const float c2 = 2413.f/128.f;
560 const float c3 = 2392.f/128.f;
561
562 const float SDR_LEVEL = 100.f;
563 sig *= SDR_LEVEL/10000.f;
564 float psig = powf(sig, m1);
565 float num = c1 + c2*psig;
566 float den = 1 + c3*psig;
567 return powf(num/den, m2);
568}
569
570float convertHLGFromLinear(float sig)
571{
572 const float a = 0.17883277f;
573 const float b = 0.28466892f; // = 1 - 4a
574 const float c = 0.55991073f; // = 0.5 - a ln(4a)
575
576 if (sig < 1.f/12.f)
577 return sqrtf(3.f*sig);
578 return a*logf(12.f*sig - b) + c;
579}
580
581static float convertSDRFromLinear(float sig)
582{
583 return sig;
584}
585
586void updateUniformData(QByteArray *dst, QRhi *rhi, const QVideoFrameFormat &format,
587 const QVideoFrame &frame, const QMatrix4x4 &transform, float opacity,
588 float maxNits)
589{
590#ifndef Q_OS_ANDROID
591 Q_UNUSED(frame);
592#endif
593
594 QMatrix4x4 cmat;
595 switch (format.pixelFormat()) {
596 case QVideoFrameFormat::Format_Invalid:
597 return;
598
599 case QVideoFrameFormat::Format_ARGB8888:
600 case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
601 case QVideoFrameFormat::Format_XRGB8888:
602 case QVideoFrameFormat::Format_BGRA8888:
603 case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
604 case QVideoFrameFormat::Format_BGRX8888:
605 case QVideoFrameFormat::Format_ABGR8888:
606 case QVideoFrameFormat::Format_XBGR8888:
607 case QVideoFrameFormat::Format_RGBA8888:
608 case QVideoFrameFormat::Format_RGBX8888: {
609 if (format.colorRange() == QVideoFrameFormat::ColorRange_Video) {
610 constexpr float scale = 255.0f / 219.0f; // (255 - 0) / (235 - 16)
611 constexpr float offset = -16.0f / 219.0f; // -16 / (235 - 16)
612 // clang-format off
613 cmat = QMatrix4x4 {
614 scale, 0.f, 0.f, offset,
615 0.f, scale, 0.f, offset,
616 0.f, 0.f, scale, offset,
617 0.f, 0.f, 0.f, 1.f,
618 };
619 // clang-format on
620 }
621
622 break;
623 }
624
625 case QVideoFrameFormat::Format_Jpeg:
626 case QVideoFrameFormat::Format_Y8:
627 case QVideoFrameFormat::Format_Y16:
628 break;
629 case QVideoFrameFormat::Format_IMC1:
630 case QVideoFrameFormat::Format_IMC2:
631 case QVideoFrameFormat::Format_IMC3:
632 case QVideoFrameFormat::Format_IMC4:
633 case QVideoFrameFormat::Format_AYUV:
634 case QVideoFrameFormat::Format_AYUV_Premultiplied:
635 case QVideoFrameFormat::Format_YUV420P:
636 case QVideoFrameFormat::Format_YUV420P10:
637 case QVideoFrameFormat::Format_YUV422P:
638 case QVideoFrameFormat::Format_YV12:
639 case QVideoFrameFormat::Format_UYVY:
640 case QVideoFrameFormat::Format_YUYV:
641 case QVideoFrameFormat::Format_NV12:
642 case QVideoFrameFormat::Format_NV21:
643 case QVideoFrameFormat::Format_P010:
644 case QVideoFrameFormat::Format_P016:
645 cmat = colorMatrix(format);
646 break;
647 case QVideoFrameFormat::Format_SamplerExternalOES:
648 // get Android specific transform for the externalsampler texture
649 if (auto hwBuffer = QVideoFramePrivate::hwBuffer(frame))
650 cmat = hwBuffer->externalTextureMatrix();
651 break;
652 case QVideoFrameFormat::Format_SamplerRect:
653 {
654 // Similarly to SamplerExternalOES, the "color matrix" is used here to
655 // transform the texture coordinates. OpenGL texture rectangles expect
656 // non-normalized UVs, so apply a scale to have the fragment shader see
657 // UVs in range [width,height] instead of [0,1].
658 const QSize videoSize = frame.size();
659 cmat.scale(videoSize.width(), videoSize.height());
660 }
661 break;
662 }
663
664 // HDR with a PQ or HLG transfer function uses a BT2390 based tone mapping to cut off the HDR peaks
665 // This requires that we pass the max luminance the tonemapper should clip to over to the fragment
666 // shader. To reduce computations there, it's precomputed in PQ values here.
667 auto fromLinear = convertSDRFromLinear;
668 switch (format.colorTransfer()) {
669 case QVideoFrameFormat::ColorTransfer_ST2084:
670 fromLinear = convertPQFromLinear;
671 break;
672 case QVideoFrameFormat::ColorTransfer_STD_B67:
673 fromLinear = convertHLGFromLinear;
674 break;
675 default:
676 break;
677 }
678
679 if (dst->size() < qsizetype(sizeof(UniformData)))
680 dst->resize(sizeof(UniformData));
681
682 auto ud = reinterpret_cast<UniformData*>(dst->data());
683 memcpy(ud->transformMatrix, transform.constData(), sizeof(ud->transformMatrix));
684 memcpy(ud->colorMatrix, cmat.constData(), sizeof(ud->transformMatrix));
685 ud->opacity = opacity;
686 ud->width = float(format.frameWidth());
687 ud->masteringWhite = fromLinear(float(format.maxLuminance())/100.f);
688 ud->maxLum = fromLinear(float(maxNits)/100.f);
689 const TextureDescription* desc = textureDescription(format.pixelFormat());
690
691 const bool isDmaBuf = QVideoFramePrivate::hasDmaBuf(frame);
692 using FallbackPolicy = QVideoTextureHelper::TextureDescription::FallbackPolicy;
693 auto fallbackPolicy = isDmaBuf
694 ? FallbackPolicy::Disable
695 : FallbackPolicy::Enable;
696
697 // Let's consider using the red component if Red_8 is not used,
698 // it's useful for compatibility the shaders with 16bit formats.
699
700 const bool useRedComponent =
701 !desc->hasTextureFormat(TextureDescription::Red_8)
702 || isRhiTextureFormatSupported(rhi, QRhiTexture::R8)
703 || rhi->isFeatureSupported(QRhi::RedOrAlpha8IsRed)
704 || isDmaBuf;
705 ud->redOrAlphaIndex = useRedComponent ? 0 : 3; // r:0 g:1 b:2 a:3
706
707 for (int plane = 0; plane < desc->nplanes; ++plane)
708 ud->planeFormats[plane] = desc->rhiTextureFormat(plane, rhi, fallbackPolicy);
709}
710
716
717static UpdateTextureWithMapResult updateTextureWithMap(const QVideoFrame &frame, QRhi &rhi,
718 QRhiResourceUpdateBatch &rub, int plane,
719 std::unique_ptr<QRhiTexture> &tex)
720{
721 Q_ASSERT(frame.isMapped());
722
723 QVideoFrameFormat fmt = frame.surfaceFormat();
724 QVideoFrameFormat::PixelFormat pixelFormat = fmt.pixelFormat();
725 QSize size = fmt.frameSize();
726
727 const TextureDescription &texDesc = descriptions[pixelFormat];
728 QSize planeSize = texDesc.rhiPlaneSize(size, plane, &rhi);
729
730 bool needsRebuild = !tex || tex->pixelSize() != planeSize || tex->format() != texDesc.rhiTextureFormat(plane, &rhi);
731 if (!tex) {
732 tex.reset(rhi.newTexture(texDesc.rhiTextureFormat(plane, &rhi), planeSize, 1, {}));
733 if (!tex) {
734 qWarning("Failed to create new texture (size %dx%d)", planeSize.width(), planeSize.height());
736 }
737 }
738
739 if (needsRebuild) {
740 tex->setFormat(texDesc.rhiTextureFormat(plane, &rhi));
741 tex->setPixelSize(planeSize);
742 if (!tex->create()) {
743 qWarning("Failed to create texture (size %dx%d)", planeSize.width(), planeSize.height());
745 }
746 }
747
749
751
752 if (pixelFormat == QVideoFrameFormat::Format_Jpeg) {
753 Q_ASSERT(plane == 0);
754
755 QImage image;
756
757 // calling QVideoFrame::toImage is not accurate. To be fixed.
758 // frame transformation will be considered later
759 const QVideoFrameFormat surfaceFormat = frame.surfaceFormat();
760
761 const bool hasSurfaceTransform = surfaceFormat.isMirrored()
762 || surfaceFormat.scanLineDirection() == QVideoFrameFormat::BottomToTop
763 || surfaceFormat.rotation() != QtVideo::Rotation::None;
764
765 if (hasSurfaceTransform)
766 image = qImageFromVideoFrame(frame, VideoTransformation{});
767 else
768 image = frame.toImage(); // use the frame cache, no surface transforms applied
769
770 image.convertTo(QImage::Format_ARGB32);
771 subresDesc.setImage(image);
772
773 } else {
774 // Note, QByteArray::fromRawData creare QByteArray as a view without data copying
775 subresDesc.setData(QByteArray::fromRawData(
776 reinterpret_cast<const char *>(frame.bits(plane)), frame.mappedBytes(plane)));
777 subresDesc.setDataStride(frame.bytesPerLine(plane));
779 }
780
781 QRhiTextureUploadEntry entry(0, 0, subresDesc);
782 QRhiTextureUploadDescription desc({ entry });
783 rub.uploadTexture(tex.get(), desc);
784
785 return result;
786}
787
789createTextureFromHandle(QVideoFrameTexturesHandles &texturesSet, QRhi &rhi,
790 QVideoFrameFormat::PixelFormat pixelFormat, QSize size, int plane)
791{
792 const TextureDescription &texDesc = descriptions[pixelFormat];
793 QSize planeSize = texDesc.rhiPlaneSize(size, plane, &rhi);
794
795 QRhiTexture::Flags textureFlags = {};
796 if (pixelFormat == QVideoFrameFormat::Format_SamplerExternalOES) {
797#ifdef Q_OS_ANDROID
798 if (rhi.backend() == QRhi::OpenGLES2)
799 textureFlags |= QRhiTexture::ExternalOES;
800#endif
801 }
802 if (pixelFormat == QVideoFrameFormat::Format_SamplerRect) {
803#ifdef Q_OS_MACOS
804 if (rhi.backend() == QRhi::OpenGLES2)
805 textureFlags |= QRhiTexture::TextureRectangleGL;
806#endif
807 }
808
809 if (quint64 handle = texturesSet.textureHandle(rhi, plane); handle) {
810 std::unique_ptr<QRhiTexture> tex(rhi.newTexture(texDesc.rhiTextureFormat(plane, &rhi), planeSize, 1, textureFlags));
811 if (tex->createFrom({handle, 0}))
812 return tex;
813
814 qWarning("Failed to initialize QRhiTexture wrapper for native texture object %llu",handle);
815 }
816 return {};
817}
818
819template <typename TexturesType, typename... Args>
821createTexturesArray(QRhi &rhi, QVideoFrameTexturesHandles &texturesSet,
822 QVideoFrameFormat::PixelFormat pixelFormat, QSize size, Args &&...args)
823{
824 const TextureDescription &texDesc = descriptions[pixelFormat];
825 bool ok = true;
826 RhiTextureArray textures;
827 for (quint8 plane = 0; plane < texDesc.nplanes; ++plane) {
828 textures[plane] = QVideoTextureHelper::createTextureFromHandle(texturesSet, rhi,
829 pixelFormat, size, plane);
830 ok &= bool(textures[plane]);
831 }
832 if (ok)
833 return std::make_unique<TexturesType>(std::move(textures), std::forward<Args>(args)...);
834 else
835 return {};
836}
837
838QVideoFrameTexturesUPtr createTexturesFromHandles(QVideoFrameTexturesHandlesUPtr texturesSet,
839 QRhi &rhi,
840 QVideoFrameFormat::PixelFormat pixelFormat,
841 QSize size)
842{
843 if (!texturesSet)
844 return nullptr;
845
846 if (pixelFormat == QVideoFrameFormat::Format_Invalid)
847 return nullptr;
848
849 if (size.isEmpty())
850 return nullptr;
851
852 auto &texturesSetRef = *texturesSet;
853 return createTexturesArray<QVideoFrameTexturesFromHandlesSet>(rhi, texturesSetRef, pixelFormat,
854 size, std::move(texturesSet));
855}
856
857static QVideoFrameTexturesUPtr createTexturesFromMemory(QVideoFrame frame, QRhi &rhi,
858 QRhiResourceUpdateBatch &rub,
859 QVideoFrameTexturesUPtr &oldTextures)
860{
861 qCDebug(qLcVideoTextureHelper) << "createTexturesFromMemory, pixelFormat:" << frame.pixelFormat();
862 if (!frame.map(QVideoFrame::ReadOnly)) {
863 qWarning() << "Cannot map a video frame in ReadOnly mode!";
864 return {};
865 }
866
867 auto unmapFrameGuard = qScopeGuard([&frame] { frame.unmap(); });
868
869 const TextureDescription &texDesc = descriptions[frame.surfaceFormat().pixelFormat()];
870
871 const bool canReuseTextures(dynamic_cast<QVideoFrameTexturesFromMemory*>(oldTextures.get()));
872
873 std::unique_ptr<QVideoFrameTexturesFromMemory> textures(canReuseTextures ?
874 static_cast<QVideoFrameTexturesFromMemory *>(oldTextures.release()) :
876
877 RhiTextureArray& textureArray = textures->textureArray();
878 bool shouldKeepMapping = false;
879 for (quint8 plane = 0; plane < texDesc.nplanes; ++plane) {
880 const auto result = updateTextureWithMap(frame, rhi, rub, plane, textureArray[plane]);
882 return {};
883
885 shouldKeepMapping = true;
886 }
887
888 // as QVideoFrame::unmap does nothing with null frames, we just move the frame to the result
889 textures->setMappedFrame(shouldKeepMapping ? std::move(frame) : QVideoFrame());
890
891 return textures;
892}
893
894QVideoFrameTexturesUPtr createTextures(const QVideoFrame &frame, QRhi &rhi,
895 QRhiResourceUpdateBatch &rub,
896 QVideoFrameTexturesUPtr &oldTextures)
897{
898 if (!frame.isValid())
899 return {};
900
901 auto setSourceFrame = [&frame](QVideoFrameTexturesUPtr result) {
902 result->setSourceFrame(frame);
903 return result;
904 };
905
906 if (QHwVideoBuffer *hwBuffer = QVideoFramePrivate::hwBuffer(frame)) {
907 if (auto textures = hwBuffer->mapTextures(rhi, oldTextures))
908 return setSourceFrame(std::move(textures));
909
910 QVideoFrameFormat format = frame.surfaceFormat();
911 if (auto textures = createTexturesArray<QVideoFrameTexturesFromRhiTextureArray>(
912 rhi, *hwBuffer, format.pixelFormat(), format.frameSize()))
913 return setSourceFrame(std::move(textures));
914 }
915
916 if (auto textures = createTexturesFromMemory(frame, rhi, rub, oldTextures))
917 return setSourceFrame(std::move(textures));
918
919 return {};
920}
921
923{
925 if (layout.text() == text && videoSize == frameSize)
926 return false;
927
929 QFont font;
930 // 0.045 - based on this https://www.md-subs.com/saa-subtitle-font-size
931 qreal fontSize = frameSize.height() * 0.045;
933
935 if (text.isEmpty()) {
936 bounds = {};
937 return true;
938 }
944
946 int leading = metrics.leading();
947
949 qreal margin = videoSize.width()*.05;
950 qreal height = 0;
951 qreal textWidth = 0;
953 while (1) {
955 if (!line.isValid())
956 break;
957
959 height += leading;
961 height += line.height();
963 }
965
966 // put subtitles vertically in lower part of the video but not stuck to the bottom
967 int bottomMargin = videoSize.height() / 20;
970 textWidth += fontSize/4.;
971
973 return true;
974}
975
995
997{
998 auto size = bounds.size().toSize();
999 if (size.isEmpty())
1000 return QImage();
1003 bgColor.setAlpha(128);
1004 img.fill(bgColor);
1005
1009 range.start = 0;
1010 range.length = layout.text().size();
1012 layout.draw(&painter, {}, { range });
1013 return img;
1014}
1015
1016}
1017
1018QT_END_NAMESPACE
\inmodule QtGui
Definition qimage.h:37
\inmodule QtGuiPrivate \inheaderfile rhi/qrhi.h
Definition qrhi.h:662
\inmodule QtGuiPrivate \inheaderfile rhi/qrhi.h
Definition qrhi.h:722
\inmodule QtGuiPrivate \inheaderfile rhi/qrhi.h
Definition qrhi.h:699
The QVideoFrameFormat class specifies the stream format of a video presentation surface.
static bool hasDmaBuf(const QVideoFrame &frame)
Combined button and popup list for selecting options.
float convertHLGFromLinear(float sig)
static QMatrix4x4 colorMatrix(const QVideoFrameFormat &format)
static float convertPQFromLinear(float sig)
static float convertSDRFromLinear(float sig)
Q_MULTIMEDIA_EXPORT void setExcludedRhiTextureFormats(QList< QRhiTexture::Format > formats)
static bool isRhiTextureFormatSupported(const QRhi *rhi, QRhiTexture::Format format)
Q_MULTIMEDIA_EXPORT bool forceGlTextureExternalOesIsSet()
static std::unique_ptr< QRhiTexture > createTextureFromHandle(QVideoFrameTexturesHandles &texturesSet, QRhi &rhi, QVideoFrameFormat::PixelFormat pixelFormat, QSize size, int plane)
static const TextureDescription descriptions[QVideoFrameFormat::NPixelFormats]
static QVideoFrameTexturesUPtr createTexturesFromMemory(QVideoFrame frame, QRhi &rhi, QRhiResourceUpdateBatch &rub, QVideoFrameTexturesUPtr &oldTextures)
static UpdateTextureWithMapResult updateTextureWithMap(const QVideoFrame &frame, QRhi &rhi, QRhiResourceUpdateBatch &rub, int plane, std::unique_ptr< QRhiTexture > &tex)
static QVideoFrameTexturesUPtr createTexturesArray(QRhi &rhi, QVideoFrameTexturesHandles &texturesSet, QVideoFrameFormat::PixelFormat pixelFormat, QSize size, Args &&...args)
Q_MULTIMEDIA_EXPORT void updateUniformData(QByteArray *dst, QRhi *rhi, const QVideoFrameFormat &format, const QVideoFrame &frame, const QMatrix4x4 &transform, float opacity, float maxNits=100)
#define qCDebug(category,...)
#define Q_STATIC_LOGGING_CATEGORY(name,...)