Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qvideoframeformat.cpp
Go to the documentation of this file.
1// Copyright (C) 2016 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
5
6#include <QtMultimedia/private/qvideotexturehelper_p.h>
7#include <QtMultimedia/private/qvideotransformation_p.h>
8#include <QtMultimedia/private/qmultimedia_enum_to_string_converter_p.h>
9
10#include <qdebug.h>
11#include <qlist.h>
12#include <qmetatype.h>
13#include <qvariant.h>
14#include <qmatrix4x4.h>
15
16static void initResource() {
17 Q_INIT_RESOURCE(qtmultimedia_shaders);
18}
19
20QT_BEGIN_NAMESPACE
21
22class QVideoFrameFormatPrivate : public QSharedData
23{
24public:
25 QVideoFrameFormatPrivate() = default;
26
27 QVideoFrameFormatPrivate(
28 const QSize &size,
29 QVideoFrameFormat::PixelFormat format)
30 : pixelFormat(format)
31 , frameSize(size)
32 , viewport(QPoint(0, 0), size)
33 {
34 }
35
36 bool operator ==(const QVideoFrameFormatPrivate &other) const
37 {
38 if (pixelFormat == other.pixelFormat && scanLineDirection == other.scanLineDirection
39 && frameSize == other.frameSize && viewport == other.viewport
40 && frameRatesEqual(frameRate, other.frameRate) && colorSpace == other.colorSpace
41 && transformation == other.transformation)
42 return true;
43
44 return false;
45 }
46
47 inline static bool frameRatesEqual(qreal r1, qreal r2)
48 {
49 return qAbs(r1 - r2) <= 0.00001 * qMin(qAbs(r1), qAbs(r2));
50 }
51
52 QVideoFrameFormat::PixelFormat pixelFormat = QVideoFrameFormat::Format_Invalid;
53 QVideoFrameFormat::Direction scanLineDirection = QVideoFrameFormat::TopToBottom;
54 QSize frameSize;
55 QVideoFrameFormat::ColorSpace colorSpace = QVideoFrameFormat::ColorSpace_Undefined;
56 QVideoFrameFormat::ColorTransfer colorTransfer = QVideoFrameFormat::ColorTransfer_Unknown;
57 QVideoFrameFormat::ColorRange colorRange = QVideoFrameFormat::ColorRange_Unknown;
58 QRect viewport;
59 float frameRate = 0.0;
60 float maxLuminance = -1.;
61 VideoTransformation transformation;
62};
63
65
66/*!
67 \class QVideoFrameFormat
68 \brief The QVideoFrameFormat class specifies the stream format of a video presentation
69 surface.
70 \inmodule QtMultimedia
71
72 \ingroup multimedia
73 \ingroup multimedia_video
74
75 A video sink presents a stream of video frames. QVideoFrameFormat describes the type of
76 the frames and determines how they should be presented.
77
78 The core properties of a video stream required to set up a video sink are the pixel format
79 given by pixelFormat(), and the frame dimensions given by frameSize().
80
81 The region of a frame that is actually displayed on a video surface is given by the viewport().
82 A stream may have a viewport less than the entire region of a frame to allow for videos smaller
83 than the nearest optimal size of a video frame. For example the width of a frame may be
84 extended so that the start of each scan line is eight byte aligned.
85
86 Other common properties are the scanLineDirection(), frameRate() and the yCrCbColorSpace().
87*/
88
89/*!
90 \enum QVideoFrameFormat::PixelFormat
91
92 Enumerates video data types.
93
94 \value Format_Invalid
95 The frame is invalid.
96
97 \value Format_ARGB8888
98 The frame is stored using a ARGB format with 8 bits per component.
99
100 \value Format_ARGB8888_Premultiplied
101 The frame stored using a premultiplied ARGB format with 8 bits per component.
102
103 \value Format_XRGB8888
104 The frame stored using a 32 bits per pixel RGB format (0xff, R, G, B).
105
106 \value Format_BGRA8888
107 The frame is stored using a 32-bit BGRA format (0xBBGGRRAA).
108
109 \value Format_BGRA8888_Premultiplied
110 The frame is stored using a premultiplied 32bit BGRA format.
111
112 \value Format_ABGR8888
113 The frame is stored using a 32-bit ABGR format (0xAABBGGRR).
114
115 \value Format_XBGR8888
116 The frame is stored using a 32-bit BGR format (0xffBBGGRR).
117
118 \value Format_RGBA8888
119 The frame is stored in memory as the bytes R, G, B, A/X, with R at the lowest address and A/X at the highest address.
120
121 \value Format_BGRX8888
122 The frame is stored in format 32-bit BGRx format, [31:0] B:G:R:x 8:8:8:8 little endian
123
124 \value Format_RGBX8888
125 The frame is stored in memory as the bytes R, G, B, A/X, with R at the lowest address and A/X at the highest address.
126
127 \value Format_AYUV
128 The frame is stored using a packed 32-bit AYUV format (0xAAYYUUVV).
129
130 \value Format_AYUV_Premultiplied
131 The frame is stored using a packed premultiplied 32-bit AYUV format (0xAAYYUUVV).
132
133 \value Format_YUV420P
134 The frame is stored using an 8-bit per component planar YUV format with the U and V planes
135 horizontally and vertically sub-sampled, i.e. the height and width of the U and V planes are
136 half that of the Y plane.
137
138 \value Format_YUV422P
139 The frame is stored using an 8-bit per component planar YUV format with the U and V planes
140 horizontally sub-sampled, i.e. the width of the U and V planes are
141 half that of the Y plane, and height of U and V planes is the same as Y.
142
143 \value Format_YV12
144 The frame is stored using an 8-bit per component planar YVU format with the V and U planes
145 horizontally and vertically sub-sampled, i.e. the height and width of the V and U planes are
146 half that of the Y plane.
147
148 \value Format_UYVY
149 The frame is stored using an 8-bit per component packed YUV format with the U and V planes
150 horizontally sub-sampled (U-Y-V-Y), i.e. two horizontally adjacent pixels are stored as a 32-bit
151 macropixel which has a Y value for each pixel and common U and V values.
152
153 \value Format_YUYV
154 The frame is stored using an 8-bit per component packed YUV format with the U and V planes
155 horizontally sub-sampled (Y-U-Y-V), i.e. two horizontally adjacent pixels are stored as a 32-bit
156 macropixel which has a Y value for each pixel and common U and V values.
157
158 \value Format_NV12
159 The frame is stored using an 8-bit per component semi-planar YUV format with a Y plane (Y)
160 followed by a horizontally and vertically sub-sampled, packed UV plane (U-V).
161
162 \value Format_NV21
163 The frame is stored using an 8-bit per component semi-planar YUV format with a Y plane (Y)
164 followed by a horizontally and vertically sub-sampled, packed VU plane (V-U).
165
166 \value Format_IMC1
167 The frame is stored using an 8-bit per component planar YUV format with the U and V planes
168 horizontally and vertically sub-sampled. This is similar to the Format_YUV420P type, except
169 that the bytes per line of the U and V planes are padded out to the same stride as the Y plane.
170
171 \value Format_IMC2
172 The frame is stored using an 8-bit per component planar YUV format with the U and V planes
173 horizontally and vertically sub-sampled. This is similar to the Format_YUV420P type, except
174 that the lines of the U and V planes are interleaved, i.e. each line of U data is followed by a
175 line of V data creating a single line of the same stride as the Y data.
176
177 \value Format_IMC3
178 The frame is stored using an 8-bit per component planar YVU format with the V and U planes
179 horizontally and vertically sub-sampled. This is similar to the Format_YV12 type, except that
180 the bytes per line of the V and U planes are padded out to the same stride as the Y plane.
181
182 \value Format_IMC4
183 The frame is stored using an 8-bit per component planar YVU format with the V and U planes
184 horizontally and vertically sub-sampled. This is similar to the Format_YV12 type, except that
185 the lines of the V and U planes are interleaved, i.e. each line of V data is followed by a line
186 of U data creating a single line of the same stride as the Y data.
187
188 \value Format_P010
189 The frame is stored using a 16bit per component semi-planar YUV format with a Y plane (Y)
190 followed by a horizontally and vertically sub-sampled, packed UV plane (U-V). Only the 10 most
191 significant bits of each component are being used.
192
193 \value Format_P016
194 The frame is stored using a 16bit per component semi-planar YUV format with a Y plane (Y)
195 followed by a horizontally and vertically sub-sampled, packed UV plane (U-V).
196
197 \value Format_Y8
198 The frame is stored using an 8-bit greyscale format.
199
200 \value Format_Y16
201 The frame is stored using a 16-bit linear greyscale format. Little endian.
202
203 \value Format_Jpeg
204 The frame is stored in compressed Jpeg format.
205
206 \value Format_SamplerExternalOES
207 The frame is stored in external OES texture format. This is currently only being used on Android.
208
209 \value Format_SamplerRect
210 The frame is stored in rectangle texture format (GL_TEXTURE_RECTANGLE). This is only being used on
211 macOS with an OpenGL based Rendering Hardware interface. The underlying pixel format stored in the
212 texture is Format_BRGA8888.
213
214 \value Format_YUV420P10
215 Similar to YUV420, but uses 16bits per component, 10 of those significant.
216*/
217
218/*!
219 \enum QVideoFrameFormat::Direction
220
221 Enumerates the layout direction of video scan lines.
222
223 \value TopToBottom Scan lines are arranged from the top of the frame to the bottom.
224 \value BottomToTop Scan lines are arranged from the bottom of the frame to the top.
225*/
226
227/*!
228 \enum QVideoFrameFormat::YCbCrColorSpace
229
230 \deprecated Use QVideoFrameFormat::ColorSpace instead.
231
232 Enumerates the Y'CbCr color space of video frames.
233
234 \value YCbCr_Undefined
235 No color space is specified.
236
237 \value YCbCr_BT601
238 A Y'CbCr color space defined by ITU-R recommendation BT.601
239 with Y value range from 16 to 235, and Cb/Cr range from 16 to 240.
240 Used mostly by older videos that were targeting CRT displays.
241
242 \value YCbCr_BT709
243 A Y'CbCr color space defined by ITU-R BT.709 with the same values range as YCbCr_BT601.
244 The most commonly used color space today.
245
246 \value YCbCr_xvYCC601
247 This value is deprecated. Please check the \l ColorRange instead.
248 The BT.601 color space with the value range extended to 0 to 255.
249 It is backward compatible with BT.601 and uses values outside BT.601 range to represent a
250 wider range of colors.
251
252 \value YCbCr_xvYCC709
253 This value is deprecated. Please check the \l ColorRange instead.
254 The BT.709 color space with the value range extended to 0 to 255.
255
256 \value YCbCr_JPEG
257 The full range Y'CbCr color space used in most JPEG files.
258
259 \value YCbCr_BT2020
260 The color space defined by ITU-R BT.2020. Used mainly for HDR videos.
261*/
262
263
264/*!
265 \enum QVideoFrameFormat::ColorSpace
266
267 Enumerates the color space of video frames.
268
269 \value ColorSpace_Undefined
270 No color space is specified.
271
272 \value ColorSpace_BT601
273 A color space defined by ITU-R recommendation BT.601
274 with Y value range from 16 to 235, and Cb/Cr range from 16 to 240.
275 Used mostly by older videos that were targeting CRT displays.
276
277 \value ColorSpace_BT709
278 A color space defined by ITU-R BT.709 with the same values range as ColorSpace_BT601.
279 The most commonly used color space today.
280
281 \value ColorSpace_AdobeRgb
282 The full range YUV color space used in most JPEG files.
283
284 \value ColorSpace_BT2020
285 The color space defined by ITU-R BT.2020. Used mainly for HDR videos.
286*/
287
288/*!
289 \enum QVideoFrameFormat::ColorTransfer
290
291 \value ColorTransfer_Unknown
292 The color transfer function is unknown.
293
294 \value ColorTransfer_BT709
295 Color values are encoded according to BT709. See also https://www.itu.int/rec/R-REC-BT.709/en.
296 This is close to, but not identical to a gamma curve of 2.2, and the same transfer curve as is
297 used in sRGB.
298
299 \value ColorTransfer_BT601
300 Color values are encoded according to BT601. See also https://www.itu.int/rec/R-REC-BT.601/en.
301
302 \value ColorTransfer_Linear
303 Color values are linear
304
305 \value ColorTransfer_Gamma22
306 Color values are encoded with a gamma of 2.2
307
308 \value ColorTransfer_Gamma28
309 Color values are encoded with a gamma of 2.8
310
311 \value ColorTransfer_ST2084
312 Color values are encoded using STME ST 2084. This transfer function is the most common HDR
313 transfer function and often called the 'perceptual quantizer'. See also https://www.itu.int/rec/R-REC-BT.2100
314 and https://en.wikipedia.org/wiki/Perceptual_quantizer.
315
316
317 \value ColorTransfer_STD_B67
318 Color values are encoded using ARIB STD B67. This transfer function is also often referred to as 'hybrid log gamma'.
319 See also https://www.itu.int/rec/R-REC-BT.2100 and https://en.wikipedia.org/wiki/Hybrid_log–gamma.
320*/
321
322/*!
323 \enum QVideoFrameFormat::ColorRange
324
325 Describes the color range used by the video data. Video data usually comes in either full
326 color range, where all values are being used, or a more limited range traditionally used in
327 YUV video formats, where a subset of all values is being used.
328
329 \value ColorRange_Unknown
330 The color range of the video is unknown.
331
332 \value ColorRange_Video
333
334 The color range traditionally used by most YUV video formats. For 8 bit formats, the Y component is
335 limited to values between 16 and 235. The U and V components are limited to values between 16 and 240
336
337 For higher bit depths multiply these values with 2^(depth-8).
338
339 \value ColorRange_Full
340
341 Full color range. All values from 0 to 2^depth - 1 are valid.
342*/
343
344/*!
345 Constructs a null video stream format.
346*/
347QVideoFrameFormat::QVideoFrameFormat()
348 : d(new QVideoFrameFormatPrivate)
349{
350 initResource();
351}
352
353/*!
354 Constructs a video stream with the given frame \a size and pixel \a format.
355*/
356QVideoFrameFormat::QVideoFrameFormat(
357 const QSize& size, QVideoFrameFormat::PixelFormat format)
358 : d(new QVideoFrameFormatPrivate(size, format))
359{
360}
361
362/*!
363 Constructs a copy of \a other.
364*/
365QVideoFrameFormat::QVideoFrameFormat(const QVideoFrameFormat &other) = default;
366
367/*!
368 \fn QVideoFrameFormat::QVideoFrameFormat(QVideoFrameFormat &&other)
369
370 Constructs a QVideoFrameFormat by moving from \a other.
371*/
372
373/*!
374 \fn void QVideoFrameFormat::swap(QVideoFrameFormat &other) noexcept
375
376 Swaps the current video frame format with the \a other.
377*/
378
379/*!
380 Assigns the values of \a other to this object.
381*/
382QVideoFrameFormat &QVideoFrameFormat::operator =(const QVideoFrameFormat &other) = default;
383
384/*!
385 \fn QVideoFrameFormat &QVideoFrameFormat::operator =(QVideoFrameFormat &&other)
386
387 Moves \a other into this QVideoFrameFormat.
388*/
389
390/*!
391 Destroys a video stream description.
392*/
393QVideoFrameFormat::~QVideoFrameFormat() = default;
394
395/*!
396 Identifies if a video surface format has a valid pixel format and frame size.
397
398 Returns true if the format is valid, and false otherwise.
399*/
400bool QVideoFrameFormat::isValid() const
401{
402 return d->pixelFormat != Format_Invalid && d->frameSize.isValid();
403}
404
405/*!
406 Returns true if \a other is the same as this video format, and false if they are different.
407*/
408bool QVideoFrameFormat::operator ==(const QVideoFrameFormat &other) const
409{
410 return d == other.d || *d == *other.d;
411}
412
413/*!
414 Returns true if \a other is different to this video format, and false if they are the same.
415*/
416bool QVideoFrameFormat::operator !=(const QVideoFrameFormat &other) const
417{
418 return d != other.d && !(*d == *other.d);
419}
420
421/*!
422 \internal
423*/
424void QVideoFrameFormat::detach()
425{
426 d.detach();
427}
428
429/*!
430 Returns the pixel format of frames in a video stream.
431*/
432QVideoFrameFormat::PixelFormat QVideoFrameFormat::pixelFormat() const
433{
434 return d->pixelFormat;
435}
436
437/*!
438 Returns the dimensions of frames in a video stream.
439
440 \sa frameWidth(), frameHeight()
441*/
442QSize QVideoFrameFormat::frameSize() const
443{
444 return d->frameSize;
445}
446
447/*!
448 Returns the width of frames in a video stream.
449
450 \sa frameSize(), frameHeight()
451*/
452int QVideoFrameFormat::frameWidth() const
453{
454 return d->frameSize.width();
455}
456
457/*!
458 Returns the height of frame in a video stream.
459*/
460int QVideoFrameFormat::frameHeight() const
461{
462 return d->frameSize.height();
463}
464
465/*!
466 Returns the number of planes used.
467 This number is depending on the pixel format and is
468 1 for RGB based formats, and a number between 1 and 3 for
469 YUV based formats.
470*/
471int QVideoFrameFormat::planeCount() const
472{
473 return QVideoTextureHelper::textureDescription(d->pixelFormat)->nplanes;
474}
475
476/*!
477 Sets the size of frames in a video stream to \a size.
478
479 This will reset the viewport() to fill the entire frame.
480*/
481void QVideoFrameFormat::setFrameSize(const QSize &size)
482{
483 detach();
484 d->frameSize = size;
485 d->viewport = QRect(QPoint(0, 0), size);
486}
487
488/*!
489 \overload
490
491 Sets the \a width and \a height of frames in a video stream.
492
493 This will reset the viewport() to fill the entire frame.
494*/
495void QVideoFrameFormat::setFrameSize(int width, int height)
496{
497 detach();
498 d->frameSize = QSize(width, height);
499 d->viewport = QRect(0, 0, width, height);
500}
501
502/*!
503 Returns the viewport of a video stream.
504
505 The viewport is the region of a video frame that is actually displayed.
506
507 By default the viewport covers an entire frame.
508*/
509QRect QVideoFrameFormat::viewport() const
510{
511 return d->viewport;
512}
513
514/*!
515 Sets the viewport of a video stream to \a viewport.
516*/
517void QVideoFrameFormat::setViewport(const QRect &viewport)
518{
519 detach();
520 d->viewport = viewport;
521}
522
523/*!
524 Returns the direction of scan lines.
525*/
526QVideoFrameFormat::Direction QVideoFrameFormat::scanLineDirection() const
527{
528 return d->scanLineDirection;
529}
530
531/*!
532 Sets the \a direction of scan lines.
533*/
534void QVideoFrameFormat::setScanLineDirection(Direction direction)
535{
536 detach();
537 d->scanLineDirection = direction;
538}
539
540#if QT_DEPRECATED_SINCE(6, 8)
541/*!
542 Returns the frame rate of a video stream in frames per second.
543*/
544qreal QVideoFrameFormat::frameRate() const
545{
546 return streamFrameRate();
547}
548
549/*!
550 Sets the frame \a rate of a video stream in frames per second.
551*/
552void QVideoFrameFormat::setFrameRate(qreal rate)
553{
554 setStreamFrameRate(rate);
555}
556#endif
557
558/*!
559 Returns the frame rate of a video stream in frames per second.
560*/
561qreal QVideoFrameFormat::streamFrameRate() const
562{
563 return d->frameRate;
564}
565
566/*!
567 Sets the frame \a rate of a video stream in frames per second.
568*/
569void QVideoFrameFormat::setStreamFrameRate(qreal rate)
570{
571 detach();
572 d->frameRate = rate;
573}
574
575#if QT_DEPRECATED_SINCE(6, 4)
576/*!
577 \deprecated Use colorSpace() instead
578
579 Returns the Y'CbCr color space of a video stream.
580*/
581QVideoFrameFormat::YCbCrColorSpace QVideoFrameFormat::yCbCrColorSpace() const
582{
583 return YCbCrColorSpace(d->colorSpace);
584}
585
586/*!
587 \deprecated Use setColorSpace() instead
588
589 Sets the Y'CbCr color \a space of a video stream.
590 It is only used with raw YUV frame types.
591*/
592void QVideoFrameFormat::setYCbCrColorSpace(QVideoFrameFormat::YCbCrColorSpace space)
593{
594 detach();
595 d->colorSpace = ColorSpace(space);
596}
597#endif // QT_DEPRECATED_SINCE(6, 4)
598
599/*!
600 Returns the color space of a video stream.
601*/
602QVideoFrameFormat::ColorSpace QVideoFrameFormat::colorSpace() const
603{
604 return d->colorSpace;
605}
606
607/*!
608 Sets the \a colorSpace of a video stream.
609*/
610void QVideoFrameFormat::setColorSpace(ColorSpace colorSpace)
611{
612 detach();
613 d->colorSpace = colorSpace;
614}
615
616/*!
617 Returns the color transfer function that should be used to render the
618 video stream.
619*/
620QVideoFrameFormat::ColorTransfer QVideoFrameFormat::colorTransfer() const
621{
622 return d->colorTransfer;
623}
624
625/*!
626 Sets the color transfer function that should be used to render the
627 video stream to \a colorTransfer.
628*/
629void QVideoFrameFormat::setColorTransfer(ColorTransfer colorTransfer)
630{
631 detach();
632 d->colorTransfer = colorTransfer;
633}
634
635/*!
636 Returns the color range that should be used to render the
637 video stream.
638*/
639QVideoFrameFormat::ColorRange QVideoFrameFormat::colorRange() const
640{
641 return d->colorRange;
642}
643
644/*!
645 Sets the color transfer range that should be used to render the
646 video stream to \a range.
647*/
648void QVideoFrameFormat::setColorRange(ColorRange range)
649{
650 detach();
651 d->colorRange = range;
652}
653
654/*!
655 Returns \c true if the surface is mirrored around its vertical axis.
656
657 Transformations of \c QVideoFrameFormat, specifically,
658 rotation and mirroring, can be determined by the orientation of
659 the camera sensor, camera settings, or the orientation of
660 the video stream.
661
662 Mirroring is applied after rotation.
663
664 \note The mirroring here differs from QImage::mirrored, as a vertically mirrored QImage
665 will be mirrored around its x-axis.
666
667 \since 5.11
668 */
669bool QVideoFrameFormat::isMirrored() const
670{
671 return d->transformation.mirroredHorizontallyAfterRotation;
672}
673
674/*!
675 Sets if the surface is \a mirrored around its vertical axis.
676
677 Transformations of \c QVideoFrameFormat, specifically,
678 rotation and mirroring, can be determined by the orientation of
679 the camera sensor, camera settings, or the orientation of
680 the video stream.
681
682 Mirroring is applied after rotation.
683
684 Default value is \c false.
685
686 \note The mirroring here differs from QImage::mirrored, as a vertically mirrored QImage
687 will be mirrored around its x-axis.
688
689 \since 5.11
690 */
691void QVideoFrameFormat::setMirrored(bool mirrored)
692{
693 detach();
694 d->transformation.mirroredHorizontallyAfterRotation = mirrored;
695}
696
697/*!
698 Returns the angle by which the surface is rotated clockwise.
699
700 Transformations of \c QVideoFrameFormat, specifically,
701 rotation and mirroring, can be determined by the orientation of
702 the camera sensor, camera settings, or the orientation of
703 the video stream.
704
705 Rotation is applied before mirroring.
706 */
707QtVideo::Rotation QVideoFrameFormat::rotation() const
708{
709 return d->transformation.rotation;
710}
711
712/*!
713 Sets the \a angle by which the surface is rotated clockwise.
714
715 Transformations of \c QVideoFrameFormat, specifically,
716 rotation and mirroring, can be determined by the orientation of
717 the camera sensor, camera settings, or the orientation of
718 the video stream.
719
720 Rotation is applied before mirroring.
721
722 Default value is \c QtVideo::Rotation::None.
723 */
724void QVideoFrameFormat::setRotation(QtVideo::Rotation angle)
725{
726 detach();
727 d->transformation.rotation = angle;
728}
729
730/*!
731 \internal
732*/
733QString QVideoFrameFormat::vertexShaderFileName() const
734{
735 return QVideoTextureHelper::vertexShaderFileName(*this);
736}
737
738/*!
739 \internal
740*/
741QString QVideoFrameFormat::fragmentShaderFileName() const
742{
743 return QVideoTextureHelper::fragmentShaderFileName(*this, nullptr);
744}
745
746/*!
747 \internal
748*/
749void QVideoFrameFormat::updateUniformData(QByteArray *dst, const QVideoFrame &frame, const QMatrix4x4 &transform, float opacity) const
750{
751 QVideoTextureHelper::updateUniformData(dst, nullptr, *this, frame, transform, opacity);
752}
753
754/*!
755 \internal
756
757 The maximum luminence in nits as set by the HDR metadata. If the video doesn't have meta data, the returned value depends on the
758 maximum that can be encoded by the transfer function.
759*/
760float QVideoFrameFormat::maxLuminance() const
761{
762 if (d->maxLuminance <= 0) {
763 if (d->colorTransfer == ColorTransfer_ST2084)
764 return 10000.; // ST2084 can encode up to 10000 nits
765 if (d->colorTransfer == ColorTransfer_STD_B67)
766 return 1500.; // SRD_B67 can encode up to 1200 nits, use a bit more for some headroom
767 return 100; // SDR
768 }
769 return d->maxLuminance;
770}
771/*!
772 Sets the maximum luminance to the given value, \a lum.
773*/
774void QVideoFrameFormat::setMaxLuminance(float lum)
775{
776 detach();
777 d->maxLuminance = lum;
778}
779
780
781/*!
782 Returns a video pixel format equivalent to an image \a format. If there is no equivalent
783 format QVideoFrameFormat::Format_Invalid is returned instead.
784
785 \note In general \l QImage does not handle YUV formats.
786
787*/
788QVideoFrameFormat::PixelFormat QVideoFrameFormat::pixelFormatFromImageFormat(QImage::Format format)
789{
790 switch (format) {
791#if Q_BYTE_ORDER == Q_LITTLE_ENDIAN
792 case QImage::Format_RGB32:
793 return QVideoFrameFormat::Format_BGRX8888;
794 case QImage::Format_ARGB32:
795 return QVideoFrameFormat::Format_BGRA8888;
796 case QImage::Format_ARGB32_Premultiplied:
797 return QVideoFrameFormat::Format_BGRA8888_Premultiplied;
798#else
799 case QImage::Format_RGB32:
800 return QVideoFrameFormat::Format_XRGB8888;
801 case QImage::Format_ARGB32:
802 return QVideoFrameFormat::Format_ARGB8888;
803 case QImage::Format_ARGB32_Premultiplied:
804 return QVideoFrameFormat::Format_ARGB8888_Premultiplied;
805#endif
806 case QImage::Format_RGBA8888:
807 return QVideoFrameFormat::Format_RGBA8888;
808 case QImage::Format_RGBA8888_Premultiplied:
809 // QVideoFrameFormat::Format_RGBA8888_Premultiplied is to be added in 6.8
810 // Format_RGBX8888 suits the best as a workaround
811 return QVideoFrameFormat::Format_RGBX8888;
812 case QImage::Format_RGBX8888:
813 return QVideoFrameFormat::Format_RGBX8888;
814 case QImage::Format_Grayscale8:
815 return QVideoFrameFormat::Format_Y8;
816 case QImage::Format_Grayscale16:
817 return QVideoFrameFormat::Format_Y16;
818 default:
819 return QVideoFrameFormat::Format_Invalid;
820 }
821}
822
823/*!
824 Returns an image format equivalent to a video frame pixel \a format. If there is no equivalent
825 format QImage::Format_Invalid is returned instead.
826
827 \note In general \l QImage does not handle YUV formats.
828
829*/
830QImage::Format QVideoFrameFormat::imageFormatFromPixelFormat(QVideoFrameFormat::PixelFormat format)
831{
832 switch (format) {
833#if Q_BYTE_ORDER == Q_LITTLE_ENDIAN
834 case QVideoFrameFormat::Format_BGRA8888:
835 return QImage::Format_ARGB32;
836 case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
837 return QImage::Format_ARGB32_Premultiplied;
838 case QVideoFrameFormat::Format_BGRX8888:
839 return QImage::Format_RGB32;
840 case QVideoFrameFormat::Format_ARGB8888:
841 case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
842 case QVideoFrameFormat::Format_XRGB8888:
843 return QImage::Format_Invalid;
844#else
845 case QVideoFrameFormat::Format_ARGB8888:
846 return QImage::Format_ARGB32;
847 case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
848 return QImage::Format_ARGB32_Premultiplied;
849 case QVideoFrameFormat::Format_XRGB8888:
850 return QImage::Format_RGB32;
851 case QVideoFrameFormat::Format_BGRA8888:
852 case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
853 case QVideoFrameFormat::Format_BGRX8888:
854 return QImage::Format_Invalid;
855#endif
856 case QVideoFrameFormat::Format_RGBA8888:
857 return QImage::Format_RGBA8888;
858 case QVideoFrameFormat::Format_RGBX8888:
859 return QImage::Format_RGBX8888;
860 case QVideoFrameFormat::Format_Y8:
861 return QImage::Format_Grayscale8;
862 case QVideoFrameFormat::Format_Y16:
863 return QImage::Format_Grayscale16;
864 case QVideoFrameFormat::Format_ABGR8888:
865 case QVideoFrameFormat::Format_XBGR8888:
866 case QVideoFrameFormat::Format_AYUV:
867 case QVideoFrameFormat::Format_AYUV_Premultiplied:
868 case QVideoFrameFormat::Format_YUV420P:
869 case QVideoFrameFormat::Format_YUV420P10:
870 case QVideoFrameFormat::Format_YUV422P:
871 case QVideoFrameFormat::Format_YV12:
872 case QVideoFrameFormat::Format_UYVY:
873 case QVideoFrameFormat::Format_YUYV:
874 case QVideoFrameFormat::Format_NV12:
875 case QVideoFrameFormat::Format_NV21:
876 case QVideoFrameFormat::Format_IMC1:
877 case QVideoFrameFormat::Format_IMC2:
878 case QVideoFrameFormat::Format_IMC3:
879 case QVideoFrameFormat::Format_IMC4:
880 case QVideoFrameFormat::Format_P010:
881 case QVideoFrameFormat::Format_P016:
882 case QVideoFrameFormat::Format_Jpeg:
883 case QVideoFrameFormat::Format_Invalid:
884 case QVideoFrameFormat::Format_SamplerExternalOES:
885 case QVideoFrameFormat::Format_SamplerRect:
886 return QImage::Format_Invalid;
887 }
888 return QImage::Format_Invalid;
889}
890
891// clang-format off
892
893QT_MM_MAKE_STRING_RESOLVER(QVideoFrameFormat::PixelFormat, QtMultimediaPrivate::EnumName,
894 (QVideoFrameFormat::Format_Invalid, "Invalid")
895 (QVideoFrameFormat::Format_ARGB8888, "ARGB8888")
896 (QVideoFrameFormat::Format_ARGB8888_Premultiplied, "ARGB8888 Premultiplied")
897 (QVideoFrameFormat::Format_XRGB8888, "XRGB8888")
898 (QVideoFrameFormat::Format_BGRA8888, "BGRA8888")
899 (QVideoFrameFormat::Format_BGRX8888, "BGRX8888")
900 (QVideoFrameFormat::Format_BGRA8888_Premultiplied, "BGRA8888 Premultiplied")
901 (QVideoFrameFormat::Format_RGBA8888, "RGBA8888")
902 (QVideoFrameFormat::Format_RGBX8888, "RGBX8888")
903 (QVideoFrameFormat::Format_ABGR8888, "ABGR8888")
904 (QVideoFrameFormat::Format_XBGR8888, "XBGR8888")
905 (QVideoFrameFormat::Format_AYUV, "AYUV")
906 (QVideoFrameFormat::Format_AYUV_Premultiplied, "AYUV Premultiplied")
907 (QVideoFrameFormat::Format_YUV420P, "YUV420P")
908 (QVideoFrameFormat::Format_YUV420P10, "YUV420P10")
909 (QVideoFrameFormat::Format_YUV422P, "YUV422P")
910 (QVideoFrameFormat::Format_YV12, "YV12")
911 (QVideoFrameFormat::Format_UYVY, "UYVY")
912 (QVideoFrameFormat::Format_YUYV, "YUYV")
913 (QVideoFrameFormat::Format_NV12, "NV12")
914 (QVideoFrameFormat::Format_NV21, "NV21")
915 (QVideoFrameFormat::Format_IMC1, "IMC1")
916 (QVideoFrameFormat::Format_IMC2, "IMC2")
917 (QVideoFrameFormat::Format_IMC3, "IMC3")
918 (QVideoFrameFormat::Format_IMC4, "IMC4")
919 (QVideoFrameFormat::Format_Y8, "Y8")
920 (QVideoFrameFormat::Format_Y16, "Y16")
921 (QVideoFrameFormat::Format_P010, "P010")
922 (QVideoFrameFormat::Format_P016, "P016")
923 (QVideoFrameFormat::Format_SamplerExternalOES, "SamplerExternalOES")
924 (QVideoFrameFormat::Format_Jpeg, "Jpeg")
925 (QVideoFrameFormat::Format_SamplerRect, "SamplerRect")
926);
927
928#ifndef QT_NO_DEBUG_STREAM
929# if QT_DEPRECATED_SINCE(6, 4)
930QT_MM_MAKE_STRING_RESOLVER(QVideoFrameFormat::YCbCrColorSpace, QtMultimediaPrivate::EnumName,
931 (QVideoFrameFormat::YCbCr_Undefined, "YCbCr_Undefined")
932 (QVideoFrameFormat::YCbCr_BT601, "YCbCr_BT601")
933 (QVideoFrameFormat::YCbCr_BT709, "YCbCr_BT709")
934 (QVideoFrameFormat::YCbCr_xvYCC601, "YCbCr_xvYCC601")
935 (QVideoFrameFormat::YCbCr_xvYCC709, "YCbCr_xvYCC709")
936 (QVideoFrameFormat::YCbCr_JPEG, "YCbCr_JPEG")
937 (QVideoFrameFormat::YCbCr_BT2020, "YCbCr_BT2020")
938);
939QT_MM_DEFINE_QDEBUG_ENUM(QVideoFrameFormat::YCbCrColorSpace);
940# endif // QT_DEPRECATED_SINCE(6, 4)
941
942QT_MM_MAKE_STRING_RESOLVER(QVideoFrameFormat::ColorSpace, QtMultimediaPrivate::EnumName,
943 (QVideoFrameFormat::ColorSpace_BT601, "ColorSpace_BT601")
944 (QVideoFrameFormat::ColorSpace_BT709, "ColorSpace_BT709")
945 (QVideoFrameFormat::ColorSpace_AdobeRgb, "ColorSpace_AdobeRgb")
946 (QVideoFrameFormat::ColorSpace_BT2020, "ColorSpace_BT2020")
947 (QVideoFrameFormat::ColorSpace_Undefined, "ColorSpace_Undefined")
948 );
949QT_MM_DEFINE_QDEBUG_ENUM(QVideoFrameFormat::ColorSpace);
950
951QT_MM_MAKE_STRING_RESOLVER(QVideoFrameFormat::ColorTransfer, QtMultimediaPrivate::EnumName,
952 (QVideoFrameFormat::ColorTransfer_Unknown, "ColorTransfer_Unknown")
953 (QVideoFrameFormat::ColorTransfer_BT709, "ColorTransfer_BT709")
954 (QVideoFrameFormat::ColorTransfer_BT601, "ColorTransfer_BT601")
955 (QVideoFrameFormat::ColorTransfer_Linear, "ColorTransfer_Linear")
956 (QVideoFrameFormat::ColorTransfer_Gamma22, "ColorTransfer_Gamma22")
957 (QVideoFrameFormat::ColorTransfer_Gamma28, "ColorTransfer_Gamma28")
958 (QVideoFrameFormat::ColorTransfer_ST2084, "ColorTransfer_ST2084")
959 (QVideoFrameFormat::ColorTransfer_STD_B67, "ColorTransfer_STD_B67")
960);
961QT_MM_DEFINE_QDEBUG_ENUM(QVideoFrameFormat::ColorTransfer);
962
963QT_MM_MAKE_STRING_RESOLVER(QVideoFrameFormat::ColorRange, QtMultimediaPrivate::EnumName,
964 (QVideoFrameFormat::ColorRange_Unknown, "ColorRange_Unknown")
965 (QVideoFrameFormat::ColorRange_Video, "ColorRange_Video")
966 (QVideoFrameFormat::ColorRange_Full, "ColorRange_Full")
967);
968QT_MM_DEFINE_QDEBUG_ENUM(QVideoFrameFormat::ColorRange);
969
970QT_MM_MAKE_STRING_RESOLVER(QVideoFrameFormat::Direction, QtMultimediaPrivate::EnumName,
971 (QVideoFrameFormat::TopToBottom, "TopToBottom")
972 (QVideoFrameFormat::BottomToTop, "BottomToTop")
973);
974QT_MM_DEFINE_QDEBUG_ENUM(QVideoFrameFormat::Direction);
975
976// clang-format off
977
978/*!
979 Returns a string representation of the given \a pixelFormat.
980*/
981
982Q_MULTIMEDIA_EXPORT QString QVideoFrameFormat::pixelFormatToString(QVideoFrameFormat::PixelFormat pixelFormat)
983{
984 auto str = QtMultimediaPrivate::StringResolver<QVideoFrameFormat::PixelFormat>::toQString(pixelFormat);
985 return str.value_or(QString());
986}
987
988QDebug operator<<(QDebug dbg, const QVideoFrameFormat &f)
989{
990 QDebugStateSaver saver(dbg);
991 dbg.nospace();
992 dbg << "QVideoFrameFormat(" << f.pixelFormat() << ", " << f.frameSize()
993 << ", viewport=" << f.viewport()
994 << ", colorSpace=" << f.colorSpace()
995 << ')'
996 << "\n pixel format=" << f.pixelFormat()
997 << "\n frame size=" << f.frameSize()
998 << "\n viewport=" << f.viewport()
999 << "\n colorSpace=" << f.colorSpace()
1000 << "\n frameRate=" << f.streamFrameRate()
1001 << "\n mirrored=" << f.isMirrored()
1002 << "\n range=" << f.colorRange()
1003 << "\n colorTransfer=" << f.colorTransfer();
1004
1005 return dbg;
1006}
1007
1008QDebug operator<<(QDebug dbg, QVideoFrameFormat::PixelFormat pf)
1009{
1010 QDebugStateSaver saver(dbg);
1011 dbg.nospace();
1012
1013 auto format = QVideoFrameFormat::pixelFormatToString(pf);
1014 if (format.isEmpty())
1015 return dbg;
1016
1017 dbg.noquote() << QStringLiteral("Format_") << format;
1018 return dbg;
1019}
1020#endif
1021
1022QT_END_NAMESPACE
QT_DEFINE_QESDP_SPECIALIZATION_DTOR(QVideoFrameFormatPrivate)
static void initResource()