Qt
Internal/Contributor docs for the Qt SDK. <b>Note:</b> These are NOT official API docs; those are found <a href='https://doc.qt.io/'>here</a>.
Loading...
Searching...
No Matches
qffmpegvideobuffer.cpp
Go to the documentation of this file.
1// Copyright (C) 2021 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
5#include "private/qvideotexturehelper_p.h"
6#include "private/qmultimediautils_p.h"
7#include "qffmpeghwaccel_p.h"
8#include "qloggingcategory.h"
9
10extern "C" {
11#include <libavutil/pixdesc.h>
12#include <libavutil/hdr_dynamic_metadata.h>
13#include <libavutil/mastering_display_metadata.h>
14}
15
17
18static bool isFrameFlipped(const AVFrame& frame) {
19 for (int i = 0; i < AV_NUM_DATA_POINTERS && frame.data[i]; ++i) {
20 if (frame.linesize[i] < 0)
21 return true;
22 }
23
24 return false;
25}
26
27Q_STATIC_LOGGING_CATEGORY(qLcFFmpegVideoBuffer, "qt.multimedia.ffmpeg.videobuffer");
28
30 : QHwVideoBuffer(QVideoFrame::NoHandle),
31 m_frame(frame.get()),
33 { pixelAspectRatio.num, pixelAspectRatio.den }))
34{
35 if (frame->hw_frames_ctx) {
36 m_hwFrame = std::move(frame);
37 m_pixelFormat = toQtPixelFormat(QFFmpeg::HWAccel::format(m_hwFrame.get()));
38 return;
39 }
40
41 m_swFrame = std::move(frame);
42 m_pixelFormat = toQtPixelFormat(AVPixelFormat(m_swFrame->format));
43
45}
46
48
50{
51 Q_ASSERT(m_swFrame);
52
53 const auto actualAVPixelFormat = AVPixelFormat(m_swFrame->format);
54 const auto targetAVPixelFormat = toAVPixelFormat(m_pixelFormat);
55
56 if (actualAVPixelFormat != targetAVPixelFormat || isFrameFlipped(*m_swFrame)
57 || m_size != QSize(m_swFrame->width, m_swFrame->height)) {
58 Q_ASSERT(toQtPixelFormat(targetAVPixelFormat) == m_pixelFormat);
59 // convert the format into something we can handle
60 SwsContext *c = sws_getContext(m_swFrame->width, m_swFrame->height, actualAVPixelFormat,
61 m_size.width(), m_size.height(), targetAVPixelFormat,
62 SWS_BICUBIC, nullptr, nullptr, nullptr);
63
64 auto newFrame = QFFmpeg::makeAVFrame();
65 newFrame->width = m_size.width();
66 newFrame->height = m_size.height();
67 newFrame->format = targetAVPixelFormat;
68 av_frame_get_buffer(newFrame.get(), 0);
69
70 sws_scale(c, m_swFrame->data, m_swFrame->linesize, 0, m_swFrame->height, newFrame->data, newFrame->linesize);
71 if (m_frame == m_swFrame.get())
72 m_frame = newFrame.get();
73 m_swFrame = std::move(newFrame);
74 sws_freeContext(c);
75 }
76}
77
79{
80 m_textureConverter = converter;
81 m_textureConverter.init(m_hwFrame.get());
83}
84
89
94
99
101{
102 float maxNits = -1;
103 for (int i = 0; i < m_frame->nb_side_data; ++i) {
104 AVFrameSideData *sd = m_frame->side_data[i];
105 // TODO: Longer term we might want to also support HDR10+ dynamic metadata
106 if (sd->type == AV_FRAME_DATA_MASTERING_DISPLAY_METADATA) {
107 auto *data = reinterpret_cast<AVMasteringDisplayMetadata *>(sd->data);
108 auto maybeLum = QFFmpeg::mul(10'000., data->max_luminance);
109 if (maybeLum)
110 maxNits = float(maybeLum.value());
111 }
112 }
113 return maxNits;
114}
115
117{
118 if (!m_swFrame) {
119 Q_ASSERT(m_hwFrame && m_hwFrame->hw_frames_ctx);
120 m_swFrame = QFFmpeg::makeAVFrame();
121 /* retrieve data from GPU to CPU */
122 int ret = av_hwframe_transfer_data(m_swFrame.get(), m_hwFrame.get(), 0);
123 if (ret < 0) {
124 qWarning() << "Error transferring the data to system memory:" << ret;
125 return {};
126 }
128 }
129
130 m_mode = mode;
131
134 mapData.planeCount = desc->nplanes;
135 for (int i = 0; i < mapData.planeCount; ++i) {
136 Q_ASSERT(m_swFrame->linesize[i] >= 0);
137
138 mapData.data[i] = m_swFrame->data[i];
139 mapData.bytesPerLine[i] = m_swFrame->linesize[i];
140 mapData.dataSize[i] = mapData.bytesPerLine[i]*desc->heightForPlane(m_swFrame->height, i);
141 }
142
145 m_hwFrame.reset();
146 if (m_textures) {
147 qCDebug(qLcFFmpegVideoBuffer)
148 << "Mapping of FFmpeg video buffer with write mode when "
149 "textures have been created. Visual artifacts might "
150 "happen if the frame is still in the rendering pipeline";
151 m_textures.reset();
152 }
153 }
154
155 return mapData;
156}
157
159{
160 // nothing to do here for SW buffers.
161 // Set NotMapped mode to ensure map/unmap/mapMode consisteny.
163}
164
165std::unique_ptr<QVideoFrameTextures> QFFmpegVideoBuffer::mapTextures(QRhi *)
166{
167 if (m_textures)
168 return {};
169 if (!m_hwFrame)
170 return {};
171 if (m_textureConverter.isNull()) {
172 m_textures = nullptr;
173 return {};
174 }
175
176 m_textures.reset(m_textureConverter.getTextures(m_hwFrame.get()));
177 if (!m_textures) {
178 static thread_local int lastFormat = 0;
179 if (std::exchange(lastFormat, m_hwFrame->format) != m_hwFrame->format) // prevent logging spam
180 qWarning() << " failed to get textures for frame; format:" << m_hwFrame->format;
181 }
182 return {};
183}
184
186{
187 return m_textures ? m_textures->textureHandle(rhi, plane) : 0;
188}
189
191{
192 return m_pixelFormat;
193}
194
196{
197 return m_size;
198}
199
200QVideoFrameFormat::PixelFormat QFFmpegVideoBuffer::toQtPixelFormat(AVPixelFormat avPixelFormat, bool *needsConversion)
201{
202 if (needsConversion)
203 *needsConversion = false;
204
205 switch (avPixelFormat) {
206 default:
207 break;
208 case AV_PIX_FMT_NONE:
209 Q_ASSERT(!"Invalid avPixelFormat!");
211 case AV_PIX_FMT_ARGB:
213 case AV_PIX_FMT_0RGB:
215 case AV_PIX_FMT_BGRA:
217 case AV_PIX_FMT_BGR0:
219 case AV_PIX_FMT_ABGR:
221 case AV_PIX_FMT_0BGR:
223 case AV_PIX_FMT_RGBA:
225 case AV_PIX_FMT_RGB0:
227
228 case AV_PIX_FMT_YUV422P:
230 case AV_PIX_FMT_YUV420P:
232 case AV_PIX_FMT_YUV420P10:
234 case AV_PIX_FMT_UYVY422:
236 case AV_PIX_FMT_YUYV422:
238 case AV_PIX_FMT_NV12:
240 case AV_PIX_FMT_NV21:
242 case AV_PIX_FMT_GRAY8:
244 case AV_PIX_FMT_GRAY16:
246
247 case AV_PIX_FMT_P010:
249 case AV_PIX_FMT_P016:
251 case AV_PIX_FMT_MEDIACODEC:
253 }
254
255 if (needsConversion)
256 *needsConversion = true;
257
258 const AVPixFmtDescriptor *descriptor = av_pix_fmt_desc_get(avPixelFormat);
259
260 if (descriptor->flags & AV_PIX_FMT_FLAG_RGB)
262
263 if (descriptor->comp[0].depth > 8)
266}
267
269{
270 switch (pixelFormat) {
271 default:
280 return AV_PIX_FMT_NONE;
282 // We're using the data from the converted QImage here, which is in BGRA.
283 return AV_PIX_FMT_BGRA;
285 return AV_PIX_FMT_ARGB;
288 return AV_PIX_FMT_0RGB;
290 return AV_PIX_FMT_BGRA;
293 return AV_PIX_FMT_BGR0;
295 return AV_PIX_FMT_ABGR;
297 return AV_PIX_FMT_0BGR;
299 return AV_PIX_FMT_RGBA;
300 // to be added in 6.8:
301 // case QVideoFrameFormat::Format_RGBA8888_Premultiplied:
303 return AV_PIX_FMT_RGB0;
304
306 return AV_PIX_FMT_YUV422P;
308 return AV_PIX_FMT_YUV420P;
310 return AV_PIX_FMT_YUV420P10;
312 return AV_PIX_FMT_UYVY422;
314 return AV_PIX_FMT_YUYV422;
316 return AV_PIX_FMT_NV12;
318 return AV_PIX_FMT_NV21;
320 return AV_PIX_FMT_GRAY8;
322 return AV_PIX_FMT_GRAY16;
323
325 return AV_PIX_FMT_P010;
327 return AV_PIX_FMT_P016;
328
330 return AV_PIX_FMT_MEDIACODEC;
331 }
332}
333
QVideoFrameFormat::PixelFormat pixelFormat() const
QVideoFrameFormat::ColorSpace colorSpace() const
void unmap() override
Releases the memory mapped by the map() function.
QVideoFrameFormat::ColorTransfer colorTransfer() const
QFFmpeg::AVFrameUPtr AVFrameUPtr
void setTextureConverter(const QFFmpeg::TextureConverter &converter)
virtual std::unique_ptr< QVideoFrameTextures > mapTextures(QRhi *) override
static AVPixelFormat toAVPixelFormat(QVideoFrameFormat::PixelFormat pixelFormat)
QVideoFrameFormat::ColorRange colorRange() const
MapData map(QtVideo::MapMode mode) override
Maps the planes of a video buffer to memory.
static QVideoFrameFormat::PixelFormat toQtPixelFormat(AVPixelFormat avPixelFormat, bool *needsConversion=nullptr)
QFFmpegVideoBuffer(AVFrameUPtr frame, AVRational pixelAspectRatio={ 1, 1 })
~QFFmpegVideoBuffer() override
virtual quint64 textureHandle(QRhi *rhi, int plane) const override
static AVPixelFormat format(AVFrame *frame)
TextureSet * getTextures(AVFrame *frame)
void init(AVFrame *frame)
QRhi * rhi() const
QVideoFrame::HandleType m_type
\inmodule QtGuiPrivate \inheaderfile rhi/qrhi.h
Definition qrhi.h:1805
\inmodule QtCore
Definition qsize.h:25
constexpr int height() const noexcept
Returns the height.
Definition qsize.h:133
constexpr int width() const noexcept
Returns the width.
Definition qsize.h:130
ColorSpace
Enumerates the color space of video frames.
ColorTransfer
\value ColorTransfer_Unknown The color transfer function is unknown.
PixelFormat
Enumerates video data types.
ColorRange
Describes the color range used by the video data.
The QVideoFrame class represents a frame of video data.
Definition qvideoframe.h:27
int width
the width of the widget excluding any window frame
Definition qwidget.h:114
int height
the height of the widget excluding any window frame
Definition qwidget.h:115
AVFrameUPtr makeAVFrame()
Definition qffmpeg_p.h:136
QVideoFrameFormat::ColorSpace fromAvColorSpace(AVColorSpace colorSpace)
Definition qffmpeg.cpp:646
QVideoFrameFormat::ColorTransfer fromAvColorTransfer(AVColorTransferCharacteristic colorTrc)
Definition qffmpeg.cpp:594
QVideoFrameFormat::ColorRange fromAvColorRange(AVColorRange colorRange)
Definition qffmpeg.cpp:689
std::optional< qint64 > mul(qint64 a, AVRational b)
Definition qffmpeg_p.h:39
Combined button and popup list for selecting options.
const TextureDescription * textureDescription(QVideoFrameFormat::PixelFormat format)
MapMode
Enumerates how a video buffer's data is mapped to system memory.
Definition qtvideo.h:25
static QDBusError::ErrorType get(const char *name)
static QT_BEGIN_NAMESPACE bool isFrameFlipped(const AVFrame &frame)
#define qWarning
Definition qlogging.h:167
#define qCDebug(category,...)
#define Q_STATIC_LOGGING_CATEGORY(name,...)
return ret
QSize qCalculateFrameSize(QSize resolution, Fraction par)
GLenum mode
GLint GLsizei GLsizei GLenum GLenum GLsizei void * data
const GLubyte * c
static QAbstractVideoBuffer::MapData mapData(const camera_frame_nv12_t &frame, unsigned char *baseAddress)
#define Q_ASSERT(cond)
Definition qrandom.cpp:47
unsigned long long quint64
Definition qtypes.h:61
QFrame frame
[0]
The QAbstractVideoBuffer::MapData structure describes the mapped plane layout.