Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qffmpegvideobuffer.cpp
Go to the documentation of this file.
1// Copyright (C) 2021 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
5#include "private/qvideotexturehelper_p.h"
6#include "private/qmultimediautils_p.h"
9#include <QtCore/qthread.h>
10
11extern "C" {
12#include <libavutil/pixdesc.h>
13#include <libavutil/hdr_dynamic_metadata.h>
14#include <libavutil/mastering_display_metadata.h>
15}
16
17QT_BEGIN_NAMESPACE
18
19using namespace QFFmpeg;
20
21static bool isFrameFlipped(const AVFrame& frame) {
22 for (int i = 0; i < AV_NUM_DATA_POINTERS && frame.data[i]; ++i) {
23 if (frame.linesize[i] < 0)
24 return true;
25 }
26
27 return false;
28}
29
30QFFmpegVideoBuffer::QFFmpegVideoBuffer(AVFrameUPtr frame, AVRational pixelAspectRatio)
32 m_frame(frame.get()),
35{
36 if (frame->hw_frames_ctx) {
37 m_hwFrame = std::move(frame);
38 m_pixelFormat = toQtPixelFormat(HWAccel::format(m_hwFrame.get()));
39 return;
40 }
41
42 m_swFrame = std::move(frame);
43 m_pixelFormat = toQtPixelFormat(AVPixelFormat(m_swFrame->format));
44
46}
47
49
51{
52 Q_ASSERT(m_swFrame);
53
54 const auto actualAVPixelFormat = AVPixelFormat(m_swFrame->format);
55 const auto targetAVPixelFormat = toAVPixelFormat(m_pixelFormat);
56
57 const QSize actualSize(m_swFrame->width, m_swFrame->height);
58 if (actualAVPixelFormat != targetAVPixelFormat || isFrameFlipped(*m_swFrame)
59 || m_size != actualSize) {
60 Q_ASSERT(toQtPixelFormat(targetAVPixelFormat) == m_pixelFormat);
61 // convert the format into something we can handle
62 SwsContextUPtr scaleContext = createSwsContext(actualSize, actualAVPixelFormat, m_size,
63 targetAVPixelFormat, SWS_BICUBIC);
64
65 auto newFrame = makeAVFrame();
66 newFrame->width = m_size.width();
67 newFrame->height = m_size.height();
68 newFrame->format = targetAVPixelFormat;
69 av_frame_get_buffer(newFrame.get(), 0);
70
71 sws_scale(scaleContext.get(), m_swFrame->data, m_swFrame->linesize, 0, m_swFrame->height,
72 newFrame->data, newFrame->linesize);
73 if (m_frame == m_swFrame.get())
74 m_frame = newFrame.get();
75 m_swFrame = std::move(newFrame);
76 }
77}
78
79void QFFmpegVideoBuffer::initTextureConverter(QRhi &rhi)
80{
81 if (!m_hwFrame)
82 return;
83
84 // don't use the result reference here
85 ensureTextureConverter(rhi);
86
87 // the type is to be clarified in the method mapTextures
88 m_type = m_hwFrame && TextureConverter::isBackendAvailable(*m_hwFrame, rhi)
89 ? QVideoFrame::RhiTextureHandle
90 : QVideoFrame::NoHandle;
91}
92
93QFFmpeg::TextureConverter &QFFmpegVideoBuffer::ensureTextureConverter(QRhi &rhi)
94{
95 Q_ASSERT(m_hwFrame);
96
97 HwFrameContextData &frameContextData = HwFrameContextData::ensure(*m_hwFrame);
98 TextureConverter *converter = frameContextData.textureConverterMapper.get(&rhi);
99
100 if (!converter) {
101 bool added = false;
102 std::tie(converter, added) =
103 frameContextData.textureConverterMapper.tryMap(rhi, TextureConverter(rhi));
104 // no issues are expected if it's already added in another thread, however,it's worth to
105 // check it
106 Q_ASSERT(converter && added);
107 }
108
109 return *converter;
110}
111
113{
114 if (!m_hwFrame)
115 return nullptr;
116
117 HwFrameContextData &frameContextData = HwFrameContextData::ensure(*m_hwFrame);
118 return frameContextData.textureConverterMapper.findRhi(
119 [](QRhi &rhi) { return rhi.thread()->isCurrentThread(); });
120}
121
123{
124 return fromAvColorSpace(m_frame->colorspace);
125}
126
128{
129 return fromAvColorTransfer(m_frame->color_trc);
130}
131
133{
134 return fromAvColorRange(m_frame->color_range);
135}
136
138{
139 float maxNits = -1;
140 for (int i = 0; i < m_frame->nb_side_data; ++i) {
141 AVFrameSideData *sd = m_frame->side_data[i];
142 // TODO: Longer term we might want to also support HDR10+ dynamic metadata
143 if (sd->type == AV_FRAME_DATA_MASTERING_DISPLAY_METADATA) {
144 auto *data = reinterpret_cast<AVMasteringDisplayMetadata *>(sd->data);
145 auto maybeLum = QFFmpeg::mul(qreal(10'000.), data->max_luminance);
146 if (maybeLum)
147 maxNits = float(maybeLum.value());
148 }
149 }
150 return maxNits;
151}
152
154{
155 if (!m_swFrame) {
156 Q_ASSERT(m_hwFrame && m_hwFrame->hw_frames_ctx);
157 m_swFrame = makeAVFrame();
158 /* retrieve data from GPU to CPU */
159 int ret = av_hwframe_transfer_data(m_swFrame.get(), m_hwFrame.get(), 0);
160 if (ret < 0) {
161 qWarning() << "Error transferring the data to system memory:" << ret;
162 return {};
163 }
165 }
166
167 m_mode = mode;
168
169 MapData mapData;
170 auto *desc = QVideoTextureHelper::textureDescription(pixelFormat());
171 mapData.planeCount = desc->nplanes;
172 for (int i = 0; i < mapData.planeCount; ++i) {
173 Q_ASSERT(m_swFrame->linesize[i] >= 0);
174
175 mapData.data[i] = m_swFrame->data[i];
176 mapData.bytesPerLine[i] = m_swFrame->linesize[i];
177 mapData.dataSize[i] = mapData.bytesPerLine[i]*desc->heightForPlane(m_swFrame->height, i);
178 }
179
180 if ((mode & QVideoFrame::WriteOnly) != 0 && m_hwFrame) {
181 m_type = QVideoFrame::NoHandle;
182 m_hwFrame.reset();
183 }
184
185 return mapData;
186}
187
189{
190 // nothing to do here for SW buffers.
191 // Set NotMapped mode to ensure map/unmap/mapMode consisteny.
192 m_mode = QVideoFrame::NotMapped;
193}
194
195QVideoFrameTexturesUPtr QFFmpegVideoBuffer::mapTextures(QRhi &rhi, QVideoFrameTexturesUPtr& oldTextures)
196{
197 Q_ASSERT(rhi.thread()->isCurrentThread());
198
199 QVideoFrameTexturesUPtr result = createTexturesFromHwFrame(rhi, oldTextures);
200
201 // update m_type according to the real result
202 m_type = result ? QVideoFrame::RhiTextureHandle : QVideoFrame::NoHandle;
203 return result;
204}
205
206QVideoFrameTexturesUPtr QFFmpegVideoBuffer::createTexturesFromHwFrame(QRhi &rhi, QVideoFrameTexturesUPtr& oldTextures) {
207
208 if (!m_hwFrame)
209 return {};
210
211 // QTBUG-132200:
212 // We aim to set initTextureConverterForAnyRhi=true for as much platforms as we can,
213 // and remove the check after all platforms work fine on CI. If the flag is enabled,
214 // QVideoFrame::toImage can work faster, and we can test hw texture conversion on CI.
215 // Currently, enabling the flag fails some CI platforms.
216 constexpr bool initTextureConverterForAnyRhi = false;
217
218 TextureConverter *converter = initTextureConverterForAnyRhi
219 ? &ensureTextureConverter(rhi)
220 : HwFrameContextData::ensure(*m_hwFrame).textureConverterMapper.get(&rhi);
221
222 if (!converter)
223 return {};
224
225 if (!converter->init(*m_hwFrame))
226 return {};
227
228 const QVideoFrameTextures *oldTexturesRaw = oldTextures.get();
229 if (QVideoFrameTexturesUPtr newTextures = converter->createTextures(*m_hwFrame, oldTextures))
230 return newTextures;
231
232 Q_ASSERT(oldTextures.get() == oldTexturesRaw);
233
234 QVideoFrameTexturesHandlesUPtr oldTextureHandles =
235 oldTextures ? oldTextures->takeHandles() : nullptr;
236 QVideoFrameTexturesHandlesUPtr newTextureHandles =
237 converter->createTextureHandles(*m_hwFrame, std::move(oldTextureHandles));
238
239 if (newTextureHandles) {
240 QVideoFrameTexturesUPtr newTextures = QVideoTextureHelper::createTexturesFromHandles(
241 std::move(newTextureHandles), rhi, m_pixelFormat,
242 { m_hwFrame->width, m_hwFrame->height });
243
244 return newTextures;
245 }
246
247 static thread_local int lastFormat = 0;
248 if (std::exchange(lastFormat, m_hwFrame->format) != m_hwFrame->format) // prevent logging spam
249 qWarning() << " failed to get textures for frame; format:" << m_hwFrame->format;
250
251 return {};
252}
253
255{
256 return m_pixelFormat;
257}
258
260{
261 return m_size;
262}
263
264QVideoFrameFormat::PixelFormat QFFmpegVideoBuffer::toQtPixelFormat(AVPixelFormat avPixelFormat, bool *needsConversion)
265{
266 if (needsConversion)
267 *needsConversion = false;
268
269 switch (avPixelFormat) {
270 default:
271 break;
272 case AV_PIX_FMT_NONE:
273 Q_ASSERT(!"Invalid avPixelFormat!");
274 return QVideoFrameFormat::Format_Invalid;
275 case AV_PIX_FMT_ARGB:
276 return QVideoFrameFormat::Format_ARGB8888;
277 case AV_PIX_FMT_0RGB:
278 return QVideoFrameFormat::Format_XRGB8888;
279 case AV_PIX_FMT_BGRA:
280 return QVideoFrameFormat::Format_BGRA8888;
281 case AV_PIX_FMT_BGR0:
282 return QVideoFrameFormat::Format_BGRX8888;
283 case AV_PIX_FMT_ABGR:
284 return QVideoFrameFormat::Format_ABGR8888;
285 case AV_PIX_FMT_0BGR:
286 return QVideoFrameFormat::Format_XBGR8888;
287 case AV_PIX_FMT_RGBA:
288 return QVideoFrameFormat::Format_RGBA8888;
289 case AV_PIX_FMT_RGB0:
290 return QVideoFrameFormat::Format_RGBX8888;
291
292 case AV_PIX_FMT_YUV422P:
293 return QVideoFrameFormat::Format_YUV422P;
294 case AV_PIX_FMT_YUV420P:
295 return QVideoFrameFormat::Format_YUV420P;
296 case AV_PIX_FMT_YUV420P10:
297 return QVideoFrameFormat::Format_YUV420P10;
298 case AV_PIX_FMT_UYVY422:
299 return QVideoFrameFormat::Format_UYVY;
300 case AV_PIX_FMT_YUYV422:
301 return QVideoFrameFormat::Format_YUYV;
302 case AV_PIX_FMT_NV12:
303 return QVideoFrameFormat::Format_NV12;
304 case AV_PIX_FMT_NV21:
305 return QVideoFrameFormat::Format_NV21;
306 case AV_PIX_FMT_GRAY8:
307 return QVideoFrameFormat::Format_Y8;
308 case AV_PIX_FMT_GRAY16:
309 return QVideoFrameFormat::Format_Y16;
310
311 case AV_PIX_FMT_P010:
312 return QVideoFrameFormat::Format_P010;
313 case AV_PIX_FMT_P016:
314 return QVideoFrameFormat::Format_P016;
315 case AV_PIX_FMT_MEDIACODEC:
316 return QVideoFrameFormat::Format_SamplerExternalOES;
317 }
318
319 if (needsConversion)
320 *needsConversion = true;
321
322 const AVPixFmtDescriptor *descriptor = av_pix_fmt_desc_get(avPixelFormat);
323
324 if (descriptor->flags & AV_PIX_FMT_FLAG_RGB)
325 return QVideoFrameFormat::Format_RGBA8888;
326
327 if (descriptor->comp[0].depth > 8)
328 return QVideoFrameFormat::Format_P016;
329 return QVideoFrameFormat::Format_YUV420P;
330}
331
332AVPixelFormat QFFmpegVideoBuffer::toAVPixelFormat(QVideoFrameFormat::PixelFormat pixelFormat)
333{
334 switch (pixelFormat) {
335 default:
336 case QVideoFrameFormat::Format_Invalid:
337 case QVideoFrameFormat::Format_AYUV:
338 case QVideoFrameFormat::Format_AYUV_Premultiplied:
339 case QVideoFrameFormat::Format_YV12:
340 case QVideoFrameFormat::Format_IMC1:
341 case QVideoFrameFormat::Format_IMC2:
342 case QVideoFrameFormat::Format_IMC3:
343 case QVideoFrameFormat::Format_IMC4:
344 return AV_PIX_FMT_NONE;
345 case QVideoFrameFormat::Format_Jpeg:
346 // We're using the data from the converted QImage here, which is in BGRA.
347 return AV_PIX_FMT_BGRA;
348 case QVideoFrameFormat::Format_ARGB8888:
349 return AV_PIX_FMT_ARGB;
350 case QVideoFrameFormat::Format_ARGB8888_Premultiplied:
351 case QVideoFrameFormat::Format_XRGB8888:
352 return AV_PIX_FMT_0RGB;
353 case QVideoFrameFormat::Format_BGRA8888:
354 return AV_PIX_FMT_BGRA;
355 case QVideoFrameFormat::Format_BGRA8888_Premultiplied:
356 case QVideoFrameFormat::Format_BGRX8888:
357 return AV_PIX_FMT_BGR0;
358 case QVideoFrameFormat::Format_ABGR8888:
359 return AV_PIX_FMT_ABGR;
360 case QVideoFrameFormat::Format_XBGR8888:
361 return AV_PIX_FMT_0BGR;
362 case QVideoFrameFormat::Format_RGBA8888:
363 return AV_PIX_FMT_RGBA;
364 // to be added in 6.8:
365 // case QVideoFrameFormat::Format_RGBA8888_Premultiplied:
366 case QVideoFrameFormat::Format_RGBX8888:
367 return AV_PIX_FMT_RGB0;
368
369 case QVideoFrameFormat::Format_YUV422P:
370 return AV_PIX_FMT_YUV422P;
371 case QVideoFrameFormat::Format_YUV420P:
372 return AV_PIX_FMT_YUV420P;
373 case QVideoFrameFormat::Format_YUV420P10:
374 return AV_PIX_FMT_YUV420P10;
375 case QVideoFrameFormat::Format_UYVY:
376 return AV_PIX_FMT_UYVY422;
377 case QVideoFrameFormat::Format_YUYV:
378 return AV_PIX_FMT_YUYV422;
379 case QVideoFrameFormat::Format_NV12:
380 return AV_PIX_FMT_NV12;
381 case QVideoFrameFormat::Format_NV21:
382 return AV_PIX_FMT_NV21;
383 case QVideoFrameFormat::Format_Y8:
384 return AV_PIX_FMT_GRAY8;
385 case QVideoFrameFormat::Format_Y16:
386 return AV_PIX_FMT_GRAY16;
387
388 case QVideoFrameFormat::Format_P010:
389 return AV_PIX_FMT_P010;
390 case QVideoFrameFormat::Format_P016:
391 return AV_PIX_FMT_P016;
392
393 case QVideoFrameFormat::Format_SamplerExternalOES:
394 return AV_PIX_FMT_MEDIACODEC;
395 }
396}
397
398QT_END_NAMESPACE
QVideoFrameFormat::PixelFormat pixelFormat() const
QVideoFrameFormat::ColorSpace colorSpace() const
QRhi * rhi() const override
void unmap() override
Releases the memory mapped by the map() function.
MapData map(QVideoFrame::MapMode mode) override
Maps the planes of a video buffer to memory.
QVideoFrameFormat::ColorTransfer colorTransfer() const
QVideoFrameFormat::ColorRange colorRange() const
QFFmpegVideoBuffer(AVFrameUPtr frame, AVRational pixelAspectRatio={ 1, 1 })
~QFFmpegVideoBuffer() override
static bool isFrameFlipped(const AVFrame &frame)