Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qffmpegmediadataholder.cpp
Go to the documentation of this file.
1// Copyright (C) 2021 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include "playbackengine/qffmpegmediadataholder_p.h"
5
9#include "qiodevice.h"
10#include "qdatetime.h"
12
13#include <QtMultimedia/qplaybackoptions.h>
14
15#include <math.h>
16#include <optional>
17
18extern "C" {
19#include "libavutil/display.h"
20}
21
22QT_BEGIN_NAMESPACE
23
24Q_STATIC_LOGGING_CATEGORY(qLcMediaDataHolder, "qt.multimedia.ffmpeg.mediadataholder")
25
26namespace QFFmpeg {
27
28static std::optional<TrackDuration> streamDuration(const AVStream &stream)
29{
30 if (stream.duration > 0)
31 return toTrackDuration(AVStreamDuration(stream.duration), &stream);
32
33 // In some cases ffmpeg reports negative duration that is definitely invalid.
34 // However, the correct duration may be read from the metadata.
35
36 if (stream.duration < 0 && stream.duration != AV_NOPTS_VALUE) {
37 qCWarning(qLcMediaDataHolder) << "AVStream duration" << stream.duration
38 << "is invalid. Taking it from the metadata";
39 }
40
41 if (const auto duration = av_dict_get(stream.metadata, "DURATION", nullptr, 0)) {
42 const auto time = QTime::fromString(QString::fromUtf8(duration->value));
43 return TrackDuration(qint64(1000) * time.msecsSinceStartOfDay());
44 }
45
46 return {};
47}
48
49static QTransform displayMatrixToTransform(const int32_t *displayMatrix)
50{
51 // displayMatrix is stored as
52 //
53 // . -- X axis
54 // |
55 // | | a b u |
56 // Y | c d v |
57 // axis | x y w |
58 //
59 // where a, b, c, d, x, y are 16.16 fixed-point values,
60 // and u, v, w are 30.2 point values.
61 // Only a, b, c, d impacts on mirroring and rotation,
62 // so it's enough to propagate them to QTransform.
63 //
64 // If we were interested in getting proper XY scales,
65 // we would divide a,b,c,d by 2^16. The whole scale doesn't
66 // impact mirroring and rotation, so we don't do so.
67
68 auto toRotateMirrorValue = [displayMatrix](int index) {
69 // toRotateScaleValue would be:
70 // return displayMatrix[index] / qreal(1 << 16);
71 return displayMatrix[index];
72 };
73
74 return QTransform(toRotateMirrorValue(0), toRotateMirrorValue(1),
75 toRotateMirrorValue(3), toRotateMirrorValue(4),
76 0, 0);
77}
78
79static VideoTransformation streamTransformation(const AVStream *stream)
80{
81 Q_ASSERT(stream);
82
83 using SideDataSize = decltype(AVPacketSideData::size);
84 constexpr SideDataSize displayMatrixSize = sizeof(int32_t) * 9;
85 const AVPacketSideData *sideData = streamSideData(stream, AV_PKT_DATA_DISPLAYMATRIX);
86 if (!sideData || sideData->size < displayMatrixSize)
87 return {};
88
89 const auto displayMatrix = reinterpret_cast<const int32_t *>(sideData->data);
90 const QTransform transform = displayMatrixToTransform(displayMatrix);
91 const VideoTransformationOpt result = qVideoTransformationFromMatrix(transform);
92 if (!result) {
93 qCWarning(qLcMediaDataHolder)
94 << "Video stream contains malformed display matrix" << transform;
95 return {};
96 }
97 return *result;
98}
99
100static bool colorTransferSupportsHdr(const AVStream *stream)
101{
102 if (!stream)
103 return false;
104
105 const AVCodecParameters *codecPar = stream->codecpar;
106 if (!codecPar)
107 return false;
108
109 const QVideoFrameFormat::ColorTransfer colorTransfer = fromAvColorTransfer(codecPar->color_trc);
110
111 // Assume that content is using HDR if the color transfer supports high
112 // dynamic range. The video may still not utilize the extended range,
113 // but we can't determine the actual range without decoding frames.
114 return colorTransfer == QVideoFrameFormat::ColorTransfer_ST2084
115 || colorTransfer == QVideoFrameFormat::ColorTransfer_STD_B67;
116}
117
119{
120 // TODO: Add QMediaMetaData::Mirrored and take from it and QMediaMetaData::Orientation:
121 // int orientation = m_metaData.value(QMediaMetaData::Orientation).toInt();
122 // return static_cast<QtVideo::Rotation>(orientation);
123
124 const int streamIndex = m_currentAVStreamIndex[QPlatformMediaPlayer::VideoStream];
125 if (streamIndex < 0)
126 return {};
127
128 return streamTransformation(m_context->streams[streamIndex]);
129}
130
132{
133 return m_context.get();
134}
135
136int MediaDataHolder::currentStreamIndex(QPlatformMediaPlayer::TrackType trackType) const
137{
138 return m_currentAVStreamIndex[trackType];
139}
140
170
171QPlatformMediaPlayer::TrackType MediaDataHolder::trackTypeFromMediaType(int mediaType)
172{
173 switch (mediaType) {
174 case AVMEDIA_TYPE_AUDIO:
175 return QPlatformMediaPlayer::AudioStream;
176 case AVMEDIA_TYPE_VIDEO:
177 return QPlatformMediaPlayer::VideoStream;
178 case AVMEDIA_TYPE_SUBTITLE:
179 return QPlatformMediaPlayer::SubtitleStream;
180 default:
181 return QPlatformMediaPlayer::NTrackTypes;
182 }
183}
184
185namespace {
186q23::expected<AVFormatContextUPtr, MediaDataHolder::ContextError>
187loadMedia(const QUrl &mediaUrl, QIODevice *stream, const QPlaybackOptions &playbackOptions,
188 const std::shared_ptr<ICancelToken> &cancelToken)
189{
190 using std::chrono::duration_cast;
191 using std::chrono::microseconds;
192 using std::chrono::milliseconds;
193
194 const QByteArray url = mediaUrl.toString(QUrl::PreferLocalFile).toUtf8();
195
196 AVFormatContextUPtr context{ avformat_alloc_context() };
197
198 if (stream) {
199 if (!stream->isOpen()) {
200 if (!stream->open(QIODevice::ReadOnly))
201 return q23::unexpected{
202 MediaDataHolder::ContextError{
203 QMediaPlayer::ResourceError,
204 QLatin1String("Could not open source device."),
205 },
206 };
207 }
208
209 auto seek = &seekQIODevice;
210
211 if (!stream->isSequential()) {
212 stream->seek(0);
213 } else {
214 context->ctx_flags |= AVFMTCTX_UNSEEKABLE;
215 seek = nullptr;
216 }
217
218 constexpr int bufferSize = 32768;
219 unsigned char *buffer = (unsigned char *)av_malloc(bufferSize);
220 context->pb = avio_alloc_context(buffer, bufferSize, false, stream, &readQIODevice, nullptr,
221 seek);
222 }
223
224 AVDictionaryHolder dict;
225 {
226 const milliseconds timeout = playbackOptions.networkTimeout();
227 av_dict_set_int(dict, "timeout", duration_cast<microseconds>(timeout).count(), 0);
228 qCDebug(qLcMediaDataHolder) << "Using custom network timeout:" << timeout;
229 }
230
231 {
232 const int probeSize = playbackOptions.probeSize();
233 if (probeSize != -1) {
234 constexpr int minProbeSizeFFmpeg = 32;
235 if (probeSize >= minProbeSizeFFmpeg) {
236 av_dict_set_int(dict, "probesize", probeSize, 0);
237 qCDebug(qLcMediaDataHolder) << "Using custom probesize" << probeSize;
238 }
239 else
240 qCWarning(qLcMediaDataHolder) << "Invalid probe size, using default";
241 }
242 }
243
244 const QByteArray protocolWhitelist = qgetenv("QT_FFMPEG_PROTOCOL_WHITELIST");
245 if (!protocolWhitelist.isNull())
246 av_dict_set(dict, "protocol_whitelist", protocolWhitelist.data(), 0);
247
248 if (playbackOptions.playbackIntent() == QPlaybackOptions::PlaybackIntent::LowLatencyStreaming) {
249 av_dict_set(dict, "fflags", "nobuffer", 0);
250 av_dict_set_int(dict, "flush_packets", 1, 0);
251 qCDebug(qLcMediaDataHolder) << "Enabled low latency streaming";
252 }
253
254 context->interrupt_callback.opaque = cancelToken.get();
255 context->interrupt_callback.callback = [](void *opaque) {
256 const auto *cancelToken = static_cast<const ICancelToken *>(opaque);
257 if (cancelToken && cancelToken->isCancelled())
258 return 1;
259 return 0;
260 };
261
262 int ret = 0;
263 {
264 AVFormatContext *contextRaw = context.release();
265 ret = avformat_open_input(&contextRaw, url.constData(), nullptr, dict);
266 context.reset(contextRaw);
267 }
268
269 if (ret < 0) {
270 auto code = QMediaPlayer::ResourceError;
271 if (ret == AVERROR(EACCES))
272 code = QMediaPlayer::AccessDeniedError;
273 else if (ret == AVERROR(EINVAL) || ret == AVERROR_INVALIDDATA)
274 code = QMediaPlayer::FormatError;
275
276 qCWarning(qLcMediaDataHolder)
277 << "Could not open media. FFmpeg error description:" << AVError(ret);
278
279 return q23::unexpected{
280 MediaDataHolder::ContextError{ code, QMediaPlayer::tr("Could not open file") },
281 };
282 }
283
284 ret = avformat_find_stream_info(context.get(), nullptr);
285 if (ret < 0) {
286 return q23::unexpected{
287 MediaDataHolder::ContextError{
288 QMediaPlayer::FormatError,
289 QMediaPlayer::tr("Could not find stream information for media file") },
290 };
291 }
292
293 if (qLcMediaDataHolder().isInfoEnabled())
294 av_dump_format(context.get(), 0, url.constData(), 0);
295
296
297 return context;
298}
299
300} // namespace
301
302MediaDataHolder::Maybe MediaDataHolder::create(const QUrl &url, QIODevice *stream,
303 const QPlaybackOptions &options,
304 const std::shared_ptr<ICancelToken> &cancelToken)
305{
306 q23::expected context = loadMedia(url, stream, options, cancelToken);
307 if (context) {
308 // MediaDataHolder is wrapped in a shared pointer to interop with signal/slot mechanism
309 return std::make_shared<MediaDataHolder>(
310 MediaDataHolder{ std::move(context.value()), cancelToken });
311 }
312 return q23::unexpected{ context.error() };
313}
314
315MediaDataHolder::MediaDataHolder(AVFormatContextUPtr context,
316 const std::shared_ptr<ICancelToken> &cancelToken)
317 : m_cancelToken{ cancelToken }
318{
319 Q_ASSERT(context);
320
321 m_context = std::move(context);
322 m_isSeekable = !(m_context->ctx_flags & AVFMTCTX_UNSEEKABLE);
323
324 for (unsigned int i = 0; i < m_context->nb_streams; ++i) {
325
326 const auto *stream = m_context->streams[i];
327 const auto trackType = trackTypeFromMediaType(stream->codecpar->codec_type);
328
329 if (trackType == QPlatformMediaPlayer::NTrackTypes)
330 continue;
331
332 if (stream->disposition & AV_DISPOSITION_ATTACHED_PIC)
333 continue; // Ignore attached picture streams because we treat them as metadata
334
335 if (stream->time_base.num <= 0 || stream->time_base.den <= 0) {
336 // An invalid stream timebase is not expected to be given by FFmpeg
337 qCWarning(qLcMediaDataHolder) << "A stream for the track type" << trackType
338 << "has an invalid timebase:" << stream->time_base;
339 continue;
340 }
341
342 auto metaData = QFFmpegMetaData::fromAVMetaData(stream->metadata);
343 const bool isDefault = stream->disposition & AV_DISPOSITION_DEFAULT;
344
345 if (trackType != QPlatformMediaPlayer::SubtitleStream) {
346 insertMediaData(metaData, trackType, stream);
347
348 if (isDefault && m_requestedStreams[trackType] < 0)
349 m_requestedStreams[trackType] = m_streamMap[trackType].size();
350 }
351
352 if (auto duration = streamDuration(*stream)) {
353 m_duration = qMax(m_duration, *duration);
354 metaData.insert(QMediaMetaData::Duration, toUserDuration(*duration).get());
355 }
356
357 m_streamMap[trackType].append({ (int)i, isDefault, metaData });
358 }
359
360 // With some media files, streams may be lacking duration info. Let's
361 // get it from ffmpeg's duration estimation instead.
362 if (m_duration == TrackDuration(0) && m_context->duration > 0ll) {
363 m_duration = toTrackDuration(AVContextDuration(m_context->duration));
364 }
365
366 for (auto trackType :
367 { QPlatformMediaPlayer::VideoStream, QPlatformMediaPlayer::AudioStream }) {
368 auto &requestedStream = m_requestedStreams[trackType];
369 auto &streamMap = m_streamMap[trackType];
370
371 if (requestedStream < 0 && !streamMap.empty())
372 requestedStream = 0;
373
374 if (requestedStream >= 0)
375 m_currentAVStreamIndex[trackType] = streamMap[requestedStream].avStreamIndex;
376 }
377
378 updateMetaData();
379}
380
381namespace {
382
383/*!
384 \internal
385
386 Attempt to find an attached picture from the context's streams.
387 This will find ID3v2 pictures on audio files, and also pictures
388 attached to videos.
389 */
390QImage getAttachedPicture(const AVFormatContext *context)
391{
392 if (!context)
393 return {};
394
395 for (unsigned int i = 0; i < context->nb_streams; ++i) {
396 const AVStream* stream = context->streams[i];
397 if (!stream || !(stream->disposition & AV_DISPOSITION_ATTACHED_PIC))
398 continue;
399
400 const AVPacket *compressedImage = &stream->attached_pic;
401 if (!compressedImage || !compressedImage->data || compressedImage->size <= 0)
402 continue;
403
404 // Feed raw compressed data to QImage::fromData, which will decompress it
405 // if it is a recognized format.
406 QImage image = QImage::fromData({ compressedImage->data, compressedImage->size });
407 if (!image.isNull())
408 return image;
409 }
410
411 return {};
412}
413
414} // namespace
415
416void MediaDataHolder::updateMetaData()
417{
418 m_metaData = {};
419
420 if (!m_context)
421 return;
422
423 m_metaData = QFFmpegMetaData::fromAVMetaData(m_context->metadata);
424 m_metaData.insert(QMediaMetaData::FileFormat,
425 QVariant::fromValue(QFFmpegMediaFormatInfo::fileFormatForAVInputFormat(
426 *m_context->iformat)));
427 m_metaData.insert(QMediaMetaData::Duration, toUserDuration(m_duration).get());
428
429 if (!m_cachedThumbnail.has_value())
430 m_cachedThumbnail = getAttachedPicture(m_context.get());
431
432 if (!m_cachedThumbnail->isNull())
433 m_metaData.insert(QMediaMetaData::ThumbnailImage, m_cachedThumbnail.value());
434
435 for (auto trackType :
436 { QPlatformMediaPlayer::AudioStream, QPlatformMediaPlayer::VideoStream }) {
437 const auto streamIndex = m_currentAVStreamIndex[trackType];
438 if (streamIndex >= 0)
439 insertMediaData(m_metaData, trackType, m_context->streams[streamIndex]);
440 }
441}
442
444{
445 if (!m_context)
446 return false;
447
449 streamNumber = -1;
451 return false;
454
456 qCDebug(qLcMediaDataHolder) << ">>>>> change track" << type << "from" << oldIndex << "to"
457 << avStreamIndex;
458
459 // TODO: maybe add additional verifications
461
463
464 return true;
465}
466
471
473 QPlatformMediaPlayer::TrackType trackType) const
474{
475 Q_ASSERT(trackType < QPlatformMediaPlayer::NTrackTypes);
476
477 return m_streamMap[trackType];
478}
479
480} // namespace QFFmpeg
481
482QT_END_NAMESPACE
int currentStreamIndex(QPlatformMediaPlayer::TrackType trackType) const
VideoTransformation transformation() const
const QList< StreamInfo > & streamInfo(QPlatformMediaPlayer::TrackType trackType) const
Definition qlist.h:80
static VideoTransformation streamTransformation(const AVStream *stream)
static bool colorTransferSupportsHdr(const AVStream *stream)
static void insertMediaData(QMediaMetaData &metaData, QPlatformMediaPlayer::TrackType trackType, const AVStream *stream)
static std::optional< TrackDuration > streamDuration(const AVStream &stream)
std::conditional_t< QT_FFMPEG_AVIO_WRITE_CONST, const uint8_t *, uint8_t * > AvioWriteBufferType
static QTransform displayMatrixToTransform(const int32_t *displayMatrix)
#define qCWarning(category,...)
#define qCDebug(category,...)
#define Q_STATIC_LOGGING_CATEGORY(name,...)
virtual bool isCancelled() const =0