Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qffmpegmediadataholder.cpp
Go to the documentation of this file.
1// Copyright (C) 2021 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include "playbackengine/qffmpegmediadataholder_p.h"
5
9#include "qiodevice.h"
10#include "qdatetime.h"
12
13#include <QtMultimedia/qplaybackoptions.h>
14
15#include <math.h>
16#include <optional>
17
18extern "C" {
19#include "libavutil/display.h"
20}
21
22QT_BEGIN_NAMESPACE
23
24Q_STATIC_LOGGING_CATEGORY(qLcMediaDataHolder, "qt.multimedia.ffmpeg.mediadataholder")
25
26namespace QFFmpeg {
27
28static std::optional<TrackDuration> streamDuration(const AVStream &stream)
29{
30 if (stream.duration > 0)
31 return toTrackDuration(AVStreamDuration(stream.duration), &stream);
32
33 // In some cases ffmpeg reports negative duration that is definitely invalid.
34 // However, the correct duration may be read from the metadata.
35
36 if (stream.duration < 0 && stream.duration != AV_NOPTS_VALUE) {
37 qCWarning(qLcMediaDataHolder) << "AVStream duration" << stream.duration
38 << "is invalid. Taking it from the metadata";
39 }
40
41 if (const auto duration = av_dict_get(stream.metadata, "DURATION", nullptr, 0)) {
42 const auto time = QTime::fromString(QString::fromUtf8(duration->value));
43 return TrackDuration(qint64(1000) * time.msecsSinceStartOfDay());
44 }
45
46 return {};
47}
48
49static QTransform displayMatrixToTransform(const int32_t *displayMatrix)
50{
51 // displayMatrix is stored as
52 //
53 // . -- X axis
54 // |
55 // | | a b u |
56 // Y | c d v |
57 // axis | x y w |
58 //
59 // where a, b, c, d, x, y are 16.16 fixed-point values,
60 // and u, v, w are 30.2 point values.
61 // Only a, b, c, d impacts on mirroring and rotation,
62 // so it's enough to propagate them to QTransform.
63 //
64 // If we were interested in getting proper XY scales,
65 // we would divide a,b,c,d by 2^16. The whole scale doesn't
66 // impact mirroring and rotation, so we don't do so.
67
68 auto toRotateMirrorValue = [displayMatrix](int index) {
69 // toRotateScaleValue would be:
70 // return displayMatrix[index] / qreal(1 << 16);
71 return displayMatrix[index];
72 };
73
74 return QTransform(toRotateMirrorValue(0), toRotateMirrorValue(1),
75 toRotateMirrorValue(3), toRotateMirrorValue(4),
76 0, 0);
77}
78
79static VideoTransformation streamTransformation(const AVStream *stream)
80{
81 Q_ASSERT(stream);
82
83 using SideDataSize = decltype(AVPacketSideData::size);
84 constexpr SideDataSize displayMatrixSize = sizeof(int32_t) * 9;
85 const AVPacketSideData *sideData = streamSideData(stream, AV_PKT_DATA_DISPLAYMATRIX);
86 if (!sideData || sideData->size < displayMatrixSize)
87 return {};
88
89 const auto displayMatrix = reinterpret_cast<const int32_t *>(sideData->data);
90 const QTransform transform = displayMatrixToTransform(displayMatrix);
91 const VideoTransformationOpt result = qVideoTransformationFromMatrix(transform);
92 if (!result) {
93 qCWarning(qLcMediaDataHolder)
94 << "Video stream contains malformed display matrix" << transform;
95 return {};
96 }
97 return *result;
98}
99
100static bool colorTransferSupportsHdr(const AVStream *stream)
101{
102 if (!stream)
103 return false;
104
105 const AVCodecParameters *codecPar = stream->codecpar;
106 if (!codecPar)
107 return false;
108
109 const QVideoFrameFormat::ColorTransfer colorTransfer = fromAvColorTransfer(codecPar->color_trc);
110
111 // Assume that content is using HDR if the color transfer supports high
112 // dynamic range. The video may still not utilize the extended range,
113 // but we can't determine the actual range without decoding frames.
114 return colorTransfer == QVideoFrameFormat::ColorTransfer_ST2084
115 || colorTransfer == QVideoFrameFormat::ColorTransfer_STD_B67;
116}
117
119{
120 // TODO: Add QMediaMetaData::Mirrored and take from it and QMediaMetaData::Orientation:
121 // int orientation = m_metaData.value(QMediaMetaData::Orientation).toInt();
122 // return static_cast<QtVideo::Rotation>(orientation);
123
124 const int streamIndex = m_currentAVStreamIndex[QPlatformMediaPlayer::VideoStream];
125 if (streamIndex < 0)
126 return {};
127
128 return streamTransformation(m_context->streams[streamIndex]);
129}
130
132{
133 return m_context.get();
134}
135
136int MediaDataHolder::currentStreamIndex(QPlatformMediaPlayer::TrackType trackType) const
137{
138 return m_currentAVStreamIndex[trackType];
139}
140
170
171QPlatformMediaPlayer::TrackType MediaDataHolder::trackTypeFromMediaType(int mediaType)
172{
173 switch (mediaType) {
174 case AVMEDIA_TYPE_AUDIO:
175 return QPlatformMediaPlayer::AudioStream;
176 case AVMEDIA_TYPE_VIDEO:
177 return QPlatformMediaPlayer::VideoStream;
178 case AVMEDIA_TYPE_SUBTITLE:
179 return QPlatformMediaPlayer::SubtitleStream;
180 default:
181 return QPlatformMediaPlayer::NTrackTypes;
182 }
183}
184
185namespace {
186QMaybe<AVFormatContextUPtr, MediaDataHolder::ContextError>
187loadMedia(const QUrl &mediaUrl, QIODevice *stream, const QPlaybackOptions &playbackOptions,
188 const std::shared_ptr<ICancelToken> &cancelToken)
189{
190 using std::chrono::duration_cast;
191 using std::chrono::microseconds;
192 using std::chrono::milliseconds;
193
194 const QByteArray url = mediaUrl.toString(QUrl::PreferLocalFile).toUtf8();
195
196 AVFormatContextUPtr context{ avformat_alloc_context() };
197
198 if (stream) {
199 if (!stream->isOpen()) {
200 if (!stream->open(QIODevice::ReadOnly))
201 return { unexpect,
202 MediaDataHolder::ContextError{
203 QMediaPlayer::ResourceError,
204 QLatin1String("Could not open source device.") } };
205 }
206
207 auto seek = &seekQIODevice;
208
209 if (!stream->isSequential()) {
210 stream->seek(0);
211 } else {
212 context->ctx_flags |= AVFMTCTX_UNSEEKABLE;
213 seek = nullptr;
214 }
215
216 constexpr int bufferSize = 32768;
217 unsigned char *buffer = (unsigned char *)av_malloc(bufferSize);
218 context->pb = avio_alloc_context(buffer, bufferSize, false, stream, &readQIODevice, nullptr,
219 seek);
220 }
221
222 AVDictionaryHolder dict;
223 {
224 const auto timeout = milliseconds(playbackOptions.networkTimeoutMs());
225 av_dict_set_int(dict, "timeout", duration_cast<microseconds>(timeout).count(), 0);
226 qCDebug(qLcMediaDataHolder) << "Using custom network timeout:" << timeout;
227 }
228
229 {
230 const int probeSize = playbackOptions.probeSize();
231 if (probeSize != -1) {
232 constexpr int minProbeSizeFFmpeg = 32;
233 if (probeSize >= minProbeSizeFFmpeg) {
234 av_dict_set_int(dict, "probesize", probeSize, 0);
235 qCDebug(qLcMediaDataHolder) << "Using custom probesize" << probeSize;
236 }
237 else
238 qCWarning(qLcMediaDataHolder) << "Invalid probe size, using default";
239 }
240 }
241
242 const QByteArray protocolWhitelist = qgetenv("QT_FFMPEG_PROTOCOL_WHITELIST");
243 if (!protocolWhitelist.isNull())
244 av_dict_set(dict, "protocol_whitelist", protocolWhitelist.data(), 0);
245
246 if (playbackOptions.playbackIntent() == QPlaybackOptions::LowLatencyStreaming) {
247 av_dict_set(dict, "fflags", "nobuffer", 0);
248 av_dict_set_int(dict, "flush_packets", 1, 0);
249 qCDebug(qLcMediaDataHolder) << "Enabled low latency streaming";
250 }
251
252 context->interrupt_callback.opaque = cancelToken.get();
253 context->interrupt_callback.callback = [](void *opaque) {
254 const auto *cancelToken = static_cast<const ICancelToken *>(opaque);
255 if (cancelToken && cancelToken->isCancelled())
256 return 1;
257 return 0;
258 };
259
260 int ret = 0;
261 {
262 AVFormatContext *contextRaw = context.release();
263 ret = avformat_open_input(&contextRaw, url.constData(), nullptr, dict);
264 context.reset(contextRaw);
265 }
266
267 if (ret < 0) {
268 auto code = QMediaPlayer::ResourceError;
269 if (ret == AVERROR(EACCES))
270 code = QMediaPlayer::AccessDeniedError;
271 else if (ret == AVERROR(EINVAL) || ret == AVERROR_INVALIDDATA)
272 code = QMediaPlayer::FormatError;
273
274 qCWarning(qLcMediaDataHolder)
275 << "Could not open media. FFmpeg error description:" << err2str(ret);
276
277 return { unexpect,
278 MediaDataHolder::ContextError{ code, QMediaPlayer::tr("Could not open file") } };
279 }
280
281 ret = avformat_find_stream_info(context.get(), nullptr);
282 if (ret < 0) {
283 return { unexpect,
284 MediaDataHolder::ContextError{
285 QMediaPlayer::FormatError,
286 QMediaPlayer::tr("Could not find stream information for media file") } };
287 }
288
289 if (qLcMediaDataHolder().isInfoEnabled())
290 av_dump_format(context.get(), 0, url.constData(), 0);
291
292
293 return context;
294}
295
296} // namespace
297
298MediaDataHolder::Maybe MediaDataHolder::create(const QUrl &url, QIODevice *stream,
299 const QPlaybackOptions &options,
300 const std::shared_ptr<ICancelToken> &cancelToken)
301{
302 QMaybe context = loadMedia(url, stream, options, cancelToken);
303 if (context) {
304 // MediaDataHolder is wrapped in a shared pointer to interop with signal/slot mechanism
305 return QSharedPointer<MediaDataHolder>{ new MediaDataHolder{ std::move(context.value()), cancelToken } };
306 }
307 return { unexpect, context.error() };
308}
309
310MediaDataHolder::MediaDataHolder(AVFormatContextUPtr context,
311 const std::shared_ptr<ICancelToken> &cancelToken)
312 : m_cancelToken{ cancelToken }
313{
314 Q_ASSERT(context);
315
316 m_context = std::move(context);
317 m_isSeekable = !(m_context->ctx_flags & AVFMTCTX_UNSEEKABLE);
318
319 for (unsigned int i = 0; i < m_context->nb_streams; ++i) {
320
321 const auto *stream = m_context->streams[i];
322 const auto trackType = trackTypeFromMediaType(stream->codecpar->codec_type);
323
324 if (trackType == QPlatformMediaPlayer::NTrackTypes)
325 continue;
326
327 if (stream->disposition & AV_DISPOSITION_ATTACHED_PIC)
328 continue; // Ignore attached picture streams because we treat them as metadata
329
330 if (stream->time_base.num <= 0 || stream->time_base.den <= 0) {
331 // An invalid stream timebase is not expected to be given by FFmpeg
332 qCWarning(qLcMediaDataHolder) << "A stream for the track type" << trackType
333 << "has an invalid timebase:" << stream->time_base;
334 continue;
335 }
336
337 auto metaData = QFFmpegMetaData::fromAVMetaData(stream->metadata);
338 const bool isDefault = stream->disposition & AV_DISPOSITION_DEFAULT;
339
340 if (trackType != QPlatformMediaPlayer::SubtitleStream) {
341 insertMediaData(metaData, trackType, stream);
342
343 if (isDefault && m_requestedStreams[trackType] < 0)
344 m_requestedStreams[trackType] = m_streamMap[trackType].size();
345 }
346
347 if (auto duration = streamDuration(*stream)) {
348 m_duration = qMax(m_duration, *duration);
349 metaData.insert(QMediaMetaData::Duration, toUserDuration(*duration).get());
350 }
351
352 m_streamMap[trackType].append({ (int)i, isDefault, metaData });
353 }
354
355 // With some media files, streams may be lacking duration info. Let's
356 // get it from ffmpeg's duration estimation instead.
357 if (m_duration == TrackDuration(0) && m_context->duration > 0ll) {
358 m_duration = toTrackDuration(AVContextDuration(m_context->duration));
359 }
360
361 for (auto trackType :
362 { QPlatformMediaPlayer::VideoStream, QPlatformMediaPlayer::AudioStream }) {
363 auto &requestedStream = m_requestedStreams[trackType];
364 auto &streamMap = m_streamMap[trackType];
365
366 if (requestedStream < 0 && !streamMap.empty())
367 requestedStream = 0;
368
369 if (requestedStream >= 0)
370 m_currentAVStreamIndex[trackType] = streamMap[requestedStream].avStreamIndex;
371 }
372
373 updateMetaData();
374}
375
376namespace {
377
378/*!
379 \internal
380
381 Attempt to find an attached picture from the context's streams.
382 This will find ID3v2 pictures on audio files, and also pictures
383 attached to videos.
384 */
385QImage getAttachedPicture(const AVFormatContext *context)
386{
387 if (!context)
388 return {};
389
390 for (unsigned int i = 0; i < context->nb_streams; ++i) {
391 const AVStream* stream = context->streams[i];
392 if (!stream || !(stream->disposition & AV_DISPOSITION_ATTACHED_PIC))
393 continue;
394
395 const AVPacket *compressedImage = &stream->attached_pic;
396 if (!compressedImage || !compressedImage->data || compressedImage->size <= 0)
397 continue;
398
399 // Feed raw compressed data to QImage::fromData, which will decompress it
400 // if it is a recognized format.
401 QImage image = QImage::fromData({ compressedImage->data, compressedImage->size });
402 if (!image.isNull())
403 return image;
404 }
405
406 return {};
407}
408
409} // namespace
410
411void MediaDataHolder::updateMetaData()
412{
413 m_metaData = {};
414
415 if (!m_context)
416 return;
417
418 m_metaData = QFFmpegMetaData::fromAVMetaData(m_context->metadata);
419 m_metaData.insert(QMediaMetaData::FileFormat,
420 QVariant::fromValue(QFFmpegMediaFormatInfo::fileFormatForAVInputFormat(
421 *m_context->iformat)));
422 m_metaData.insert(QMediaMetaData::Duration, toUserDuration(m_duration).get());
423
424 if (!m_cachedThumbnail.has_value())
425 m_cachedThumbnail = getAttachedPicture(m_context.get());
426
427 if (!m_cachedThumbnail->isNull())
428 m_metaData.insert(QMediaMetaData::ThumbnailImage, m_cachedThumbnail.value());
429
430 for (auto trackType :
431 { QPlatformMediaPlayer::AudioStream, QPlatformMediaPlayer::VideoStream }) {
432 const auto streamIndex = m_currentAVStreamIndex[trackType];
433 if (streamIndex >= 0)
434 insertMediaData(m_metaData, trackType, m_context->streams[streamIndex]);
435 }
436}
437
439{
440 if (!m_context)
441 return false;
442
444 streamNumber = -1;
446 return false;
449
451 qCDebug(qLcMediaDataHolder) << ">>>>> change track" << type << "from" << oldIndex << "to"
452 << avStreamIndex;
453
454 // TODO: maybe add additional verifications
456
458
459 return true;
460}
461
466
468 QPlatformMediaPlayer::TrackType trackType) const
469{
470 Q_ASSERT(trackType < QPlatformMediaPlayer::NTrackTypes);
471
472 return m_streamMap[trackType];
473}
474
475} // namespace QFFmpeg
476
477QT_END_NAMESPACE
int currentStreamIndex(QPlatformMediaPlayer::TrackType trackType) const
VideoTransformation transformation() const
const QList< StreamInfo > & streamInfo(QPlatformMediaPlayer::TrackType trackType) const
Definition qlist.h:80
static VideoTransformation streamTransformation(const AVStream *stream)
static bool colorTransferSupportsHdr(const AVStream *stream)
static void insertMediaData(QMediaMetaData &metaData, QPlatformMediaPlayer::TrackType trackType, const AVStream *stream)
static std::optional< TrackDuration > streamDuration(const AVStream &stream)
static QTransform displayMatrixToTransform(const int32_t *displayMatrix)
#define qCWarning(category,...)
#define qCDebug(category,...)
#define Q_STATIC_LOGGING_CATEGORY(name,...)
virtual bool isCancelled() const =0