Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qffmpegmediadataholder.cpp
Go to the documentation of this file.
1// Copyright (C) 2021 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include "playbackengine/qffmpegmediadataholder_p.h"
5
9#include "qiodevice.h"
10#include "qdatetime.h"
12
13#include <QtMultimedia/qplaybackoptions.h>
14#include <QtMultimedia/private/qmediametadata_p.h>
15
16#include <math.h>
17#include <optional>
18
19extern "C" {
20#include "libavutil/display.h"
21}
22
23QT_BEGIN_NAMESPACE
24
25Q_STATIC_LOGGING_CATEGORY(qLcMediaDataHolder, "qt.multimedia.ffmpeg.mediadataholder")
26
27namespace QFFmpeg {
28
29static std::optional<TrackDuration> streamDuration(const AVStream &stream)
30{
31 if (stream.duration > 0)
32 return toTrackDuration(AVStreamDuration(stream.duration), &stream);
33
34 // In some cases ffmpeg reports negative duration that is definitely invalid.
35 // However, the correct duration may be read from the metadata.
36
37 if (stream.duration < 0 && stream.duration != AV_NOPTS_VALUE) {
38 qCWarning(qLcMediaDataHolder) << "AVStream duration" << stream.duration
39 << "is invalid. Taking it from the metadata";
40 }
41
42 if (const auto duration = av_dict_get(stream.metadata, "DURATION", nullptr, 0)) {
43 const auto time = QTime::fromString(QString::fromUtf8(duration->value));
44 return TrackDuration(qint64(1000) * time.msecsSinceStartOfDay());
45 }
46
47 return {};
48}
49
50static QTransform displayMatrixToTransform(const int32_t *displayMatrix)
51{
52 // displayMatrix is stored as
53 //
54 // . -- X axis
55 // |
56 // | | a b u |
57 // Y | c d v |
58 // axis | x y w |
59 //
60 // where a, b, c, d, x, y are 16.16 fixed-point values,
61 // and u, v, w are 30.2 point values.
62 // Only a, b, c, d impacts on mirroring and rotation,
63 // so it's enough to propagate them to QTransform.
64 //
65 // If we were interested in getting proper XY scales,
66 // we would divide a,b,c,d by 2^16. The whole scale doesn't
67 // impact mirroring and rotation, so we don't do so.
68
69 auto toRotateMirrorValue = [displayMatrix](int index) {
70 // toRotateScaleValue would be:
71 // return displayMatrix[index] / qreal(1 << 16);
72 return displayMatrix[index];
73 };
74
75 return QTransform(toRotateMirrorValue(0), toRotateMirrorValue(1),
76 toRotateMirrorValue(3), toRotateMirrorValue(4),
77 0, 0);
78}
79
80static VideoTransformation streamTransformation(const AVStream *stream)
81{
82 Q_ASSERT(stream);
83
84 using SideDataSize = decltype(AVPacketSideData::size);
85 constexpr SideDataSize displayMatrixSize = sizeof(int32_t) * 9;
86 const AVPacketSideData *sideData = streamSideData(stream, AV_PKT_DATA_DISPLAYMATRIX);
87 if (!sideData || sideData->size < displayMatrixSize)
88 return {};
89
90 const auto displayMatrix = reinterpret_cast<const int32_t *>(sideData->data);
91 const QTransform transform = displayMatrixToTransform(displayMatrix);
92 const VideoTransformationOpt result = qVideoTransformationFromMatrix(transform);
93 if (!result) {
94 qCWarning(qLcMediaDataHolder)
95 << "Video stream contains malformed display matrix" << transform;
96 return {};
97 }
98 return *result;
99}
100
101static bool colorTransferSupportsHdr(const AVStream *stream)
102{
103 if (!stream)
104 return false;
105
106 const AVCodecParameters *codecPar = stream->codecpar;
107 if (!codecPar)
108 return false;
109
110 const QVideoFrameFormat::ColorTransfer colorTransfer = fromAvColorTransfer(codecPar->color_trc);
111
112 // Assume that content is using HDR if the color transfer supports high
113 // dynamic range. The video may still not utilize the extended range,
114 // but we can't determine the actual range without decoding frames.
115 return colorTransfer == QVideoFrameFormat::ColorTransfer_ST2084
116 || colorTransfer == QVideoFrameFormat::ColorTransfer_STD_B67;
117}
118
120{
121 // TODO: Add QMediaMetaData::Mirrored and take from it and QMediaMetaData::Orientation:
122 // int orientation = m_metaData.value(QMediaMetaData::Orientation).toInt();
123 // return static_cast<QtVideo::Rotation>(orientation);
124
125 const int streamIndex = m_currentAVStreamIndex[QPlatformMediaPlayer::VideoStream];
126 if (streamIndex < 0)
127 return {};
128
129 return streamTransformation(m_context->streams[streamIndex]);
130}
131
133{
134 return m_context.get();
135}
136
137int MediaDataHolder::currentStreamIndex(QPlatformMediaPlayer::TrackType trackType) const
138{
139 return m_currentAVStreamIndex[trackType];
140}
141
171
172QPlatformMediaPlayer::TrackType MediaDataHolder::trackTypeFromMediaType(int mediaType)
173{
174 switch (mediaType) {
175 case AVMEDIA_TYPE_AUDIO:
176 return QPlatformMediaPlayer::AudioStream;
177 case AVMEDIA_TYPE_VIDEO:
178 return QPlatformMediaPlayer::VideoStream;
179 case AVMEDIA_TYPE_SUBTITLE:
180 return QPlatformMediaPlayer::SubtitleStream;
181 default:
182 return QPlatformMediaPlayer::NTrackTypes;
183 }
184}
185
186namespace {
187q23::expected<AVFormatContextUPtr, MediaDataHolder::ContextError>
188loadMedia(const QUrl &mediaUrl, QIODevice *stream, const QPlaybackOptions &playbackOptions,
189 const std::shared_ptr<ICancelToken> &cancelToken)
190{
191 using std::chrono::duration_cast;
192 using std::chrono::microseconds;
193 using std::chrono::milliseconds;
194
195 const QByteArray url = mediaUrl.toString(QUrl::PreferLocalFile).toUtf8();
196
197 AVFormatContextUPtr context{ avformat_alloc_context() };
198
199 if (stream) {
200 if (!stream->isOpen()) {
201 if (!stream->open(QIODevice::ReadOnly))
202 return q23::unexpected{
203 MediaDataHolder::ContextError{
204 QMediaPlayer::ResourceError,
205 QLatin1String("Could not open source device."),
206 },
207 };
208 }
209
210 auto seek = &seekQIODevice;
211
212 if (!stream->isSequential()) {
213 stream->seek(0);
214 } else {
215 context->ctx_flags |= AVFMTCTX_UNSEEKABLE;
216 seek = nullptr;
217 }
218
219 constexpr int bufferSize = 32768;
220 unsigned char *buffer = (unsigned char *)av_malloc(bufferSize);
221 context->pb = avio_alloc_context(buffer, bufferSize, false, stream, &readQIODevice, nullptr,
222 seek);
223 }
224
225 AVDictionaryHolder dict;
226 using RtmpProtocols = std::set<std::basic_string_view<char16_t>, std::less<>>;
227
228 static const RtmpProtocols rtmpProtocols{
229 u"rtmp", u"rtmpe", u"rtmps", u"rtmpt", u"rtmpse", u"rtmpte",
230 };
231
232 // for rtmp streams, the `timout` parameter implies acting as a server:
233 // https://ffmpeg.org/ffmpeg-protocols.html#rtmp
234 // This is not the semantics of QPlaybackOptions::networkTimeout, and will cause failures when
235 // opening streams
236 const bool setNetworkTimeout = rtmpProtocols.find(mediaUrl.scheme()) == rtmpProtocols.end();
237
238 if (setNetworkTimeout) {
239 const milliseconds timeout = playbackOptions.networkTimeout();
240 av_dict_set_int(dict, "timeout", duration_cast<microseconds>(timeout).count(), 0);
241 qCDebug(qLcMediaDataHolder) << "Using custom network timeout:" << timeout;
242 }
243
244 {
245 const int probeSize = playbackOptions.probeSize();
246 if (probeSize != -1) {
247 constexpr int minProbeSizeFFmpeg = 32;
248 if (probeSize >= minProbeSizeFFmpeg) {
249 av_dict_set_int(dict, "probesize", probeSize, 0);
250 qCDebug(qLcMediaDataHolder) << "Using custom probesize" << probeSize;
251 } else
252 qCWarning(qLcMediaDataHolder) << "Invalid probe size, using default";
253 }
254 }
255
256 const QByteArray protocolWhitelist = qgetenv("QT_FFMPEG_PROTOCOL_WHITELIST");
257 if (!protocolWhitelist.isNull())
258 av_dict_set(dict, "protocol_whitelist", protocolWhitelist.data(), 0);
259
260 if (playbackOptions.playbackIntent() == QPlaybackOptions::PlaybackIntent::LowLatencyStreaming) {
261 av_dict_set(dict, "fflags", "nobuffer", 0);
262 av_dict_set_int(dict, "flush_packets", 1, 0);
263 qCDebug(qLcMediaDataHolder) << "Enabled low latency streaming";
264 }
265
266 // QTBUG-145590: for hls streams, we want to disable http persistent connections to allow FFmpeg
267 // (before FFmpeg 8?) to mix raw and encrypted streams
268 // compare https://trac.ffmpeg.org/ticket/10599
269 if (avformat_version() < AV_VERSION_INT(62, 12, 100))
270 av_dict_set_int(dict, "http_persistent", 0, 0);
271
272 context->interrupt_callback.opaque = cancelToken.get();
273 context->interrupt_callback.callback = [](void *opaque) {
274 const auto *cancelToken = static_cast<const ICancelToken *>(opaque);
275 if (cancelToken && cancelToken->isCancelled())
276 return 1;
277 return 0;
278 };
279
280 int ret = 0;
281 {
282 AVFormatContext *contextRaw = context.release();
283 ret = avformat_open_input(&contextRaw, url.constData(), nullptr, dict);
284 context.reset(contextRaw);
285 }
286
287 if (ret < 0) {
288 auto code = QMediaPlayer::ResourceError;
289 if (ret == AVERROR(EACCES))
290 code = QMediaPlayer::AccessDeniedError;
291 else if (ret == AVERROR(EINVAL) || ret == AVERROR_INVALIDDATA)
292 code = QMediaPlayer::FormatError;
293
294 qCWarning(qLcMediaDataHolder)
295 << "Could not open media. FFmpeg error description:" << AVError(ret);
296
297 return q23::unexpected{
298 MediaDataHolder::ContextError{ code, QMediaPlayer::tr("Could not open file") },
299 };
300 }
301
302 ret = avformat_find_stream_info(context.get(), nullptr);
303 if (ret < 0) {
304 return q23::unexpected{
305 MediaDataHolder::ContextError{
306 QMediaPlayer::FormatError,
307 QMediaPlayer::tr("Could not find stream information for media file") },
308 };
309 }
310
311 if (qLcMediaDataHolder().isInfoEnabled())
312 av_dump_format(context.get(), 0, url.constData(), 0);
313
314
315 return context;
316}
317
318} // namespace
319
320MediaDataHolder::Maybe MediaDataHolder::create(const QUrl &url, QIODevice *stream,
321 const QPlaybackOptions &options,
322 const std::shared_ptr<ICancelToken> &cancelToken)
323{
324 q23::expected context = loadMedia(url, stream, options, cancelToken);
325 if (context) {
326 // MediaDataHolder is wrapped in a shared pointer to interop with signal/slot mechanism
327 return std::make_shared<MediaDataHolder>(
328 MediaDataHolder{ std::move(context.value()), cancelToken });
329 }
330 return q23::unexpected{ context.error() };
331}
332
333MediaDataHolder::MediaDataHolder(AVFormatContextUPtr context,
334 const std::shared_ptr<ICancelToken> &cancelToken)
335 : m_cancelToken{ cancelToken }
336{
337 Q_ASSERT(context);
338
339 m_context = std::move(context);
340 m_isSeekable = !(m_context->ctx_flags & AVFMTCTX_UNSEEKABLE);
341
342 for (unsigned int i = 0; i < m_context->nb_streams; ++i) {
343
344 const auto *stream = m_context->streams[i];
345 const auto trackType = trackTypeFromMediaType(stream->codecpar->codec_type);
346
347 if (trackType == QPlatformMediaPlayer::NTrackTypes)
348 continue;
349
350 if (stream->disposition & AV_DISPOSITION_ATTACHED_PIC)
351 continue; // Ignore attached picture streams because we treat them as metadata
352
353 if (stream->time_base.num <= 0 || stream->time_base.den <= 0) {
354 // An invalid stream timebase is not expected to be given by FFmpeg
355 qCWarning(qLcMediaDataHolder) << "A stream for the track type" << trackType
356 << "has an invalid timebase:" << stream->time_base;
357 continue;
358 }
359
360 auto metaData = QFFmpegMetaData::fromAVMetaData(stream->metadata);
361 const bool isDefault = stream->disposition & AV_DISPOSITION_DEFAULT;
362
363 if (trackType != QPlatformMediaPlayer::SubtitleStream) {
364 insertMediaData(metaData, trackType, stream);
365
366 if (isDefault && m_requestedStreams[trackType] < 0)
367 m_requestedStreams[trackType] = m_streamMap[trackType].size();
368 }
369
370 if (auto duration = streamDuration(*stream)) {
371 m_duration = qMax(m_duration, *duration);
372 metaData.insert(QMediaMetaData::Duration, toUserDuration(*duration).get());
373 }
374
375 m_streamMap[trackType].append({ (int)i, isDefault, metaData });
376 }
377
378 // With some media files, streams may be lacking duration info. Let's
379 // get it from ffmpeg's duration estimation instead.
380 if (m_duration == TrackDuration(0) && m_context->duration > 0ll) {
381 m_duration = toTrackDuration(AVContextDuration(m_context->duration));
382 }
383
384 for (auto trackType :
385 { QPlatformMediaPlayer::VideoStream, QPlatformMediaPlayer::AudioStream }) {
386 auto &requestedStream = m_requestedStreams[trackType];
387 auto &streamMap = m_streamMap[trackType];
388
389 if (requestedStream < 0 && !streamMap.empty())
390 requestedStream = 0;
391
392 if (requestedStream >= 0)
393 m_currentAVStreamIndex[trackType] = streamMap[requestedStream].avStreamIndex;
394 }
395
396 updateMetaData();
397}
398
399namespace {
400
401/*!
402 \internal
403
404 Attempt to find an attached picture from the context's streams.
405 This will find ID3v2 pictures on audio files, and also pictures
406 attached to videos.
407 */
408QImage getAttachedPicture(const AVFormatContext *context)
409{
410 if (!context)
411 return {};
412
413 for (unsigned int i = 0; i < context->nb_streams; ++i) {
414 const AVStream* stream = context->streams[i];
415 if (!stream || !(stream->disposition & AV_DISPOSITION_ATTACHED_PIC))
416 continue;
417
418 const AVPacket *compressedImage = &stream->attached_pic;
419 if (!compressedImage || !compressedImage->data || compressedImage->size <= 0)
420 continue;
421
422 // Feed raw compressed data to QImage::fromData, which will decompress it
423 // if it is a recognized format.
424 QImage image = QImage::fromData({ compressedImage->data, compressedImage->size });
425 if (!image.isNull())
426 return image;
427 }
428
429 return {};
430}
431
432} // namespace
433
434void MediaDataHolder::updateMetaData()
435{
436 m_metaData = {};
437
438 if (!m_context)
439 return;
440
441 m_metaData = QFFmpegMetaData::fromAVMetaData(m_context->metadata);
442 m_metaData.insert(QMediaMetaData::FileFormat,
443 QVariant::fromValue(QFFmpegMediaFormatInfo::fileFormatForAVInputFormat(
444 *m_context->iformat)));
445 m_metaData.insert(QMediaMetaData::Duration, toUserDuration(m_duration).get());
446
447 if (!m_cachedThumbnail.has_value())
448 m_cachedThumbnail = getAttachedPicture(m_context.get());
449
450 QtMultimediaPrivate::setCoverArtImage(m_metaData, *m_cachedThumbnail);
451
452 for (auto trackType :
453 { QPlatformMediaPlayer::AudioStream, QPlatformMediaPlayer::VideoStream }) {
454 const auto streamIndex = m_currentAVStreamIndex[trackType];
455 if (streamIndex >= 0)
456 insertMediaData(m_metaData, trackType, m_context->streams[streamIndex]);
457 }
458}
459
461{
462 if (!m_context)
463 return false;
464
466 streamNumber = -1;
468 return false;
471
473 qCDebug(qLcMediaDataHolder) << ">>>>> change track" << type << "from" << oldIndex << "to"
474 << avStreamIndex;
475
476 // TODO: maybe add additional verifications
478
480
481 return true;
482}
483
488
490 QPlatformMediaPlayer::TrackType trackType) const
491{
492 Q_ASSERT(trackType < QPlatformMediaPlayer::NTrackTypes);
493
494 return m_streamMap[trackType];
495}
496
497} // namespace QFFmpeg
498
499QT_END_NAMESPACE
int currentStreamIndex(QPlatformMediaPlayer::TrackType trackType) const
VideoTransformation transformation() const
const QList< StreamInfo > & streamInfo(QPlatformMediaPlayer::TrackType trackType) const
Definition qlist.h:81
static VideoTransformation streamTransformation(const AVStream *stream)
static bool colorTransferSupportsHdr(const AVStream *stream)
static void insertMediaData(QMediaMetaData &metaData, QPlatformMediaPlayer::TrackType trackType, const AVStream *stream)
static std::optional< TrackDuration > streamDuration(const AVStream &stream)
std::conditional_t< QT_FFMPEG_AVIO_WRITE_CONST, const uint8_t *, uint8_t * > AvioWriteBufferType
static QTransform displayMatrixToTransform(const int32_t *displayMatrix)
#define qCWarning(category,...)
#define qCDebug(category,...)
#define Q_STATIC_LOGGING_CATEGORY(name,...)
virtual bool isCancelled() const =0