Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qffmpegmediadataholder.cpp
Go to the documentation of this file.
1// Copyright (C) 2021 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include "playbackengine/qffmpegmediadataholder_p.h"
5
9#include "qiodevice.h"
10#include "qdatetime.h"
12
13#include <QtMultimedia/qplaybackoptions.h>
14#include <QtMultimedia/private/qmediametadata_p.h>
15
16#include <math.h>
17#include <optional>
18
19#include <QtCore/private/qminimalflatset_p.h>
20
21extern "C" {
22#include "libavutil/display.h"
23}
24
25QT_BEGIN_NAMESPACE
26
27Q_STATIC_LOGGING_CATEGORY(qLcMediaDataHolder, "qt.multimedia.ffmpeg.mediadataholder")
28
29namespace QFFmpeg {
30
31static std::optional<TrackDuration> streamDuration(const AVStream &stream)
32{
33 if (stream.duration > 0)
34 return toTrackDuration(AVStreamDuration(stream.duration), &stream);
35
36 // In some cases ffmpeg reports negative duration that is definitely invalid.
37 // However, the correct duration may be read from the metadata.
38
39 if (stream.duration < 0 && stream.duration != AV_NOPTS_VALUE) {
40 qCWarning(qLcMediaDataHolder) << "AVStream duration" << stream.duration
41 << "is invalid. Taking it from the metadata";
42 }
43
44 if (const auto duration = av_dict_get(stream.metadata, "DURATION", nullptr, 0)) {
45 const auto time = QTime::fromString(QString::fromUtf8(duration->value));
46 return TrackDuration(qint64(1000) * time.msecsSinceStartOfDay());
47 }
48
49 return {};
50}
51
52static QTransform displayMatrixToTransform(const int32_t *displayMatrix)
53{
54 // displayMatrix is stored as
55 //
56 // . -- X axis
57 // |
58 // | | a b u |
59 // Y | c d v |
60 // axis | x y w |
61 //
62 // where a, b, c, d, x, y are 16.16 fixed-point values,
63 // and u, v, w are 30.2 point values.
64 // Only a, b, c, d impacts on mirroring and rotation,
65 // so it's enough to propagate them to QTransform.
66 //
67 // If we were interested in getting proper XY scales,
68 // we would divide a,b,c,d by 2^16. The whole scale doesn't
69 // impact mirroring and rotation, so we don't do so.
70
71 auto toRotateMirrorValue = [displayMatrix](int index) {
72 // toRotateScaleValue would be:
73 // return displayMatrix[index] / qreal(1 << 16);
74 return displayMatrix[index];
75 };
76
77 return QTransform(toRotateMirrorValue(0), toRotateMirrorValue(1),
78 toRotateMirrorValue(3), toRotateMirrorValue(4),
79 0, 0);
80}
81
82static VideoTransformation streamTransformation(const AVStream *stream)
83{
84 Q_ASSERT(stream);
85
86 using SideDataSize = decltype(AVPacketSideData::size);
87 constexpr SideDataSize displayMatrixSize = sizeof(int32_t) * 9;
88 const AVPacketSideData *sideData = streamSideData(stream, AV_PKT_DATA_DISPLAYMATRIX);
89 if (!sideData || sideData->size < displayMatrixSize)
90 return {};
91
92 const auto displayMatrix = reinterpret_cast<const int32_t *>(sideData->data);
93 const QTransform transform = displayMatrixToTransform(displayMatrix);
94 const VideoTransformationOpt result = qVideoTransformationFromMatrix(transform);
95 if (!result) {
96 qCWarning(qLcMediaDataHolder)
97 << "Video stream contains malformed display matrix" << transform;
98 return {};
99 }
100 return *result;
101}
102
103static bool colorTransferSupportsHdr(const AVStream *stream)
104{
105 if (!stream)
106 return false;
107
108 const AVCodecParameters *codecPar = stream->codecpar;
109 if (!codecPar)
110 return false;
111
112 const QVideoFrameFormat::ColorTransfer colorTransfer = fromAvColorTransfer(codecPar->color_trc);
113
114 // Assume that content is using HDR if the color transfer supports high
115 // dynamic range. The video may still not utilize the extended range,
116 // but we can't determine the actual range without decoding frames.
117 return colorTransfer == QVideoFrameFormat::ColorTransfer_ST2084
118 || colorTransfer == QVideoFrameFormat::ColorTransfer_STD_B67;
119}
120
122{
123 // TODO: Add QMediaMetaData::Mirrored and take from it and QMediaMetaData::Orientation:
124 // int orientation = m_metaData.value(QMediaMetaData::Orientation).toInt();
125 // return static_cast<QtVideo::Rotation>(orientation);
126
127 const int streamIndex = m_currentAVStreamIndex[QPlatformMediaPlayer::VideoStream];
128 if (streamIndex < 0)
129 return {};
130
131 return streamTransformation(m_context->streams[streamIndex]);
132}
133
135{
136 return m_context.get();
137}
138
139int MediaDataHolder::currentStreamIndex(QPlatformMediaPlayer::TrackType trackType) const
140{
141 return m_currentAVStreamIndex[trackType];
142}
143
173
174QPlatformMediaPlayer::TrackType MediaDataHolder::trackTypeFromMediaType(int mediaType)
175{
176 switch (mediaType) {
177 case AVMEDIA_TYPE_AUDIO:
178 return QPlatformMediaPlayer::AudioStream;
179 case AVMEDIA_TYPE_VIDEO:
180 return QPlatformMediaPlayer::VideoStream;
181 case AVMEDIA_TYPE_SUBTITLE:
182 return QPlatformMediaPlayer::SubtitleStream;
183 default:
184 return QPlatformMediaPlayer::NTrackTypes;
185 }
186}
187
188namespace {
189q23::expected<AVFormatContextUPtr, MediaDataHolder::ContextError>
190loadMedia(const QUrl &mediaUrl, QIODevice *stream, const QPlaybackOptions &playbackOptions,
191 const std::shared_ptr<ICancelToken> &cancelToken)
192{
193 using std::chrono::duration_cast;
194 using std::chrono::microseconds;
195 using std::chrono::milliseconds;
196
197 const QByteArray url = mediaUrl.toString(QUrl::PreferLocalFile).toUtf8();
198
199 AVFormatContextUPtr context{ avformat_alloc_context() };
200
201 if (stream) {
202 if (!stream->isOpen()) {
203 if (!stream->open(QIODevice::ReadOnly))
204 return q23::unexpected{
205 MediaDataHolder::ContextError{
206 QMediaPlayer::ResourceError,
207 QLatin1String("Could not open source device."),
208 },
209 };
210 }
211
212 auto seek = &seekQIODevice;
213
214 if (!stream->isSequential()) {
215 stream->seek(0);
216 } else {
217 context->ctx_flags |= AVFMTCTX_UNSEEKABLE;
218 seek = nullptr;
219 }
220
221 constexpr int bufferSize = 32768;
222 unsigned char *buffer = (unsigned char *)av_malloc(bufferSize);
223 context->pb = avio_alloc_context(buffer, bufferSize, false, stream, &readQIODevice, nullptr,
224 seek);
225 }
226
227 AVDictionaryHolder dict;
228 using RtmpProtocols =
229 QMinimalVarLengthFlatSet<std::basic_string_view<char16_t>, 6, std::less<>>;
230
231 static const RtmpProtocols rtmpProtocols{
232 u"rtmp", u"rtmpe", u"rtmps", u"rtmpt", u"rtmpse", u"rtmpte",
233 };
234
235 // for rtmp streams, the `timout` parameter implies acting as a server:
236 // https://ffmpeg.org/ffmpeg-protocols.html#rtmp
237 // This is not the semantics of QPlaybackOptions::networkTimeout, and will cause failures when
238 // opening streams
239 const bool setNetworkTimeout = !rtmpProtocols.contains(mediaUrl.scheme());
240
241 if (setNetworkTimeout) {
242 const milliseconds timeout = playbackOptions.networkTimeout();
243 av_dict_set_int(dict, "timeout", duration_cast<microseconds>(timeout).count(), 0);
244 qCDebug(qLcMediaDataHolder) << "Using custom network timeout:" << timeout;
245 }
246
247 {
248 const int probeSize = playbackOptions.probeSize();
249 if (probeSize != -1) {
250 constexpr int minProbeSizeFFmpeg = 32;
251 if (probeSize >= minProbeSizeFFmpeg) {
252 av_dict_set_int(dict, "probesize", probeSize, 0);
253 qCDebug(qLcMediaDataHolder) << "Using custom probesize" << probeSize;
254 } else
255 qCWarning(qLcMediaDataHolder) << "Invalid probe size, using default";
256 }
257 }
258
259 const QByteArray protocolWhitelist = qgetenv("QT_FFMPEG_PROTOCOL_WHITELIST");
260 if (!protocolWhitelist.isNull())
261 av_dict_set(dict, "protocol_whitelist", protocolWhitelist.data(), 0);
262
263 if (playbackOptions.playbackIntent() == QPlaybackOptions::PlaybackIntent::LowLatencyStreaming) {
264 av_dict_set(dict, "fflags", "nobuffer", 0);
265 av_dict_set_int(dict, "flush_packets", 1, 0);
266 qCDebug(qLcMediaDataHolder) << "Enabled low latency streaming";
267 }
268
269 // QTBUG-145590: for hls streams, we want to disable http persistent connections to allow FFmpeg
270 // (before FFmpeg 8?) to mix raw and encrypted streams
271 // compare https://trac.ffmpeg.org/ticket/10599
272 if (avformat_version() < AV_VERSION_INT(62, 12, 100))
273 av_dict_set_int(dict, "http_persistent", 0, 0);
274
275 context->interrupt_callback.opaque = cancelToken.get();
276 context->interrupt_callback.callback = [](void *opaque) {
277 const auto *cancelToken = static_cast<const ICancelToken *>(opaque);
278 if (cancelToken && cancelToken->isCancelled())
279 return 1;
280 return 0;
281 };
282
283 int ret = 0;
284 {
285 AVFormatContext *contextRaw = context.release();
286 ret = avformat_open_input(&contextRaw, url.constData(), nullptr, dict);
287 context.reset(contextRaw);
288 }
289
290 if (ret < 0) {
291 auto code = QMediaPlayer::ResourceError;
292 if (ret == AVERROR(EACCES))
293 code = QMediaPlayer::AccessDeniedError;
294 else if (ret == AVERROR(EINVAL) || ret == AVERROR_INVALIDDATA)
295 code = QMediaPlayer::FormatError;
296
297 qCWarning(qLcMediaDataHolder)
298 << "Could not open media. FFmpeg error description:" << AVError(ret);
299
300 return q23::unexpected{
301 MediaDataHolder::ContextError{ code, QMediaPlayer::tr("Could not open file") },
302 };
303 }
304
305 ret = avformat_find_stream_info(context.get(), nullptr);
306 if (ret < 0) {
307 return q23::unexpected{
308 MediaDataHolder::ContextError{
309 QMediaPlayer::FormatError,
310 QMediaPlayer::tr("Could not find stream information for media file") },
311 };
312 }
313
314 if (qLcMediaDataHolder().isInfoEnabled())
315 av_dump_format(context.get(), 0, url.constData(), 0);
316
317
318 return context;
319}
320
321} // namespace
322
323MediaDataHolder::Maybe MediaDataHolder::create(const QUrl &url, QIODevice *stream,
324 const QPlaybackOptions &options,
325 const std::shared_ptr<ICancelToken> &cancelToken)
326{
327 q23::expected context = loadMedia(url, stream, options, cancelToken);
328 if (context) {
329 // MediaDataHolder is wrapped in a shared pointer to interop with signal/slot mechanism
330 return std::make_shared<MediaDataHolder>(
331 MediaDataHolder{ std::move(context.value()), cancelToken });
332 }
333 return q23::unexpected{ context.error() };
334}
335
336MediaDataHolder::MediaDataHolder(AVFormatContextUPtr context,
337 const std::shared_ptr<ICancelToken> &cancelToken)
338 : m_cancelToken{ cancelToken }
339{
340 Q_ASSERT(context);
341
342 m_context = std::move(context);
343 m_isSeekable = !(m_context->ctx_flags & AVFMTCTX_UNSEEKABLE);
344
345 for (unsigned int i = 0; i < m_context->nb_streams; ++i) {
346
347 const auto *stream = m_context->streams[i];
348 const auto trackType = trackTypeFromMediaType(stream->codecpar->codec_type);
349
350 if (trackType == QPlatformMediaPlayer::NTrackTypes)
351 continue;
352
353 if (stream->disposition & AV_DISPOSITION_ATTACHED_PIC)
354 continue; // Ignore attached picture streams because we treat them as metadata
355
356 if (stream->time_base.num <= 0 || stream->time_base.den <= 0) {
357 // An invalid stream timebase is not expected to be given by FFmpeg
358 qCWarning(qLcMediaDataHolder) << "A stream for the track type" << trackType
359 << "has an invalid timebase:" << stream->time_base;
360 continue;
361 }
362
363 auto metaData = QFFmpegMetaData::fromAVMetaData(stream->metadata);
364 const bool isDefault = stream->disposition & AV_DISPOSITION_DEFAULT;
365
366 if (trackType != QPlatformMediaPlayer::SubtitleStream) {
367 insertMediaData(metaData, trackType, stream);
368
369 if (isDefault && m_requestedStreams[trackType] < 0)
370 m_requestedStreams[trackType] = m_streamMap[trackType].size();
371 }
372
373 if (auto duration = streamDuration(*stream)) {
374 m_duration = qMax(m_duration, *duration);
375 metaData.insert(QMediaMetaData::Duration, toUserDuration(*duration).get());
376 }
377
378 m_streamMap[trackType].append({ (int)i, isDefault, metaData });
379 }
380
381 // With some media files, streams may be lacking duration info. Let's
382 // get it from ffmpeg's duration estimation instead.
383 if (m_duration == TrackDuration(0) && m_context->duration > 0ll) {
384 m_duration = toTrackDuration(AVContextDuration(m_context->duration));
385 }
386
387 for (auto trackType :
388 { QPlatformMediaPlayer::VideoStream, QPlatformMediaPlayer::AudioStream }) {
389 auto &requestedStream = m_requestedStreams[trackType];
390 auto &streamMap = m_streamMap[trackType];
391
392 if (requestedStream < 0 && !streamMap.empty())
393 requestedStream = 0;
394
395 if (requestedStream >= 0)
396 m_currentAVStreamIndex[trackType] = streamMap[requestedStream].avStreamIndex;
397 }
398
399 updateMetaData();
400}
401
402namespace {
403
404/*!
405 \internal
406
407 Attempt to find an attached picture from the context's streams.
408 This will find ID3v2 pictures on audio files, and also pictures
409 attached to videos.
410 */
411QImage getAttachedPicture(const AVFormatContext *context)
412{
413 if (!context)
414 return {};
415
416 for (unsigned int i = 0; i < context->nb_streams; ++i) {
417 const AVStream* stream = context->streams[i];
418 if (!stream || !(stream->disposition & AV_DISPOSITION_ATTACHED_PIC))
419 continue;
420
421 const AVPacket *compressedImage = &stream->attached_pic;
422 if (!compressedImage || !compressedImage->data || compressedImage->size <= 0)
423 continue;
424
425 // Feed raw compressed data to QImage::fromData, which will decompress it
426 // if it is a recognized format.
427 QImage image = QImage::fromData({ compressedImage->data, compressedImage->size });
428 if (!image.isNull())
429 return image;
430 }
431
432 return {};
433}
434
435} // namespace
436
437void MediaDataHolder::updateMetaData()
438{
439 m_metaData = {};
440
441 if (!m_context)
442 return;
443
444 m_metaData = QFFmpegMetaData::fromAVMetaData(m_context->metadata);
445 m_metaData.insert(QMediaMetaData::FileFormat,
446 QVariant::fromValue(QFFmpegMediaFormatInfo::fileFormatForAVInputFormat(
447 *m_context->iformat)));
448 m_metaData.insert(QMediaMetaData::Duration, toUserDuration(m_duration).get());
449
450 if (!m_cachedThumbnail.has_value())
451 m_cachedThumbnail = getAttachedPicture(m_context.get());
452
453 QtMultimediaPrivate::setCoverArtImage(m_metaData, *m_cachedThumbnail);
454
455 for (auto trackType :
456 { QPlatformMediaPlayer::AudioStream, QPlatformMediaPlayer::VideoStream }) {
457 const auto streamIndex = m_currentAVStreamIndex[trackType];
458 if (streamIndex >= 0)
459 insertMediaData(m_metaData, trackType, m_context->streams[streamIndex]);
460 }
461}
462
464{
465 if (!m_context)
466 return false;
467
469 streamNumber = -1;
471 return false;
474
476 qCDebug(qLcMediaDataHolder) << ">>>>> change track" << type << "from" << oldIndex << "to"
477 << avStreamIndex;
478
479 // TODO: maybe add additional verifications
481
483
484 return true;
485}
486
491
493 QPlatformMediaPlayer::TrackType trackType) const
494{
495 Q_ASSERT(trackType < QPlatformMediaPlayer::NTrackTypes);
496
497 return m_streamMap[trackType];
498}
499
500} // namespace QFFmpeg
501
502QT_END_NAMESPACE
int currentStreamIndex(QPlatformMediaPlayer::TrackType trackType) const
VideoTransformation transformation() const
const QList< StreamInfo > & streamInfo(QPlatformMediaPlayer::TrackType trackType) const
Definition qlist.h:81
static VideoTransformation streamTransformation(const AVStream *stream)
static bool colorTransferSupportsHdr(const AVStream *stream)
static void insertMediaData(QMediaMetaData &metaData, QPlatformMediaPlayer::TrackType trackType, const AVStream *stream)
static std::optional< TrackDuration > streamDuration(const AVStream &stream)
std::conditional_t< QT_FFMPEG_AVIO_WRITE_CONST, const uint8_t *, uint8_t * > AvioWriteBufferType
static QTransform displayMatrixToTransform(const int32_t *displayMatrix)
#define qCWarning(category,...)
#define qCDebug(category,...)
#define Q_STATIC_LOGGING_CATEGORY(name,...)
virtual bool isCancelled() const =0