Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qffmpegmediadataholder.cpp
Go to the documentation of this file.
1// Copyright (C) 2021 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include "playbackengine/qffmpegmediadataholder_p.h"
5
9#include "qiodevice.h"
10#include "qdatetime.h"
12
13#include <math.h>
14#include <optional>
15
16extern "C" {
17#include "libavutil/display.h"
18}
19
20QT_BEGIN_NAMESPACE
21
22Q_STATIC_LOGGING_CATEGORY(qLcMediaDataHolder, "qt.multimedia.ffmpeg.mediadataholder")
23
24namespace QFFmpeg {
25
26static std::optional<TrackDuration> streamDuration(const AVStream &stream)
27{
28 if (stream.duration > 0)
29 return toTrackDuration(AVStreamDuration(stream.duration), &stream);
30
31 // In some cases ffmpeg reports negative duration that is definitely invalid.
32 // However, the correct duration may be read from the metadata.
33
34 if (stream.duration < 0 && stream.duration != AV_NOPTS_VALUE) {
35 qCWarning(qLcMediaDataHolder) << "AVStream duration" << stream.duration
36 << "is invalid. Taking it from the metadata";
37 }
38
39 if (const auto duration = av_dict_get(stream.metadata, "DURATION", nullptr, 0)) {
40 const auto time = QTime::fromString(QString::fromUtf8(duration->value));
41 return TrackDuration(qint64(1000) * time.msecsSinceStartOfDay());
42 }
43
44 return {};
45}
46
47static QTransform displayMatrixToTransform(const int32_t *displayMatrix)
48{
49 // displayMatrix is stored as
50 //
51 // . -- X axis
52 // |
53 // | | a b u |
54 // Y | c d v |
55 // axis | x y w |
56 //
57 // where a, b, c, d, x, y are 16.16 fixed-point values,
58 // and u, v, w are 30.2 point values.
59 // Only a, b, c, d impacts on mirroring and rotation,
60 // so it's enough to propagate them to QTransform.
61 //
62 // If we were interested in getting proper XY scales,
63 // we would divide a,b,c,d by 2^16. The whole scale doesn't
64 // impact mirroring and rotation, so we don't do so.
65
66 auto toRotateMirrorValue = [displayMatrix](int index) {
67 // toRotateScaleValue would be:
68 // return displayMatrix[index] / qreal(1 << 16);
69 return displayMatrix[index];
70 };
71
72 return QTransform(toRotateMirrorValue(0), toRotateMirrorValue(1),
73 toRotateMirrorValue(3), toRotateMirrorValue(4),
74 0, 0);
75}
76
77static VideoTransformation streamTransformation(const AVStream *stream)
78{
79 Q_ASSERT(stream);
80
81 using SideDataSize = decltype(AVPacketSideData::size);
82 constexpr SideDataSize displayMatrixSize = sizeof(int32_t) * 9;
83 const AVPacketSideData *sideData = streamSideData(stream, AV_PKT_DATA_DISPLAYMATRIX);
84 if (!sideData || sideData->size < displayMatrixSize)
85 return {};
86
87 const auto displayMatrix = reinterpret_cast<const int32_t *>(sideData->data);
88 const QTransform transform = displayMatrixToTransform(displayMatrix);
89 const VideoTransformationOpt result = qVideoTransformationFromMatrix(transform);
90 if (!result) {
91 qCWarning(qLcMediaDataHolder)
92 << "Video stream contains malformed display matrix" << transform;
93 return {};
94 }
95 return *result;
96}
97
98static bool colorTransferSupportsHdr(const AVStream *stream)
99{
100 if (!stream)
101 return false;
102
103 const AVCodecParameters *codecPar = stream->codecpar;
104 if (!codecPar)
105 return false;
106
107 const QVideoFrameFormat::ColorTransfer colorTransfer = fromAvColorTransfer(codecPar->color_trc);
108
109 // Assume that content is using HDR if the color transfer supports high
110 // dynamic range. The video may still not utilize the extended range,
111 // but we can't determine the actual range without decoding frames.
112 return colorTransfer == QVideoFrameFormat::ColorTransfer_ST2084
113 || colorTransfer == QVideoFrameFormat::ColorTransfer_STD_B67;
114}
115
117{
118 // TODO: Add QMediaMetaData::Mirrored and take from it and QMediaMetaData::Orientation:
119 // int orientation = m_metaData.value(QMediaMetaData::Orientation).toInt();
120 // return static_cast<QtVideo::Rotation>(orientation);
121
122 const int streamIndex = m_currentAVStreamIndex[QPlatformMediaPlayer::VideoStream];
123 if (streamIndex < 0)
124 return {};
125
126 return streamTransformation(m_context->streams[streamIndex]);
127}
128
130{
131 return m_context.get();
132}
133
134int MediaDataHolder::currentStreamIndex(QPlatformMediaPlayer::TrackType trackType) const
135{
136 return m_currentAVStreamIndex[trackType];
137}
138
168
169QPlatformMediaPlayer::TrackType MediaDataHolder::trackTypeFromMediaType(int mediaType)
170{
171 switch (mediaType) {
172 case AVMEDIA_TYPE_AUDIO:
173 return QPlatformMediaPlayer::AudioStream;
174 case AVMEDIA_TYPE_VIDEO:
175 return QPlatformMediaPlayer::VideoStream;
176 case AVMEDIA_TYPE_SUBTITLE:
177 return QPlatformMediaPlayer::SubtitleStream;
178 default:
179 return QPlatformMediaPlayer::NTrackTypes;
180 }
181}
182
183namespace {
184QMaybe<AVFormatContextUPtr, MediaDataHolder::ContextError>
185loadMedia(const QUrl &mediaUrl, QIODevice *stream, const std::shared_ptr<ICancelToken> &cancelToken)
186{
187 const QByteArray url = mediaUrl.toString(QUrl::PreferLocalFile).toUtf8();
188
189 AVFormatContextUPtr context{ avformat_alloc_context() };
190
191 if (stream) {
192 if (!stream->isOpen()) {
193 if (!stream->open(QIODevice::ReadOnly))
194 return { unexpect,
195 MediaDataHolder::ContextError{
196 QMediaPlayer::ResourceError,
197 QLatin1String("Could not open source device.") } };
198 }
199
200 auto seek = &seekQIODevice;
201
202 if (!stream->isSequential()) {
203 stream->seek(0);
204 } else {
205 context->ctx_flags |= AVFMTCTX_UNSEEKABLE;
206 seek = nullptr;
207 }
208
209 constexpr int bufferSize = 32768;
210 unsigned char *buffer = (unsigned char *)av_malloc(bufferSize);
211 context->pb = avio_alloc_context(buffer, bufferSize, false, stream, &readQIODevice, nullptr,
212 seek);
213 }
214
215 AVDictionaryHolder dict;
216 constexpr auto NetworkTimeoutUs = "5000000";
217 av_dict_set(dict, "timeout", NetworkTimeoutUs, 0);
218
219 const QByteArray protocolWhitelist = qgetenv("QT_FFMPEG_PROTOCOL_WHITELIST");
220 if (!protocolWhitelist.isNull())
221 av_dict_set(dict, "protocol_whitelist", protocolWhitelist.data(), 0);
222
223 context->interrupt_callback.opaque = cancelToken.get();
224 context->interrupt_callback.callback = [](void *opaque) {
225 const auto *cancelToken = static_cast<const ICancelToken *>(opaque);
226 if (cancelToken && cancelToken->isCancelled())
227 return 1;
228 return 0;
229 };
230
231 int ret = 0;
232 {
233 AVFormatContext *contextRaw = context.release();
234 ret = avformat_open_input(&contextRaw, url.constData(), nullptr, dict);
235 context.reset(contextRaw);
236 }
237
238 if (ret < 0) {
239 auto code = QMediaPlayer::ResourceError;
240 if (ret == AVERROR(EACCES))
241 code = QMediaPlayer::AccessDeniedError;
242 else if (ret == AVERROR(EINVAL) || ret == AVERROR_INVALIDDATA)
243 code = QMediaPlayer::FormatError;
244
245 qCWarning(qLcMediaDataHolder)
246 << "Could not open media. FFmpeg error description:" << err2str(ret);
247
248 return { unexpect,
249 MediaDataHolder::ContextError{ code, QMediaPlayer::tr("Could not open file") } };
250 }
251
252 ret = avformat_find_stream_info(context.get(), nullptr);
253 if (ret < 0) {
254 return { unexpect,
255 MediaDataHolder::ContextError{
256 QMediaPlayer::FormatError,
257 QMediaPlayer::tr("Could not find stream information for media file") } };
258 }
259
260 if (qLcMediaDataHolder().isInfoEnabled())
261 av_dump_format(context.get(), 0, url.constData(), 0);
262
263
264 return context;
265}
266
267} // namespace
268
269MediaDataHolder::Maybe MediaDataHolder::create(const QUrl &url, QIODevice *stream,
270 const std::shared_ptr<ICancelToken> &cancelToken)
271{
272 QMaybe context = loadMedia(url, stream, cancelToken);
273 if (context) {
274 // MediaDataHolder is wrapped in a shared pointer to interop with signal/slot mechanism
275 return QSharedPointer<MediaDataHolder>{ new MediaDataHolder{ std::move(context.value()), cancelToken } };
276 }
277 return { unexpect, context.error() };
278}
279
280MediaDataHolder::MediaDataHolder(AVFormatContextUPtr context,
281 const std::shared_ptr<ICancelToken> &cancelToken)
282 : m_cancelToken{ cancelToken }
283{
284 Q_ASSERT(context);
285
286 m_context = std::move(context);
287 m_isSeekable = !(m_context->ctx_flags & AVFMTCTX_UNSEEKABLE);
288
289 for (unsigned int i = 0; i < m_context->nb_streams; ++i) {
290
291 const auto *stream = m_context->streams[i];
292 const auto trackType = trackTypeFromMediaType(stream->codecpar->codec_type);
293
294 if (trackType == QPlatformMediaPlayer::NTrackTypes)
295 continue;
296
297 if (stream->disposition & AV_DISPOSITION_ATTACHED_PIC)
298 continue; // Ignore attached picture streams because we treat them as metadata
299
300 if (stream->time_base.num <= 0 || stream->time_base.den <= 0) {
301 // An invalid stream timebase is not expected to be given by FFmpeg
302 qCWarning(qLcMediaDataHolder) << "A stream for the track type" << trackType
303 << "has an invalid timebase:" << stream->time_base;
304 continue;
305 }
306
307 auto metaData = QFFmpegMetaData::fromAVMetaData(stream->metadata);
308 const bool isDefault = stream->disposition & AV_DISPOSITION_DEFAULT;
309
310 if (trackType != QPlatformMediaPlayer::SubtitleStream) {
311 insertMediaData(metaData, trackType, stream);
312
313 if (isDefault && m_requestedStreams[trackType] < 0)
314 m_requestedStreams[trackType] = m_streamMap[trackType].size();
315 }
316
317 if (auto duration = streamDuration(*stream)) {
318 m_duration = qMax(m_duration, *duration);
319 metaData.insert(QMediaMetaData::Duration, toUserDuration(*duration).get());
320 }
321
322 m_streamMap[trackType].append({ (int)i, isDefault, metaData });
323 }
324
325 // With some media files, streams may be lacking duration info. Let's
326 // get it from ffmpeg's duration estimation instead.
327 if (m_duration == TrackDuration(0) && m_context->duration > 0ll) {
328 m_duration = toTrackDuration(AVContextDuration(m_context->duration));
329 }
330
331 for (auto trackType :
332 { QPlatformMediaPlayer::VideoStream, QPlatformMediaPlayer::AudioStream }) {
333 auto &requestedStream = m_requestedStreams[trackType];
334 auto &streamMap = m_streamMap[trackType];
335
336 if (requestedStream < 0 && !streamMap.empty())
337 requestedStream = 0;
338
339 if (requestedStream >= 0)
340 m_currentAVStreamIndex[trackType] = streamMap[requestedStream].avStreamIndex;
341 }
342
343 updateMetaData();
344}
345
346namespace {
347
348/*!
349 \internal
350
351 Attempt to find an attached picture from the context's streams.
352 This will find ID3v2 pictures on audio files, and also pictures
353 attached to videos.
354 */
355QImage getAttachedPicture(const AVFormatContext *context)
356{
357 if (!context)
358 return {};
359
360 for (unsigned int i = 0; i < context->nb_streams; ++i) {
361 const AVStream* stream = context->streams[i];
362 if (!stream || !(stream->disposition & AV_DISPOSITION_ATTACHED_PIC))
363 continue;
364
365 const AVPacket *compressedImage = &stream->attached_pic;
366 if (!compressedImage || !compressedImage->data || compressedImage->size <= 0)
367 continue;
368
369 // Feed raw compressed data to QImage::fromData, which will decompress it
370 // if it is a recognized format.
371 QImage image = QImage::fromData({ compressedImage->data, compressedImage->size });
372 if (!image.isNull())
373 return image;
374 }
375
376 return {};
377}
378
379} // namespace
380
381void MediaDataHolder::updateMetaData()
382{
383 m_metaData = {};
384
385 if (!m_context)
386 return;
387
388 m_metaData = QFFmpegMetaData::fromAVMetaData(m_context->metadata);
389 m_metaData.insert(QMediaMetaData::FileFormat,
390 QVariant::fromValue(QFFmpegMediaFormatInfo::fileFormatForAVInputFormat(
391 *m_context->iformat)));
392 m_metaData.insert(QMediaMetaData::Duration, toUserDuration(m_duration).get());
393
394 if (!m_cachedThumbnail.has_value())
395 m_cachedThumbnail = getAttachedPicture(m_context.get());
396
397 if (!m_cachedThumbnail->isNull())
398 m_metaData.insert(QMediaMetaData::ThumbnailImage, m_cachedThumbnail.value());
399
400 for (auto trackType :
401 { QPlatformMediaPlayer::AudioStream, QPlatformMediaPlayer::VideoStream }) {
402 const auto streamIndex = m_currentAVStreamIndex[trackType];
403 if (streamIndex >= 0)
404 insertMediaData(m_metaData, trackType, m_context->streams[streamIndex]);
405 }
406}
407
409{
410 if (!m_context)
411 return false;
412
414 streamNumber = -1;
416 return false;
419
421 qCDebug(qLcMediaDataHolder) << ">>>>> change track" << type << "from" << oldIndex << "to"
422 << avStreamIndex;
423
424 // TODO: maybe add additional verifications
426
428
429 return true;
430}
431
436
438 QPlatformMediaPlayer::TrackType trackType) const
439{
440 Q_ASSERT(trackType < QPlatformMediaPlayer::NTrackTypes);
441
442 return m_streamMap[trackType];
443}
444
445} // namespace QFFmpeg
446
447QT_END_NAMESPACE
int currentStreamIndex(QPlatformMediaPlayer::TrackType trackType) const
VideoTransformation transformation() const
const QList< StreamInfo > & streamInfo(QPlatformMediaPlayer::TrackType trackType) const
Definition qlist.h:80
static VideoTransformation streamTransformation(const AVStream *stream)
static bool colorTransferSupportsHdr(const AVStream *stream)
static void insertMediaData(QMediaMetaData &metaData, QPlatformMediaPlayer::TrackType trackType, const AVStream *stream)
static std::optional< TrackDuration > streamDuration(const AVStream &stream)
static QTransform displayMatrixToTransform(const int32_t *displayMatrix)
#define qCWarning(category,...)
#define qCDebug(category,...)
#define Q_STATIC_LOGGING_CATEGORY(name,...)
virtual bool isCancelled() const =0