Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qffmpegmediadataholder.cpp
Go to the documentation of this file.
1// Copyright (C) 2021 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include "playbackengine/qffmpegmediadataholder_p.h"
5
9#include "qiodevice.h"
10#include "qdatetime.h"
12
13#include <math.h>
14#include <optional>
15
16extern "C" {
17#include "libavutil/display.h"
18}
19
21
22Q_STATIC_LOGGING_CATEGORY(qLcMediaDataHolder, "qt.multimedia.ffmpeg.mediadataholder")
23
24namespace QFFmpeg {
25
26static std::optional<qint64> streamDuration(const AVStream &stream)
27{
28 const auto &factor = stream.time_base;
29
30 if (stream.duration > 0 && factor.num > 0 && factor.den > 0) {
31 return qint64(1000000) * stream.duration * factor.num / factor.den;
32 }
33
34 // In some cases ffmpeg reports negative duration that is definitely invalid.
35 // However, the correct duration may be read from the metadata.
36
37 if (stream.duration < 0) {
38 qCWarning(qLcMediaDataHolder) << "AVStream duration" << stream.duration
39 << "is invalid. Taking it from the metadata";
40 }
41
42 if (const auto duration = av_dict_get(stream.metadata, "DURATION", nullptr, 0)) {
43 const auto time = QTime::fromString(QString::fromUtf8(duration->value));
44 return qint64(1000) * time.msecsSinceStartOfDay();
45 }
46
47 return {};
48}
49
50static QTransform displayMatrixToTransform(const int32_t *displayMatrix)
51{
52 // displayMatrix is stored as
53 //
54 // . -- X axis
55 // |
56 // | | a b u |
57 // Y | c d v |
58 // axis | x y w |
59 //
60 // where a, b, c, d, x, y are 16.16 fixed-point values,
61 // and u, v, w are 30.2 point values.
62 // Only a, b, c, d impacts on mirroring and rotation,
63 // so it's enough to propagate them to QTransform.
64 //
65 // If we were interested in getting proper XY scales,
66 // we would divide a,b,c,d by 2^16. The whole scale doesn't
67 // impact mirroring and rotation, so we don't do so.
68
69 auto toRotateMirrorValue = [displayMatrix](int index) {
70 // toRotateScaleValue would be:
71 // return displayMatrix[index] / qreal(1 << 16);
72 return displayMatrix[index];
73 };
74
75 return QTransform(toRotateMirrorValue(0), toRotateMirrorValue(1),
76 toRotateMirrorValue(3), toRotateMirrorValue(4),
77 0, 0);
78}
79
80static VideoTransformation streamTransformation(const AVStream *stream)
81{
82 Q_ASSERT(stream);
83
84 using SideDataSize = decltype(AVPacketSideData::size);
85 constexpr SideDataSize displayMatrixSize = sizeof(int32_t) * 9;
86 const AVPacketSideData *sideData = streamSideData(stream, AV_PKT_DATA_DISPLAYMATRIX);
87 if (!sideData || sideData->size < displayMatrixSize)
88 return {};
89
90 const auto displayMatrix = reinterpret_cast<const int32_t *>(sideData->data);
91 const QTransform transform = displayMatrixToTransform(displayMatrix);
92 const VideoTransformationOpt result = qVideoTransformationFromMatrix(transform);
93 if (!result) {
94 qCWarning(qLcMediaDataHolder)
95 << "Video stream contains malformed display matrix" << transform;
96 return {};
97 }
98 return *result;
99}
100
101static bool colorTransferSupportsHdr(const AVStream *stream)
102{
103 if (!stream)
104 return false;
105
106 const AVCodecParameters *codecPar = stream->codecpar;
107 if (!codecPar)
108 return false;
109
110 const QVideoFrameFormat::ColorTransfer colorTransfer = fromAvColorTransfer(codecPar->color_trc);
111
112 // Assume that content is using HDR if the color transfer supports high
113 // dynamic range. The video may still not utilize the extended range,
114 // but we can't determine the actual range without decoding frames.
115 return colorTransfer == QVideoFrameFormat::ColorTransfer_ST2084
116 || colorTransfer == QVideoFrameFormat::ColorTransfer_STD_B67;
117}
118
120{
121 // TODO: Add QMediaMetaData::Mirrored and take from it and QMediaMetaData::Orientation:
122 // int orientation = m_metaData.value(QMediaMetaData::Orientation).toInt();
123 // return static_cast<QtVideo::Rotation>(orientation);
124
125 const int streamIndex = m_currentAVStreamIndex[QPlatformMediaPlayer::VideoStream];
126 if (streamIndex < 0)
127 return {};
128
129 return streamTransformation(m_context->streams[streamIndex]);
130}
131
132AVFormatContext *MediaDataHolder::avContext()
133{
134 return m_context.get();
135}
136
137int MediaDataHolder::currentStreamIndex(QPlatformMediaPlayer::TrackType trackType) const
138{
139 return m_currentAVStreamIndex[trackType];
140}
141
171
172QPlatformMediaPlayer::TrackType MediaDataHolder::trackTypeFromMediaType(int mediaType)
173{
174 switch (mediaType) {
175 case AVMEDIA_TYPE_AUDIO:
176 return QPlatformMediaPlayer::AudioStream;
177 case AVMEDIA_TYPE_VIDEO:
178 return QPlatformMediaPlayer::VideoStream;
179 case AVMEDIA_TYPE_SUBTITLE:
180 return QPlatformMediaPlayer::SubtitleStream;
181 default:
182 return QPlatformMediaPlayer::NTrackTypes;
183 }
184}
185
186namespace {
187QMaybe<AVFormatContextUPtr, MediaDataHolder::ContextError>
188loadMedia(const QUrl &mediaUrl, QIODevice *stream, const std::shared_ptr<ICancelToken> &cancelToken)
189{
190 const QByteArray url = mediaUrl.toString(QUrl::PreferLocalFile).toUtf8();
191
192 AVFormatContextUPtr context{ avformat_alloc_context() };
193
194 if (stream) {
195 if (!stream->isOpen()) {
196 if (!stream->open(QIODevice::ReadOnly))
197 return MediaDataHolder::ContextError{
198 QMediaPlayer::ResourceError, QLatin1String("Could not open source device.")
199 };
200 }
201 if (!stream->isSequential())
202 stream->seek(0);
203
204 constexpr int bufferSize = 32768;
205 unsigned char *buffer = (unsigned char *)av_malloc(bufferSize);
206 context->pb = avio_alloc_context(buffer, bufferSize, false, stream, &readQIODevice, nullptr,
208 }
209
211 constexpr auto NetworkTimeoutUs = "5000000";
212 av_dict_set(dict, "timeout", NetworkTimeoutUs, 0);
213
214 const QByteArray protocolWhitelist = qgetenv("QT_FFMPEG_PROTOCOL_WHITELIST");
215 if (!protocolWhitelist.isNull())
216 av_dict_set(dict, "protocol_whitelist", protocolWhitelist.data(), 0);
217
218 context->interrupt_callback.opaque = cancelToken.get();
219 context->interrupt_callback.callback = [](void *opaque) {
220 const auto *cancelToken = static_cast<const ICancelToken *>(opaque);
221 if (cancelToken && cancelToken->isCancelled())
222 return 1;
223 return 0;
224 };
225
226 int ret = 0;
227 {
228 AVFormatContext *contextRaw = context.release();
229 ret = avformat_open_input(&contextRaw, url.constData(), nullptr, dict);
230 context.reset(contextRaw);
231 }
232
233 if (ret < 0) {
234 auto code = QMediaPlayer::ResourceError;
235 if (ret == AVERROR(EACCES))
236 code = QMediaPlayer::AccessDeniedError;
237 else if (ret == AVERROR(EINVAL) || ret == AVERROR_INVALIDDATA)
238 code = QMediaPlayer::FormatError;
239
240 return MediaDataHolder::ContextError{ code, QMediaPlayer::tr("Could not open file") };
241 }
242
243 ret = avformat_find_stream_info(context.get(), nullptr);
244 if (ret < 0) {
245 return MediaDataHolder::ContextError{
246 QMediaPlayer::FormatError,
247 QMediaPlayer::tr("Could not find stream information for media file")
248 };
249 }
250
251#ifndef QT_NO_DEBUG
252 av_dump_format(context.get(), 0, url.constData(), 0);
253#endif
254 return context;
255}
256
257} // namespace
258
259MediaDataHolder::Maybe MediaDataHolder::create(const QUrl &url, QIODevice *stream,
260 const std::shared_ptr<ICancelToken> &cancelToken)
261{
262 QMaybe context = loadMedia(url, stream, cancelToken);
263 if (context) {
264 // MediaDataHolder is wrapped in a shared pointer to interop with signal/slot mechanism
265 return QSharedPointer<MediaDataHolder>{ new MediaDataHolder{ std::move(context.value()), cancelToken } };
266 }
267 return context.error();
268}
269
270MediaDataHolder::MediaDataHolder(AVFormatContextUPtr context,
271 const std::shared_ptr<ICancelToken> &cancelToken)
273{
274 Q_ASSERT(context);
275
276 m_context = std::move(context);
277 m_isSeekable = !(m_context->ctx_flags & AVFMTCTX_UNSEEKABLE);
278
279 for (unsigned int i = 0; i < m_context->nb_streams; ++i) {
280
281 const auto *stream = m_context->streams[i];
282 const auto trackType = trackTypeFromMediaType(stream->codecpar->codec_type);
283
284 if (trackType == QPlatformMediaPlayer::NTrackTypes)
285 continue;
286
287 if (stream->disposition & AV_DISPOSITION_ATTACHED_PIC)
288 continue; // Ignore attached picture streams because we treat them as metadata
289
290 auto metaData = QFFmpegMetaData::fromAVMetaData(stream->metadata);
291 const bool isDefault = stream->disposition & AV_DISPOSITION_DEFAULT;
292
293 if (trackType != QPlatformMediaPlayer::SubtitleStream) {
294 insertMediaData(metaData, trackType, stream);
295
296 if (isDefault && m_requestedStreams[trackType] < 0)
297 m_requestedStreams[trackType] = m_streamMap[trackType].size();
298 }
299
300 if (auto duration = streamDuration(*stream)) {
301 m_duration = qMax(m_duration, *duration);
302 metaData.insert(QMediaMetaData::Duration, *duration / qint64(1000));
303 }
304
305 m_streamMap[trackType].append({ (int)i, isDefault, metaData });
306 }
307
308 // With some media files, streams may be lacking duration info. Let's
309 // get it from ffmpeg's duration estimation instead.
310 if (m_duration == 0 && m_context->duration > 0ll) {
311 m_duration = m_context->duration;
312 }
313
314 for (auto trackType :
315 { QPlatformMediaPlayer::VideoStream, QPlatformMediaPlayer::AudioStream }) {
316 auto &requestedStream = m_requestedStreams[trackType];
317 auto &streamMap = m_streamMap[trackType];
318
319 if (requestedStream < 0 && !streamMap.empty())
320 requestedStream = 0;
321
322 if (requestedStream >= 0)
323 m_currentAVStreamIndex[trackType] = streamMap[requestedStream].avStreamIndex;
324 }
325
326 updateMetaData();
327}
328
329namespace {
330
331/*!
332 \internal
333
334 Attempt to find an attached picture from the context's streams.
335 This will find ID3v2 pictures on audio files, and also pictures
336 attached to videos.
337 */
338QImage getAttachedPicture(const AVFormatContext *context)
339{
340 if (!context)
341 return {};
342
343 for (unsigned int i = 0; i < context->nb_streams; ++i) {
344 const AVStream* stream = context->streams[i];
345 if (!stream || !(stream->disposition & AV_DISPOSITION_ATTACHED_PIC))
346 continue;
347
348 const AVPacket *compressedImage = &stream->attached_pic;
349 if (!compressedImage || !compressedImage->data || compressedImage->size <= 0)
350 continue;
351
352 // Feed raw compressed data to QImage::fromData, which will decompress it
353 // if it is a recognized format.
354 QImage image = QImage::fromData({ compressedImage->data, compressedImage->size });
355 if (!image.isNull())
356 return image;
357 }
358
359 return {};
360}
361
362}
363
364void MediaDataHolder::updateMetaData()
365{
366 m_metaData = {};
367
368 if (!m_context)
369 return;
370
371 m_metaData = QFFmpegMetaData::fromAVMetaData(m_context->metadata);
372 m_metaData.insert(QMediaMetaData::FileFormat,
373 QVariant::fromValue(QFFmpegMediaFormatInfo::fileFormatForAVInputFormat(
374 m_context->iformat)));
375 m_metaData.insert(QMediaMetaData::Duration, m_duration / qint64(1000));
376
377 if (!m_cachedThumbnail.has_value())
378 m_cachedThumbnail = getAttachedPicture(m_context.get());
379
380 if (!m_cachedThumbnail->isNull())
381 m_metaData.insert(QMediaMetaData::ThumbnailImage, m_cachedThumbnail.value());
382
383 for (auto trackType :
384 { QPlatformMediaPlayer::AudioStream, QPlatformMediaPlayer::VideoStream }) {
385 const auto streamIndex = m_currentAVStreamIndex[trackType];
386 if (streamIndex >= 0)
387 insertMediaData(m_metaData, trackType, m_context->streams[streamIndex]);
388 }
389}
390
391bool MediaDataHolder::setActiveTrack(QPlatformMediaPlayer::TrackType type, int streamNumber)
392{
393 if (!m_context)
394 return false;
395
396 if (streamNumber < 0 || streamNumber >= m_streamMap[type].size())
397 streamNumber = -1;
398 if (m_requestedStreams[type] == streamNumber)
399 return false;
400 m_requestedStreams[type] = streamNumber;
401 const int avStreamIndex = m_streamMap[type].value(streamNumber).avStreamIndex;
402
403 const int oldIndex = m_currentAVStreamIndex[type];
404 qCDebug(qLcMediaDataHolder) << ">>>>> change track" << type << "from" << oldIndex << "to"
405 << avStreamIndex;
406
407 // TODO: maybe add additional verifications
408 m_currentAVStreamIndex[type] = avStreamIndex;
409
410 updateMetaData();
411
412 return true;
413}
414
415int MediaDataHolder::activeTrack(QPlatformMediaPlayer::TrackType type) const
416{
417 return type < QPlatformMediaPlayer::NTrackTypes ? m_requestedStreams[type] : -1;
418}
419
421 QPlatformMediaPlayer::TrackType trackType) const
422{
423 Q_ASSERT(trackType < QPlatformMediaPlayer::NTrackTypes);
424
425 return m_streamMap[trackType];
426}
427
428} // namespace QFFmpeg
429
MediaDataHolder(AVFormatContextUPtr context, const std::shared_ptr< ICancelToken > &cancelToken)
bool setActiveTrack(QPlatformMediaPlayer::TrackType type, int streamNumber)
int currentStreamIndex(QPlatformMediaPlayer::TrackType trackType) const
VideoTransformation transformation() const
const QList< StreamInfo > & streamInfo(QPlatformMediaPlayer::TrackType trackType) const
int activeTrack(QPlatformMediaPlayer::TrackType type) const
Definition qlist.h:76
static VideoTransformation streamTransformation(const AVStream *stream)
static bool colorTransferSupportsHdr(const AVStream *stream)
static void insertMediaData(QMediaMetaData &metaData, QPlatformMediaPlayer::TrackType trackType, const AVStream *stream)
static std::optional< qint64 > streamDuration(const AVStream &stream)
int64_t seekQIODevice(void *opaque, int64_t offset, int whence)
static QTransform displayMatrixToTransform(const int32_t *displayMatrix)
int readQIODevice(void *opaque, uint8_t *buf, int buf_size)
const AVPacketSideData * streamSideData(const AVStream *stream, AVPacketSideDataType type)
Definition qffmpeg.cpp:121
#define qCWarning(category,...)
#define qCDebug(category,...)
#define Q_STATIC_LOGGING_CATEGORY(name,...)
#define QT_BEGIN_NAMESPACE
#define QT_END_NAMESPACE
virtual bool isCancelled() const =0