4#include "playbackengine/qffmpegmediadataholder_p.h"
13#include <QtMultimedia/qplaybackoptions.h>
19#include "libavutil/display.h"
30 if (stream.duration > 0)
31 return toTrackDuration(AVStreamDuration(stream.duration), &stream);
36 if (stream.duration < 0 && stream.duration != AV_NOPTS_VALUE) {
37 qCWarning(qLcMediaDataHolder) <<
"AVStream duration" << stream.duration
38 <<
"is invalid. Taking it from the metadata";
41 if (
const auto duration = av_dict_get(stream.metadata,
"DURATION",
nullptr, 0)) {
42 const auto time = QTime::fromString(QString::fromUtf8(duration->value));
43 return TrackDuration(qint64(1000) * time.msecsSinceStartOfDay());
68 auto toRotateMirrorValue = [displayMatrix](
int index) {
71 return displayMatrix[index];
74 return QTransform(toRotateMirrorValue(0), toRotateMirrorValue(1),
75 toRotateMirrorValue(3), toRotateMirrorValue(4),
83 using SideDataSize =
decltype(AVPacketSideData::size);
84 constexpr SideDataSize displayMatrixSize =
sizeof(int32_t) * 9;
85 const AVPacketSideData *sideData = streamSideData(stream, AV_PKT_DATA_DISPLAYMATRIX);
86 if (!sideData || sideData->size < displayMatrixSize)
89 const auto displayMatrix =
reinterpret_cast<
const int32_t *>(sideData->data);
90 const QTransform transform = displayMatrixToTransform(displayMatrix);
91 const VideoTransformationOpt result = qVideoTransformationFromMatrix(transform);
94 <<
"Video stream contains malformed display matrix" << transform;
105 const AVCodecParameters *codecPar = stream->codecpar;
109 const QVideoFrameFormat::ColorTransfer colorTransfer = fromAvColorTransfer(codecPar->color_trc);
114 return colorTransfer == QVideoFrameFormat::ColorTransfer_ST2084
115 || colorTransfer == QVideoFrameFormat::ColorTransfer_STD_B67;
124 const int streamIndex = m_currentAVStreamIndex[QPlatformMediaPlayer::VideoStream];
128 return streamTransformation(m_context->streams[streamIndex]);
133 return m_context.get();
138 return m_currentAVStreamIndex[trackType];
174 case AVMEDIA_TYPE_AUDIO:
175 return QPlatformMediaPlayer::AudioStream;
176 case AVMEDIA_TYPE_VIDEO:
177 return QPlatformMediaPlayer::VideoStream;
178 case AVMEDIA_TYPE_SUBTITLE:
179 return QPlatformMediaPlayer::SubtitleStream;
181 return QPlatformMediaPlayer::NTrackTypes;
186QMaybe<AVFormatContextUPtr, MediaDataHolder::ContextError>
187loadMedia(
const QUrl &mediaUrl, QIODevice *stream,
const QPlaybackOptions &playbackOptions,
190 using std::chrono::duration_cast;
191 using std::chrono::microseconds;
192 using std::chrono::milliseconds;
194 const QByteArray url = mediaUrl.toString(QUrl::PreferLocalFile).toUtf8();
196 AVFormatContextUPtr context{ avformat_alloc_context() };
199 if (!stream->isOpen()) {
200 if (!stream->open(QIODevice::ReadOnly))
202 MediaDataHolder::ContextError{
203 QMediaPlayer::ResourceError,
204 QLatin1String(
"Could not open source device.") } };
207 auto seek = &seekQIODevice;
209 if (!stream->isSequential()) {
212 context->ctx_flags |= AVFMTCTX_UNSEEKABLE;
216 constexpr int bufferSize = 32768;
217 unsigned char *buffer = (
unsigned char *)av_malloc(bufferSize);
218 context->pb = avio_alloc_context(buffer, bufferSize,
false, stream, &readQIODevice,
nullptr,
222 AVDictionaryHolder dict;
224 const auto timeout = milliseconds(playbackOptions.networkTimeoutMs());
225 av_dict_set_int(dict,
"timeout", duration_cast<microseconds>(timeout).count(), 0);
226 qCDebug(qLcMediaDataHolder) <<
"Using custom network timeout:" << timeout;
230 const int probeSize = playbackOptions.probeSize();
231 if (probeSize != -1) {
232 constexpr int minProbeSizeFFmpeg = 32;
233 if (probeSize >= minProbeSizeFFmpeg) {
234 av_dict_set_int(dict,
"probesize", probeSize, 0);
235 qCDebug(qLcMediaDataHolder) <<
"Using custom probesize" << probeSize;
238 qCWarning(qLcMediaDataHolder) <<
"Invalid probe size, using default";
242 const QByteArray protocolWhitelist = qgetenv(
"QT_FFMPEG_PROTOCOL_WHITELIST");
243 if (!protocolWhitelist.isNull())
244 av_dict_set(dict,
"protocol_whitelist", protocolWhitelist.data(), 0);
246 if (playbackOptions.playbackIntent() == QPlaybackOptions::LowLatencyStreaming) {
247 av_dict_set(dict,
"fflags",
"nobuffer", 0);
248 av_dict_set_int(dict,
"flush_packets", 1, 0);
249 qCDebug(qLcMediaDataHolder) <<
"Enabled low latency streaming";
252 context->interrupt_callback.opaque = cancelToken.get();
253 context->interrupt_callback.callback = [](
void *opaque) {
254 const auto *cancelToken =
static_cast<
const ICancelToken *>(opaque);
262 AVFormatContext *contextRaw = context.release();
263 ret = avformat_open_input(&contextRaw, url.constData(),
nullptr, dict);
264 context.reset(contextRaw);
268 auto code = QMediaPlayer::ResourceError;
269 if (ret == AVERROR(EACCES))
270 code = QMediaPlayer::AccessDeniedError;
271 else if (ret == AVERROR(EINVAL) || ret == AVERROR_INVALIDDATA)
272 code = QMediaPlayer::FormatError;
275 <<
"Could not open media. FFmpeg error description:" << err2str(ret);
278 MediaDataHolder::ContextError{ code, QMediaPlayer::tr(
"Could not open file") } };
281 ret = avformat_find_stream_info(context.get(),
nullptr);
284 MediaDataHolder::ContextError{
285 QMediaPlayer::FormatError,
286 QMediaPlayer::tr(
"Could not find stream information for media file") } };
289 if (qLcMediaDataHolder().isInfoEnabled())
290 av_dump_format(context.get(), 0, url.constData(), 0);
299 const QPlaybackOptions &options,
302 QMaybe context = loadMedia(url, stream, options, cancelToken);
305 return QSharedPointer<MediaDataHolder>{
new MediaDataHolder{ std::move(context.value()), cancelToken } };
307 return { unexpect, context.error() };
312 : m_cancelToken{ cancelToken }
316 m_context = std::move(context);
317 m_isSeekable = !(m_context->ctx_flags & AVFMTCTX_UNSEEKABLE);
319 for (
unsigned int i = 0; i < m_context->nb_streams; ++i) {
321 const auto *stream = m_context->streams[i];
322 const auto trackType = trackTypeFromMediaType(stream->codecpar->codec_type);
324 if (trackType == QPlatformMediaPlayer::NTrackTypes)
327 if (stream->disposition & AV_DISPOSITION_ATTACHED_PIC)
330 if (stream->time_base.num <= 0 || stream->time_base.den <= 0) {
332 qCWarning(qLcMediaDataHolder) <<
"A stream for the track type" << trackType
333 <<
"has an invalid timebase:" << stream->time_base;
337 auto metaData = QFFmpegMetaData::fromAVMetaData(stream->metadata);
338 const bool isDefault = stream->disposition & AV_DISPOSITION_DEFAULT;
340 if (trackType != QPlatformMediaPlayer::SubtitleStream) {
343 if (isDefault && m_requestedStreams[trackType] < 0)
344 m_requestedStreams[trackType] = m_streamMap[trackType].size();
347 if (
auto duration = streamDuration(*stream)) {
348 m_duration = qMax(m_duration, *duration);
349 metaData.insert(QMediaMetaData::Duration, toUserDuration(*duration).get());
352 m_streamMap[trackType].append({ (
int)i, isDefault, metaData });
357 if (m_duration == TrackDuration(0) && m_context->duration > 0ll) {
358 m_duration = toTrackDuration(AVContextDuration(m_context->duration));
361 for (
auto trackType :
362 { QPlatformMediaPlayer::VideoStream, QPlatformMediaPlayer::AudioStream }) {
363 auto &requestedStream = m_requestedStreams[trackType];
364 auto &streamMap = m_streamMap[trackType];
366 if (requestedStream < 0 && !streamMap.empty())
369 if (requestedStream >= 0)
370 m_currentAVStreamIndex[trackType] = streamMap[requestedStream].avStreamIndex;
379
380
381
382
383
384
385QImage getAttachedPicture(
const AVFormatContext *context)
390 for (
unsigned int i = 0; i < context->nb_streams; ++i) {
391 const AVStream* stream = context->streams[i];
392 if (!stream || !(stream->disposition & AV_DISPOSITION_ATTACHED_PIC))
395 const AVPacket *compressedImage = &stream->attached_pic;
396 if (!compressedImage || !compressedImage->data || compressedImage->size <= 0)
401 QImage image = QImage::fromData({ compressedImage->data, compressedImage->size });
418 m_metaData = QFFmpegMetaData::fromAVMetaData(m_context->metadata);
419 m_metaData.insert(QMediaMetaData::FileFormat,
420 QVariant::fromValue(QFFmpegMediaFormatInfo::fileFormatForAVInputFormat(
421 *m_context->iformat)));
422 m_metaData.insert(QMediaMetaData::Duration, toUserDuration(m_duration).get());
424 if (!m_cachedThumbnail.has_value())
425 m_cachedThumbnail = getAttachedPicture(m_context.get());
427 if (!m_cachedThumbnail->isNull())
428 m_metaData.insert(QMediaMetaData::ThumbnailImage, m_cachedThumbnail.value());
430 for (
auto trackType :
431 { QPlatformMediaPlayer::AudioStream, QPlatformMediaPlayer::VideoStream }) {
432 const auto streamIndex = m_currentAVStreamIndex[trackType];
433 if (streamIndex >= 0)
434 insertMediaData(m_metaData, trackType, m_context->streams[streamIndex]);
468 QPlatformMediaPlayer::TrackType trackType)
const
470 Q_ASSERT(trackType < QPlatformMediaPlayer::NTrackTypes);
472 return m_streamMap[trackType];
static VideoTransformation streamTransformation(const AVStream *stream)
static bool colorTransferSupportsHdr(const AVStream *stream)
static void insertMediaData(QMediaMetaData &metaData, QPlatformMediaPlayer::TrackType trackType, const AVStream *stream)
static std::optional< TrackDuration > streamDuration(const AVStream &stream)
static QTransform displayMatrixToTransform(const int32_t *displayMatrix)
#define qCWarning(category,...)
#define qCDebug(category,...)
#define Q_STATIC_LOGGING_CATEGORY(name,...)
virtual bool isCancelled() const =0