4#include "playbackengine/qffmpegmediadataholder_p.h"
13#include <QtMultimedia/qplaybackoptions.h>
19#include "libavutil/display.h"
30 if (stream.duration > 0)
31 return toTrackDuration(AVStreamDuration(stream.duration), &stream);
36 if (stream.duration < 0 && stream.duration != AV_NOPTS_VALUE) {
37 qCWarning(qLcMediaDataHolder) <<
"AVStream duration" << stream.duration
38 <<
"is invalid. Taking it from the metadata";
41 if (
const auto duration = av_dict_get(stream.metadata,
"DURATION",
nullptr, 0)) {
42 const auto time = QTime::fromString(QString::fromUtf8(duration->value));
43 return TrackDuration(qint64(1000) * time.msecsSinceStartOfDay());
68 auto toRotateMirrorValue = [displayMatrix](
int index) {
71 return displayMatrix[index];
74 return QTransform(toRotateMirrorValue(0), toRotateMirrorValue(1),
75 toRotateMirrorValue(3), toRotateMirrorValue(4),
83 using SideDataSize =
decltype(AVPacketSideData::size);
84 constexpr SideDataSize displayMatrixSize =
sizeof(int32_t) * 9;
85 const AVPacketSideData *sideData = streamSideData(stream, AV_PKT_DATA_DISPLAYMATRIX);
86 if (!sideData || sideData->size < displayMatrixSize)
89 const auto displayMatrix =
reinterpret_cast<
const int32_t *>(sideData->data);
90 const QTransform transform = displayMatrixToTransform(displayMatrix);
91 const VideoTransformationOpt result = qVideoTransformationFromMatrix(transform);
94 <<
"Video stream contains malformed display matrix" << transform;
105 const AVCodecParameters *codecPar = stream->codecpar;
109 const QVideoFrameFormat::ColorTransfer colorTransfer = fromAvColorTransfer(codecPar->color_trc);
114 return colorTransfer == QVideoFrameFormat::ColorTransfer_ST2084
115 || colorTransfer == QVideoFrameFormat::ColorTransfer_STD_B67;
124 const int streamIndex = m_currentAVStreamIndex[QPlatformMediaPlayer::VideoStream];
128 return streamTransformation(m_context->streams[streamIndex]);
133 return m_context.get();
138 return m_currentAVStreamIndex[trackType];
174 case AVMEDIA_TYPE_AUDIO:
175 return QPlatformMediaPlayer::AudioStream;
176 case AVMEDIA_TYPE_VIDEO:
177 return QPlatformMediaPlayer::VideoStream;
178 case AVMEDIA_TYPE_SUBTITLE:
179 return QPlatformMediaPlayer::SubtitleStream;
181 return QPlatformMediaPlayer::NTrackTypes;
186q23::expected<AVFormatContextUPtr, MediaDataHolder::ContextError>
187loadMedia(
const QUrl &mediaUrl, QIODevice *stream,
const QPlaybackOptions &playbackOptions,
190 using std::chrono::duration_cast;
191 using std::chrono::microseconds;
192 using std::chrono::milliseconds;
194 const QByteArray url = mediaUrl.toString(QUrl::PreferLocalFile).toUtf8();
196 AVFormatContextUPtr context{ avformat_alloc_context() };
199 if (!stream->isOpen()) {
200 if (!stream->open(QIODevice::ReadOnly))
201 return q23::unexpected{
202 MediaDataHolder::ContextError{
203 QMediaPlayer::ResourceError,
204 QLatin1String(
"Could not open source device."),
209 auto seek = &seekQIODevice;
211 if (!stream->isSequential()) {
214 context->ctx_flags |= AVFMTCTX_UNSEEKABLE;
218 constexpr int bufferSize = 32768;
219 unsigned char *buffer = (
unsigned char *)av_malloc(bufferSize);
220 context->pb = avio_alloc_context(buffer, bufferSize,
false, stream, &readQIODevice,
nullptr,
224 AVDictionaryHolder dict;
226 const milliseconds timeout = playbackOptions.networkTimeout();
227 av_dict_set_int(dict,
"timeout", duration_cast<microseconds>(timeout).count(), 0);
228 qCDebug(qLcMediaDataHolder) <<
"Using custom network timeout:" << timeout;
232 const int probeSize = playbackOptions.probeSize();
233 if (probeSize != -1) {
234 constexpr int minProbeSizeFFmpeg = 32;
235 if (probeSize >= minProbeSizeFFmpeg) {
236 av_dict_set_int(dict,
"probesize", probeSize, 0);
237 qCDebug(qLcMediaDataHolder) <<
"Using custom probesize" << probeSize;
240 qCWarning(qLcMediaDataHolder) <<
"Invalid probe size, using default";
244 const QByteArray protocolWhitelist = qgetenv(
"QT_FFMPEG_PROTOCOL_WHITELIST");
245 if (!protocolWhitelist.isNull())
246 av_dict_set(dict,
"protocol_whitelist", protocolWhitelist.data(), 0);
248 if (playbackOptions.playbackIntent() == QPlaybackOptions::PlaybackIntent::LowLatencyStreaming) {
249 av_dict_set(dict,
"fflags",
"nobuffer", 0);
250 av_dict_set_int(dict,
"flush_packets", 1, 0);
251 qCDebug(qLcMediaDataHolder) <<
"Enabled low latency streaming";
254 context->interrupt_callback.opaque = cancelToken.get();
255 context->interrupt_callback.callback = [](
void *opaque) {
256 const auto *cancelToken =
static_cast<
const ICancelToken *>(opaque);
264 AVFormatContext *contextRaw = context.release();
265 ret = avformat_open_input(&contextRaw, url.constData(),
nullptr, dict);
266 context.reset(contextRaw);
270 auto code = QMediaPlayer::ResourceError;
271 if (ret == AVERROR(EACCES))
272 code = QMediaPlayer::AccessDeniedError;
273 else if (ret == AVERROR(EINVAL) || ret == AVERROR_INVALIDDATA)
274 code = QMediaPlayer::FormatError;
277 <<
"Could not open media. FFmpeg error description:" << AVError(ret);
279 return q23::unexpected{
280 MediaDataHolder::ContextError{ code, QMediaPlayer::tr(
"Could not open file") },
284 ret = avformat_find_stream_info(context.get(),
nullptr);
286 return q23::unexpected{
287 MediaDataHolder::ContextError{
288 QMediaPlayer::FormatError,
289 QMediaPlayer::tr(
"Could not find stream information for media file") },
293 if (qLcMediaDataHolder().isInfoEnabled())
294 av_dump_format(context.get(), 0, url.constData(), 0);
303 const QPlaybackOptions &options,
306 q23::expected context = loadMedia(url, stream, options, cancelToken);
312 return q23::unexpected{ context.error() };
317 : m_cancelToken{ cancelToken }
321 m_context = std::move(context);
322 m_isSeekable = !(m_context->ctx_flags & AVFMTCTX_UNSEEKABLE);
324 for (
unsigned int i = 0; i < m_context->nb_streams; ++i) {
326 const auto *stream = m_context->streams[i];
327 const auto trackType = trackTypeFromMediaType(stream->codecpar->codec_type);
329 if (trackType == QPlatformMediaPlayer::NTrackTypes)
332 if (stream->disposition & AV_DISPOSITION_ATTACHED_PIC)
335 if (stream->time_base.num <= 0 || stream->time_base.den <= 0) {
337 qCWarning(qLcMediaDataHolder) <<
"A stream for the track type" << trackType
338 <<
"has an invalid timebase:" << stream->time_base;
342 auto metaData = QFFmpegMetaData::fromAVMetaData(stream->metadata);
343 const bool isDefault = stream->disposition & AV_DISPOSITION_DEFAULT;
345 if (trackType != QPlatformMediaPlayer::SubtitleStream) {
348 if (isDefault && m_requestedStreams[trackType] < 0)
349 m_requestedStreams[trackType] = m_streamMap[trackType].size();
352 if (
auto duration = streamDuration(*stream)) {
353 m_duration = qMax(m_duration, *duration);
354 metaData.insert(QMediaMetaData::Duration, toUserDuration(*duration).get());
357 m_streamMap[trackType].append({ (
int)i, isDefault, metaData });
362 if (m_duration == TrackDuration(0) && m_context->duration > 0ll) {
363 m_duration = toTrackDuration(AVContextDuration(m_context->duration));
366 for (
auto trackType :
367 { QPlatformMediaPlayer::VideoStream, QPlatformMediaPlayer::AudioStream }) {
368 auto &requestedStream = m_requestedStreams[trackType];
369 auto &streamMap = m_streamMap[trackType];
371 if (requestedStream < 0 && !streamMap.empty())
374 if (requestedStream >= 0)
375 m_currentAVStreamIndex[trackType] = streamMap[requestedStream].avStreamIndex;
384
385
386
387
388
389
390QImage getAttachedPicture(
const AVFormatContext *context)
395 for (
unsigned int i = 0; i < context->nb_streams; ++i) {
396 const AVStream* stream = context->streams[i];
397 if (!stream || !(stream->disposition & AV_DISPOSITION_ATTACHED_PIC))
400 const AVPacket *compressedImage = &stream->attached_pic;
401 if (!compressedImage || !compressedImage->data || compressedImage->size <= 0)
406 QImage image = QImage::fromData({ compressedImage->data, compressedImage->size });
423 m_metaData = QFFmpegMetaData::fromAVMetaData(m_context->metadata);
424 m_metaData.insert(QMediaMetaData::FileFormat,
425 QVariant::fromValue(QFFmpegMediaFormatInfo::fileFormatForAVInputFormat(
426 *m_context->iformat)));
427 m_metaData.insert(QMediaMetaData::Duration, toUserDuration(m_duration).get());
429 if (!m_cachedThumbnail.has_value())
430 m_cachedThumbnail = getAttachedPicture(m_context.get());
432 if (!m_cachedThumbnail->isNull())
433 m_metaData.insert(QMediaMetaData::ThumbnailImage, m_cachedThumbnail.value());
435 for (
auto trackType :
436 { QPlatformMediaPlayer::AudioStream, QPlatformMediaPlayer::VideoStream }) {
437 const auto streamIndex = m_currentAVStreamIndex[trackType];
438 if (streamIndex >= 0)
439 insertMediaData(m_metaData, trackType, m_context->streams[streamIndex]);
473 QPlatformMediaPlayer::TrackType trackType)
const
475 Q_ASSERT(trackType < QPlatformMediaPlayer::NTrackTypes);
477 return m_streamMap[trackType];
static VideoTransformation streamTransformation(const AVStream *stream)
static bool colorTransferSupportsHdr(const AVStream *stream)
static void insertMediaData(QMediaMetaData &metaData, QPlatformMediaPlayer::TrackType trackType, const AVStream *stream)
static std::optional< TrackDuration > streamDuration(const AVStream &stream)
std::conditional_t< QT_FFMPEG_AVIO_WRITE_CONST, const uint8_t *, uint8_t * > AvioWriteBufferType
static QTransform displayMatrixToTransform(const int32_t *displayMatrix)
#define qCWarning(category,...)
#define qCDebug(category,...)
#define Q_STATIC_LOGGING_CATEGORY(name,...)
virtual bool isCancelled() const =0