4#include "playbackengine/qffmpegmediadataholder_p.h"
13#include <QtMultimedia/qplaybackoptions.h>
14#include <QtMultimedia/private/qmediametadata_p.h>
19#include <QtCore/private/qminimalflatset_p.h>
22#include "libavutil/display.h"
33 if (stream.duration > 0)
34 return toTrackDuration(AVStreamDuration(stream.duration), &stream);
39 if (stream.duration < 0 && stream.duration != AV_NOPTS_VALUE) {
40 qCWarning(qLcMediaDataHolder) <<
"AVStream duration" << stream.duration
41 <<
"is invalid. Taking it from the metadata";
44 if (
const auto duration = av_dict_get(stream.metadata,
"DURATION",
nullptr, 0)) {
45 const auto time = QTime::fromString(QString::fromUtf8(duration->value));
46 return TrackDuration(qint64(1000) * time.msecsSinceStartOfDay());
71 auto toRotateMirrorValue = [displayMatrix](
int index) {
74 return displayMatrix[index];
77 return QTransform(toRotateMirrorValue(0), toRotateMirrorValue(1),
78 toRotateMirrorValue(3), toRotateMirrorValue(4),
86 using SideDataSize =
decltype(AVPacketSideData::size);
87 constexpr SideDataSize displayMatrixSize =
sizeof(int32_t) * 9;
88 const AVPacketSideData *sideData = streamSideData(stream, AV_PKT_DATA_DISPLAYMATRIX);
89 if (!sideData || sideData->size < displayMatrixSize)
92 const auto displayMatrix =
reinterpret_cast<
const int32_t *>(sideData->data);
93 const QTransform transform = displayMatrixToTransform(displayMatrix);
94 const VideoTransformationOpt result = qVideoTransformationFromMatrix(transform);
97 <<
"Video stream contains malformed display matrix" << transform;
108 const AVCodecParameters *codecPar = stream->codecpar;
112 const QVideoFrameFormat::ColorTransfer colorTransfer = fromAvColorTransfer(codecPar->color_trc);
117 return colorTransfer == QVideoFrameFormat::ColorTransfer_ST2084
118 || colorTransfer == QVideoFrameFormat::ColorTransfer_STD_B67;
127 const int streamIndex = m_currentAVStreamIndex[QPlatformMediaPlayer::VideoStream];
131 return streamTransformation(m_context->streams[streamIndex]);
136 return m_context.get();
141 return m_currentAVStreamIndex[trackType];
177 case AVMEDIA_TYPE_AUDIO:
178 return QPlatformMediaPlayer::AudioStream;
179 case AVMEDIA_TYPE_VIDEO:
180 return QPlatformMediaPlayer::VideoStream;
181 case AVMEDIA_TYPE_SUBTITLE:
182 return QPlatformMediaPlayer::SubtitleStream;
184 return QPlatformMediaPlayer::NTrackTypes;
189q23::expected<AVFormatContextUPtr, MediaDataHolder::ContextError>
190loadMedia(
const QUrl &mediaUrl, QIODevice *stream,
const QPlaybackOptions &playbackOptions,
193 using std::chrono::duration_cast;
194 using std::chrono::microseconds;
195 using std::chrono::milliseconds;
197 const QByteArray url = mediaUrl.toString(QUrl::PreferLocalFile).toUtf8();
199 AVFormatContextUPtr context{ avformat_alloc_context() };
202 if (!stream->isOpen()) {
203 if (!stream->open(QIODevice::ReadOnly))
204 return q23::unexpected{
205 MediaDataHolder::ContextError{
206 QMediaPlayer::ResourceError,
207 QLatin1String(
"Could not open source device."),
212 auto seek = &seekQIODevice;
214 if (!stream->isSequential()) {
217 context->ctx_flags |= AVFMTCTX_UNSEEKABLE;
221 constexpr int bufferSize = 32768;
222 unsigned char *buffer = (
unsigned char *)av_malloc(bufferSize);
223 context->pb = avio_alloc_context(buffer, bufferSize,
false, stream, &readQIODevice,
nullptr,
227 AVDictionaryHolder dict;
228 using RtmpProtocols =
229 QMinimalVarLengthFlatSet<std::basic_string_view<
char16_t>, 6, std::less<>>;
231 static const RtmpProtocols rtmpProtocols{
232 u"rtmp", u"rtmpe", u"rtmps", u"rtmpt", u"rtmpse", u"rtmpte",
239 const bool setNetworkTimeout = !rtmpProtocols.contains(mediaUrl.scheme());
241 if (setNetworkTimeout) {
242 const milliseconds timeout = playbackOptions.networkTimeout();
243 av_dict_set_int(dict,
"timeout", duration_cast<microseconds>(timeout).count(), 0);
244 qCDebug(qLcMediaDataHolder) <<
"Using custom network timeout:" << timeout;
248 const int probeSize = playbackOptions.probeSize();
249 if (probeSize != -1) {
250 constexpr int minProbeSizeFFmpeg = 32;
251 if (probeSize >= minProbeSizeFFmpeg) {
252 av_dict_set_int(dict,
"probesize", probeSize, 0);
253 qCDebug(qLcMediaDataHolder) <<
"Using custom probesize" << probeSize;
255 qCWarning(qLcMediaDataHolder) <<
"Invalid probe size, using default";
259 const QByteArray protocolWhitelist = qgetenv(
"QT_FFMPEG_PROTOCOL_WHITELIST");
260 if (!protocolWhitelist.isNull())
261 av_dict_set(dict,
"protocol_whitelist", protocolWhitelist.data(), 0);
263 if (playbackOptions.playbackIntent() == QPlaybackOptions::PlaybackIntent::LowLatencyStreaming) {
264 av_dict_set(dict,
"fflags",
"nobuffer", 0);
265 av_dict_set_int(dict,
"flush_packets", 1, 0);
266 qCDebug(qLcMediaDataHolder) <<
"Enabled low latency streaming";
272 if (avformat_version() < AV_VERSION_INT(62, 12, 100))
273 av_dict_set_int(dict,
"http_persistent", 0, 0);
275 context->interrupt_callback.opaque = cancelToken.get();
276 context->interrupt_callback.callback = [](
void *opaque) {
277 const auto *cancelToken =
static_cast<
const ICancelToken *>(opaque);
285 AVFormatContext *contextRaw = context.release();
286 ret = avformat_open_input(&contextRaw, url.constData(),
nullptr, dict);
287 context.reset(contextRaw);
291 auto code = QMediaPlayer::ResourceError;
292 if (ret == AVERROR(EACCES))
293 code = QMediaPlayer::AccessDeniedError;
294 else if (ret == AVERROR(EINVAL) || ret == AVERROR_INVALIDDATA)
295 code = QMediaPlayer::FormatError;
298 <<
"Could not open media. FFmpeg error description:" << AVError(ret);
300 return q23::unexpected{
301 MediaDataHolder::ContextError{ code, QMediaPlayer::tr(
"Could not open file") },
305 ret = avformat_find_stream_info(context.get(),
nullptr);
307 return q23::unexpected{
308 MediaDataHolder::ContextError{
309 QMediaPlayer::FormatError,
310 QMediaPlayer::tr(
"Could not find stream information for media file") },
314 if (qLcMediaDataHolder().isInfoEnabled())
315 av_dump_format(context.get(), 0, url.constData(), 0);
324 const QPlaybackOptions &options,
327 q23::expected context = loadMedia(url, stream, options, cancelToken);
333 return q23::unexpected{ context.error() };
338 : m_cancelToken{ cancelToken }
342 m_context = std::move(context);
343 m_isSeekable = !(m_context->ctx_flags & AVFMTCTX_UNSEEKABLE);
345 for (
unsigned int i = 0; i < m_context->nb_streams; ++i) {
347 const auto *stream = m_context->streams[i];
348 const auto trackType = trackTypeFromMediaType(stream->codecpar->codec_type);
350 if (trackType == QPlatformMediaPlayer::NTrackTypes)
353 if (stream->disposition & AV_DISPOSITION_ATTACHED_PIC)
356 if (stream->time_base.num <= 0 || stream->time_base.den <= 0) {
358 qCWarning(qLcMediaDataHolder) <<
"A stream for the track type" << trackType
359 <<
"has an invalid timebase:" << stream->time_base;
363 auto metaData = QFFmpegMetaData::fromAVMetaData(stream->metadata);
364 const bool isDefault = stream->disposition & AV_DISPOSITION_DEFAULT;
366 if (trackType != QPlatformMediaPlayer::SubtitleStream) {
369 if (isDefault && m_requestedStreams[trackType] < 0)
370 m_requestedStreams[trackType] = m_streamMap[trackType].size();
373 if (
auto duration = streamDuration(*stream)) {
374 m_duration = qMax(m_duration, *duration);
375 metaData.insert(QMediaMetaData::Duration, toUserDuration(*duration).get());
378 m_streamMap[trackType].append({ (
int)i, isDefault, metaData });
383 if (m_duration == TrackDuration(0) && m_context->duration > 0ll) {
384 m_duration = toTrackDuration(AVContextDuration(m_context->duration));
387 for (
auto trackType :
388 { QPlatformMediaPlayer::VideoStream, QPlatformMediaPlayer::AudioStream }) {
389 auto &requestedStream = m_requestedStreams[trackType];
390 auto &streamMap = m_streamMap[trackType];
392 if (requestedStream < 0 && !streamMap.empty())
395 if (requestedStream >= 0)
396 m_currentAVStreamIndex[trackType] = streamMap[requestedStream].avStreamIndex;
405
406
407
408
409
410
411QImage getAttachedPicture(
const AVFormatContext *context)
416 for (
unsigned int i = 0; i < context->nb_streams; ++i) {
417 const AVStream* stream = context->streams[i];
418 if (!stream || !(stream->disposition & AV_DISPOSITION_ATTACHED_PIC))
421 const AVPacket *compressedImage = &stream->attached_pic;
422 if (!compressedImage || !compressedImage->data || compressedImage->size <= 0)
427 QImage image = QImage::fromData({ compressedImage->data, compressedImage->size });
444 m_metaData = QFFmpegMetaData::fromAVMetaData(m_context->metadata);
445 m_metaData.insert(QMediaMetaData::FileFormat,
446 QVariant::fromValue(QFFmpegMediaFormatInfo::fileFormatForAVInputFormat(
447 *m_context->iformat)));
448 m_metaData.insert(QMediaMetaData::Duration, toUserDuration(m_duration).get());
450 if (!m_cachedThumbnail.has_value())
451 m_cachedThumbnail = getAttachedPicture(m_context.get());
453 QtMultimediaPrivate::setCoverArtImage(m_metaData, *m_cachedThumbnail);
455 for (
auto trackType :
456 { QPlatformMediaPlayer::AudioStream, QPlatformMediaPlayer::VideoStream }) {
457 const auto streamIndex = m_currentAVStreamIndex[trackType];
458 if (streamIndex >= 0)
459 insertMediaData(m_metaData, trackType, m_context->streams[streamIndex]);
493 QPlatformMediaPlayer::TrackType trackType)
const
495 Q_ASSERT(trackType < QPlatformMediaPlayer::NTrackTypes);
497 return m_streamMap[trackType];
static VideoTransformation streamTransformation(const AVStream *stream)
static bool colorTransferSupportsHdr(const AVStream *stream)
static void insertMediaData(QMediaMetaData &metaData, QPlatformMediaPlayer::TrackType trackType, const AVStream *stream)
static std::optional< TrackDuration > streamDuration(const AVStream &stream)
std::conditional_t< QT_FFMPEG_AVIO_WRITE_CONST, const uint8_t *, uint8_t * > AvioWriteBufferType
static QTransform displayMatrixToTransform(const int32_t *displayMatrix)
#define qCWarning(category,...)
#define qCDebug(category,...)
#define Q_STATIC_LOGGING_CATEGORY(name,...)
virtual bool isCancelled() const =0