4#include "playbackengine/qffmpegmediadataholder_p.h"
13#include <QtMultimedia/qplaybackoptions.h>
14#include <QtMultimedia/private/qmediametadata_p.h>
20#include "libavutil/display.h"
31 if (stream.duration > 0)
32 return toTrackDuration(AVStreamDuration(stream.duration), &stream);
37 if (stream.duration < 0 && stream.duration != AV_NOPTS_VALUE) {
38 qCWarning(qLcMediaDataHolder) <<
"AVStream duration" << stream.duration
39 <<
"is invalid. Taking it from the metadata";
42 if (
const auto duration = av_dict_get(stream.metadata,
"DURATION",
nullptr, 0)) {
43 const auto time = QTime::fromString(QString::fromUtf8(duration->value));
44 return TrackDuration(qint64(1000) * time.msecsSinceStartOfDay());
69 auto toRotateMirrorValue = [displayMatrix](
int index) {
72 return displayMatrix[index];
75 return QTransform(toRotateMirrorValue(0), toRotateMirrorValue(1),
76 toRotateMirrorValue(3), toRotateMirrorValue(4),
84 using SideDataSize =
decltype(AVPacketSideData::size);
85 constexpr SideDataSize displayMatrixSize =
sizeof(int32_t) * 9;
86 const AVPacketSideData *sideData = streamSideData(stream, AV_PKT_DATA_DISPLAYMATRIX);
87 if (!sideData || sideData->size < displayMatrixSize)
90 const auto displayMatrix =
reinterpret_cast<
const int32_t *>(sideData->data);
91 const QTransform transform = displayMatrixToTransform(displayMatrix);
92 const VideoTransformationOpt result = qVideoTransformationFromMatrix(transform);
95 <<
"Video stream contains malformed display matrix" << transform;
106 const AVCodecParameters *codecPar = stream->codecpar;
110 const QVideoFrameFormat::ColorTransfer colorTransfer = fromAvColorTransfer(codecPar->color_trc);
115 return colorTransfer == QVideoFrameFormat::ColorTransfer_ST2084
116 || colorTransfer == QVideoFrameFormat::ColorTransfer_STD_B67;
125 const int streamIndex = m_currentAVStreamIndex[QPlatformMediaPlayer::VideoStream];
129 return streamTransformation(m_context->streams[streamIndex]);
134 return m_context.get();
139 return m_currentAVStreamIndex[trackType];
175 case AVMEDIA_TYPE_AUDIO:
176 return QPlatformMediaPlayer::AudioStream;
177 case AVMEDIA_TYPE_VIDEO:
178 return QPlatformMediaPlayer::VideoStream;
179 case AVMEDIA_TYPE_SUBTITLE:
180 return QPlatformMediaPlayer::SubtitleStream;
182 return QPlatformMediaPlayer::NTrackTypes;
187q23::expected<AVFormatContextUPtr, MediaDataHolder::ContextError>
188loadMedia(
const QUrl &mediaUrl, QIODevice *stream,
const QPlaybackOptions &playbackOptions,
191 using std::chrono::duration_cast;
192 using std::chrono::microseconds;
193 using std::chrono::milliseconds;
195 const QByteArray url = mediaUrl.toString(QUrl::PreferLocalFile).toUtf8();
197 AVFormatContextUPtr context{ avformat_alloc_context() };
200 if (!stream->isOpen()) {
201 if (!stream->open(QIODevice::ReadOnly))
202 return q23::unexpected{
203 MediaDataHolder::ContextError{
204 QMediaPlayer::ResourceError,
205 QLatin1String(
"Could not open source device."),
210 auto seek = &seekQIODevice;
212 if (!stream->isSequential()) {
215 context->ctx_flags |= AVFMTCTX_UNSEEKABLE;
219 constexpr int bufferSize = 32768;
220 unsigned char *buffer = (
unsigned char *)av_malloc(bufferSize);
221 context->pb = avio_alloc_context(buffer, bufferSize,
false, stream, &readQIODevice,
nullptr,
225 AVDictionaryHolder dict;
226 using RtmpProtocols = std::set<std::basic_string_view<
char16_t>, std::less<>>;
228 static const RtmpProtocols rtmpProtocols{
229 u"rtmp", u"rtmpe", u"rtmps", u"rtmpt", u"rtmpse", u"rtmpte",
236 const bool setNetworkTimeout = rtmpProtocols.find(mediaUrl.scheme()) == rtmpProtocols.end();
238 if (setNetworkTimeout) {
239 const milliseconds timeout = playbackOptions.networkTimeout();
240 av_dict_set_int(dict,
"timeout", duration_cast<microseconds>(timeout).count(), 0);
241 qCDebug(qLcMediaDataHolder) <<
"Using custom network timeout:" << timeout;
245 const int probeSize = playbackOptions.probeSize();
246 if (probeSize != -1) {
247 constexpr int minProbeSizeFFmpeg = 32;
248 if (probeSize >= minProbeSizeFFmpeg) {
249 av_dict_set_int(dict,
"probesize", probeSize, 0);
250 qCDebug(qLcMediaDataHolder) <<
"Using custom probesize" << probeSize;
252 qCWarning(qLcMediaDataHolder) <<
"Invalid probe size, using default";
256 const QByteArray protocolWhitelist = qgetenv(
"QT_FFMPEG_PROTOCOL_WHITELIST");
257 if (!protocolWhitelist.isNull())
258 av_dict_set(dict,
"protocol_whitelist", protocolWhitelist.data(), 0);
260 if (playbackOptions.playbackIntent() == QPlaybackOptions::PlaybackIntent::LowLatencyStreaming) {
261 av_dict_set(dict,
"fflags",
"nobuffer", 0);
262 av_dict_set_int(dict,
"flush_packets", 1, 0);
263 qCDebug(qLcMediaDataHolder) <<
"Enabled low latency streaming";
269 if (avformat_version() < AV_VERSION_INT(62, 12, 100))
270 av_dict_set_int(dict,
"http_persistent", 0, 0);
272 context->interrupt_callback.opaque = cancelToken.get();
273 context->interrupt_callback.callback = [](
void *opaque) {
274 const auto *cancelToken =
static_cast<
const ICancelToken *>(opaque);
282 AVFormatContext *contextRaw = context.release();
283 ret = avformat_open_input(&contextRaw, url.constData(),
nullptr, dict);
284 context.reset(contextRaw);
288 auto code = QMediaPlayer::ResourceError;
289 if (ret == AVERROR(EACCES))
290 code = QMediaPlayer::AccessDeniedError;
291 else if (ret == AVERROR(EINVAL) || ret == AVERROR_INVALIDDATA)
292 code = QMediaPlayer::FormatError;
295 <<
"Could not open media. FFmpeg error description:" << AVError(ret);
297 return q23::unexpected{
298 MediaDataHolder::ContextError{ code, QMediaPlayer::tr(
"Could not open file") },
302 ret = avformat_find_stream_info(context.get(),
nullptr);
304 return q23::unexpected{
305 MediaDataHolder::ContextError{
306 QMediaPlayer::FormatError,
307 QMediaPlayer::tr(
"Could not find stream information for media file") },
311 if (qLcMediaDataHolder().isInfoEnabled())
312 av_dump_format(context.get(), 0, url.constData(), 0);
321 const QPlaybackOptions &options,
324 q23::expected context = loadMedia(url, stream, options, cancelToken);
330 return q23::unexpected{ context.error() };
335 : m_cancelToken{ cancelToken }
339 m_context = std::move(context);
340 m_isSeekable = !(m_context->ctx_flags & AVFMTCTX_UNSEEKABLE);
342 for (
unsigned int i = 0; i < m_context->nb_streams; ++i) {
344 const auto *stream = m_context->streams[i];
345 const auto trackType = trackTypeFromMediaType(stream->codecpar->codec_type);
347 if (trackType == QPlatformMediaPlayer::NTrackTypes)
350 if (stream->disposition & AV_DISPOSITION_ATTACHED_PIC)
353 if (stream->time_base.num <= 0 || stream->time_base.den <= 0) {
355 qCWarning(qLcMediaDataHolder) <<
"A stream for the track type" << trackType
356 <<
"has an invalid timebase:" << stream->time_base;
360 auto metaData = QFFmpegMetaData::fromAVMetaData(stream->metadata);
361 const bool isDefault = stream->disposition & AV_DISPOSITION_DEFAULT;
363 if (trackType != QPlatformMediaPlayer::SubtitleStream) {
366 if (isDefault && m_requestedStreams[trackType] < 0)
367 m_requestedStreams[trackType] = m_streamMap[trackType].size();
370 if (
auto duration = streamDuration(*stream)) {
371 m_duration = qMax(m_duration, *duration);
372 metaData.insert(QMediaMetaData::Duration, toUserDuration(*duration).get());
375 m_streamMap[trackType].append({ (
int)i, isDefault, metaData });
380 if (m_duration == TrackDuration(0) && m_context->duration > 0ll) {
381 m_duration = toTrackDuration(AVContextDuration(m_context->duration));
384 for (
auto trackType :
385 { QPlatformMediaPlayer::VideoStream, QPlatformMediaPlayer::AudioStream }) {
386 auto &requestedStream = m_requestedStreams[trackType];
387 auto &streamMap = m_streamMap[trackType];
389 if (requestedStream < 0 && !streamMap.empty())
392 if (requestedStream >= 0)
393 m_currentAVStreamIndex[trackType] = streamMap[requestedStream].avStreamIndex;
402
403
404
405
406
407
408QImage getAttachedPicture(
const AVFormatContext *context)
413 for (
unsigned int i = 0; i < context->nb_streams; ++i) {
414 const AVStream* stream = context->streams[i];
415 if (!stream || !(stream->disposition & AV_DISPOSITION_ATTACHED_PIC))
418 const AVPacket *compressedImage = &stream->attached_pic;
419 if (!compressedImage || !compressedImage->data || compressedImage->size <= 0)
424 QImage image = QImage::fromData({ compressedImage->data, compressedImage->size });
441 m_metaData = QFFmpegMetaData::fromAVMetaData(m_context->metadata);
442 m_metaData.insert(QMediaMetaData::FileFormat,
443 QVariant::fromValue(QFFmpegMediaFormatInfo::fileFormatForAVInputFormat(
444 *m_context->iformat)));
445 m_metaData.insert(QMediaMetaData::Duration, toUserDuration(m_duration).get());
447 if (!m_cachedThumbnail.has_value())
448 m_cachedThumbnail = getAttachedPicture(m_context.get());
450 QtMultimediaPrivate::setCoverArtImage(m_metaData, *m_cachedThumbnail);
452 for (
auto trackType :
453 { QPlatformMediaPlayer::AudioStream, QPlatformMediaPlayer::VideoStream }) {
454 const auto streamIndex = m_currentAVStreamIndex[trackType];
455 if (streamIndex >= 0)
456 insertMediaData(m_metaData, trackType, m_context->streams[streamIndex]);
490 QPlatformMediaPlayer::TrackType trackType)
const
492 Q_ASSERT(trackType < QPlatformMediaPlayer::NTrackTypes);
494 return m_streamMap[trackType];
static VideoTransformation streamTransformation(const AVStream *stream)
static bool colorTransferSupportsHdr(const AVStream *stream)
static void insertMediaData(QMediaMetaData &metaData, QPlatformMediaPlayer::TrackType trackType, const AVStream *stream)
static std::optional< TrackDuration > streamDuration(const AVStream &stream)
std::conditional_t< QT_FFMPEG_AVIO_WRITE_CONST, const uint8_t *, uint8_t * > AvioWriteBufferType
static QTransform displayMatrixToTransform(const int32_t *displayMatrix)
#define qCWarning(category,...)
#define qCDebug(category,...)
#define Q_STATIC_LOGGING_CATEGORY(name,...)
virtual bool isCancelled() const =0