4#include "playbackengine/qffmpegmediadataholder_p.h"
17#include "libavutil/display.h"
28 const auto &factor = stream.time_base;
30 if (stream.duration > 0 && factor.num > 0 && factor.den > 0) {
31 return qint64(1000000) * stream.duration * factor.num / factor.den;
37 if (stream.duration < 0) {
38 qCWarning(qLcMediaDataHolder) <<
"AVStream duration" << stream.duration
39 <<
"is invalid. Taking it from the metadata";
42 if (
const auto duration = av_dict_get(stream.metadata,
"DURATION",
nullptr, 0)) {
43 const auto time = QTime::fromString(QString::fromUtf8(duration->value));
44 return qint64(1000) * time.msecsSinceStartOfDay();
69 auto toRotateMirrorValue = [displayMatrix](
int index) {
72 return displayMatrix[index];
75 return QTransform(toRotateMirrorValue(0), toRotateMirrorValue(1),
76 toRotateMirrorValue(3), toRotateMirrorValue(4),
84 using SideDataSize =
decltype(AVPacketSideData::size);
85 constexpr SideDataSize displayMatrixSize =
sizeof(int32_t) * 9;
86 const AVPacketSideData *sideData =
streamSideData(stream
, AV_PKT_DATA_DISPLAYMATRIX
);
87 if (!sideData || sideData->size < displayMatrixSize)
90 const auto displayMatrix =
reinterpret_cast<
const int32_t *>(sideData->data);
91 const QTransform transform = displayMatrixToTransform(displayMatrix);
92 const VideoTransformationOpt result = qVideoTransformationFromMatrix(transform);
95 <<
"Video stream contains malformed display matrix" << transform;
106 const AVCodecParameters *codecPar = stream->codecpar;
110 const QVideoFrameFormat::ColorTransfer colorTransfer = fromAvColorTransfer(codecPar->color_trc);
115 return colorTransfer == QVideoFrameFormat::ColorTransfer_ST2084
116 || colorTransfer == QVideoFrameFormat::ColorTransfer_STD_B67;
125 const int streamIndex = m_currentAVStreamIndex[QPlatformMediaPlayer::VideoStream];
129 return streamTransformation(m_context->streams[streamIndex]);
134 return m_context.get();
139 return m_currentAVStreamIndex[trackType];
175 case AVMEDIA_TYPE_AUDIO:
176 return QPlatformMediaPlayer::AudioStream;
177 case AVMEDIA_TYPE_VIDEO:
178 return QPlatformMediaPlayer::VideoStream;
179 case AVMEDIA_TYPE_SUBTITLE:
180 return QPlatformMediaPlayer::SubtitleStream;
182 return QPlatformMediaPlayer::NTrackTypes;
187QMaybe<AVFormatContextUPtr, MediaDataHolder::ContextError>
188loadMedia(
const QUrl &mediaUrl, QIODevice *stream,
const std::shared_ptr<ICancelToken> &cancelToken)
190 const QByteArray url = mediaUrl.toString(QUrl::PreferLocalFile).toUtf8();
192 AVFormatContextUPtr context{ avformat_alloc_context() };
195 if (!stream->isOpen()) {
196 if (!stream->open(QIODevice::ReadOnly))
197 return MediaDataHolder::ContextError{
198 QMediaPlayer::ResourceError, QLatin1String(
"Could not open source device.")
201 if (!stream->isSequential())
204 constexpr int bufferSize = 32768;
205 unsigned char *buffer = (
unsigned char *)av_malloc(bufferSize);
206 context->pb = avio_alloc_context(buffer, bufferSize,
false, stream, &
readQIODevice,
nullptr,
211 constexpr auto NetworkTimeoutUs =
"5000000";
212 av_dict_set(dict,
"timeout", NetworkTimeoutUs, 0);
214 const QByteArray protocolWhitelist = qgetenv(
"QT_FFMPEG_PROTOCOL_WHITELIST");
215 if (!protocolWhitelist.isNull())
216 av_dict_set(dict,
"protocol_whitelist", protocolWhitelist.data(), 0);
218 context->interrupt_callback.opaque = cancelToken.get();
219 context->interrupt_callback.callback = [](
void *opaque) {
220 const auto *cancelToken =
static_cast<
const ICancelToken *>(opaque);
228 AVFormatContext *contextRaw = context.release();
229 ret = avformat_open_input(&contextRaw, url.constData(),
nullptr, dict);
230 context.reset(contextRaw);
234 auto code = QMediaPlayer::ResourceError;
235 if (ret == AVERROR(EACCES))
236 code = QMediaPlayer::AccessDeniedError;
237 else if (ret == AVERROR(EINVAL) || ret == AVERROR_INVALIDDATA)
238 code = QMediaPlayer::FormatError;
240 return MediaDataHolder::ContextError{ code, QMediaPlayer::tr(
"Could not open file") };
243 ret = avformat_find_stream_info(context.get(),
nullptr);
245 return MediaDataHolder::ContextError{
246 QMediaPlayer::FormatError,
247 QMediaPlayer::tr(
"Could not find stream information for media file")
252 av_dump_format(context.get(), 0, url.constData(), 0);
260 const std::shared_ptr<ICancelToken> &cancelToken)
262 QMaybe context = loadMedia(url, stream, cancelToken);
265 return QSharedPointer<MediaDataHolder>{
new MediaDataHolder{ std::move(context.value()), cancelToken } };
267 return context.error();
271 const std::shared_ptr<ICancelToken> &cancelToken)
276 m_context = std::move(context);
277 m_isSeekable = !(m_context->ctx_flags & AVFMTCTX_UNSEEKABLE);
279 for (
unsigned int i = 0; i < m_context->nb_streams; ++i) {
281 const auto *stream = m_context->streams[i];
282 const auto trackType = trackTypeFromMediaType(stream->codecpar->codec_type);
284 if (trackType == QPlatformMediaPlayer::NTrackTypes)
287 if (stream->disposition & AV_DISPOSITION_ATTACHED_PIC)
290 auto metaData = QFFmpegMetaData::fromAVMetaData(stream->metadata);
291 const bool isDefault = stream->disposition & AV_DISPOSITION_DEFAULT;
293 if (trackType != QPlatformMediaPlayer::SubtitleStream) {
296 if (isDefault && m_requestedStreams[trackType] < 0)
297 m_requestedStreams[trackType] = m_streamMap[trackType].size();
300 if (
auto duration = streamDuration(*stream)) {
301 m_duration = qMax(m_duration, *duration);
302 metaData.insert(QMediaMetaData::Duration, *duration / qint64(1000));
305 m_streamMap[trackType].append({ (
int)i, isDefault, metaData });
310 if (m_duration == 0 && m_context->duration > 0ll) {
311 m_duration = m_context->duration;
314 for (
auto trackType :
315 { QPlatformMediaPlayer::VideoStream, QPlatformMediaPlayer::AudioStream }) {
316 auto &requestedStream = m_requestedStreams[trackType];
317 auto &streamMap = m_streamMap[trackType];
319 if (requestedStream < 0 && !streamMap.empty())
322 if (requestedStream >= 0)
323 m_currentAVStreamIndex[trackType] = streamMap[requestedStream].avStreamIndex;
332
333
334
335
336
337
338QImage getAttachedPicture(
const AVFormatContext *context)
343 for (
unsigned int i = 0; i < context->nb_streams; ++i) {
344 const AVStream* stream = context->streams[i];
345 if (!stream || !(stream->disposition & AV_DISPOSITION_ATTACHED_PIC))
348 const AVPacket *compressedImage = &stream->attached_pic;
349 if (!compressedImage || !compressedImage->data || compressedImage->size <= 0)
354 QImage image = QImage::fromData({ compressedImage->data, compressedImage->size });
371 m_metaData = QFFmpegMetaData::fromAVMetaData(m_context->metadata);
372 m_metaData.insert(QMediaMetaData::FileFormat,
373 QVariant::fromValue(QFFmpegMediaFormatInfo::fileFormatForAVInputFormat(
374 m_context->iformat)));
375 m_metaData.insert(QMediaMetaData::Duration, m_duration / qint64(1000));
377 if (!m_cachedThumbnail.has_value())
378 m_cachedThumbnail = getAttachedPicture(m_context.get());
380 if (!m_cachedThumbnail->isNull())
381 m_metaData.insert(QMediaMetaData::ThumbnailImage, m_cachedThumbnail.value());
383 for (
auto trackType :
384 { QPlatformMediaPlayer::AudioStream, QPlatformMediaPlayer::VideoStream }) {
385 const auto streamIndex = m_currentAVStreamIndex[trackType];
386 if (streamIndex >= 0)
387 insertMediaData(m_metaData, trackType, m_context->streams[streamIndex]);
396 if (streamNumber < 0 || streamNumber >= m_streamMap[type].size())
398 if (m_requestedStreams[type] == streamNumber)
400 m_requestedStreams[type] = streamNumber;
401 const int avStreamIndex = m_streamMap[type].value(streamNumber).avStreamIndex;
403 const int oldIndex = m_currentAVStreamIndex[type];
404 qCDebug(qLcMediaDataHolder) <<
">>>>> change track" << type <<
"from" << oldIndex <<
"to"
408 m_currentAVStreamIndex[type] = avStreamIndex;
417 return type < QPlatformMediaPlayer::NTrackTypes ? m_requestedStreams[type] : -1;
421 QPlatformMediaPlayer::TrackType trackType)
const
423 Q_ASSERT(trackType < QPlatformMediaPlayer::NTrackTypes);
425 return m_streamMap[trackType];
static VideoTransformation streamTransformation(const AVStream *stream)
static bool colorTransferSupportsHdr(const AVStream *stream)
static void insertMediaData(QMediaMetaData &metaData, QPlatformMediaPlayer::TrackType trackType, const AVStream *stream)
static std::optional< qint64 > streamDuration(const AVStream &stream)
int64_t seekQIODevice(void *opaque, int64_t offset, int whence)
static QTransform displayMatrixToTransform(const int32_t *displayMatrix)
int readQIODevice(void *opaque, uint8_t *buf, int buf_size)
const AVPacketSideData * streamSideData(const AVStream *stream, AVPacketSideDataType type)
#define qCWarning(category,...)
#define qCDebug(category,...)
#define Q_STATIC_LOGGING_CATEGORY(name,...)
#define QT_BEGIN_NAMESPACE
virtual bool isCancelled() const =0