7#include "private/qplatformaudiooutput_p.h"
8#include "private/qplatformvideosink_p.h"
9#include "private/qaudiobufferoutput_p.h"
11#include "playbackengine/qffmpegdemuxer_p.h"
12#include "playbackengine/qffmpegstreamdecoder_p.h"
13#include "playbackengine/qffmpegsubtitlerenderer_p.h"
14#include "playbackengine/qffmpegvideorenderer_p.h"
15#include "playbackengine/qffmpegaudiorenderer_p.h"
17#include <qloggingcategory.h>
27template <
typename Array>
30 using T =
typename Array::value_type;
31 return { T{ {}, {} }, T{ {}, {} }, T{ {}, {} } };
45 m_streams(defaultObjectsArray<
decltype(m_streams)>()),
46 m_renderers(defaultObjectsArray<
decltype(m_renderers)>()),
49 qCDebug(qLcPlaybackEngine) <<
"Create PlaybackEngine";
50 qRegisterMetaType<QFFmpeg::Packet>();
51 qRegisterMetaType<QFFmpeg::Frame>();
52 qRegisterMetaType<QFFmpeg::TrackPosition>();
53 qRegisterMetaType<QFFmpeg::TrackDuration>();
57 qCDebug(qLcPlaybackEngine) <<
"Delete PlaybackEngine";
60 forEachExistingObject([](
auto &object) { object.reset(); });
66 auto isAtEnd = [
this](
auto trackType) {
67 return !m_renderers[trackType] || m_renderers[trackType]->isAtEnd();
70 if (!isAtEnd(QPlatformMediaPlayer::VideoStream))
73 if (!isAtEnd(QPlatformMediaPlayer::AudioStream))
76 if (!isAtEnd(QPlatformMediaPlayer::SubtitleStream) && !hasMediaStream())
79 if (std::exchange(m_state, QMediaPlayer::StoppedState) == QMediaPlayer::StoppedState)
82 finilizeTime(duration().asTimePoint());
86 qCDebug(qLcPlaybackEngine) <<
"Playback engine end of stream";
91void PlaybackEngine::onRendererLoopChanged(quint64 id, TrackPosition offset,
int loopIndex)
96 if (loopIndex > m_currentLoopOffset.loopIndex) {
97 m_currentLoopOffset = { offset, loopIndex };
99 }
else if (loopIndex == m_currentLoopOffset.loopIndex && offset != m_currentLoopOffset.loopStartTimeUs) {
100 qWarning() <<
"Unexpected offset for loop" << loopIndex <<
":" << offset.get() <<
"vs"
101 << m_currentLoopOffset.loopStartTimeUs.get();
102 m_currentLoopOffset.loopStartTimeUs = offset;
106void PlaybackEngine::onFirsPacketFound(quint64 id, TrackPosition absSeekPos)
108 if (!m_demuxer || m_demuxer->id() != id)
111 if (m_shouldUpdateTimeOnFirstPacket) {
112 const auto timePoint = SteadyClock::now();
113 const SteadyClock::time_point expectedTimePoint =
114 m_timeController.timeFromPosition(absSeekPos);
115 const auto delay = std::chrono::duration_cast<std::chrono::microseconds>(
116 timePoint - expectedTimePoint);
117 qCDebug(qLcPlaybackEngine) <<
"Delay of demuxer initialization:" << delay;
118 m_timeController.sync(timePoint, absSeekPos);
120 m_shouldUpdateTimeOnFirstPacket =
false;
123 forEachExistingObject<Renderer>([&](
auto &renderer) { renderer->start(m_timeController); });
126void PlaybackEngine::onRendererSynchronized(quint64 id, SteadyClock::time_point tp, TrackPosition pos)
128 if (!hasRenderer(id))
131 Q_ASSERT(m_renderers[QPlatformMediaPlayer::AudioStream]
132 && m_renderers[QPlatformMediaPlayer::AudioStream]->id() == id);
134 m_timeController.sync(tp, pos);
136 forEachExistingObject<Renderer>([&](
auto &renderer) {
137 if (id != renderer->id())
138 renderer->syncSoft(tp, pos);
143 if (!m_media.avContext())
146 if (state == m_state)
149 const auto prevState = std::exchange(m_state, state);
151 if (m_state == QMediaPlayer::StoppedState) {
153 finilizeTime(TrackPosition(0));
156 if (prevState == QMediaPlayer::StoppedState || m_state == QMediaPlayer::StoppedState)
159 if (prevState == QMediaPlayer::StoppedState)
160 triggerStepIfNeeded();
162 updateObjectsPausedState();
167 const auto paused = m_state != QMediaPlayer::PlayingState;
168 m_timeController.setPaused(paused);
170 forEachExistingObject([&](
auto &object) {
171 bool objectPaused =
false;
173 if constexpr (std::is_same_v<
decltype(*object), Renderer &>)
174 objectPaused = paused;
176 auto streamPaused = [](
bool p,
auto &r) {
177 const auto needMoreFrames = r && r->stepInProgress();
178 return p && !needMoreFrames;
182 objectPaused = streamPaused(paused, renderer(object->trackType()));
184 objectPaused = std::accumulate(m_renderers.begin(), m_renderers.end(), paused,
188 object->setPaused(objectPaused);
195 if (!std::exchange(engine->m_threadsDirty,
true))
196 QMetaObject::invokeMethod(engine, &PlaybackEngine::deleteFreeThreads, Qt::QueuedConnection);
203 connect(&object, &PlaybackEngineObject::error,
this, &PlaybackEngine::errorOccured);
205 auto threadName = objectThreadName(object);
206 auto &thread = m_threads[threadName];
208 thread = std::make_unique<QThread>();
209 thread->setObjectName(threadName);
213 Q_ASSERT(object.thread() != thread.get());
214 object.moveToThread(thread.get());
221 case QPlatformMediaPlayer::VideoStream:
222 return m_videoSink ? createPlaybackEngineObject<VideoRenderer>(
223 m_timeController, m_videoSink, m_media.transformation())
224 : RendererPtr{ {}, {} };
225 case QPlatformMediaPlayer::AudioStream:
226 return m_audioOutput || m_audioBufferOutput
227 ? createPlaybackEngineObject<AudioRenderer>(
228 m_timeController, m_audioOutput, m_audioBufferOutput, m_pitchCompensation)
229 : RendererPtr{ {}, {} };
230 case QPlatformMediaPlayer::SubtitleStream:
232 ? createPlaybackEngineObject<SubtitleRenderer>(m_timeController, m_videoSink)
233 : RendererPtr{ {}, {} };
239template<
typename C,
typename Action>
242 auto handleNotNullObject = [&](
auto &object) {
243 if constexpr (std::is_base_of_v<C, std::remove_reference_t<
decltype(*object)>>)
248 handleNotNullObject(m_demuxer);
249 std::for_each(m_streams.begin(), m_streams.end(), handleNotNullObject);
250 std::for_each(m_renderers.begin(), m_renderers.end(), handleNotNullObject);
253template<
typename Action>
256 forEachExistingObject<PlaybackEngineObject>(std::forward<Action>(action));
261 pos = boundPosition(pos);
263 m_timeController.setPaused(
true);
264 m_timeController.sync(m_currentLoopOffset.loopStartTimeUs.asDuration() + pos);
265 m_seekPending =
true;
273 qWarning() <<
"Cannot set loops for non-seekable source";
277 if (std::exchange(m_loops, loops) == loops)
280 qCDebug(qLcPlaybackEngine) <<
"set playback engine loops:" << loops <<
"prev loops:" << m_loops
281 <<
"index:" << m_currentLoopOffset.loopIndex;
284 m_demuxer->setLoops(loops);
289 if (m_state != QMediaPlayer::PausedState)
292 if (m_renderers[QPlatformMediaPlayer::VideoStream])
293 m_renderers[QPlatformMediaPlayer::VideoStream]->doForceStep();
300QString
PlaybackEngine::objectThreadName(
const PlaybackEngineObject &object)
302 QString result = QString::fromLatin1(object.metaObject()->className());
303 if (
auto stream = qobject_cast<
const StreamDecoder *>(&object))
304 result += QString::number(stream->trackType());
313 m_timeController.setPlaybackRate(rate);
314 forEachExistingObject<Renderer>([rate](
auto &renderer) { renderer->setPlaybackRate(rate); });
318 return m_timeController.playbackRate();
323 m_timeController.setPaused(
true);
325 forEachExistingObject([](
auto &object) { object.reset(); });
327 createObjectsIfNeeded();
332 if (m_state == QMediaPlayer::StoppedState || !m_media.avContext())
335 for (
int i = 0; i < QPlatformMediaPlayer::NTrackTypes; ++i)
336 createStreamAndRenderer(
static_cast<QPlatformMediaPlayer::TrackType>(i));
344 triggerStepIfNeeded();
345 updateObjectsPausedState();
348void PlaybackEngine::createStreamAndRenderer(QPlatformMediaPlayer::TrackType trackType)
350 auto codecContext = codecContextForTrack(trackType);
352 auto &renderer = m_renderers[trackType];
358 renderer = createRenderer(trackType);
363 connect(renderer.get(), &Renderer::synchronized,
this,
364 &PlaybackEngine::onRendererSynchronized);
366 connect(renderer.get(), &Renderer::loopChanged,
this,
367 &PlaybackEngine::onRendererLoopChanged);
369 if constexpr (shouldPauseStreams)
370 connect(renderer.get(), &Renderer::forceStepDone,
this,
371 &PlaybackEngine::updateObjectsPausedState);
373 connect(renderer.get(), &PlaybackEngineObject::atEnd,
this,
374 &PlaybackEngine::onRendererFinished);
377 auto &stream = m_streams[trackType] =
378 createPlaybackEngineObject<StreamDecoder>(*codecContext, renderer->seekPosition());
380 Q_ASSERT(trackType == stream->trackType());
382 connect(stream.get(), &StreamDecoder::requestHandleFrame, renderer.get(), &Renderer::render);
383 connect(stream.get(), &PlaybackEngineObject::atEnd, renderer.get(),
384 &Renderer::onFinalFrameReceived);
385 connect(renderer.get(), &Renderer::frameProcessed, stream.get(),
386 &StreamDecoder::onFrameProcessed);
389std::optional<CodecContext>
PlaybackEngine::codecContextForTrack(QPlatformMediaPlayer::TrackType trackType)
391 const auto streamIndex = m_media.currentStreamIndex(trackType);
395 auto &codecContext = m_codecContexts[trackType];
399 <<
"Create codec for stream:" << streamIndex <<
"trackType:" << trackType;
400 auto maybeCodecContext = CodecContext::create(m_media.avContext()->streams[streamIndex],
401 m_media.avContext(), m_options);
403 if (!maybeCodecContext) {
404 emit errorOccured(QMediaPlayer::FormatError,
405 u"Cannot create codec," + maybeCodecContext.error());
409 codecContext = maybeCodecContext.value();
417 return m_renderers[QPlatformMediaPlayer::AudioStream]
418 || m_renderers[QPlatformMediaPlayer::VideoStream];
423 std::array<
int, QPlatformMediaPlayer::NTrackTypes> streamIndexes = { -1, -1, -1 };
425 bool hasStreams =
false;
428 const auto trackType = stream->trackType();
429 streamIndexes[trackType] = m_media.currentStreamIndex(trackType);
435 const TrackPosition currentLoopPosUs = currentPosition(
false);
437 m_demuxer = createPlaybackEngineObject<Demuxer>(m_media.avContext(), currentLoopPosUs,
438 m_seekPending, m_currentLoopOffset,
439 streamIndexes, m_loops);
441 m_seekPending =
false;
443 connect(m_demuxer.get(), &Demuxer::packetsBuffered,
this, &PlaybackEngine::buffered);
446 connect(m_demuxer.get(), Demuxer::signalByTrackType(stream->trackType()), stream.get(),
447 &StreamDecoder::decode);
448 connect(m_demuxer.get(), &PlaybackEngineObject::atEnd, stream.get(),
449 &StreamDecoder::onFinalPacketReceived);
450 connect(stream.get(), &StreamDecoder::packetProcessed, m_demuxer.get(),
451 &Demuxer::onPacketProcessed);
454 m_shouldUpdateTimeOnFirstPacket =
true;
455 connect(m_demuxer.get(), &Demuxer::firstPacketFound,
this, &PlaybackEngine::onFirsPacketFound);
459 m_threadsDirty =
false;
460 auto freeThreads = std::move(m_threads);
462 forEachExistingObject([&](
auto &object) {
463 m_threads.insert(freeThreads.extract(objectThreadName(*object)));
466 for (
auto &[name, thr] : freeThreads)
469 for (
auto &[name, thr] : freeThreads)
475 Q_ASSERT(!m_media.avContext());
476 Q_ASSERT(m_state == QMediaPlayer::StoppedState);
477 Q_ASSERT(m_threads.empty());
479 m_media = std::move(media);
480 updateVideoSinkSize();
485 auto prev = std::exchange(m_videoSink, sink);
489 updateVideoSinkSize(prev);
492 if (!sink || !prev) {
504 QAudioOutput *prev = std::exchange(m_audioOutput, output);
508 updateActiveAudioOutput(output);
510 if (!output || !prev) {
518 QAudioBufferOutput *prev = std::exchange(m_audioBufferOutput, output);
521 updateActiveAudioOutput(output);
526 std::optional<TrackPosition> pos;
528 for (size_t i = 0; i < m_renderers.size(); ++i) {
529 const auto &renderer = m_renderers[i];
534 if (!topPos && i == QPlatformMediaPlayer::SubtitleStream && hasMediaStream())
537 const auto rendererPos = renderer->lastPosition();
538 pos = !pos ? rendererPos
539 : topPos ?
std::max(*pos, rendererPos)
540 :
std::min(*pos, rendererPos);
544 pos = m_timeController.currentPosition();
546 return boundPosition(*pos - m_currentLoopOffset.loopStartTimeUs.asDuration());
551 return m_media.duration();
559 return m_media.streamInfo(trackType);
564 return m_media.metaData();
574 m_pitchCompensation = enabled;
581 if (!m_media.setActiveTrack(trackType, streamNumber))
584 m_codecContexts[trackType] = {};
586 m_renderers[trackType].reset();
587 m_streams = defaultObjectsArray<
decltype(m_streams)>();
590 updateVideoSinkSize();
591 createObjectsIfNeeded();
592 updateObjectsPausedState();
597 m_shouldUpdateTimeOnFirstPacket =
false;
602 Q_ASSERT(pos >= TrackPosition(0) && pos <= duration().asTimePoint());
604 m_timeController.setPaused(
true);
605 m_timeController.sync(pos);
606 m_currentLoopOffset = {};
611 if (m_audioBufferOutput)
612 updateActiveAudioOutput(
static_cast<QAudioBufferOutput *>(
nullptr));
614 updateActiveAudioOutput(
static_cast<QAudioOutput *>(
nullptr));
620 return std::any_of(m_renderers.begin(), m_renderers.end(),
621 [id](
auto &renderer) {
return renderer && renderer->id() == id; });
624template <
typename AudioOutput>
628 renderer->setOutput(output);
633 if (
auto renderer = qobject_cast<SubtitleRenderer *>(
634 m_renderers[QPlatformMediaPlayer::SubtitleStream].get()))
635 renderer->setOutput(sink, cleanOutput);
637 qobject_cast<VideoRenderer *>(m_renderers[QPlatformMediaPlayer::VideoStream].get()))
638 renderer->setOutput(sink, cleanOutput);
643 auto platformVideoSink = m_videoSink ? m_videoSink->platformVideoSink() :
nullptr;
644 if (!platformVideoSink)
647 if (prevSink && prevSink->platformVideoSink())
648 platformVideoSink->setNativeSize(prevSink->platformVideoSink()->nativeSize());
650 const auto streamIndex = m_media.currentStreamIndex(QPlatformMediaPlayer::VideoStream);
651 if (streamIndex >= 0) {
652 const auto context = m_media.avContext();
653 const auto stream = context->streams[streamIndex];
654 const AVRational pixelAspectRatio =
655 av_guess_sample_aspect_ratio(context, stream,
nullptr);
658 qCalculateFrameSize({ stream->codecpar->width, stream->codecpar->height },
659 { pixelAspectRatio.num, pixelAspectRatio.den });
661 platformVideoSink->setNativeSize(
662 qRotatedFrameSize(size, m_media.transformation().rotation));
667TrackPosition
PlaybackEngine::boundPosition(TrackPosition position)
const
669 position = qMax(position, TrackPosition(0));
670 return duration() > TrackDuration(0) ? qMin(position, duration().asTimePoint()) : position;
675 return qobject_cast<AudioRenderer *>(m_renderers[QPlatformMediaPlayer::AudioStream].get());
682#include "moc_qffmpegplaybackengine_p.cpp"
void setPitchCompensation(bool enabled)
void seek(TrackPosition pos)
void setLoops(int loopsCount)
void setVideoSink(QVideoSink *sink)
void setState(QMediaPlayer::PlaybackState state)
void updateActiveAudioOutput(AudioOutput *output)
virtual RendererPtr createRenderer(QPlatformMediaPlayer::TrackType trackType)
void setAudioBufferOutput(QAudioBufferOutput *output)
void setPlaybackRate(float rate)
void setAudioSink(QAudioOutput *output)
void setPitchCompensation(bool enabled)
~PlaybackEngine() override
const QList< MediaDataHolder::StreamInfo > & streamInfo(QPlatformMediaPlayer::TrackType trackType) const
TrackDuration duration() const
void setActiveTrack(QPlatformMediaPlayer::TrackType type, int streamNumber)
void setMedia(MediaDataHolder media)
TrackPosition currentPosition(bool topPos=true) const
const QMediaMetaData & metaData() const
void updateActiveVideoOutput(QVideoSink *sink, bool cleanOutput=false)
float playbackRate() const
The QPlaybackOptions class enables low-level control of media playback options.
static Array defaultObjectsArray()
static constexpr bool shouldPauseStreams
std::conditional_t< QT_FFMPEG_AVIO_WRITE_CONST, const uint8_t *, uint8_t * > AvioWriteBufferType
#define qCDebug(category,...)
#define Q_STATIC_LOGGING_CATEGORY(name,...)
void operator()(PlaybackEngineObject *) const