7#include "private/qplatformaudiooutput_p.h"
8#include "private/qplatformvideosink_p.h"
9#include "private/qaudiobufferoutput_p.h"
11#include "playbackengine/qffmpegdemuxer_p.h"
12#include "playbackengine/qffmpegstreamdecoder_p.h"
13#include "playbackengine/qffmpegsubtitlerenderer_p.h"
14#include "playbackengine/qffmpegvideorenderer_p.h"
15#include "playbackengine/qffmpegaudiorenderer_p.h"
17#include <qloggingcategory.h>
27template <
typename Array>
30 using T =
typename Array::value_type;
31 return { T{ {}, {} }, T{ {}, {} }, T{ {}, {} } };
36 m_streams(defaultObjectsArray<
decltype(m_streams)>()),
37 m_renderers(defaultObjectsArray<
decltype(m_renderers)>()),
40 qCDebug(qLcPlaybackEngine) <<
"Create PlaybackEngine";
41 qRegisterMetaType<QFFmpeg::Packet>();
42 qRegisterMetaType<QFFmpeg::Frame>();
43 qRegisterMetaType<QFFmpeg::TrackPosition>();
44 qRegisterMetaType<QFFmpeg::PlaybackEngineObjectID>();
48 qCDebug(qLcPlaybackEngine) <<
"Delete PlaybackEngine";
51 forEachExistingObject([](
auto &object) { object.reset(); });
55void PlaybackEngine::onRendererFinished(
const PlaybackEngineObjectID &id)
60 auto isAtEnd = [
this](
auto trackType) {
61 return !m_renderers[trackType] || m_renderers[trackType]->isAtEnd();
64 if (!isAtEnd(QPlatformMediaPlayer::VideoStream))
67 if (!isAtEnd(QPlatformMediaPlayer::AudioStream))
70 if (!isAtEnd(QPlatformMediaPlayer::SubtitleStream) && !hasMediaStream())
73 if (std::exchange(m_state, QMediaPlayer::StoppedState) == QMediaPlayer::StoppedState)
76 finilizeTime(duration().asTimePoint());
80 qCDebug(qLcPlaybackEngine) <<
"Playback engine end of stream";
85void PlaybackEngine::onRendererLoopChanged(
const PlaybackEngineObjectID &id, TrackPosition offset,
91 if (loopIndex > m_currentLoopOffset.loopIndex) {
92 m_currentLoopOffset = { offset, loopIndex };
94 }
else if (loopIndex == m_currentLoopOffset.loopIndex && offset != m_currentLoopOffset.loopStartTimeUs) {
95 qWarning() <<
"Unexpected offset for loop" << loopIndex <<
":" << offset.get() <<
"vs"
96 << m_currentLoopOffset.loopStartTimeUs.get();
97 m_currentLoopOffset.loopStartTimeUs = offset;
101void PlaybackEngine::onFirstPacketFound(
const PlaybackEngineObjectID &id, TrackPosition absSeekPos)
103 if (!checkObjectID(m_demuxer, id))
106 if (m_timeController.isStarted())
109 const SteadyClock::time_point now = SteadyClock::now();
110 const SteadyClock::time_point expectedTimePoint = m_timeController.timeFromPosition(absSeekPos);
112 std::chrono::round<std::chrono::microseconds>(now - expectedTimePoint);
113 qCDebug(qLcPlaybackEngine) <<
"Delay of demuxer initialization:" << delay;
114 m_timeController.sync(now, absSeekPos);
115 m_timeController.start();
117 forEachExistingObject<Renderer>(
118 [&](
auto &renderer) { renderer->setTimeController(m_timeController); });
121void PlaybackEngine::onRendererSynchronized(
const PlaybackEngineObjectID &id,
122 SteadyClock::time_point tp, TrackPosition pos)
124 if (!hasRenderer(id))
127 Q_ASSERT(checkObjectID(m_renderers[QPlatformMediaPlayer::AudioStream], id));
129 forEachExistingObject<Renderer>([&](
auto &renderer) {
130 if (id.objectID != renderer->objectID()) {
131 auto tc = m_timeController;
132 tc.syncSoft(tp, pos);
133 renderer->setTimeController(tc);
137 m_timeController.sync(tp, pos);
141 if (!m_media.avContext())
144 if (state == m_state)
147 const auto prevState = std::exchange(m_state, state);
149 if (m_state == QMediaPlayer::StoppedState) {
151 finilizeTime(TrackPosition(0));
154 if (prevState == QMediaPlayer::StoppedState || m_state == QMediaPlayer::StoppedState)
157 if (prevState == QMediaPlayer::StoppedState)
158 triggerStepIfNeeded();
160 updateObjectsPausedState();
165 const bool paused = m_state != QMediaPlayer::PlayingState;
166 m_timeController.setPaused(paused);
168 forEachExistingObject([&](
auto &object) {
169 if constexpr (std::is_same_v<
decltype(*object), Renderer &>)
170 object->setPaused(paused);
172 object->setPaused(
false);
179 if (!std::exchange(engine->m_threadsDirty,
true))
180 QMetaObject::invokeMethod(engine, &PlaybackEngine::deleteFreeThreads, Qt::QueuedConnection);
187 connect(&object, &PlaybackEngineObject::error,
this, &PlaybackEngine::errorOccured);
189 auto threadName = objectThreadName(object);
190 auto &thread = m_threads[threadName];
192 thread = std::make_unique<QThread>();
193 thread->setObjectName(threadName);
197 Q_ASSERT(object.thread() != thread.get());
198 object.moveToThread(thread.get());
205 case QPlatformMediaPlayer::VideoStream:
206 return m_videoSink ? createPlaybackEngineObject<VideoRenderer>(
207 m_timeController, m_videoSink, m_media.transformation())
208 : RendererPtr{ {}, {} };
209 case QPlatformMediaPlayer::AudioStream:
210 return m_audioOutput || m_audioBufferOutput
211 ? createPlaybackEngineObject<AudioRenderer>(
212 m_timeController, m_audioOutput, m_audioBufferOutput, m_pitchCompensation)
213 : RendererPtr{ {}, {} };
214 case QPlatformMediaPlayer::SubtitleStream:
216 ? createPlaybackEngineObject<SubtitleRenderer>(m_timeController, m_videoSink)
217 : RendererPtr{ {}, {} };
223template<
typename C,
typename Action>
226 auto handleNotNullObject = [&](
auto &object) {
227 if constexpr (std::is_base_of_v<C, std::remove_reference_t<
decltype(*object)>>)
234 std::for_each(m_renderers.begin(), m_renderers.end(), handleNotNullObject);
235 std::for_each(m_streams.begin(), m_streams.end(), handleNotNullObject);
236 handleNotNullObject(m_demuxer);
239template<
typename Action>
242 forEachExistingObject<PlaybackEngineObject>(std::forward<Action>(action));
247 pos = boundPosition(pos);
249 m_timeController.sync(m_currentLoopOffset.loopStartTimeUs.asDuration() + pos);
250 if (!m_demuxer || !m_media.avContext()) {
251 m_seekPending =
true;
255 m_seekPending =
false;
256 ++m_currentID.sessionID;
258 m_timeController.deactivate();
259 m_timeController.setPaused(m_state != QMediaPlayer::PlayingState);
261 forEachExistingObject([&](
auto &object) {
262 if constexpr (std::is_same_v<
decltype(*object), Renderer &>)
263 object->seek(m_currentID.sessionID, m_timeController, m_currentLoopOffset);
265 object->seek(m_currentID.sessionID, pos, m_currentLoopOffset);
268 triggerStepIfNeeded();
274 qWarning() <<
"Cannot set loops for non-seekable source";
278 if (std::exchange(m_loops, loops) == loops)
281 qCDebug(qLcPlaybackEngine) <<
"set playback engine loops:" << loops <<
"prev loops:" << m_loops
282 <<
"index:" << m_currentLoopOffset.loopIndex;
285 m_demuxer->setLoops(loops);
290 if (m_state != QMediaPlayer::PausedState)
293 if (m_renderers[QPlatformMediaPlayer::VideoStream])
294 m_renderers[QPlatformMediaPlayer::VideoStream]->doForceStep();
301QString
PlaybackEngine::objectThreadName(
const PlaybackEngineObject &object)
303 QString result = QString::fromLatin1(object.metaObject()->className());
304 if (
auto stream = qobject_cast<
const StreamDecoder *>(&object))
305 result += QString::number(stream->trackType());
314 m_timeController.setPlaybackRate(rate);
315 forEachExistingObject<Renderer>([rate](
auto &renderer) { renderer->setPlaybackRate(rate); });
319 return m_timeController.playbackRate();
324 m_timeController.deactivate();
326 forEachExistingObject([](
auto &object) { object.reset(); });
328 createObjectsIfNeeded();
333 if (m_state == QMediaPlayer::StoppedState || !m_media.avContext())
336 for (
int i = 0; i < QPlatformMediaPlayer::NTrackTypes; ++i)
337 createStreamAndRenderer(
static_cast<QPlatformMediaPlayer::TrackType>(i));
343 m_timeController.start();
349 triggerStepIfNeeded();
350 updateObjectsPausedState();
353void PlaybackEngine::createStreamAndRenderer(QPlatformMediaPlayer::TrackType trackType)
355 auto codecContext = codecContextForTrack(trackType);
357 auto &renderer = m_renderers[trackType];
363 renderer = createRenderer(trackType);
368 connect(renderer.get(), &Renderer::synchronized,
this,
369 &PlaybackEngine::onRendererSynchronized);
371 connect(renderer.get(), &Renderer::loopChanged,
this,
372 &PlaybackEngine::onRendererLoopChanged);
374 connect(renderer.get(), &PlaybackEngineObject::atEnd,
this,
375 &PlaybackEngine::onRendererFinished);
378 auto &stream = m_streams[trackType] =
379 createPlaybackEngineObject<StreamDecoder>(*codecContext, renderer->seekPosition());
381 Q_ASSERT(trackType == stream->trackType());
383 connect(stream.get(), &StreamDecoder::requestHandleFrame, renderer.get(), &Renderer::render);
384 connect(stream.get(), &PlaybackEngineObject::atEnd, renderer.get(),
385 &Renderer::onFinalFrameReceived);
386 connect(renderer.get(), &Renderer::frameProcessed, stream.get(),
387 &StreamDecoder::onFrameProcessed);
390std::optional<CodecContext>
PlaybackEngine::codecContextForTrack(QPlatformMediaPlayer::TrackType trackType)
392 const auto streamIndex = m_media.currentStreamIndex(trackType);
396 auto &codecContext = m_codecContexts[trackType];
400 <<
"Create codec for stream:" << streamIndex <<
"trackType:" << trackType;
401 auto maybeCodecContext = CodecContext::create(m_media.avContext()->streams[streamIndex],
402 m_media.avContext(), m_options);
404 if (!maybeCodecContext) {
405 emit errorOccured(QMediaPlayer::FormatError,
406 u"Cannot create codec," + maybeCodecContext.error());
410 codecContext = maybeCodecContext.value();
418 return m_renderers[QPlatformMediaPlayer::AudioStream]
419 || m_renderers[QPlatformMediaPlayer::VideoStream];
424 std::array<
int, QPlatformMediaPlayer::NTrackTypes> streamIndexes = { -1, -1, -1 };
426 bool hasStreams =
false;
429 const auto trackType = stream->trackType();
430 streamIndexes[trackType] = m_media.currentStreamIndex(trackType);
436 const TrackPosition currentLoopPosUs = currentPosition(
false);
438 m_demuxer = createPlaybackEngineObject<Demuxer>(m_media.avContext(), currentLoopPosUs,
439 m_seekPending, m_currentLoopOffset,
440 streamIndexes, m_loops);
442 m_seekPending =
false;
447 connect(m_demuxer.get(), Demuxer::signalByTrackType(stream->trackType()), stream.get(),
448 &StreamDecoder::decode);
449 connect(m_demuxer.get(), &PlaybackEngineObject::atEnd, stream.get(),
450 &StreamDecoder::onFinalPacketReceived);
451 connect(stream.get(), &StreamDecoder::packetProcessed, m_demuxer.get(),
452 &Demuxer::onPacketProcessed);
455 connect(m_demuxer.get(), &Demuxer::firstPacketFound,
this, &PlaybackEngine::onFirstPacketFound);
459 m_threadsDirty =
false;
460 auto freeThreads =
std::move(m_threads);
462 forEachExistingObject([&](
auto &object) {
463 m_threads.insert(freeThreads.extract(objectThreadName(*object)));
466 for (
auto &[name, thr] : freeThreads)
469 for (
auto &[name, thr] : freeThreads)
475 Q_ASSERT(!m_media.avContext());
476 Q_ASSERT(m_state == QMediaPlayer::StoppedState);
477 Q_ASSERT(m_threads.empty());
479 m_media = std::move(media);
480 updateVideoSinkSize();
485 auto prev = std::exchange(m_videoSink, sink);
489 updateVideoSinkSize(prev);
492 if (!sink || !prev) {
504 QAudioOutput *prev = std::exchange(m_audioOutput, output);
508 updateActiveAudioOutput(output);
510 if (!output || !prev) {
518 QAudioBufferOutput *prev = std::exchange(m_audioBufferOutput, output);
521 updateActiveAudioOutput(output);
526 std::optional<TrackPosition> pos;
528 if (m_timeController.isStarted()) {
529 for (size_t i = 0; i < m_renderers.size(); ++i) {
530 const auto &renderer = m_renderers[i];
535 if (!topPos && i == QPlatformMediaPlayer::SubtitleStream && hasMediaStream())
538 const auto rendererPos = renderer->lastPosition();
539 pos = !pos ? rendererPos
540 : topPos ?
std::max(*pos, rendererPos)
541 :
std::min(*pos, rendererPos);
547 pos = m_timeController.currentPosition();
549 return boundPosition(*pos - m_currentLoopOffset.loopStartTimeUs.asDuration());
554 return m_media.duration();
562 return m_media.streamInfo(trackType);
567 return m_media.metaData();
577 m_pitchCompensation = enabled;
584 if (!m_media.setActiveTrack(trackType, streamNumber))
587 m_codecContexts[trackType] = {};
589 m_renderers[trackType].reset();
590 m_streams = defaultObjectsArray<
decltype(m_streams)>();
599 updateVideoSinkSize();
600 createObjectsIfNeeded();
601 updateObjectsPausedState();
606 Q_ASSERT(pos >= TrackPosition(0) && pos <= duration().asTimePoint());
608 m_timeController.deactivate();
609 m_timeController.sync(pos);
610 m_currentLoopOffset = {};
615 if (m_audioBufferOutput)
616 updateActiveAudioOutput(
static_cast<QAudioBufferOutput *>(
nullptr));
618 updateActiveAudioOutput(
static_cast<QAudioOutput *>(
nullptr));
622bool PlaybackEngine::hasRenderer(
const PlaybackEngineObjectID &id)
const
624 return std::any_of(m_renderers.begin(), m_renderers.end(),
625 [&](
auto &renderer) {
return checkObjectID(renderer, id); });
628template <
typename AudioOutput>
632 renderer->setOutput(output);
637 if (
auto renderer = qobject_cast<SubtitleRenderer *>(
638 m_renderers[QPlatformMediaPlayer::SubtitleStream].get()))
639 renderer->setOutput(sink, cleanOutput);
641 qobject_cast<VideoRenderer *>(m_renderers[QPlatformMediaPlayer::VideoStream].get()))
642 renderer->setOutput(sink, cleanOutput);
647 auto platformVideoSink = m_videoSink ? m_videoSink->platformVideoSink() :
nullptr;
648 if (!platformVideoSink)
651 if (prevSink && prevSink->platformVideoSink())
652 platformVideoSink->setNativeSize(prevSink->platformVideoSink()->nativeSize());
654 const auto streamIndex = m_media.currentStreamIndex(QPlatformMediaPlayer::VideoStream);
655 if (streamIndex >= 0) {
656 const auto context = m_media.avContext();
657 const auto stream = context->streams[streamIndex];
658 const AVRational pixelAspectRatio =
659 av_guess_sample_aspect_ratio(context, stream,
nullptr);
662 qCalculateFrameSize({ stream->codecpar->width, stream->codecpar->height },
663 { pixelAspectRatio.num, pixelAspectRatio.den });
665 platformVideoSink->setNativeSize(
666 qRotatedFrameSize(size, m_media.transformation().rotation));
671TrackPosition
PlaybackEngine::boundPosition(TrackPosition position)
const
673 position = qMax(position, TrackPosition(0));
674 return duration() > TrackDuration(0) ? qMin(position, duration().asTimePoint()) : position;
679 return qobject_cast<
AudioRenderer *>(m_renderers[QPlatformMediaPlayer::AudioStream].get());
686#include "moc_qffmpegplaybackengine_p.cpp"
void setPitchCompensation(bool enabled)
void seek(TrackPosition pos)
void setLoops(int loopsCount)
void setVideoSink(QVideoSink *sink)
void setState(QMediaPlayer::PlaybackState state)
void updateActiveAudioOutput(AudioOutput *output)
virtual RendererPtr createRenderer(QPlatformMediaPlayer::TrackType trackType)
void setAudioBufferOutput(QAudioBufferOutput *output)
void setPlaybackRate(float rate)
void setAudioSink(QAudioOutput *output)
void setPitchCompensation(bool enabled)
~PlaybackEngine() override
const QList< MediaDataHolder::StreamInfo > & streamInfo(QPlatformMediaPlayer::TrackType trackType) const
TrackDuration duration() const
void setActiveTrack(QPlatformMediaPlayer::TrackType type, int streamNumber)
void setMedia(MediaDataHolder media)
TrackPosition currentPosition(bool topPos=true) const
const QMediaMetaData & metaData() const
void updateActiveVideoOutput(QVideoSink *sink, bool cleanOutput=false)
float playbackRate() const
The QPlaybackOptions class enables low-level control of media playback options.
static Array defaultObjectsArray()
std::conditional_t< QT_FFMPEG_AVIO_WRITE_CONST, const uint8_t *, uint8_t * > AvioWriteBufferType
Combined button and popup list for selecting options.
#define qCDebug(category,...)
#define Q_STATIC_LOGGING_CATEGORY(name,...)
void operator()(PlaybackEngineObject *) const