Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qffmpegplaybackengine.cpp
Go to the documentation of this file.
1// Copyright (C) 2021 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
4
5#include "qvideosink.h"
6#include "qaudiooutput.h"
7#include "private/qplatformaudiooutput_p.h"
8#include "private/qplatformvideosink_p.h"
9#include "private/qaudiobufferoutput_p.h"
10#include "qiodevice.h"
11#include "playbackengine/qffmpegdemuxer_p.h"
12#include "playbackengine/qffmpegstreamdecoder_p.h"
13#include "playbackengine/qffmpegsubtitlerenderer_p.h"
14#include "playbackengine/qffmpegvideorenderer_p.h"
15#include "playbackengine/qffmpegaudiorenderer_p.h"
16
17#include <qloggingcategory.h>
18
20
21namespace QFFmpeg {
22
23Q_STATIC_LOGGING_CATEGORY(qLcPlaybackEngine, "qt.multimedia.ffmpeg.playbackengine");
24
25// The helper is needed since on some compilers std::unique_ptr
26// doesn't have a default constructor in the case of sizeof(CustomDeleter) > 0
27template <typename Array>
28inline static Array defaultObjectsArray()
29{
30 using T = typename Array::value_type;
31 return { T{ {}, {} }, T{ {}, {} }, T{ {}, {} } };
32}
33
34// TODO: investigate what's better: profile and try network case
35// Most likely, shouldPauseStreams = false is better because of:
36// - packet and frame buffers are not big, the saturration of the is pretty fast.
37// - after any pause a user has some preloaded buffers, so the playback is
38// supposed to be more stable in cases with a weak processor or bad internet.
39// - the code is simplier, usage is more convenient.
40//
41static constexpr bool shouldPauseStreams = false;
42
43PlaybackEngine::PlaybackEngine(const QPlaybackOptions &options)
44 : m_demuxer({}, {}),
45 m_streams(defaultObjectsArray<decltype(m_streams)>()),
46 m_renderers(defaultObjectsArray<decltype(m_renderers)>()),
47 m_options{ options }
48{
49 qCDebug(qLcPlaybackEngine) << "Create PlaybackEngine";
50 qRegisterMetaType<QFFmpeg::Packet>();
51 qRegisterMetaType<QFFmpeg::Frame>();
52 qRegisterMetaType<QFFmpeg::TrackPosition>();
53 qRegisterMetaType<QFFmpeg::TrackDuration>();
54}
55
57 qCDebug(qLcPlaybackEngine) << "Delete PlaybackEngine";
58
59 finalizeOutputs();
60 forEachExistingObject([](auto &object) { object.reset(); });
61 deleteFreeThreads();
62}
63
64void PlaybackEngine::onRendererFinished()
65{
66 auto isAtEnd = [this](auto trackType) {
67 return !m_renderers[trackType] || m_renderers[trackType]->isAtEnd();
68 };
69
70 if (!isAtEnd(QPlatformMediaPlayer::VideoStream))
71 return;
72
73 if (!isAtEnd(QPlatformMediaPlayer::AudioStream))
74 return;
75
76 if (!isAtEnd(QPlatformMediaPlayer::SubtitleStream) && !hasMediaStream())
77 return;
78
79 if (std::exchange(m_state, QMediaPlayer::StoppedState) == QMediaPlayer::StoppedState)
80 return;
81
82 finilizeTime(duration().asTimePoint());
83
84 forceUpdate();
85
86 qCDebug(qLcPlaybackEngine) << "Playback engine end of stream";
87
88 emit endOfStream();
89}
90
91void PlaybackEngine::onRendererLoopChanged(quint64 id, TrackPosition offset, int loopIndex)
92{
93 if (!hasRenderer(id))
94 return;
95
96 if (loopIndex > m_currentLoopOffset.loopIndex) {
97 m_currentLoopOffset = { offset, loopIndex };
98 emit loopChanged();
99 } else if (loopIndex == m_currentLoopOffset.loopIndex && offset != m_currentLoopOffset.loopStartTimeUs) {
100 qWarning() << "Unexpected offset for loop" << loopIndex << ":" << offset.get() << "vs"
101 << m_currentLoopOffset.loopStartTimeUs.get();
102 m_currentLoopOffset.loopStartTimeUs = offset;
103 }
104}
105
106void PlaybackEngine::onFirsPacketFound(quint64 id, TrackPosition absSeekPos)
107{
108 if (!m_demuxer || m_demuxer->id() != id)
109 return;
110
111 if (m_shouldUpdateTimeOnFirstPacket) {
112 const auto timePoint = SteadyClock::now();
113 const SteadyClock::time_point expectedTimePoint =
114 m_timeController.timeFromPosition(absSeekPos);
115 const auto delay = std::chrono::duration_cast<std::chrono::microseconds>(
116 timePoint - expectedTimePoint);
117 qCDebug(qLcPlaybackEngine) << "Delay of demuxer initialization:" << delay;
118 m_timeController.sync(timePoint, absSeekPos);
119
120 m_shouldUpdateTimeOnFirstPacket = false; // turn the flag back to ensure the consistency.
121 }
122
123 forEachExistingObject<Renderer>([&](auto &renderer) { renderer->start(m_timeController); });
124}
125
126void PlaybackEngine::onRendererSynchronized(quint64 id, SteadyClock::time_point tp, TrackPosition pos)
127{
128 if (!hasRenderer(id))
129 return;
130
131 Q_ASSERT(m_renderers[QPlatformMediaPlayer::AudioStream]
132 && m_renderers[QPlatformMediaPlayer::AudioStream]->id() == id);
133
134 m_timeController.sync(tp, pos);
135
136 forEachExistingObject<Renderer>([&](auto &renderer) {
137 if (id != renderer->id())
138 renderer->syncSoft(tp, pos);
139 });
140}
141
142void PlaybackEngine::setState(QMediaPlayer::PlaybackState state) {
143 if (!m_media.avContext())
144 return;
145
146 if (state == m_state)
147 return;
148
149 const auto prevState = std::exchange(m_state, state);
150
151 if (m_state == QMediaPlayer::StoppedState) {
152 finalizeOutputs();
153 finilizeTime(TrackPosition(0));
154 }
155
156 if (prevState == QMediaPlayer::StoppedState || m_state == QMediaPlayer::StoppedState)
157 recreateObjects();
158
159 if (prevState == QMediaPlayer::StoppedState)
160 triggerStepIfNeeded();
161
162 updateObjectsPausedState();
163}
164
165void PlaybackEngine::updateObjectsPausedState()
166{
167 const auto paused = m_state != QMediaPlayer::PlayingState;
168 m_timeController.setPaused(paused);
169
170 forEachExistingObject([&](auto &object) {
171 bool objectPaused = false;
172
173 if constexpr (std::is_same_v<decltype(*object), Renderer &>)
174 objectPaused = paused;
175 else if constexpr (shouldPauseStreams) {
176 auto streamPaused = [](bool p, auto &r) {
177 const auto needMoreFrames = r && r->stepInProgress();
178 return p && !needMoreFrames;
179 };
180
181 if constexpr (std::is_same_v<decltype(*object), StreamDecoder &>)
182 objectPaused = streamPaused(paused, renderer(object->trackType()));
183 else
184 objectPaused = std::accumulate(m_renderers.begin(), m_renderers.end(), paused,
185 streamPaused);
186 }
187
188 object->setPaused(objectPaused);
189 });
190}
191
192void PlaybackEngine::ObjectDeleter::operator()(PlaybackEngineObject *object) const
193{
194 Q_ASSERT(engine);
195 if (!std::exchange(engine->m_threadsDirty, true))
196 QMetaObject::invokeMethod(engine, &PlaybackEngine::deleteFreeThreads, Qt::QueuedConnection);
197
198 object->kill();
199}
200
201void PlaybackEngine::registerObject(PlaybackEngineObject &object)
202{
203 connect(&object, &PlaybackEngineObject::error, this, &PlaybackEngine::errorOccured);
204
205 auto threadName = objectThreadName(object);
206 auto &thread = m_threads[threadName];
207 if (!thread) {
208 thread = std::make_unique<QThread>();
209 thread->setObjectName(threadName);
210 thread->start();
211 }
212
213 Q_ASSERT(object.thread() != thread.get());
214 object.moveToThread(thread.get());
215}
216
218PlaybackEngine::createRenderer(QPlatformMediaPlayer::TrackType trackType)
219{
220 switch (trackType) {
221 case QPlatformMediaPlayer::VideoStream:
222 return m_videoSink ? createPlaybackEngineObject<VideoRenderer>(
223 m_timeController, m_videoSink, m_media.transformation())
224 : RendererPtr{ {}, {} };
225 case QPlatformMediaPlayer::AudioStream:
226 return m_audioOutput || m_audioBufferOutput
227 ? createPlaybackEngineObject<AudioRenderer>(
228 m_timeController, m_audioOutput, m_audioBufferOutput, m_pitchCompensation)
229 : RendererPtr{ {}, {} };
230 case QPlatformMediaPlayer::SubtitleStream:
231 return m_videoSink
232 ? createPlaybackEngineObject<SubtitleRenderer>(m_timeController, m_videoSink)
233 : RendererPtr{ {}, {} };
234 default:
235 return { {}, {} };
236 }
237}
238
239template<typename C, typename Action>
240void PlaybackEngine::forEachExistingObject(Action &&action)
241{
242 auto handleNotNullObject = [&](auto &object) {
243 if constexpr (std::is_base_of_v<C, std::remove_reference_t<decltype(*object)>>)
244 if (object)
245 action(object);
246 };
247
248 handleNotNullObject(m_demuxer);
249 std::for_each(m_streams.begin(), m_streams.end(), handleNotNullObject);
250 std::for_each(m_renderers.begin(), m_renderers.end(), handleNotNullObject);
251}
252
253template<typename Action>
254void PlaybackEngine::forEachExistingObject(Action &&action)
255{
256 forEachExistingObject<PlaybackEngineObject>(std::forward<Action>(action));
257}
258
259void PlaybackEngine::seek(TrackPosition pos)
260{
261 pos = boundPosition(pos);
262
263 m_timeController.setPaused(true);
264 m_timeController.sync(m_currentLoopOffset.loopStartTimeUs.asDuration() + pos);
265 m_seekPending = true;
266
267 forceUpdate();
268}
269
270void PlaybackEngine::setLoops(int loops)
271{
272 if (!isSeekable()) {
273 qWarning() << "Cannot set loops for non-seekable source";
274 return;
275 }
276
277 if (std::exchange(m_loops, loops) == loops)
278 return;
279
280 qCDebug(qLcPlaybackEngine) << "set playback engine loops:" << loops << "prev loops:" << m_loops
281 << "index:" << m_currentLoopOffset.loopIndex;
282
283 if (m_demuxer)
284 m_demuxer->setLoops(loops);
285}
286
287void PlaybackEngine::triggerStepIfNeeded()
288{
289 if (m_state != QMediaPlayer::PausedState)
290 return;
291
292 if (m_renderers[QPlatformMediaPlayer::VideoStream])
293 m_renderers[QPlatformMediaPlayer::VideoStream]->doForceStep();
294
295 // TODO: maybe trigger SubtitleStream.
296 // If trigger it, we have to make seeking for the current subtitle frame more stable.
297 // Or set some timeout for seeking.
298}
299
300QString PlaybackEngine::objectThreadName(const PlaybackEngineObject &object)
301{
302 QString result = QString::fromLatin1(object.metaObject()->className());
303 if (auto stream = qobject_cast<const StreamDecoder *>(&object))
304 result += QString::number(stream->trackType());
305
306 return result;
307}
308
310 if (rate == playbackRate())
311 return;
312
313 m_timeController.setPlaybackRate(rate);
314 forEachExistingObject<Renderer>([rate](auto &renderer) { renderer->setPlaybackRate(rate); });
315}
316
318 return m_timeController.playbackRate();
319}
320
321void PlaybackEngine::recreateObjects()
322{
323 m_timeController.setPaused(true);
324
325 forEachExistingObject([](auto &object) { object.reset(); });
326
327 createObjectsIfNeeded();
328}
329
330void PlaybackEngine::createObjectsIfNeeded()
331{
332 if (m_state == QMediaPlayer::StoppedState || !m_media.avContext())
333 return;
334
335 for (int i = 0; i < QPlatformMediaPlayer::NTrackTypes; ++i)
336 createStreamAndRenderer(static_cast<QPlatformMediaPlayer::TrackType>(i));
337
338 createDemuxer();
339}
340
341void PlaybackEngine::forceUpdate()
342{
343 recreateObjects();
344 triggerStepIfNeeded();
345 updateObjectsPausedState();
346}
347
348void PlaybackEngine::createStreamAndRenderer(QPlatformMediaPlayer::TrackType trackType)
349{
350 auto codecContext = codecContextForTrack(trackType);
351
352 auto &renderer = m_renderers[trackType];
353
354 if (!codecContext)
355 return;
356
357 if (!renderer) {
358 renderer = createRenderer(trackType);
359
360 if (!renderer)
361 return;
362
363 connect(renderer.get(), &Renderer::synchronized, this,
364 &PlaybackEngine::onRendererSynchronized);
365
366 connect(renderer.get(), &Renderer::loopChanged, this,
367 &PlaybackEngine::onRendererLoopChanged);
368
369 if constexpr (shouldPauseStreams)
370 connect(renderer.get(), &Renderer::forceStepDone, this,
371 &PlaybackEngine::updateObjectsPausedState);
372
373 connect(renderer.get(), &PlaybackEngineObject::atEnd, this,
374 &PlaybackEngine::onRendererFinished);
375 }
376
377 auto &stream = m_streams[trackType] =
378 createPlaybackEngineObject<StreamDecoder>(*codecContext, renderer->seekPosition());
379
380 Q_ASSERT(trackType == stream->trackType());
381
382 connect(stream.get(), &StreamDecoder::requestHandleFrame, renderer.get(), &Renderer::render);
383 connect(stream.get(), &PlaybackEngineObject::atEnd, renderer.get(),
384 &Renderer::onFinalFrameReceived);
385 connect(renderer.get(), &Renderer::frameProcessed, stream.get(),
386 &StreamDecoder::onFrameProcessed);
387}
388
389std::optional<CodecContext> PlaybackEngine::codecContextForTrack(QPlatformMediaPlayer::TrackType trackType)
390{
391 const auto streamIndex = m_media.currentStreamIndex(trackType);
392 if (streamIndex < 0)
393 return {};
394
395 auto &codecContext = m_codecContexts[trackType];
396
397 if (!codecContext) {
398 qCDebug(qLcPlaybackEngine)
399 << "Create codec for stream:" << streamIndex << "trackType:" << trackType;
400 auto maybeCodecContext = CodecContext::create(m_media.avContext()->streams[streamIndex],
401 m_media.avContext(), m_options);
402
403 if (!maybeCodecContext) {
404 emit errorOccured(QMediaPlayer::FormatError,
405 u"Cannot create codec," + maybeCodecContext.error());
406 return {};
407 }
408
409 codecContext = maybeCodecContext.value();
410 }
411
412 return codecContext;
413}
414
415bool PlaybackEngine::hasMediaStream() const
416{
417 return m_renderers[QPlatformMediaPlayer::AudioStream]
418 || m_renderers[QPlatformMediaPlayer::VideoStream];
419}
420
421void PlaybackEngine::createDemuxer()
422{
423 std::array<int, QPlatformMediaPlayer::NTrackTypes> streamIndexes = { -1, -1, -1 };
424
425 bool hasStreams = false;
426 forEachExistingObject<StreamDecoder>([&](auto &stream) {
427 hasStreams = true;
428 const auto trackType = stream->trackType();
429 streamIndexes[trackType] = m_media.currentStreamIndex(trackType);
430 });
431
432 if (!hasStreams)
433 return;
434
435 const TrackPosition currentLoopPosUs = currentPosition(false);
436
437 m_demuxer = createPlaybackEngineObject<Demuxer>(m_media.avContext(), currentLoopPosUs,
438 m_seekPending, m_currentLoopOffset,
439 streamIndexes, m_loops);
440
441 m_seekPending = false;
442
443 connect(m_demuxer.get(), &Demuxer::packetsBuffered, this, &PlaybackEngine::buffered);
444
445 forEachExistingObject<StreamDecoder>([&](auto &stream) {
446 connect(m_demuxer.get(), Demuxer::signalByTrackType(stream->trackType()), stream.get(),
447 &StreamDecoder::decode);
448 connect(m_demuxer.get(), &PlaybackEngineObject::atEnd, stream.get(),
449 &StreamDecoder::onFinalPacketReceived);
450 connect(stream.get(), &StreamDecoder::packetProcessed, m_demuxer.get(),
451 &Demuxer::onPacketProcessed);
452 });
453
454 m_shouldUpdateTimeOnFirstPacket = true;
455 connect(m_demuxer.get(), &Demuxer::firstPacketFound, this, &PlaybackEngine::onFirsPacketFound);
456}
457
458void PlaybackEngine::deleteFreeThreads() {
459 m_threadsDirty = false;
460 auto freeThreads = std::move(m_threads);
461
462 forEachExistingObject([&](auto &object) {
463 m_threads.insert(freeThreads.extract(objectThreadName(*object)));
464 });
465
466 for (auto &[name, thr] : freeThreads)
467 thr->quit();
468
469 for (auto &[name, thr] : freeThreads)
470 thr->wait();
471}
472
473void PlaybackEngine::setMedia(MediaDataHolder media)
474{
475 Q_ASSERT(!m_media.avContext()); // Playback engine does not support reloading media
476 Q_ASSERT(m_state == QMediaPlayer::StoppedState);
477 Q_ASSERT(m_threads.empty());
478
479 m_media = std::move(media);
480 updateVideoSinkSize();
481}
482
483void PlaybackEngine::setVideoSink(QVideoSink *sink)
484{
485 auto prev = std::exchange(m_videoSink, sink);
486 if (prev == sink)
487 return;
488
489 updateVideoSinkSize(prev);
491
492 if (!sink || !prev) {
493 // might need some improvements
494 forceUpdate();
495 }
496}
497
498void PlaybackEngine::setAudioSink(QPlatformAudioOutput *output) {
499 setAudioSink(output ? output->q : nullptr);
500}
501
502void PlaybackEngine::setAudioSink(QAudioOutput *output)
503{
504 QAudioOutput *prev = std::exchange(m_audioOutput, output);
505 if (prev == output)
506 return;
507
508 updateActiveAudioOutput(output);
509
510 if (!output || !prev) {
511 // might need some improvements
512 forceUpdate();
513 }
514}
515
516void PlaybackEngine::setAudioBufferOutput(QAudioBufferOutput *output)
517{
518 QAudioBufferOutput *prev = std::exchange(m_audioBufferOutput, output);
519 if (prev == output)
520 return;
521 updateActiveAudioOutput(output);
522}
523
525{
526 std::optional<TrackPosition> pos;
527
528 for (size_t i = 0; i < m_renderers.size(); ++i) {
529 const auto &renderer = m_renderers[i];
530 if (!renderer)
531 continue;
532
533 // skip subtitle stream for finding lower rendering position
534 if (!topPos && i == QPlatformMediaPlayer::SubtitleStream && hasMediaStream())
535 continue;
536
537 const auto rendererPos = renderer->lastPosition();
538 pos = !pos ? rendererPos
539 : topPos ? std::max(*pos, rendererPos)
540 : std::min(*pos, rendererPos);
541 }
542
543 if (!pos)
544 pos = m_timeController.currentPosition();
545
546 return boundPosition(*pos - m_currentLoopOffset.loopStartTimeUs.asDuration());
547}
548
550{
551 return m_media.duration();
552}
553
554bool PlaybackEngine::isSeekable() const { return m_media.isSeekable(); }
555
557PlaybackEngine::streamInfo(QPlatformMediaPlayer::TrackType trackType) const
558{
559 return m_media.streamInfo(trackType);
560}
561
563{
564 return m_media.metaData();
565}
566
568{
569 return m_media.activeTrack(type);
570}
571
573{
574 m_pitchCompensation = enabled;
575 if (AudioRenderer *renderer = getAudioRenderer())
576 renderer->setPitchCompensation(enabled);
577}
578
579void PlaybackEngine::setActiveTrack(QPlatformMediaPlayer::TrackType trackType, int streamNumber)
580{
581 if (!m_media.setActiveTrack(trackType, streamNumber))
582 return;
583
584 m_codecContexts[trackType] = {};
585
586 m_renderers[trackType].reset();
587 m_streams = defaultObjectsArray<decltype(m_streams)>();
588 m_demuxer.reset();
589
590 updateVideoSinkSize();
591 createObjectsIfNeeded();
592 updateObjectsPausedState();
593
594 // We strive to have a smooth playback if we change the active track. It means that
595 // we don't want to do any time shiftings. Instead, we rely on the fact that
596 // buffers in renderers are not empty to compensate the demuxer's lag.
597 m_shouldUpdateTimeOnFirstPacket = false;
598}
599
600void PlaybackEngine::finilizeTime(TrackPosition pos)
601{
602 Q_ASSERT(pos >= TrackPosition(0) && pos <= duration().asTimePoint());
603
604 m_timeController.setPaused(true);
605 m_timeController.sync(pos);
606 m_currentLoopOffset = {};
607}
608
609void PlaybackEngine::finalizeOutputs()
610{
611 if (m_audioBufferOutput)
612 updateActiveAudioOutput(static_cast<QAudioBufferOutput *>(nullptr));
613 if (m_audioOutput)
614 updateActiveAudioOutput(static_cast<QAudioOutput *>(nullptr));
615 updateActiveVideoOutput(nullptr, true);
616}
617
618bool PlaybackEngine::hasRenderer(quint64 id) const
619{
620 return std::any_of(m_renderers.begin(), m_renderers.end(),
621 [id](auto &renderer) { return renderer && renderer->id() == id; });
622}
623
624template <typename AudioOutput>
625void PlaybackEngine::updateActiveAudioOutput(AudioOutput *output)
626{
627 if (AudioRenderer *renderer = getAudioRenderer())
628 renderer->setOutput(output);
629}
630
631void PlaybackEngine::updateActiveVideoOutput(QVideoSink *sink, bool cleanOutput)
632{
633 if (auto renderer = qobject_cast<SubtitleRenderer *>(
634 m_renderers[QPlatformMediaPlayer::SubtitleStream].get()))
635 renderer->setOutput(sink, cleanOutput);
636 if (auto renderer =
637 qobject_cast<VideoRenderer *>(m_renderers[QPlatformMediaPlayer::VideoStream].get()))
638 renderer->setOutput(sink, cleanOutput);
639}
640
641void PlaybackEngine::updateVideoSinkSize(QVideoSink *prevSink)
642{
643 auto platformVideoSink = m_videoSink ? m_videoSink->platformVideoSink() : nullptr;
644 if (!platformVideoSink)
645 return;
646
647 if (prevSink && prevSink->platformVideoSink())
648 platformVideoSink->setNativeSize(prevSink->platformVideoSink()->nativeSize());
649 else {
650 const auto streamIndex = m_media.currentStreamIndex(QPlatformMediaPlayer::VideoStream);
651 if (streamIndex >= 0) {
652 const auto context = m_media.avContext();
653 const auto stream = context->streams[streamIndex];
654 const AVRational pixelAspectRatio =
655 av_guess_sample_aspect_ratio(context, stream, nullptr);
656 // auto size = metaData().value(QMediaMetaData::Resolution)
657 const QSize size =
658 qCalculateFrameSize({ stream->codecpar->width, stream->codecpar->height },
659 { pixelAspectRatio.num, pixelAspectRatio.den });
660
661 platformVideoSink->setNativeSize(
662 qRotatedFrameSize(size, m_media.transformation().rotation));
663 }
664 }
665}
666
667TrackPosition PlaybackEngine::boundPosition(TrackPosition position) const
668{
669 position = qMax(position, TrackPosition(0));
670 return duration() > TrackDuration(0) ? qMin(position, duration().asTimePoint()) : position;
671}
672
673AudioRenderer *PlaybackEngine::getAudioRenderer()
674{
675 return qobject_cast<AudioRenderer *>(m_renderers[QPlatformMediaPlayer::AudioStream].get());
676}
677
678} // namespace QFFmpeg
679
680QT_END_NAMESPACE
681
682#include "moc_qffmpegplaybackengine_p.cpp"
void setPitchCompensation(bool enabled)
void seek(TrackPosition pos)
void setVideoSink(QVideoSink *sink)
void setState(QMediaPlayer::PlaybackState state)
void updateActiveAudioOutput(AudioOutput *output)
virtual RendererPtr createRenderer(QPlatformMediaPlayer::TrackType trackType)
void setAudioBufferOutput(QAudioBufferOutput *output)
void setAudioSink(QAudioOutput *output)
void setPitchCompensation(bool enabled)
const QList< MediaDataHolder::StreamInfo > & streamInfo(QPlatformMediaPlayer::TrackType trackType) const
void setActiveTrack(QPlatformMediaPlayer::TrackType type, int streamNumber)
void setMedia(MediaDataHolder media)
TrackPosition currentPosition(bool topPos=true) const
const QMediaMetaData & metaData() const
void updateActiveVideoOutput(QVideoSink *sink, bool cleanOutput=false)
The QPlaybackOptions class enables low-level control of media playback options.
static Array defaultObjectsArray()
static constexpr bool shouldPauseStreams
std::conditional_t< QT_FFMPEG_AVIO_WRITE_CONST, const uint8_t *, uint8_t * > AvioWriteBufferType
#define qCDebug(category,...)
#define Q_STATIC_LOGGING_CATEGORY(name,...)
void operator()(PlaybackEngineObject *) const