Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qffmpegplaybackengine.cpp
Go to the documentation of this file.
1// Copyright (C) 2021 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
4
5#include "qvideosink.h"
6#include "qaudiooutput.h"
7#include "private/qplatformaudiooutput_p.h"
8#include "private/qplatformvideosink_p.h"
9#include "private/qaudiobufferoutput_p.h"
10#include "qiodevice.h"
11#include "playbackengine/qffmpegdemuxer_p.h"
12#include "playbackengine/qffmpegstreamdecoder_p.h"
13#include "playbackengine/qffmpegsubtitlerenderer_p.h"
14#include "playbackengine/qffmpegvideorenderer_p.h"
15#include "playbackengine/qffmpegaudiorenderer_p.h"
16
17#include <qloggingcategory.h>
18
20
21namespace QFFmpeg {
22
23Q_STATIC_LOGGING_CATEGORY(qLcPlaybackEngine, "qt.multimedia.ffmpeg.playbackengine");
24
25// The helper is needed since on some compilers std::unique_ptr
26// doesn't have a default constructor in the case of sizeof(CustomDeleter) > 0
27template <typename Array>
28inline static Array defaultObjectsArray()
29{
30 using T = typename Array::value_type;
31 return { T{ {}, {} }, T{ {}, {} }, T{ {}, {} } };
32}
33
34PlaybackEngine::PlaybackEngine(const QPlaybackOptions &options)
35 : m_demuxer({}, {}),
36 m_streams(defaultObjectsArray<decltype(m_streams)>()),
37 m_renderers(defaultObjectsArray<decltype(m_renderers)>()),
38 m_options{ options }
39{
40 qCDebug(qLcPlaybackEngine) << "Create PlaybackEngine";
41 qRegisterMetaType<QFFmpeg::Packet>();
42 qRegisterMetaType<QFFmpeg::Frame>();
43 qRegisterMetaType<QFFmpeg::TrackPosition>();
44 qRegisterMetaType<QFFmpeg::PlaybackEngineObjectID>();
45}
46
48 qCDebug(qLcPlaybackEngine) << "Delete PlaybackEngine";
49
50 finalizeOutputs();
51 forEachExistingObject([](auto &object) { object.reset(); });
52 deleteFreeThreads();
53}
54
55void PlaybackEngine::onRendererFinished(const PlaybackEngineObjectID &id)
56{
57 if (!hasRenderer(id))
58 return;
59
60 auto isAtEnd = [this](auto trackType) {
61 return !m_renderers[trackType] || m_renderers[trackType]->isAtEnd();
62 };
63
64 if (!isAtEnd(QPlatformMediaPlayer::VideoStream))
65 return;
66
67 if (!isAtEnd(QPlatformMediaPlayer::AudioStream))
68 return;
69
70 if (!isAtEnd(QPlatformMediaPlayer::SubtitleStream) && !hasMediaStream())
71 return;
72
73 if (std::exchange(m_state, QMediaPlayer::StoppedState) == QMediaPlayer::StoppedState)
74 return;
75
76 finilizeTime(duration().asTimePoint());
77
78 forceUpdate();
79
80 qCDebug(qLcPlaybackEngine) << "Playback engine end of stream";
81
82 emit endOfStream();
83}
84
85void PlaybackEngine::onRendererLoopChanged(const PlaybackEngineObjectID &id, TrackPosition offset,
86 int loopIndex)
87{
88 if (!hasRenderer(id))
89 return;
90
91 if (loopIndex > m_currentLoopOffset.loopIndex) {
92 m_currentLoopOffset = { offset, loopIndex };
93 emit loopChanged();
94 } else if (loopIndex == m_currentLoopOffset.loopIndex && offset != m_currentLoopOffset.loopStartTimeUs) {
95 qWarning() << "Unexpected offset for loop" << loopIndex << ":" << offset.get() << "vs"
96 << m_currentLoopOffset.loopStartTimeUs.get();
97 m_currentLoopOffset.loopStartTimeUs = offset;
98 }
99}
100
101void PlaybackEngine::onFirstPacketFound(const PlaybackEngineObjectID &id, TrackPosition absSeekPos)
102{
103 if (!checkObjectID(m_demuxer, id))
104 return;
105
106 if (m_timeController.isStarted())
107 return;
108
109 const SteadyClock::time_point now = SteadyClock::now();
110 const SteadyClock::time_point expectedTimePoint = m_timeController.timeFromPosition(absSeekPos);
111 const auto delay =
112 std::chrono::round<std::chrono::microseconds>(now - expectedTimePoint);
113 qCDebug(qLcPlaybackEngine) << "Delay of demuxer initialization:" << delay;
114 m_timeController.sync(now, absSeekPos);
115 m_timeController.start();
116
117 forEachExistingObject<Renderer>(
118 [&](auto &renderer) { renderer->setTimeController(m_timeController); });
119}
120
121void PlaybackEngine::onRendererSynchronized(const PlaybackEngineObjectID &id,
122 SteadyClock::time_point tp, TrackPosition pos)
123{
124 if (!hasRenderer(id))
125 return;
126
127 Q_ASSERT(checkObjectID(m_renderers[QPlatformMediaPlayer::AudioStream], id));
128
129 forEachExistingObject<Renderer>([&](auto &renderer) {
130 if (id.objectID != renderer->objectID()) {
131 auto tc = m_timeController;
132 tc.syncSoft(tp, pos);
133 renderer->setTimeController(tc);
134 }
135 });
136
137 m_timeController.sync(tp, pos);
138}
139
140void PlaybackEngine::setState(QMediaPlayer::PlaybackState state) {
141 if (!m_media.avContext())
142 return;
143
144 if (state == m_state)
145 return;
146
147 const auto prevState = std::exchange(m_state, state);
148
149 if (m_state == QMediaPlayer::StoppedState) {
150 finalizeOutputs();
151 finilizeTime(TrackPosition(0));
152 }
153
154 if (prevState == QMediaPlayer::StoppedState || m_state == QMediaPlayer::StoppedState)
155 recreateObjects();
156
157 if (prevState == QMediaPlayer::StoppedState)
158 triggerStepIfNeeded();
159
160 updateObjectsPausedState();
161}
162
163void PlaybackEngine::updateObjectsPausedState()
164{
165 const bool paused = m_state != QMediaPlayer::PlayingState;
166 m_timeController.setPaused(paused);
167
168 forEachExistingObject([&](auto &object) {
169 if constexpr (std::is_same_v<decltype(*object), Renderer &>)
170 object->setPaused(paused);
171 else
172 object->setPaused(false);
173 });
174}
175
176void PlaybackEngine::ObjectDeleter::operator()(PlaybackEngineObject *object) const
177{
178 Q_ASSERT(engine);
179 if (!std::exchange(engine->m_threadsDirty, true))
180 QMetaObject::invokeMethod(engine, &PlaybackEngine::deleteFreeThreads, Qt::QueuedConnection);
181
182 object->kill();
183}
184
185void PlaybackEngine::registerObject(PlaybackEngineObject &object)
186{
187 connect(&object, &PlaybackEngineObject::error, this, &PlaybackEngine::errorOccured);
188
189 auto threadName = objectThreadName(object);
190 auto &thread = m_threads[threadName];
191 if (!thread) {
192 thread = std::make_unique<QThread>();
193 thread->setObjectName(threadName);
194 thread->start();
195 }
196
197 Q_ASSERT(object.thread() != thread.get());
198 object.moveToThread(thread.get());
199}
200
202PlaybackEngine::createRenderer(QPlatformMediaPlayer::TrackType trackType)
203{
204 switch (trackType) {
205 case QPlatformMediaPlayer::VideoStream:
206 return m_videoSink ? createPlaybackEngineObject<VideoRenderer>(
207 m_timeController, m_videoSink, m_media.transformation())
208 : RendererPtr{ {}, {} };
209 case QPlatformMediaPlayer::AudioStream:
210 return m_audioOutput || m_audioBufferOutput
211 ? createPlaybackEngineObject<AudioRenderer>(
212 m_timeController, m_audioOutput, m_audioBufferOutput, m_pitchCompensation)
213 : RendererPtr{ {}, {} };
214 case QPlatformMediaPlayer::SubtitleStream:
215 return m_videoSink
216 ? createPlaybackEngineObject<SubtitleRenderer>(m_timeController, m_videoSink)
217 : RendererPtr{ {}, {} };
218 default:
219 return { {}, {} };
220 }
221}
222
223template<typename C, typename Action>
224void PlaybackEngine::forEachExistingObject(Action &&action)
225{
226 auto handleNotNullObject = [&](auto &object) {
227 if constexpr (std::is_base_of_v<C, std::remove_reference_t<decltype(*object)>>)
228 if (object)
229 action(object);
230 };
231
232 // The order Renderers => Demuxer is required for seek().
233 // For other cases, it doesn't make any difference.
234 std::for_each(m_renderers.begin(), m_renderers.end(), handleNotNullObject);
235 std::for_each(m_streams.begin(), m_streams.end(), handleNotNullObject);
236 handleNotNullObject(m_demuxer);
237}
238
239template<typename Action>
240void PlaybackEngine::forEachExistingObject(Action &&action)
241{
242 forEachExistingObject<PlaybackEngineObject>(std::forward<Action>(action));
243}
244
245void PlaybackEngine::seek(TrackPosition pos)
246{
247 pos = boundPosition(pos);
248
249 m_timeController.sync(m_currentLoopOffset.loopStartTimeUs.asDuration() + pos);
250 if (!m_demuxer || !m_media.avContext()) {
251 m_seekPending = true;
252 return;
253 }
254
255 m_seekPending = false;
256 ++m_currentID.sessionID;
257
258 m_timeController.deactivate();
259 m_timeController.setPaused(m_state != QMediaPlayer::PlayingState);
260
261 forEachExistingObject([&](auto &object) {
262 if constexpr (std::is_same_v<decltype(*object), Renderer &>)
263 object->seek(m_currentID.sessionID, m_timeController, m_currentLoopOffset);
264 else
265 object->seek(m_currentID.sessionID, pos, m_currentLoopOffset);
266 });
267
268 triggerStepIfNeeded();
269}
270
271void PlaybackEngine::setLoops(int loops)
272{
273 if (!isSeekable()) {
274 qWarning() << "Cannot set loops for non-seekable source";
275 return;
276 }
277
278 if (std::exchange(m_loops, loops) == loops)
279 return;
280
281 qCDebug(qLcPlaybackEngine) << "set playback engine loops:" << loops << "prev loops:" << m_loops
282 << "index:" << m_currentLoopOffset.loopIndex;
283
284 if (m_demuxer)
285 m_demuxer->setLoops(loops);
286}
287
288void PlaybackEngine::triggerStepIfNeeded()
289{
290 if (m_state != QMediaPlayer::PausedState)
291 return;
292
293 if (m_renderers[QPlatformMediaPlayer::VideoStream])
294 m_renderers[QPlatformMediaPlayer::VideoStream]->doForceStep();
295
296 // TODO: maybe trigger SubtitleStream.
297 // If trigger it, we have to make seeking for the current subtitle frame more stable.
298 // Or set some timeout for seeking.
299}
300
301QString PlaybackEngine::objectThreadName(const PlaybackEngineObject &object)
302{
303 QString result = QString::fromLatin1(object.metaObject()->className());
304 if (auto stream = qobject_cast<const StreamDecoder *>(&object))
305 result += QString::number(stream->trackType());
306
307 return result;
308}
309
311 if (rate == playbackRate())
312 return;
313
314 m_timeController.setPlaybackRate(rate);
315 forEachExistingObject<Renderer>([rate](auto &renderer) { renderer->setPlaybackRate(rate); });
316}
317
319 return m_timeController.playbackRate();
320}
321
322void PlaybackEngine::recreateObjects()
323{
324 m_timeController.deactivate();
325
326 forEachExistingObject([](auto &object) { object.reset(); });
327
328 createObjectsIfNeeded();
329}
330
331void PlaybackEngine::createObjectsIfNeeded()
332{
333 if (m_state == QMediaPlayer::StoppedState || !m_media.avContext())
334 return;
335
336 for (int i = 0; i < QPlatformMediaPlayer::NTrackTypes; ++i)
337 createStreamAndRenderer(static_cast<QPlatformMediaPlayer::TrackType>(i));
338
339 createDemuxer();
340
341 // temporary, to comply with the test disablingAllTracks_doesNotStopPlayback
342 if (!m_demuxer)
343 m_timeController.start();
344}
345
346void PlaybackEngine::forceUpdate()
347{
348 recreateObjects();
349 triggerStepIfNeeded();
350 updateObjectsPausedState();
351}
352
353void PlaybackEngine::createStreamAndRenderer(QPlatformMediaPlayer::TrackType trackType)
354{
355 auto codecContext = codecContextForTrack(trackType);
356
357 auto &renderer = m_renderers[trackType];
358
359 if (!codecContext)
360 return;
361
362 if (!renderer) {
363 renderer = createRenderer(trackType);
364
365 if (!renderer)
366 return;
367
368 connect(renderer.get(), &Renderer::synchronized, this,
369 &PlaybackEngine::onRendererSynchronized);
370
371 connect(renderer.get(), &Renderer::loopChanged, this,
372 &PlaybackEngine::onRendererLoopChanged);
373
374 connect(renderer.get(), &PlaybackEngineObject::atEnd, this,
375 &PlaybackEngine::onRendererFinished);
376 }
377
378 auto &stream = m_streams[trackType] =
379 createPlaybackEngineObject<StreamDecoder>(*codecContext, renderer->seekPosition());
380
381 Q_ASSERT(trackType == stream->trackType());
382
383 connect(stream.get(), &StreamDecoder::requestHandleFrame, renderer.get(), &Renderer::render);
384 connect(stream.get(), &PlaybackEngineObject::atEnd, renderer.get(),
385 &Renderer::onFinalFrameReceived);
386 connect(renderer.get(), &Renderer::frameProcessed, stream.get(),
387 &StreamDecoder::onFrameProcessed);
388}
389
390std::optional<CodecContext> PlaybackEngine::codecContextForTrack(QPlatformMediaPlayer::TrackType trackType)
391{
392 const auto streamIndex = m_media.currentStreamIndex(trackType);
393 if (streamIndex < 0)
394 return {};
395
396 auto &codecContext = m_codecContexts[trackType];
397
398 if (!codecContext) {
399 qCDebug(qLcPlaybackEngine)
400 << "Create codec for stream:" << streamIndex << "trackType:" << trackType;
401 auto maybeCodecContext = CodecContext::create(m_media.avContext()->streams[streamIndex],
402 m_media.avContext(), m_options);
403
404 if (!maybeCodecContext) {
405 emit errorOccured(QMediaPlayer::FormatError,
406 u"Cannot create codec," + maybeCodecContext.error());
407 return {};
408 }
409
410 codecContext = maybeCodecContext.value();
411 }
412
413 return codecContext;
414}
415
416bool PlaybackEngine::hasMediaStream() const
417{
418 return m_renderers[QPlatformMediaPlayer::AudioStream]
419 || m_renderers[QPlatformMediaPlayer::VideoStream];
420}
421
422void PlaybackEngine::createDemuxer()
423{
424 std::array<int, QPlatformMediaPlayer::NTrackTypes> streamIndexes = { -1, -1, -1 };
425
426 bool hasStreams = false;
427 forEachExistingObject<StreamDecoder>([&](auto &stream) {
428 hasStreams = true;
429 const auto trackType = stream->trackType();
430 streamIndexes[trackType] = m_media.currentStreamIndex(trackType);
431 });
432
433 if (!hasStreams)
434 return;
435
436 const TrackPosition currentLoopPosUs = currentPosition(false);
437
438 m_demuxer = createPlaybackEngineObject<Demuxer>(m_media.avContext(), currentLoopPosUs,
439 m_seekPending, m_currentLoopOffset,
440 streamIndexes, m_loops);
441
442 m_seekPending = false;
443
444 connect(m_demuxer.get(), &Demuxer::packetsBuffered, this, &PlaybackEngine::buffered);
445
446 forEachExistingObject<StreamDecoder>([&](auto &stream) {
447 connect(m_demuxer.get(), Demuxer::signalByTrackType(stream->trackType()), stream.get(),
448 &StreamDecoder::decode);
449 connect(m_demuxer.get(), &PlaybackEngineObject::atEnd, stream.get(),
450 &StreamDecoder::onFinalPacketReceived);
451 connect(stream.get(), &StreamDecoder::packetProcessed, m_demuxer.get(),
452 &Demuxer::onPacketProcessed);
453 });
454
455 connect(m_demuxer.get(), &Demuxer::firstPacketFound, this, &PlaybackEngine::onFirstPacketFound);
456}
457
458void PlaybackEngine::deleteFreeThreads() {
459 m_threadsDirty = false;
460 auto freeThreads = std::move(m_threads);
461
462 forEachExistingObject([&](auto &object) {
463 m_threads.insert(freeThreads.extract(objectThreadName(*object)));
464 });
465
466 for (auto &[name, thr] : freeThreads)
467 thr->quit();
468
469 for (auto &[name, thr] : freeThreads)
470 thr->wait();
471}
472
473void PlaybackEngine::setMedia(MediaDataHolder media)
474{
475 Q_ASSERT(!m_media.avContext()); // Playback engine does not support reloading media
476 Q_ASSERT(m_state == QMediaPlayer::StoppedState);
477 Q_ASSERT(m_threads.empty());
478
479 m_media = std::move(media);
480 updateVideoSinkSize();
481}
482
483void PlaybackEngine::setVideoSink(QVideoSink *sink)
484{
485 auto prev = std::exchange(m_videoSink, sink);
486 if (prev == sink)
487 return;
488
489 updateVideoSinkSize(prev);
491
492 if (!sink || !prev) {
493 // might need some improvements
494 forceUpdate();
495 }
496}
497
498void PlaybackEngine::setAudioSink(QPlatformAudioOutput *output) {
499 setAudioSink(output ? output->q : nullptr);
500}
501
502void PlaybackEngine::setAudioSink(QAudioOutput *output)
503{
504 QAudioOutput *prev = std::exchange(m_audioOutput, output);
505 if (prev == output)
506 return;
507
508 updateActiveAudioOutput(output);
509
510 if (!output || !prev) {
511 // might need some improvements
512 forceUpdate();
513 }
514}
515
516void PlaybackEngine::setAudioBufferOutput(QAudioBufferOutput *output)
517{
518 QAudioBufferOutput *prev = std::exchange(m_audioBufferOutput, output);
519 if (prev == output)
520 return;
521 updateActiveAudioOutput(output);
522}
523
525{
526 std::optional<TrackPosition> pos;
527
528 if (m_timeController.isStarted()) {
529 for (size_t i = 0; i < m_renderers.size(); ++i) {
530 const auto &renderer = m_renderers[i];
531 if (!renderer)
532 continue;
533
534 // skip subtitle stream for finding lower rendering position
535 if (!topPos && i == QPlatformMediaPlayer::SubtitleStream && hasMediaStream())
536 continue;
537
538 const auto rendererPos = renderer->lastPosition();
539 pos = !pos ? rendererPos
540 : topPos ? std::max(*pos, rendererPos)
541 : std::min(*pos, rendererPos);
542 }
543 }
544 // else we cannot reliably (without RC) getthe current renderers position after seeking
545
546 if (!pos)
547 pos = m_timeController.currentPosition();
548
549 return boundPosition(*pos - m_currentLoopOffset.loopStartTimeUs.asDuration());
550}
551
553{
554 return m_media.duration();
555}
556
557bool PlaybackEngine::isSeekable() const { return m_media.isSeekable(); }
558
560PlaybackEngine::streamInfo(QPlatformMediaPlayer::TrackType trackType) const
561{
562 return m_media.streamInfo(trackType);
563}
564
566{
567 return m_media.metaData();
568}
569
571{
572 return m_media.activeTrack(type);
573}
574
576{
577 m_pitchCompensation = enabled;
578 if (AudioRenderer *renderer = getAudioRenderer())
579 renderer->setPitchCompensation(enabled);
580}
581
582void PlaybackEngine::setActiveTrack(QPlatformMediaPlayer::TrackType trackType, int streamNumber)
583{
584 if (!m_media.setActiveTrack(trackType, streamNumber))
585 return;
586
587 m_codecContexts[trackType] = {};
588
589 m_renderers[trackType].reset();
590 m_streams = defaultObjectsArray<decltype(m_streams)>();
591 m_demuxer.reset();
592
593 // Don't deactivate m_timeController:
594 //
595 // We strive to have a smooth playback if we change the active track. It means that
596 // we don't want to do any time shiftings. Instead, we rely on the fact that
597 // buffers in renderers are not empty to compensate the demuxer's lag.
598
599 updateVideoSinkSize();
600 createObjectsIfNeeded();
601 updateObjectsPausedState();
602}
603
604void PlaybackEngine::finilizeTime(TrackPosition pos)
605{
606 Q_ASSERT(pos >= TrackPosition(0) && pos <= duration().asTimePoint());
607
608 m_timeController.deactivate();
609 m_timeController.sync(pos);
610 m_currentLoopOffset = {};
611}
612
613void PlaybackEngine::finalizeOutputs()
614{
615 if (m_audioBufferOutput)
616 updateActiveAudioOutput(static_cast<QAudioBufferOutput *>(nullptr));
617 if (m_audioOutput)
618 updateActiveAudioOutput(static_cast<QAudioOutput *>(nullptr));
619 updateActiveVideoOutput(nullptr, true);
620}
621
622bool PlaybackEngine::hasRenderer(const PlaybackEngineObjectID &id) const
623{
624 return std::any_of(m_renderers.begin(), m_renderers.end(),
625 [&](auto &renderer) { return checkObjectID(renderer, id); });
626}
627
628template <typename AudioOutput>
629void PlaybackEngine::updateActiveAudioOutput(AudioOutput *output)
630{
631 if (AudioRenderer *renderer = getAudioRenderer())
632 renderer->setOutput(output);
633}
634
635void PlaybackEngine::updateActiveVideoOutput(QVideoSink *sink, bool cleanOutput)
636{
637 if (auto renderer = qobject_cast<SubtitleRenderer *>(
638 m_renderers[QPlatformMediaPlayer::SubtitleStream].get()))
639 renderer->setOutput(sink, cleanOutput);
640 if (auto renderer =
641 qobject_cast<VideoRenderer *>(m_renderers[QPlatformMediaPlayer::VideoStream].get()))
642 renderer->setOutput(sink, cleanOutput);
643}
644
645void PlaybackEngine::updateVideoSinkSize(QVideoSink *prevSink)
646{
647 auto platformVideoSink = m_videoSink ? m_videoSink->platformVideoSink() : nullptr;
648 if (!platformVideoSink)
649 return;
650
651 if (prevSink && prevSink->platformVideoSink())
652 platformVideoSink->setNativeSize(prevSink->platformVideoSink()->nativeSize());
653 else {
654 const auto streamIndex = m_media.currentStreamIndex(QPlatformMediaPlayer::VideoStream);
655 if (streamIndex >= 0) {
656 const auto context = m_media.avContext();
657 const auto stream = context->streams[streamIndex];
658 const AVRational pixelAspectRatio =
659 av_guess_sample_aspect_ratio(context, stream, nullptr);
660 // auto size = metaData().value(QMediaMetaData::Resolution)
661 const QSize size =
662 qCalculateFrameSize({ stream->codecpar->width, stream->codecpar->height },
663 { pixelAspectRatio.num, pixelAspectRatio.den });
664
665 platformVideoSink->setNativeSize(
666 qRotatedFrameSize(size, m_media.transformation().rotation));
667 }
668 }
669}
670
671TrackPosition PlaybackEngine::boundPosition(TrackPosition position) const
672{
673 position = qMax(position, TrackPosition(0));
674 return duration() > TrackDuration(0) ? qMin(position, duration().asTimePoint()) : position;
675}
676
677AudioRenderer *PlaybackEngine::getAudioRenderer()
678{
679 return qobject_cast<AudioRenderer *>(m_renderers[QPlatformMediaPlayer::AudioStream].get());
680}
681
682} // namespace QFFmpeg
683
684QT_END_NAMESPACE
685
686#include "moc_qffmpegplaybackengine_p.cpp"
void setPitchCompensation(bool enabled)
void packetsBuffered()
void seek(TrackPosition pos)
void setVideoSink(QVideoSink *sink)
void setState(QMediaPlayer::PlaybackState state)
void updateActiveAudioOutput(AudioOutput *output)
virtual RendererPtr createRenderer(QPlatformMediaPlayer::TrackType trackType)
void setAudioBufferOutput(QAudioBufferOutput *output)
void setAudioSink(QAudioOutput *output)
void setPitchCompensation(bool enabled)
const QList< MediaDataHolder::StreamInfo > & streamInfo(QPlatformMediaPlayer::TrackType trackType) const
void setActiveTrack(QPlatformMediaPlayer::TrackType type, int streamNumber)
void setMedia(MediaDataHolder media)
TrackPosition currentPosition(bool topPos=true) const
const QMediaMetaData & metaData() const
void updateActiveVideoOutput(QVideoSink *sink, bool cleanOutput=false)
The QPlaybackOptions class enables low-level control of media playback options.
static Array defaultObjectsArray()
std::conditional_t< QT_FFMPEG_AVIO_WRITE_CONST, const uint8_t *, uint8_t * > AvioWriteBufferType
Combined button and popup list for selecting options.
#define qCDebug(category,...)
#define Q_STATIC_LOGGING_CATEGORY(name,...)
void operator()(PlaybackEngineObject *) const