Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qffmpegplaybackengine.cpp
Go to the documentation of this file.
1// Copyright (C) 2021 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
4
5#include "qvideosink.h"
6#include "qaudiooutput.h"
7#include "private/qplatformaudiooutput_p.h"
8#include "private/qplatformvideosink_p.h"
9#include "private/qaudiobufferoutput_p.h"
10#include "qiodevice.h"
11#include "playbackengine/qffmpegdemuxer_p.h"
12#include "playbackengine/qffmpegstreamdecoder_p.h"
13#include "playbackengine/qffmpegsubtitlerenderer_p.h"
14#include "playbackengine/qffmpegvideorenderer_p.h"
15#include "playbackengine/qffmpegaudiorenderer_p.h"
16
17#include <qloggingcategory.h>
18
20
21namespace QFFmpeg {
22
23Q_STATIC_LOGGING_CATEGORY(qLcPlaybackEngine, "qt.multimedia.ffmpeg.playbackengine");
24
25// The helper is needed since on some compilers std::unique_ptr
26// doesn't have a default constructor in the case of sizeof(CustomDeleter) > 0
27template <typename Array>
28inline static Array defaultObjectsArray()
29{
30 using T = typename Array::value_type;
31 return { T{ {}, {} }, T{ {}, {} }, T{ {}, {} } };
32}
33
34PlaybackEngine::PlaybackEngine(const QPlaybackOptions &options)
35 : m_demuxer({}, {}),
36 m_streams(defaultObjectsArray<decltype(m_streams)>()),
37 m_renderers(defaultObjectsArray<decltype(m_renderers)>()),
38 m_options{ options }
39{
40 qCDebug(qLcPlaybackEngine) << "Create PlaybackEngine";
41 qRegisterMetaType<QFFmpeg::Packet>();
42 qRegisterMetaType<QFFmpeg::Frame>();
43 qRegisterMetaType<QFFmpeg::TrackPosition>();
44 qRegisterMetaType<QFFmpeg::PlaybackEngineObjectID>();
45}
46
48 qCDebug(qLcPlaybackEngine) << "Delete PlaybackEngine";
49
50 finalizeOutputs();
51 forEachExistingObject([](auto &object) { object.reset(); });
52 deleteFreeThreads();
53}
54
55void PlaybackEngine::onRendererFinished(const PlaybackEngineObjectID &id)
56{
57 if (!hasRenderer(id))
58 return;
59
60 auto isAtEnd = [this](auto trackType) {
61 return !m_renderers[trackType] || m_renderers[trackType]->isAtEnd();
62 };
63
64 if (!isAtEnd(QPlatformMediaPlayer::VideoStream))
65 return;
66
67 if (!isAtEnd(QPlatformMediaPlayer::AudioStream))
68 return;
69
70 if (!isAtEnd(QPlatformMediaPlayer::SubtitleStream) && !hasMediaStream())
71 return;
72
73 if (std::exchange(m_state, QMediaPlayer::StoppedState) == QMediaPlayer::StoppedState)
74 return;
75
76 finilizeTime(duration().asTimePoint());
77
78 forceUpdate();
79
80 qCDebug(qLcPlaybackEngine) << "Playback engine end of stream";
81
82 emit endOfStream();
83}
84
85void PlaybackEngine::onRendererLoopChanged(const PlaybackEngineObjectID &id, TrackPosition offset,
86 int loopIndex)
87{
88 if (!hasRenderer(id))
89 return;
90
91 if (loopIndex > m_currentLoopOffset.loopIndex) {
92 m_currentLoopOffset = { offset, loopIndex };
93 emit loopChanged();
94 } else if (loopIndex == m_currentLoopOffset.loopIndex && offset != m_currentLoopOffset.loopStartTimeUs) {
95 qWarning() << "Unexpected offset for loop" << loopIndex << ":" << offset.get() << "vs"
96 << m_currentLoopOffset.loopStartTimeUs.get();
97 m_currentLoopOffset.loopStartTimeUs = offset;
98 }
99}
100
101void PlaybackEngine::onFirstPacketFound(const PlaybackEngineObjectID &id, TrackPosition absSeekPos)
102{
103 if (!checkObjectID(m_demuxer, id))
104 return;
105
106 if (m_timeController.isStarted())
107 return;
108
109 const SteadyClock::time_point now = SteadyClock::now();
110 const SteadyClock::time_point expectedTimePoint = m_timeController.timeFromPosition(absSeekPos);
111 const auto delay =
112 std::chrono::round<std::chrono::microseconds>(now - expectedTimePoint);
113 qCDebug(qLcPlaybackEngine) << "Delay of demuxer initialization:" << delay;
114 m_timeController.sync(now, absSeekPos);
115 m_timeController.start();
116
117 forEachExistingObject<Renderer>(
118 [&](auto &renderer) { renderer->setTimeController(m_timeController); });
119}
120
121void PlaybackEngine::onRendererSynchronized(const PlaybackEngineObjectID &id,
122 SteadyClock::time_point tp, TrackPosition pos)
123{
124 if (!hasRenderer(id))
125 return;
126
127 Q_ASSERT(checkObjectID(m_renderers[QPlatformMediaPlayer::AudioStream], id));
128
129 forEachExistingObject<Renderer>([&](auto &renderer) {
130 if (id.objectID != renderer->objectID()) {
131 auto tc = m_timeController;
132 tc.syncSoft(tp, pos);
133 renderer->setTimeController(tc);
134 }
135 });
136
137 m_timeController.sync(tp, pos);
138}
139
140void PlaybackEngine::setState(QMediaPlayer::PlaybackState state) {
141 if (!m_media.avContext())
142 return;
143
144 if (state == m_state)
145 return;
146
147 const auto prevState = std::exchange(m_state, state);
148
149 if (m_state == QMediaPlayer::StoppedState) {
150 finalizeOutputs();
151 finilizeTime(TrackPosition(0));
152 }
153
154 if (prevState == QMediaPlayer::StoppedState || m_state == QMediaPlayer::StoppedState)
155 recreateObjects();
156
157 if (prevState == QMediaPlayer::StoppedState)
158 triggerStepIfNeeded();
159
160 updateObjectsPausedState();
161}
162
163void PlaybackEngine::updateObjectsPausedState()
164{
165 const bool paused = m_state != QMediaPlayer::PlayingState;
166 m_timeController.setPaused(paused);
167
168 forEachExistingObject([&](auto &object) {
169 if constexpr (std::is_same_v<decltype(*object), Renderer &>)
170 object->setPaused(paused);
171 else
172 object->setPaused(false);
173 });
174}
175
176void PlaybackEngine::ObjectDeleter::operator()(PlaybackEngineObject *object) const
177{
178 Q_ASSERT(engine);
179 if (!std::exchange(engine->m_threadsDirty, true))
180 QMetaObject::invokeMethod(engine, &PlaybackEngine::deleteFreeThreads, Qt::QueuedConnection);
181
182 object->kill();
183}
184
185void PlaybackEngine::registerObject(PlaybackEngineObject &object)
186{
187 connect(&object, &PlaybackEngineObject::error, this, &PlaybackEngine::errorOccured);
188
189 auto threadName = objectThreadName(object);
190 auto &thread = m_threads[threadName];
191 if (!thread) {
192 thread = std::make_unique<QThread>();
193 thread->setObjectName(threadName);
194 thread->start();
195 }
196
197 Q_ASSERT(object.thread() != thread.get());
198 object.moveToThread(thread.get());
199}
200
202PlaybackEngine::createRenderer(QPlatformMediaPlayer::TrackType trackType)
203{
204 switch (trackType) {
205 case QPlatformMediaPlayer::VideoStream:
206 return m_videoSink ? createPlaybackEngineObject<VideoRenderer>(
207 m_timeController, m_videoSink, m_media.transformation())
208 : RendererPtr{ {}, {} };
209 case QPlatformMediaPlayer::AudioStream:
210 return m_audioOutput || m_audioBufferOutput
211 ? createPlaybackEngineObject<AudioRenderer>(
212 m_timeController, m_audioOutput, m_audioBufferOutput, m_pitchCompensation)
213 : RendererPtr{ {}, {} };
214 case QPlatformMediaPlayer::SubtitleStream:
215 return m_videoSink
216 ? createPlaybackEngineObject<SubtitleRenderer>(m_timeController, m_videoSink)
217 : RendererPtr{ {}, {} };
218 default:
219 return { {}, {} };
220 }
221}
222
223template<typename C, typename Action>
224void PlaybackEngine::forEachExistingObject(Action &&action)
225{
226 auto handleNotNullObject = [&](auto &object) {
227 if constexpr (std::is_base_of_v<C, std::remove_reference_t<decltype(*object)>>)
228 if (object)
229 action(object);
230 };
231
232 handleNotNullObject(m_demuxer);
233 std::for_each(m_streams.begin(), m_streams.end(), handleNotNullObject);
234 std::for_each(m_renderers.begin(), m_renderers.end(), handleNotNullObject);
235}
236
237template<typename Action>
238void PlaybackEngine::forEachExistingObject(Action &&action)
239{
240 forEachExistingObject<PlaybackEngineObject>(std::forward<Action>(action));
241}
242
243void PlaybackEngine::seek(TrackPosition pos)
244{
245 pos = boundPosition(pos);
246
247 m_timeController.deactivate();
248 m_timeController.sync(m_currentLoopOffset.loopStartTimeUs.asDuration() + pos);
249 m_seekPending = true;
250
251 forceUpdate();
252}
253
254void PlaybackEngine::setLoops(int loops)
255{
256 if (!isSeekable()) {
257 qWarning() << "Cannot set loops for non-seekable source";
258 return;
259 }
260
261 if (std::exchange(m_loops, loops) == loops)
262 return;
263
264 qCDebug(qLcPlaybackEngine) << "set playback engine loops:" << loops << "prev loops:" << m_loops
265 << "index:" << m_currentLoopOffset.loopIndex;
266
267 if (m_demuxer)
268 m_demuxer->setLoops(loops);
269}
270
271void PlaybackEngine::triggerStepIfNeeded()
272{
273 if (m_state != QMediaPlayer::PausedState)
274 return;
275
276 if (m_renderers[QPlatformMediaPlayer::VideoStream])
277 m_renderers[QPlatformMediaPlayer::VideoStream]->doForceStep();
278
279 // TODO: maybe trigger SubtitleStream.
280 // If trigger it, we have to make seeking for the current subtitle frame more stable.
281 // Or set some timeout for seeking.
282}
283
284QString PlaybackEngine::objectThreadName(const PlaybackEngineObject &object)
285{
286 QString result = QString::fromLatin1(object.metaObject()->className());
287 if (auto stream = qobject_cast<const StreamDecoder *>(&object))
288 result += QString::number(stream->trackType());
289
290 return result;
291}
292
294 if (rate == playbackRate())
295 return;
296
297 m_timeController.setPlaybackRate(rate);
298 forEachExistingObject<Renderer>([rate](auto &renderer) { renderer->setPlaybackRate(rate); });
299}
300
302 return m_timeController.playbackRate();
303}
304
305void PlaybackEngine::recreateObjects()
306{
307 m_timeController.deactivate();
308
309 forEachExistingObject([](auto &object) { object.reset(); });
310
311 createObjectsIfNeeded();
312}
313
314void PlaybackEngine::createObjectsIfNeeded()
315{
316 if (m_state == QMediaPlayer::StoppedState || !m_media.avContext())
317 return;
318
319 for (int i = 0; i < QPlatformMediaPlayer::NTrackTypes; ++i)
320 createStreamAndRenderer(static_cast<QPlatformMediaPlayer::TrackType>(i));
321
322 createDemuxer();
323
324 // temporary, to comply with the test disablingAllTracks_doesNotStopPlayback
325 if (!m_demuxer)
326 m_timeController.start();
327}
328
329void PlaybackEngine::forceUpdate()
330{
331 recreateObjects();
332 triggerStepIfNeeded();
333 updateObjectsPausedState();
334}
335
336void PlaybackEngine::createStreamAndRenderer(QPlatformMediaPlayer::TrackType trackType)
337{
338 auto codecContext = codecContextForTrack(trackType);
339
340 auto &renderer = m_renderers[trackType];
341
342 if (!codecContext)
343 return;
344
345 if (!renderer) {
346 renderer = createRenderer(trackType);
347
348 if (!renderer)
349 return;
350
351 connect(renderer.get(), &Renderer::synchronized, this,
352 &PlaybackEngine::onRendererSynchronized);
353
354 connect(renderer.get(), &Renderer::loopChanged, this,
355 &PlaybackEngine::onRendererLoopChanged);
356
357 connect(renderer.get(), &PlaybackEngineObject::atEnd, this,
358 &PlaybackEngine::onRendererFinished);
359 }
360
361 auto &stream = m_streams[trackType] =
362 createPlaybackEngineObject<StreamDecoder>(*codecContext, renderer->seekPosition());
363
364 Q_ASSERT(trackType == stream->trackType());
365
366 connect(stream.get(), &StreamDecoder::requestHandleFrame, renderer.get(), &Renderer::render);
367 connect(stream.get(), &PlaybackEngineObject::atEnd, renderer.get(),
368 &Renderer::onFinalFrameReceived);
369 connect(renderer.get(), &Renderer::frameProcessed, stream.get(),
370 &StreamDecoder::onFrameProcessed);
371}
372
373std::optional<CodecContext> PlaybackEngine::codecContextForTrack(QPlatformMediaPlayer::TrackType trackType)
374{
375 const auto streamIndex = m_media.currentStreamIndex(trackType);
376 if (streamIndex < 0)
377 return {};
378
379 auto &codecContext = m_codecContexts[trackType];
380
381 if (!codecContext) {
382 qCDebug(qLcPlaybackEngine)
383 << "Create codec for stream:" << streamIndex << "trackType:" << trackType;
384 auto maybeCodecContext = CodecContext::create(m_media.avContext()->streams[streamIndex],
385 m_media.avContext(), m_options);
386
387 if (!maybeCodecContext) {
388 emit errorOccured(QMediaPlayer::FormatError,
389 u"Cannot create codec," + maybeCodecContext.error());
390 return {};
391 }
392
393 codecContext = maybeCodecContext.value();
394 }
395
396 return codecContext;
397}
398
399bool PlaybackEngine::hasMediaStream() const
400{
401 return m_renderers[QPlatformMediaPlayer::AudioStream]
402 || m_renderers[QPlatformMediaPlayer::VideoStream];
403}
404
405void PlaybackEngine::createDemuxer()
406{
407 std::array<int, QPlatformMediaPlayer::NTrackTypes> streamIndexes = { -1, -1, -1 };
408
409 bool hasStreams = false;
410 forEachExistingObject<StreamDecoder>([&](auto &stream) {
411 hasStreams = true;
412 const auto trackType = stream->trackType();
413 streamIndexes[trackType] = m_media.currentStreamIndex(trackType);
414 });
415
416 if (!hasStreams)
417 return;
418
419 const TrackPosition currentLoopPosUs = currentPosition(false);
420
421 m_demuxer = createPlaybackEngineObject<Demuxer>(m_media.avContext(), currentLoopPosUs,
422 m_seekPending, m_currentLoopOffset,
423 streamIndexes, m_loops);
424
425 m_seekPending = false;
426
427 connect(m_demuxer.get(), &Demuxer::packetsBuffered, this, &PlaybackEngine::buffered);
428
429 forEachExistingObject<StreamDecoder>([&](auto &stream) {
430 connect(m_demuxer.get(), Demuxer::signalByTrackType(stream->trackType()), stream.get(),
431 &StreamDecoder::decode);
432 connect(m_demuxer.get(), &PlaybackEngineObject::atEnd, stream.get(),
433 &StreamDecoder::onFinalPacketReceived);
434 connect(stream.get(), &StreamDecoder::packetProcessed, m_demuxer.get(),
435 &Demuxer::onPacketProcessed);
436 });
437
438 connect(m_demuxer.get(), &Demuxer::firstPacketFound, this, &PlaybackEngine::onFirstPacketFound);
439}
440
441void PlaybackEngine::deleteFreeThreads() {
442 m_threadsDirty = false;
443 auto freeThreads = std::move(m_threads);
444
445 forEachExistingObject([&](auto &object) {
446 m_threads.insert(freeThreads.extract(objectThreadName(*object)));
447 });
448
449 for (auto &[name, thr] : freeThreads)
450 thr->quit();
451
452 for (auto &[name, thr] : freeThreads)
453 thr->wait();
454}
455
456void PlaybackEngine::setMedia(MediaDataHolder media)
457{
458 Q_ASSERT(!m_media.avContext()); // Playback engine does not support reloading media
459 Q_ASSERT(m_state == QMediaPlayer::StoppedState);
460 Q_ASSERT(m_threads.empty());
461
462 m_media = std::move(media);
463 updateVideoSinkSize();
464}
465
466void PlaybackEngine::setVideoSink(QVideoSink *sink)
467{
468 auto prev = std::exchange(m_videoSink, sink);
469 if (prev == sink)
470 return;
471
472 updateVideoSinkSize(prev);
474
475 if (!sink || !prev) {
476 // might need some improvements
477 forceUpdate();
478 }
479}
480
481void PlaybackEngine::setAudioSink(QPlatformAudioOutput *output) {
482 setAudioSink(output ? output->q : nullptr);
483}
484
485void PlaybackEngine::setAudioSink(QAudioOutput *output)
486{
487 QAudioOutput *prev = std::exchange(m_audioOutput, output);
488 if (prev == output)
489 return;
490
491 updateActiveAudioOutput(output);
492
493 if (!output || !prev) {
494 // might need some improvements
495 forceUpdate();
496 }
497}
498
499void PlaybackEngine::setAudioBufferOutput(QAudioBufferOutput *output)
500{
501 QAudioBufferOutput *prev = std::exchange(m_audioBufferOutput, output);
502 if (prev == output)
503 return;
504 updateActiveAudioOutput(output);
505}
506
508{
509 std::optional<TrackPosition> pos;
510
511 for (size_t i = 0; i < m_renderers.size(); ++i) {
512 const auto &renderer = m_renderers[i];
513 if (!renderer)
514 continue;
515
516 // skip subtitle stream for finding lower rendering position
517 if (!topPos && i == QPlatformMediaPlayer::SubtitleStream && hasMediaStream())
518 continue;
519
520 const auto rendererPos = renderer->lastPosition();
521 pos = !pos ? rendererPos
522 : topPos ? std::max(*pos, rendererPos)
523 : std::min(*pos, rendererPos);
524 }
525
526 if (!pos)
527 pos = m_timeController.currentPosition();
528
529 return boundPosition(*pos - m_currentLoopOffset.loopStartTimeUs.asDuration());
530}
531
533{
534 return m_media.duration();
535}
536
537bool PlaybackEngine::isSeekable() const { return m_media.isSeekable(); }
538
540PlaybackEngine::streamInfo(QPlatformMediaPlayer::TrackType trackType) const
541{
542 return m_media.streamInfo(trackType);
543}
544
546{
547 return m_media.metaData();
548}
549
551{
552 return m_media.activeTrack(type);
553}
554
556{
557 m_pitchCompensation = enabled;
558 if (AudioRenderer *renderer = getAudioRenderer())
559 renderer->setPitchCompensation(enabled);
560}
561
562void PlaybackEngine::setActiveTrack(QPlatformMediaPlayer::TrackType trackType, int streamNumber)
563{
564 if (!m_media.setActiveTrack(trackType, streamNumber))
565 return;
566
567 m_codecContexts[trackType] = {};
568
569 m_renderers[trackType].reset();
570 m_streams = defaultObjectsArray<decltype(m_streams)>();
571 m_demuxer.reset();
572
573 // Don't deactivate m_timeController:
574 //
575 // We strive to have a smooth playback if we change the active track. It means that
576 // we don't want to do any time shiftings. Instead, we rely on the fact that
577 // buffers in renderers are not empty to compensate the demuxer's lag.
578
579 updateVideoSinkSize();
580 createObjectsIfNeeded();
581 updateObjectsPausedState();
582}
583
584void PlaybackEngine::finilizeTime(TrackPosition pos)
585{
586 Q_ASSERT(pos >= TrackPosition(0) && pos <= duration().asTimePoint());
587
588 m_timeController.deactivate();
589 m_timeController.sync(pos);
590 m_currentLoopOffset = {};
591}
592
593void PlaybackEngine::finalizeOutputs()
594{
595 if (m_audioBufferOutput)
596 updateActiveAudioOutput(static_cast<QAudioBufferOutput *>(nullptr));
597 if (m_audioOutput)
598 updateActiveAudioOutput(static_cast<QAudioOutput *>(nullptr));
599 updateActiveVideoOutput(nullptr, true);
600}
601
602bool PlaybackEngine::hasRenderer(const PlaybackEngineObjectID &id) const
603{
604 return std::any_of(m_renderers.begin(), m_renderers.end(),
605 [&](auto &renderer) { return checkObjectID(renderer, id); });
606}
607
608template <typename AudioOutput>
609void PlaybackEngine::updateActiveAudioOutput(AudioOutput *output)
610{
611 if (AudioRenderer *renderer = getAudioRenderer())
612 renderer->setOutput(output);
613}
614
615void PlaybackEngine::updateActiveVideoOutput(QVideoSink *sink, bool cleanOutput)
616{
617 if (auto renderer = qobject_cast<SubtitleRenderer *>(
618 m_renderers[QPlatformMediaPlayer::SubtitleStream].get()))
619 renderer->setOutput(sink, cleanOutput);
620 if (auto renderer =
621 qobject_cast<VideoRenderer *>(m_renderers[QPlatformMediaPlayer::VideoStream].get()))
622 renderer->setOutput(sink, cleanOutput);
623}
624
625void PlaybackEngine::updateVideoSinkSize(QVideoSink *prevSink)
626{
627 auto platformVideoSink = m_videoSink ? m_videoSink->platformVideoSink() : nullptr;
628 if (!platformVideoSink)
629 return;
630
631 if (prevSink && prevSink->platformVideoSink())
632 platformVideoSink->setNativeSize(prevSink->platformVideoSink()->nativeSize());
633 else {
634 const auto streamIndex = m_media.currentStreamIndex(QPlatformMediaPlayer::VideoStream);
635 if (streamIndex >= 0) {
636 const auto context = m_media.avContext();
637 const auto stream = context->streams[streamIndex];
638 const AVRational pixelAspectRatio =
639 av_guess_sample_aspect_ratio(context, stream, nullptr);
640 // auto size = metaData().value(QMediaMetaData::Resolution)
641 const QSize size =
642 qCalculateFrameSize({ stream->codecpar->width, stream->codecpar->height },
643 { pixelAspectRatio.num, pixelAspectRatio.den });
644
645 platformVideoSink->setNativeSize(
646 qRotatedFrameSize(size, m_media.transformation().rotation));
647 }
648 }
649}
650
651TrackPosition PlaybackEngine::boundPosition(TrackPosition position) const
652{
653 position = qMax(position, TrackPosition(0));
654 return duration() > TrackDuration(0) ? qMin(position, duration().asTimePoint()) : position;
655}
656
657AudioRenderer *PlaybackEngine::getAudioRenderer()
658{
659 return qobject_cast<AudioRenderer *>(m_renderers[QPlatformMediaPlayer::AudioStream].get());
660}
661
662} // namespace QFFmpeg
663
664QT_END_NAMESPACE
665
666#include "moc_qffmpegplaybackengine_p.cpp"
void setPitchCompensation(bool enabled)
void seek(TrackPosition pos)
void setVideoSink(QVideoSink *sink)
void setState(QMediaPlayer::PlaybackState state)
void updateActiveAudioOutput(AudioOutput *output)
virtual RendererPtr createRenderer(QPlatformMediaPlayer::TrackType trackType)
void setAudioBufferOutput(QAudioBufferOutput *output)
void setAudioSink(QAudioOutput *output)
void setPitchCompensation(bool enabled)
const QList< MediaDataHolder::StreamInfo > & streamInfo(QPlatformMediaPlayer::TrackType trackType) const
void setActiveTrack(QPlatformMediaPlayer::TrackType type, int streamNumber)
void setMedia(MediaDataHolder media)
TrackPosition currentPosition(bool topPos=true) const
const QMediaMetaData & metaData() const
void updateActiveVideoOutput(QVideoSink *sink, bool cleanOutput=false)
The QPlaybackOptions class enables low-level control of media playback options.
static Array defaultObjectsArray()
std::conditional_t< QT_FFMPEG_AVIO_WRITE_CONST, const uint8_t *, uint8_t * > AvioWriteBufferType
Combined button and popup list for selecting options.
#define qCDebug(category,...)
#define Q_STATIC_LOGGING_CATEGORY(name,...)
void operator()(PlaybackEngineObject *) const