Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
evrcustompresenter.cpp
Go to the documentation of this file.
1// Copyright (C) 2016 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
5
7#include "evrhelpers_p.h"
8#include <private/qwindowsmultimediautils_p.h>
9#include <private/qplatformvideosink_p.h>
10
11#include <rhi/qrhi.h>
12
13#include <QtCore/qmutex.h>
14#include <QtCore/qvarlengtharray.h>
15#include <QtCore/qrect.h>
16#include <qthread.h>
17#include <qcoreapplication.h>
18#include <qmath.h>
19#include <qloggingcategory.h>
20
21#include <mutex>
22
23#include <float.h>
24#include <evcode.h>
25
27
28Q_STATIC_LOGGING_CATEGORY(qLcEvrCustomPresenter, "qt.multimedia.evrcustompresenter");
29
30const static MFRatio g_DefaultFrameRate = { 30, 1 };
31static const DWORD SCHEDULER_TIMEOUT = 5000;
32static const MFTIME ONE_SECOND = 10000000;
33static const LONG ONE_MSEC = 1000;
34
35#define QMM_PRESENTATION_CURRENT_POSITION 0x7fffffffffffffff
36
37// Function declarations.
38static HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect& nrcSource);
40
41static inline LONG MFTimeToMsec(const LONGLONG& time)
42{
43 return (LONG)(time / (ONE_SECOND / ONE_MSEC));
44}
45
46bool qt_evr_setCustomPresenter(IUnknown *evr, EVRCustomPresenter *presenter)
47{
48 if (!evr || !presenter)
49 return false;
50
51 HRESULT result = E_FAIL;
52
53 IMFVideoRenderer *renderer = NULL;
54 if (SUCCEEDED(evr->QueryInterface(IID_PPV_ARGS(&renderer)))) {
55 result = renderer->InitializeRenderer(NULL, presenter);
56 renderer->Release();
57 }
58
59 return result == S_OK;
60}
61
63{
64public:
65 explicit PresentSampleEvent(const ComPtr<IMFSample> &sample)
67 {
68 }
69
70 ComPtr<IMFSample> sample() const { return m_sample; }
71
72private:
73 const ComPtr<IMFSample> m_sample;
74};
75
77 : m_presenter(presenter)
78 , m_threadID(0)
79 , m_playbackRate(1.0f)
81{
82}
83
85{
86 m_scheduledSamples.clear();
87}
88
89void Scheduler::setFrameRate(const MFRatio& fps)
90{
91 UINT64 AvgTimePerFrame = 0;
92
93 // Convert to a duration.
94 MFFrameRateToAverageTimePerFrame(fps.Numerator, fps.Denominator, &AvgTimePerFrame);
95
96 // Calculate 1/4th of this value, because we use it frequently.
97 m_perFrame_1_4th = AvgTimePerFrame / 4;
98}
99
100HRESULT Scheduler::startScheduler(ComPtr<IMFClock> clock)
101{
102 if (m_schedulerThread)
103 return E_UNEXPECTED;
104
105 HRESULT hr = S_OK;
106 DWORD dwID = 0;
107 HANDLE hObjects[2];
108 DWORD dwWait = 0;
109
110 m_clock = clock;
111
112 // Set a high the timer resolution (ie, short timer period).
113 timeBeginPeriod(1);
114
115 // Create an event to wait for the thread to start.
116 m_threadReadyEvent = EventHandle{ CreateEvent(NULL, FALSE, FALSE, NULL) };
117 if (!m_threadReadyEvent) {
118 hr = HRESULT_FROM_WIN32(GetLastError());
119 goto done;
120 }
121
122 // Create an event to wait for flush commands to complete.
123 m_flushEvent = EventHandle{ CreateEvent(NULL, FALSE, FALSE, NULL) };
124 if (!m_flushEvent) {
125 hr = HRESULT_FROM_WIN32(GetLastError());
126 goto done;
127 }
128
129 // Create the scheduler thread.
130 m_schedulerThread = ThreadHandle{ CreateThread(NULL, 0, schedulerThreadProc, (LPVOID)this, 0, &dwID) };
131 if (!m_schedulerThread) {
132 hr = HRESULT_FROM_WIN32(GetLastError());
133 goto done;
134 }
135
136 // Wait for the thread to signal the "thread ready" event.
137 hObjects[0] = m_threadReadyEvent.get();
138 hObjects[1] = m_schedulerThread.get();
139 dwWait = WaitForMultipleObjects(2, hObjects, FALSE, INFINITE); // Wait for EITHER of these handles.
140 if (WAIT_OBJECT_0 != dwWait) {
141 // The thread terminated early for some reason. This is an error condition.
142 m_schedulerThread = {};
143
144 hr = E_UNEXPECTED;
145 goto done;
146 }
147
148 m_threadID = dwID;
149
150done:
151 // Regardless success/failure, we are done using the "thread ready" event.
152 m_threadReadyEvent = {};
153
154 return hr;
155}
156
158{
159 if (!m_schedulerThread)
160 return S_OK;
161
162 // Ask the scheduler thread to exit.
163 PostThreadMessage(m_threadID, Terminate, 0, 0);
164
165 // Wait for the thread to exit.
166 WaitForSingleObject(m_schedulerThread.get(), INFINITE);
167
168 // Close handles.
169 m_schedulerThread = {};
170 m_flushEvent = {};
171
172 // Discard samples.
173 m_mutex.lock();
174 m_scheduledSamples.clear();
175 m_mutex.unlock();
176
177 // Restore the timer resolution.
178 timeEndPeriod(1);
179
180 return S_OK;
181}
182
184{
185 if (m_schedulerThread) {
186 // Ask the scheduler thread to flush.
187 PostThreadMessage(m_threadID, Flush, 0 , 0);
188
189 // Wait for the scheduler thread to signal the flush event,
190 // OR for the thread to terminate.
191 HANDLE objects[] = { m_flushEvent.get(), m_schedulerThread.get() };
192
193 WaitForMultipleObjects(ARRAYSIZE(objects), objects, FALSE, SCHEDULER_TIMEOUT);
194 }
195
196 return S_OK;
197}
198
200{
201 QMutexLocker locker(&m_mutex);
202 return m_scheduledSamples.count() > 0;
203}
204
205HRESULT Scheduler::scheduleSample(const ComPtr<IMFSample> &sample, bool presentNow)
206{
207 if (!m_schedulerThread)
208 return MF_E_NOT_INITIALIZED;
209
210 HRESULT hr = S_OK;
211 DWORD dwExitCode = 0;
212
213 GetExitCodeThread(m_schedulerThread.get(), &dwExitCode);
214 if (dwExitCode != STILL_ACTIVE)
215 return E_FAIL;
216
217 if (presentNow || !m_clock) {
218 m_presenter->presentSample(sample);
219 } else {
220 if (m_playbackRate > 0.0f && qt_evr_isSampleTimePassed(m_clock.Get(), sample.Get())) {
221 qCDebug(qLcEvrCustomPresenter) << "Discard the sample, it came too late";
222 return hr;
223 }
224
225 // Queue the sample and ask the scheduler thread to wake up.
226 m_mutex.lock();
227 m_scheduledSamples.enqueue(sample);
228 m_mutex.unlock();
229
230 if (SUCCEEDED(hr))
231 PostThreadMessage(m_threadID, Schedule, 0, 0);
232 }
233
234 return hr;
235}
236
238{
239 HRESULT hr = S_OK;
240 LONG wait = 0;
241
242 QQueue<ComPtr<IMFSample>> scheduledSamples;
243
244 m_mutex.lock();
245 m_scheduledSamples.swap(scheduledSamples);
246 m_mutex.unlock();
247
248 // Process samples until the queue is empty or until the wait time > 0.
249 while (!scheduledSamples.isEmpty()) {
250 ComPtr<IMFSample> sample = scheduledSamples.dequeue();
251
252 // Process the next sample in the queue. If the sample is not ready
253 // for presentation. the value returned in wait is > 0, which
254 // means the scheduler should sleep for that amount of time.
255 if (isSampleReadyToPresent(sample.Get(), &wait)) {
256 m_presenter->presentSample(sample.Get());
257 continue;
258 }
259
260 if (wait > 0) {
261 // return the sample to scheduler
262 scheduledSamples.prepend(sample);
263 break;
264 }
265 }
266
267 m_mutex.lock();
268 scheduledSamples.append(std::move(m_scheduledSamples));
269 m_scheduledSamples.swap(scheduledSamples);
270 m_mutex.unlock();
271
272 // If the wait time is zero, it means we stopped because the queue is
273 // empty (or an error occurred). Set the wait time to infinite; this will
274 // make the scheduler thread sleep until it gets another thread message.
275 if (wait == 0)
276 wait = INFINITE;
277
278 *nextSleep = wait;
279 return hr;
280}
281
282bool Scheduler::isSampleReadyToPresent(IMFSample *sample, LONG *pNextSleep) const
283{
284 *pNextSleep = 0;
285 if (!m_clock)
286 return true;
287
288 MFTIME hnsPresentationTime = 0;
289 MFTIME hnsTimeNow = 0;
290 MFTIME hnsSystemTime = 0;
291
292 // Get the sample's time stamp. It is valid for a sample to
293 // have no time stamp.
294 HRESULT hr = sample->GetSampleTime(&hnsPresentationTime);
295
296 // Get the clock time. (But if the sample does not have a time stamp,
297 // we don't need the clock time.)
298 if (SUCCEEDED(hr))
299 hr = m_clock->GetCorrelatedTime(0, &hnsTimeNow, &hnsSystemTime);
300
301 // Calculate the time until the sample's presentation time.
302 // A negative value means the sample is late.
303 MFTIME hnsDelta = hnsPresentationTime - hnsTimeNow;
304 if (m_playbackRate < 0) {
305 // For reverse playback, the clock runs backward. Therefore, the
306 // delta is reversed.
307 hnsDelta = - hnsDelta;
308 }
309
310 if (hnsDelta < - m_perFrame_1_4th) {
311 // This sample is late - skip.
312 return false;
313 } else if (hnsDelta > (3 * m_perFrame_1_4th)) {
314 // This sample came too early - reschedule
315 *pNextSleep = MFTimeToMsec(hnsDelta - (3 * m_perFrame_1_4th));
316
317 // Adjust the sleep time for the clock rate. (The presentation clock runs
318 // at m_fRate, but sleeping uses the system clock.)
319 if (m_playbackRate != 0)
320 *pNextSleep = (LONG)(*pNextSleep / qFabs(m_playbackRate));
321 return *pNextSleep == 0;
322 } else {
323 // This sample can be presented right now
324 return true;
325 }
326}
327
328DWORD WINAPI Scheduler::schedulerThreadProc(LPVOID parameter)
329{
330 Scheduler* scheduler = reinterpret_cast<Scheduler*>(parameter);
331 if (!scheduler)
332 return -1;
333 return scheduler->schedulerThreadProcPrivate();
334}
335
336DWORD Scheduler::schedulerThreadProcPrivate()
337{
338 HRESULT hr = S_OK;
339 MSG msg;
340 LONG wait = INFINITE;
341 bool exitThread = false;
342
343 // Force the system to create a message queue for this thread.
344 // (See MSDN documentation for PostThreadMessage.)
345 PeekMessage(&msg, NULL, WM_USER, WM_USER, PM_NOREMOVE);
346
347 // Signal to the scheduler that the thread is ready.
348 SetEvent(m_threadReadyEvent.get());
349
350 while (!exitThread) {
351 // Wait for a thread message OR until the wait time expires.
352 DWORD result = MsgWaitForMultipleObjects(0, NULL, FALSE, wait, QS_POSTMESSAGE);
353
354 if (result == WAIT_TIMEOUT) {
355 // If we timed out, then process the samples in the queue
356 hr = processSamplesInQueue(&wait);
357 if (FAILED(hr))
358 exitThread = true;
359 }
360
361 while (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE)) {
362 bool processSamples = true;
363
364 switch (msg.message) {
365 case Terminate:
366 exitThread = true;
367 break;
368 case Flush:
369 // Flushing: Clear the sample queue and set the event.
370 m_mutex.lock();
371 m_scheduledSamples.clear();
372 m_mutex.unlock();
373 wait = INFINITE;
374 SetEvent(m_flushEvent.get());
375 break;
376 case Schedule:
377 // Process as many samples as we can.
378 if (processSamples) {
379 hr = processSamplesInQueue(&wait);
380 if (FAILED(hr))
381 exitThread = true;
382 processSamples = (wait != (LONG)INFINITE);
383 }
384 break;
385 }
386 }
387
388 }
389
390 return (SUCCEEDED(hr) ? 0 : 1);
391}
392
393
394SamplePool::SamplePool()
395 : m_initialized(false)
396{
397}
398
400{
401 clear();
402}
403
405{
406 QMutexLocker locker(&m_mutex);
407
408 if (!m_initialized) {
409 qCWarning(qLcEvrCustomPresenter) << "SamplePool is not initialized yet";
410 return nullptr;
411 }
412
413 if (m_videoSampleQueue.isEmpty()) {
414 qCDebug(qLcEvrCustomPresenter) << "SamplePool is empty";
415 return nullptr;
416 }
417
418 // Get a sample from the allocated queue.
419
420 // It doesn't matter if we pull them from the head or tail of the list,
421 // but when we get it back, we want to re-insert it onto the opposite end.
422 // (see returnSample)
423
424 return m_videoSampleQueue.takeFirst();
425}
426
427void SamplePool::returnSample(const ComPtr<IMFSample> &sample)
428{
429 QMutexLocker locker(&m_mutex);
430
431 Q_ASSERT(m_initialized);
432 if (!m_initialized) {
433 qCWarning(qLcEvrCustomPresenter) << "SamplePool is not initialized yet";
434 return;
435 }
436
437 m_videoSampleQueue.append(sample);
438}
439
440HRESULT SamplePool::initialize(QList<ComPtr<IMFSample>> &&samples)
441{
442 QMutexLocker locker(&m_mutex);
443
444 if (m_initialized)
445 return MF_E_INVALIDREQUEST;
446
447 // Move these samples into our allocated queue.
448 m_videoSampleQueue.append(std::move(samples));
449
450 m_initialized = true;
451
452 return S_OK;
453}
454
456{
457 QMutexLocker locker(&m_mutex);
458
459 m_videoSampleQueue.clear();
460 m_initialized = false;
461
462 return S_OK;
463}
464
465
467 : QObject()
469 , m_refCount(1)
471 , m_scheduler(this)
472 , m_tokenCounter(0)
473 , m_sampleNotify(false)
474 , m_prerolled(false)
475 , m_endStreaming(false)
476 , m_playbackRate(1.0f)
478 , m_mediaType(0)
479 , m_videoSink(0)
480 , m_canRenderToSurface(false)
482{
483 // Initial source rectangle = (0,0,1,1)
484 m_sourceRect.top = 0;
485 m_sourceRect.left = 0;
486 m_sourceRect.bottom = 1;
487 m_sourceRect.right = 1;
488
489 setSink(sink);
490}
491
493{
494 m_scheduler.flush();
495 m_scheduler.stopScheduler();
496 m_samplePool.clear();
497
498 delete m_presentEngine;
499}
500
501HRESULT EVRCustomPresenter::QueryInterface(REFIID riid, void ** ppvObject)
502{
503 if (!ppvObject)
504 return E_POINTER;
505 if (riid == IID_IMFGetService) {
506 *ppvObject = static_cast<IMFGetService*>(this);
507 } else if (riid == IID_IMFTopologyServiceLookupClient) {
508 *ppvObject = static_cast<IMFTopologyServiceLookupClient*>(this);
509 } else if (riid == IID_IMFVideoDeviceID) {
510 *ppvObject = static_cast<IMFVideoDeviceID*>(this);
511 } else if (riid == IID_IMFVideoPresenter) {
512 *ppvObject = static_cast<IMFVideoPresenter*>(this);
513 } else if (riid == IID_IMFRateSupport) {
514 *ppvObject = static_cast<IMFRateSupport*>(this);
515 } else if (riid == IID_IUnknown) {
516 *ppvObject = static_cast<IUnknown*>(static_cast<IMFGetService*>(this));
517 } else if (riid == IID_IMFClockStateSink) {
518 *ppvObject = static_cast<IMFClockStateSink*>(this);
519 } else {
520 *ppvObject = NULL;
521 return E_NOINTERFACE;
522 }
523 AddRef();
524 return S_OK;
525}
526
527ULONG EVRCustomPresenter::AddRef()
528{
529 return InterlockedIncrement(&m_refCount);
530}
531
532ULONG EVRCustomPresenter::Release()
533{
534 ULONG uCount = InterlockedDecrement(&m_refCount);
535 if (uCount == 0)
536 deleteLater();
537 return uCount;
538}
539
540HRESULT EVRCustomPresenter::GetService(REFGUID guidService, REFIID riid, LPVOID *ppvObject)
541{
542 HRESULT hr = S_OK;
543
544 if (!ppvObject)
545 return E_POINTER;
546
547 // The only service GUID that we support is MR_VIDEO_RENDER_SERVICE.
548 if (guidService != MR_VIDEO_RENDER_SERVICE)
549 return MF_E_UNSUPPORTED_SERVICE;
550
551 // First try to get the service interface from the D3DPresentEngine object.
552 hr = m_presentEngine->getService(guidService, riid, ppvObject);
553 if (FAILED(hr))
554 // Next, check if this object supports the interface.
555 hr = QueryInterface(riid, ppvObject);
556
557 return hr;
558}
559
561{
562 if (!deviceID)
563 return E_POINTER;
564
565 *deviceID = IID_IDirect3DDevice9;
566
567 return S_OK;
568}
569
570HRESULT EVRCustomPresenter::InitServicePointers(IMFTopologyServiceLookup *lookup)
571{
572 if (!lookup)
573 return E_POINTER;
574
575 HRESULT hr = S_OK;
576 DWORD objectCount = 0;
577
578 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
579
580 // Do not allow initializing when playing or paused.
581 if (isActive())
582 return MF_E_INVALIDREQUEST;
583
584 m_clock.Reset();
585 m_mixer.Reset();
586 m_mediaEventSink.Reset();
587
588 // Ask for the clock. Optional, because the EVR might not have a clock.
589 objectCount = 1;
590
591 lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
592 MR_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_clock),
593 &objectCount
594 );
595
596 // Ask for the mixer. (Required.)
597 objectCount = 1;
598
599 hr = lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
600 MR_VIDEO_MIXER_SERVICE, IID_PPV_ARGS(&m_mixer),
601 &objectCount
602 );
603
604 if (FAILED(hr))
605 return hr;
606
607 // Make sure that we can work with this mixer.
608 hr = configureMixer(m_mixer.Get());
609 if (FAILED(hr))
610 return hr;
611
612 // Ask for the EVR's event-sink interface. (Required.)
613 objectCount = 1;
614
615 hr = lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
616 MR_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_mediaEventSink),
617 &objectCount
618 );
619
620 if (SUCCEEDED(hr))
621 m_renderState = RenderStopped;
622
623 return hr;
624}
625
627{
628 // Enter the shut-down state.
629 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
630
631 m_renderState = RenderShutdown;
632
633 // Flush any samples that were scheduled.
634 flush();
635
636 // Clear the media type and release related resources.
637 setMediaType(NULL);
638
639 // Release all services that were acquired from InitServicePointers.
640 m_clock.Reset();
641 m_mixer.Reset();
642 m_mediaEventSink.Reset();
643
644 return S_OK;
645}
646
648{
649 return m_presentEngine->isValid() && m_canRenderToSurface;
650}
651
652HRESULT EVRCustomPresenter::ProcessMessage(MFVP_MESSAGE_TYPE message, ULONG_PTR param)
653{
654 HRESULT hr = S_OK;
655
656 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
657
658 hr = checkShutdown();
659 if (FAILED(hr))
660 return hr;
661
662 switch (message) {
663 // Flush all pending samples.
664 case MFVP_MESSAGE_FLUSH:
665 hr = flush();
666 break;
667
668 // Renegotiate the media type with the mixer.
669 case MFVP_MESSAGE_INVALIDATEMEDIATYPE:
670 hr = renegotiateMediaType();
671 break;
672
673 // The mixer received a new input sample.
674 case MFVP_MESSAGE_PROCESSINPUTNOTIFY:
675 hr = processInputNotify();
676 break;
677
678 // Streaming is about to start.
679 case MFVP_MESSAGE_BEGINSTREAMING:
680 hr = beginStreaming();
681 break;
682
683 // Streaming has ended. (The EVR has stopped.)
684 case MFVP_MESSAGE_ENDSTREAMING:
685 hr = endStreaming();
686 break;
687
688 // All input streams have ended.
689 case MFVP_MESSAGE_ENDOFSTREAM:
690 // Set the EOS flag.
691 m_endStreaming = true;
692 // Check if it's time to send the EC_COMPLETE event to the EVR.
693 hr = checkEndOfStream();
694 break;
695
696 // Frame-stepping is starting.
697 case MFVP_MESSAGE_STEP:
698 hr = prepareFrameStep(DWORD(param));
699 break;
700
701 // Cancels frame-stepping.
702 case MFVP_MESSAGE_CANCELSTEP:
703 hr = cancelFrameStep();
704 break;
705
706 default:
707 hr = E_INVALIDARG; // Unknown message. This case should never occur.
708 break;
709 }
710
711 return hr;
712}
713
714HRESULT EVRCustomPresenter::GetCurrentMediaType(IMFVideoMediaType **mediaType)
715{
716 HRESULT hr = S_OK;
717
718 if (!mediaType)
719 return E_POINTER;
720
721 *mediaType = NULL;
722
723 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
724
725 hr = checkShutdown();
726 if (FAILED(hr))
727 return hr;
728
729 if (!m_mediaType)
730 return MF_E_NOT_INITIALIZED;
731
732 return m_mediaType->QueryInterface(IID_PPV_ARGS(mediaType));
733}
734
735HRESULT EVRCustomPresenter::OnClockStart(MFTIME, LONGLONG clockStartOffset)
736{
737 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
738
739 // We cannot start after shutdown.
740 HRESULT hr = checkShutdown();
741 if (FAILED(hr))
742 return hr;
743
744 // Check if the clock is already active (not stopped).
745 if (isActive()) {
746 m_renderState = RenderStarted;
747
748 // If the clock position changes while the clock is active, it
749 // is a seek request. We need to flush all pending samples.
750 if (clockStartOffset != QMM_PRESENTATION_CURRENT_POSITION)
751 flush();
752 } else {
753 m_renderState = RenderStarted;
754
755 // The clock has started from the stopped state.
756
757 // Possibly we are in the middle of frame-stepping OR have samples waiting
758 // in the frame-step queue. Deal with these two cases first:
759 hr = startFrameStep();
760 if (FAILED(hr))
761 return hr;
762 }
763
764 // Now try to get new output samples from the mixer.
765 processOutputLoop();
766
767 return hr;
768}
769
771{
772 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
773
774 HRESULT hr = checkShutdown();
775 if (FAILED(hr))
776 return hr;
777
778 // The EVR calls OnClockRestart only while paused.
779
780 m_renderState = RenderStarted;
781
782 // Possibly we are in the middle of frame-stepping OR we have samples waiting
783 // in the frame-step queue. Deal with these two cases first:
784 hr = startFrameStep();
785 if (FAILED(hr))
786 return hr;
787
788 // Now resume the presentation loop.
789 processOutputLoop();
790
791 return hr;
792}
793
795{
796 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
797
798 HRESULT hr = checkShutdown();
799 if (FAILED(hr))
800 return hr;
801
802 if (m_renderState != RenderStopped) {
803 m_renderState = RenderStopped;
804 flush();
805
806 // If we are in the middle of frame-stepping, cancel it now.
807 if (m_frameStep.state != FrameStepNone)
808 cancelFrameStep();
809 }
810
811 return S_OK;
812}
813
815{
816 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
817
818 // We cannot pause the clock after shutdown.
819 HRESULT hr = checkShutdown();
820
821 if (SUCCEEDED(hr))
822 m_renderState = RenderPaused;
823
824 return hr;
825}
826
828{
829 // Note:
830 // The presenter reports its maximum rate through the IMFRateSupport interface.
831 // Here, we assume that the EVR honors the maximum rate.
832
833 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
834
835 HRESULT hr = checkShutdown();
836 if (FAILED(hr))
837 return hr;
838
839 // If the rate is changing from zero (scrubbing) to non-zero, cancel the
840 // frame-step operation.
841 if ((m_playbackRate == 0.0f) && (rate != 0.0f)) {
842 cancelFrameStep();
843 m_frameStep.samples.clear();
844 }
845
846 m_playbackRate = rate;
847
848 // Tell the scheduler about the new rate.
849 m_scheduler.setClockRate(rate);
850
851 return S_OK;
852}
853
855{
856 if (!rate)
857 return E_POINTER;
858
859 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
860
861 HRESULT hr = checkShutdown();
862
863 if (SUCCEEDED(hr)) {
864 // There is no minimum playback rate, so the minimum is zero.
865 *rate = 0;
866 }
867
868 return S_OK;
869}
870
871HRESULT EVRCustomPresenter::GetFastestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate)
872{
873 if (!rate)
874 return E_POINTER;
875
876 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
877
878 float maxRate = 0.0f;
879
880 HRESULT hr = checkShutdown();
881 if (FAILED(hr))
882 return hr;
883
884 // Get the maximum *forward* rate.
885 maxRate = getMaxRate(thin);
886
887 // For reverse playback, it's the negative of maxRate.
888 if (direction == MFRATE_REVERSE)
889 maxRate = -maxRate;
890
891 *rate = maxRate;
892
893 return S_OK;
894}
895
896HRESULT EVRCustomPresenter::IsRateSupported(BOOL thin, float rate, float *nearestSupportedRate)
897{
898 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
899
900 float maxRate = 0.0f;
901 float nearestRate = rate; // If we support rate, that is the nearest.
902
903 HRESULT hr = checkShutdown();
904 if (FAILED(hr))
905 return hr;
906
907 // Find the maximum forward rate.
908 // Note: We have no minimum rate (that is, we support anything down to 0).
909 maxRate = getMaxRate(thin);
910
911 if (qFabs(rate) > maxRate) {
912 // The (absolute) requested rate exceeds the maximum rate.
913 hr = MF_E_UNSUPPORTED_RATE;
914
915 // The nearest supported rate is maxRate.
916 nearestRate = maxRate;
917 if (rate < 0) {
918 // Negative for reverse playback.
919 nearestRate = -nearestRate;
920 }
921 }
922
923 // Return the nearest supported rate.
924 if (nearestSupportedRate)
925 *nearestSupportedRate = nearestRate;
926
927 return hr;
928}
929
931{
932 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
933
934 m_canRenderToSurface = false;
935
936 // check if we can render to the surface (compatible formats)
937 if (m_videoSink) {
938 for (int f = 0; f < QVideoFrameFormat::NPixelFormats; ++f) {
939 // ### set a better preference order
940 QVideoFrameFormat::PixelFormat format = QVideoFrameFormat::PixelFormat(f);
941 if (SUCCEEDED(m_presentEngine->checkFormat(qt_evr_D3DFormatFromPixelFormat(format)))) {
942 m_canRenderToSurface = true;
943 break;
944 }
945 }
946 }
947
948 // TODO: if media type already set, renegotiate?
949}
950
951void EVRCustomPresenter::setSink(QVideoSink *sink)
952{
953 m_mutex.lock();
954 m_videoSink = sink;
955 m_presentEngine->setSink(sink);
956 m_mutex.unlock();
957
959}
960
961void EVRCustomPresenter::setCropRect(QRect cropRect)
962{
963 m_mutex.lock();
964 m_cropRect = cropRect;
965 m_mutex.unlock();
966}
967
968HRESULT EVRCustomPresenter::configureMixer(IMFTransform *mixer)
969{
970 // Set the zoom rectangle (ie, the source clipping rectangle).
971 return setMixerSourceRect(mixer, m_sourceRect);
972}
973
974HRESULT EVRCustomPresenter::renegotiateMediaType()
975{
976 HRESULT hr = S_OK;
977 bool foundMediaType = false;
978
979 IMFMediaType *mixerType = NULL;
980 IMFMediaType *optimalType = NULL;
981
982 if (!m_mixer)
983 return MF_E_INVALIDREQUEST;
984
985 // Loop through all of the mixer's proposed output types.
986 DWORD typeIndex = 0;
987 while (!foundMediaType && (hr != MF_E_NO_MORE_TYPES)) {
988 qt_evr_safe_release(&mixerType);
989 qt_evr_safe_release(&optimalType);
990
991 // Step 1. Get the next media type supported by mixer.
992 hr = m_mixer->GetOutputAvailableType(0, typeIndex++, &mixerType);
993 if (FAILED(hr))
994 break;
995
996 // From now on, if anything in this loop fails, try the next type,
997 // until we succeed or the mixer runs out of types.
998
999 // Step 2. Check if we support this media type.
1000 if (SUCCEEDED(hr))
1001 hr = isMediaTypeSupported(mixerType);
1002
1003 // Step 3. Adjust the mixer's type to match our requirements.
1004 if (SUCCEEDED(hr))
1005 hr = createOptimalVideoType(mixerType, &optimalType);
1006
1007 // Step 4. Check if the mixer will accept this media type.
1008 if (SUCCEEDED(hr))
1009 hr = m_mixer->SetOutputType(0, optimalType, MFT_SET_TYPE_TEST_ONLY);
1010
1011 // Step 5. Try to set the media type on ourselves.
1012 if (SUCCEEDED(hr))
1013 hr = setMediaType(optimalType);
1014
1015 // Step 6. Set output media type on mixer.
1016 if (SUCCEEDED(hr)) {
1017 hr = m_mixer->SetOutputType(0, optimalType, 0);
1018
1019 // If something went wrong, clear the media type.
1020 if (FAILED(hr))
1021 setMediaType(NULL);
1022 }
1023
1024 if (SUCCEEDED(hr))
1025 foundMediaType = true;
1026 }
1027
1028 qt_evr_safe_release(&mixerType);
1029 qt_evr_safe_release(&optimalType);
1030
1031 return hr;
1032}
1033
1034HRESULT EVRCustomPresenter::flush()
1035{
1036 m_prerolled = false;
1037
1038 // The scheduler might have samples that are waiting for
1039 // their presentation time. Tell the scheduler to flush.
1040
1041 // This call blocks until the scheduler threads discards all scheduled samples.
1042 m_scheduler.flush();
1043
1044 // Flush the frame-step queue.
1045 m_frameStep.samples.clear();
1046
1047 if (m_renderState == RenderStopped && m_videoSink) {
1048 // Repaint with black.
1049 presentSample(nullptr);
1050 }
1051
1052 return S_OK;
1053}
1054
1055HRESULT EVRCustomPresenter::processInputNotify()
1056{
1057 HRESULT hr = S_OK;
1058
1059 // Set the flag that says the mixer has a new sample.
1060 m_sampleNotify = true;
1061
1062 if (!m_mediaType) {
1063 // We don't have a valid media type yet.
1064 hr = MF_E_TRANSFORM_TYPE_NOT_SET;
1065 } else {
1066 // Try to process an output sample.
1067 processOutputLoop();
1068 }
1069 return hr;
1070}
1071
1072HRESULT EVRCustomPresenter::beginStreaming()
1073{
1074 HRESULT hr = S_OK;
1075
1076 // Start the scheduler thread.
1077 hr = m_scheduler.startScheduler(m_clock);
1078
1079 return hr;
1080}
1081
1082HRESULT EVRCustomPresenter::endStreaming()
1083{
1084 HRESULT hr = S_OK;
1085
1086 // Stop the scheduler thread.
1087 hr = m_scheduler.stopScheduler();
1088
1089 return hr;
1090}
1091
1092HRESULT EVRCustomPresenter::checkEndOfStream()
1093{
1094 if (!m_endStreaming) {
1095 // The EVR did not send the MFVP_MESSAGE_ENDOFSTREAM message.
1096 return S_OK;
1097 }
1098
1099 if (m_sampleNotify) {
1100 // The mixer still has input.
1101 return S_OK;
1102 }
1103
1104 if (m_scheduler.areSamplesScheduled()) {
1105 // Samples are still scheduled for rendering.
1106 return S_OK;
1107 }
1108
1109 // Everything is complete. Now we can tell the EVR that we are done.
1110 notifyEvent(EC_COMPLETE, (LONG_PTR)S_OK, 0);
1111 m_endStreaming = false;
1112
1114 return S_OK;
1115}
1116
1117HRESULT EVRCustomPresenter::prepareFrameStep(DWORD steps)
1118{
1119 HRESULT hr = S_OK;
1120
1121 // Cache the step count.
1122 m_frameStep.steps += steps;
1123
1124 // Set the frame-step state.
1125 m_frameStep.state = FrameStepWaitingStart;
1126
1127 // If the clock is are already running, we can start frame-stepping now.
1128 // Otherwise, we will start when the clock starts.
1129 if (m_renderState == RenderStarted)
1130 hr = startFrameStep();
1131
1132 return hr;
1133}
1134
1135HRESULT EVRCustomPresenter::startFrameStep()
1136{
1137 if (m_frameStep.state == FrameStepWaitingStart) {
1138 // We have a frame-step request, and are waiting for the clock to start.
1139 // Set the state to "pending," which means we are waiting for samples.
1140 m_frameStep.state = FrameStepPending;
1141
1142 // If the frame-step queue already has samples, process them now.
1143 while (!m_frameStep.samples.isEmpty() && (m_frameStep.state == FrameStepPending)) {
1144 const ComPtr<IMFSample> sample = m_frameStep.samples.takeFirst();
1145
1146 const HRESULT hr = deliverFrameStepSample(sample.Get());
1147 if (FAILED(hr))
1148 return hr;
1149
1150 // We break from this loop when:
1151 // (a) the frame-step queue is empty, or
1152 // (b) the frame-step operation is complete.
1153 }
1154 } else if (m_frameStep.state == FrameStepNone) {
1155 // We are not frame stepping. Therefore, if the frame-step queue has samples,
1156 // we need to process them normally.
1157 while (!m_frameStep.samples.isEmpty()) {
1158 const ComPtr<IMFSample> sample = m_frameStep.samples.takeFirst();
1159
1160 const HRESULT hr = deliverSample(sample.Get());
1161 if (FAILED(hr))
1162 return hr;
1163 }
1164 }
1165
1166 return S_OK;
1167}
1168
1169HRESULT EVRCustomPresenter::completeFrameStep(const ComPtr<IMFSample> &sample)
1170{
1171 HRESULT hr = S_OK;
1172 MFTIME sampleTime = 0;
1173 MFTIME systemTime = 0;
1174
1175 // Update our state.
1176 m_frameStep.state = FrameStepComplete;
1177 m_frameStep.sampleNoRef = 0;
1178
1179 // Notify the EVR that the frame-step is complete.
1180 notifyEvent(EC_STEP_COMPLETE, FALSE, 0); // FALSE = completed (not cancelled)
1181
1182 // If we are scrubbing (rate == 0), also send the "scrub time" event.
1183 if (isScrubbing()) {
1184 // Get the time stamp from the sample.
1185 hr = sample->GetSampleTime(&sampleTime);
1186 if (FAILED(hr)) {
1187 // No time stamp. Use the current presentation time.
1188 if (m_clock)
1189 m_clock->GetCorrelatedTime(0, &sampleTime, &systemTime);
1190
1191 hr = S_OK; // (Not an error condition.)
1192 }
1193
1194 notifyEvent(EC_SCRUB_TIME, DWORD(sampleTime), DWORD(((sampleTime) >> 32) & 0xffffffff));
1195 }
1196 return hr;
1197}
1198
1199HRESULT EVRCustomPresenter::cancelFrameStep()
1200{
1201 FrameStepState oldState = m_frameStep.state;
1202
1203 m_frameStep.state = FrameStepNone;
1204 m_frameStep.steps = 0;
1205 m_frameStep.sampleNoRef = 0;
1206 // Don't clear the frame-step queue yet, because we might frame step again.
1207
1208 if (oldState > FrameStepNone && oldState < FrameStepComplete) {
1209 // We were in the middle of frame-stepping when it was cancelled.
1210 // Notify the EVR.
1211 notifyEvent(EC_STEP_COMPLETE, TRUE, 0); // TRUE = cancelled
1212 }
1213 return S_OK;
1214}
1215
1216HRESULT EVRCustomPresenter::createOptimalVideoType(IMFMediaType *proposedType, IMFMediaType **optimalType)
1217{
1218 HRESULT hr = S_OK;
1219
1220 RECT rcOutput;
1221 ZeroMemory(&rcOutput, sizeof(rcOutput));
1222
1223 MFVideoArea displayArea;
1224 ZeroMemory(&displayArea, sizeof(displayArea));
1225
1226 IMFMediaType *mtOptimal = NULL;
1227
1228 UINT64 size;
1229 int width;
1230 int height;
1231
1232 // Clone the proposed type.
1233
1234 hr = MFCreateMediaType(&mtOptimal);
1235 if (FAILED(hr))
1236 goto done;
1237
1238 hr = proposedType->CopyAllItems(mtOptimal);
1239 if (FAILED(hr))
1240 goto done;
1241
1242 // Modify the new type.
1243
1244 hr = proposedType->GetUINT64(MF_MT_FRAME_SIZE, &size);
1245 width = int(HI32(size));
1246 height = int(LO32(size));
1247
1248 if (m_cropRect.isValid()) {
1249 rcOutput.left = m_cropRect.x();
1250 rcOutput.top = m_cropRect.y();
1251 rcOutput.right = m_cropRect.x() + m_cropRect.width();
1252 rcOutput.bottom = m_cropRect.y() + m_cropRect.height();
1253
1254 m_sourceRect.left = float(m_cropRect.x()) / width;
1255 m_sourceRect.top = float(m_cropRect.y()) / height;
1256 m_sourceRect.right = float(m_cropRect.x() + m_cropRect.width()) / width;
1257 m_sourceRect.bottom = float(m_cropRect.y() + m_cropRect.height()) / height;
1258
1259 if (m_mixer)
1260 configureMixer(m_mixer.Get());
1261 } else {
1262 rcOutput.left = 0;
1263 rcOutput.top = 0;
1264 rcOutput.right = width;
1265 rcOutput.bottom = height;
1266 }
1267
1268 // Set the geometric aperture, and disable pan/scan.
1269 displayArea = qt_evr_makeMFArea(0, 0, rcOutput.right - rcOutput.left,
1270 rcOutput.bottom - rcOutput.top);
1271
1272 hr = mtOptimal->SetUINT32(MF_MT_PAN_SCAN_ENABLED, FALSE);
1273 if (FAILED(hr))
1274 goto done;
1275
1276 hr = mtOptimal->SetBlob(MF_MT_GEOMETRIC_APERTURE, reinterpret_cast<UINT8*>(&displayArea),
1277 sizeof(displayArea));
1278 if (FAILED(hr))
1279 goto done;
1280
1281 // Set the pan/scan aperture and the minimum display aperture. We don't care
1282 // about them per se, but the mixer will reject the type if these exceed the
1283 // frame dimentions.
1284 hr = mtOptimal->SetBlob(MF_MT_PAN_SCAN_APERTURE, reinterpret_cast<UINT8*>(&displayArea),
1285 sizeof(displayArea));
1286 if (FAILED(hr))
1287 goto done;
1288
1289 hr = mtOptimal->SetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE, reinterpret_cast<UINT8*>(&displayArea),
1290 sizeof(displayArea));
1291 if (FAILED(hr))
1292 goto done;
1293
1294 // Return the pointer to the caller.
1295 *optimalType = mtOptimal;
1296 (*optimalType)->AddRef();
1297
1298done:
1299 qt_evr_safe_release(&mtOptimal);
1300 return hr;
1301
1302}
1303
1304HRESULT EVRCustomPresenter::setMediaType(IMFMediaType *mediaType)
1305{
1306 // Note: mediaType can be NULL (to clear the type)
1307
1308 // Clearing the media type is allowed in any state (including shutdown).
1309 if (!mediaType) {
1311 m_mediaType.Reset();
1312 releaseResources();
1313 return S_OK;
1314 }
1315
1316 MFRatio fps = { 0, 0 };
1317 QList<ComPtr<IMFSample>> sampleQueue;
1318
1319 // Cannot set the media type after shutdown.
1320 HRESULT hr = checkShutdown();
1321 if (FAILED(hr))
1322 goto done;
1323
1324 // Check if the new type is actually different.
1325 // Note: This function safely handles NULL input parameters.
1326 if (qt_evr_areMediaTypesEqual(m_mediaType.Get(), mediaType))
1327 goto done; // Nothing more to do.
1328
1329 // We're really changing the type. First get rid of the old type.
1330 m_mediaType.Reset();
1331 releaseResources();
1332
1333 // Initialize the presenter engine with the new media type.
1334 // The presenter engine allocates the samples.
1335
1336 hr = m_presentEngine->createVideoSamples(mediaType, sampleQueue, m_cropRect.size());
1337 if (FAILED(hr))
1338 goto done;
1339
1340 // Mark each sample with our token counter. If this batch of samples becomes
1341 // invalid, we increment the counter, so that we know they should be discarded.
1342 for (auto sample : std::as_const(sampleQueue)) {
1343 hr = sample->SetUINT32(MFSamplePresenter_SampleCounter, m_tokenCounter);
1344 if (FAILED(hr))
1345 goto done;
1346 }
1347
1348 // Add the samples to the sample pool.
1349 hr = m_samplePool.initialize(std::move(sampleQueue));
1350 if (FAILED(hr))
1351 goto done;
1352
1353 // Set the frame rate on the scheduler.
1354 if (SUCCEEDED(qt_evr_getFrameRate(mediaType, &fps)) && (fps.Numerator != 0) && (fps.Denominator != 0)) {
1355 m_scheduler.setFrameRate(fps);
1356 } else {
1357 // NOTE: The mixer's proposed type might not have a frame rate, in which case
1358 // we'll use an arbitrary default. (Although it's unlikely the video source
1359 // does not have a frame rate.)
1360 m_scheduler.setFrameRate(g_DefaultFrameRate);
1361 }
1362
1363 // Store the media type.
1364 m_mediaType = mediaType;
1365 m_mediaType->AddRef();
1366
1368
1369done:
1370 if (FAILED(hr))
1371 releaseResources();
1372 return hr;
1373}
1374
1375HRESULT EVRCustomPresenter::isMediaTypeSupported(IMFMediaType *proposed)
1376{
1377 D3DFORMAT d3dFormat = D3DFMT_UNKNOWN;
1378 BOOL compressed = FALSE;
1379 MFVideoInterlaceMode interlaceMode = MFVideoInterlace_Unknown;
1380 MFVideoArea videoCropArea;
1381 UINT32 width = 0, height = 0;
1382
1383 // Validate the format.
1384 HRESULT hr = qt_evr_getFourCC(proposed, reinterpret_cast<DWORD*>(&d3dFormat));
1385 if (FAILED(hr))
1386 return hr;
1387
1388 QVideoFrameFormat::PixelFormat pixelFormat = pixelFormatFromMediaType(proposed);
1389 if (pixelFormat == QVideoFrameFormat::Format_Invalid)
1390 return MF_E_INVALIDMEDIATYPE;
1391
1392 // Reject compressed media types.
1393 hr = proposed->IsCompressedFormat(&compressed);
1394 if (FAILED(hr))
1395 return hr;
1396
1397 if (compressed)
1398 return MF_E_INVALIDMEDIATYPE;
1399
1400 // The D3DPresentEngine checks whether surfaces can be created using this format
1401 hr = m_presentEngine->checkFormat(d3dFormat);
1402 if (FAILED(hr))
1403 return hr;
1404
1405 // Reject interlaced formats.
1406 hr = proposed->GetUINT32(MF_MT_INTERLACE_MODE, reinterpret_cast<UINT32*>(&interlaceMode));
1407 if (FAILED(hr))
1408 return hr;
1409
1410 if (interlaceMode != MFVideoInterlace_Progressive)
1411 return MF_E_INVALIDMEDIATYPE;
1412
1413 hr = MFGetAttributeSize(proposed, MF_MT_FRAME_SIZE, &width, &height);
1414 if (FAILED(hr))
1415 return hr;
1416
1417 // Validate the various apertures (cropping regions) against the frame size.
1418 // Any of these apertures may be unspecified in the media type, in which case
1419 // we ignore it. We just want to reject invalid apertures.
1420
1421 if (SUCCEEDED(proposed->GetBlob(MF_MT_PAN_SCAN_APERTURE,
1422 reinterpret_cast<UINT8*>(&videoCropArea),
1423 sizeof(videoCropArea), nullptr))) {
1424 hr = qt_evr_validateVideoArea(videoCropArea, width, height);
1425 }
1426 if (SUCCEEDED(proposed->GetBlob(MF_MT_GEOMETRIC_APERTURE,
1427 reinterpret_cast<UINT8*>(&videoCropArea),
1428 sizeof(videoCropArea), nullptr))) {
1429 hr = qt_evr_validateVideoArea(videoCropArea, width, height);
1430 }
1431 if (SUCCEEDED(proposed->GetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE,
1432 reinterpret_cast<UINT8*>(&videoCropArea),
1433 sizeof(videoCropArea), nullptr))) {
1434 hr = qt_evr_validateVideoArea(videoCropArea, width, height);
1435 }
1436 return hr;
1437}
1438
1439void EVRCustomPresenter::processOutputLoop()
1440{
1441 HRESULT hr = S_OK;
1442
1443 // Process as many samples as possible.
1444 while (hr == S_OK) {
1445 // If the mixer doesn't have a new input sample, break from the loop.
1446 if (!m_sampleNotify) {
1447 hr = MF_E_TRANSFORM_NEED_MORE_INPUT;
1448 break;
1449 }
1450
1451 // Try to process a sample.
1452 hr = processOutput();
1453
1454 // NOTE: ProcessOutput can return S_FALSE to indicate it did not
1455 // process a sample. If so, break out of the loop.
1456 }
1457
1458 if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
1459 // The mixer has run out of input data. Check for end-of-stream.
1460 checkEndOfStream();
1461 }
1462}
1463
1464HRESULT EVRCustomPresenter::processOutput()
1465{
1466 // If the clock is not running, we present the first sample,
1467 // and then don't present any more until the clock starts.
1468 if ((m_renderState != RenderStarted) && m_prerolled)
1469 return S_FALSE;
1470
1471 // Make sure we have a pointer to the mixer.
1472 if (!m_mixer)
1473 return MF_E_INVALIDREQUEST;
1474
1475 // Try to get a free sample from the video sample pool.
1476 ComPtr<IMFSample> sample = m_samplePool.takeSample();
1477 if (!sample)
1478 return S_FALSE; // No free samples. Try again when a sample is released.
1479
1480 // From now on, we have a valid video sample pointer, where the mixer will
1481 // write the video data.
1482
1483 LONGLONG mixerStartTime = 0, mixerEndTime = 0;
1484 MFTIME systemTime = 0;
1485
1486 if (m_clock) {
1487 // Latency: Record the starting time for ProcessOutput.
1488 m_clock->GetCorrelatedTime(0, &mixerStartTime, &systemTime);
1489 }
1490
1491 // Now we are ready to get an output sample from the mixer.
1492 DWORD status = 0;
1493 MFT_OUTPUT_DATA_BUFFER dataBuffer = {};
1494 dataBuffer.pSample = sample.Get();
1495 HRESULT hr = m_mixer->ProcessOutput(0, 1, &dataBuffer, &status);
1496 // Important: Release any events returned from the ProcessOutput method.
1497 qt_evr_safe_release(&dataBuffer.pEvents);
1498
1499 if (FAILED(hr)) {
1500 // Return the sample to the pool.
1501 m_samplePool.returnSample(sample);
1502
1503 // Handle some known error codes from ProcessOutput.
1504 if (hr == MF_E_TRANSFORM_TYPE_NOT_SET) {
1505 // The mixer's format is not set. Negotiate a new format.
1506 hr = renegotiateMediaType();
1507 } else if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
1508 // There was a dynamic media type change. Clear our media type.
1509 setMediaType(NULL);
1510 } else if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
1511 // The mixer needs more input.
1512 // We have to wait for the mixer to get more input.
1513 m_sampleNotify = false;
1514 }
1515
1516 return hr;
1517 }
1518
1519 // We got an output sample from the mixer.
1520 if (m_clock) {
1521 // Latency: Record the ending time for the ProcessOutput operation,
1522 // and notify the EVR of the latency.
1523
1524 m_clock->GetCorrelatedTime(0, &mixerEndTime, &systemTime);
1525
1526 LONGLONG latencyTime = mixerEndTime - mixerStartTime;
1527 notifyEvent(EC_PROCESSING_LATENCY, reinterpret_cast<LONG_PTR>(&latencyTime), 0);
1528 }
1529
1530 // Set up notification for when the sample is released.
1531 hr = trackSample(sample);
1532 if (FAILED(hr))
1533 return hr;
1534
1535 // Schedule the sample.
1536 if (m_frameStep.state == FrameStepNone)
1537 hr = deliverSample(sample);
1538 else // We are frame-stepping
1539 hr = deliverFrameStepSample(sample);
1540
1541 if (FAILED(hr))
1542 return hr;
1543
1544 m_prerolled = true; // We have presented at least one sample now.
1545 return S_OK;
1546}
1547
1548HRESULT EVRCustomPresenter::deliverSample(const ComPtr<IMFSample> &sample)
1549{
1550 // If we are not actively playing, OR we are scrubbing (rate = 0),
1551 // then we need to present the sample immediately. Otherwise,
1552 // schedule it normally.
1553
1554 bool presentNow = ((m_renderState != RenderStarted) || isScrubbing());
1555
1556 HRESULT hr = m_scheduler.scheduleSample(sample, presentNow);
1557
1558 if (FAILED(hr)) {
1559 // Notify the EVR that we have failed during streaming. The EVR will notify the
1560 // pipeline.
1561
1562 notifyEvent(EC_ERRORABORT, hr, 0);
1563 }
1564
1565 return hr;
1566}
1567
1568HRESULT EVRCustomPresenter::deliverFrameStepSample(const ComPtr<IMFSample> &sample)
1569{
1570 HRESULT hr = S_OK;
1571 IUnknown *unk = NULL;
1572
1573 // For rate 0, discard any sample that ends earlier than the clock time.
1574 if (isScrubbing() && m_clock && qt_evr_isSampleTimePassed(m_clock.Get(), sample.Get())) {
1575 // Discard this sample.
1576 } else if (m_frameStep.state >= FrameStepScheduled) {
1577 // A frame was already submitted. Put this sample on the frame-step queue,
1578 // in case we are asked to step to the next frame. If frame-stepping is
1579 // cancelled, this sample will be processed normally.
1580 m_frameStep.samples.append(sample);
1581 } else {
1582 // We're ready to frame-step.
1583
1584 // Decrement the number of steps.
1585 if (m_frameStep.steps > 0)
1586 m_frameStep.steps--;
1587
1588 if (m_frameStep.steps > 0) {
1589 // This is not the last step. Discard this sample.
1590 } else if (m_frameStep.state == FrameStepWaitingStart) {
1591 // This is the right frame, but the clock hasn't started yet. Put the
1592 // sample on the frame-step queue. When the clock starts, the sample
1593 // will be processed.
1594 m_frameStep.samples.append(sample);
1595 } else {
1596 // This is the right frame *and* the clock has started. Deliver this sample.
1597 hr = deliverSample(sample);
1598 if (FAILED(hr))
1599 goto done;
1600
1601 // Query for IUnknown so that we can identify the sample later.
1602 // Per COM rules, an object always returns the same pointer when QI'ed for IUnknown.
1603 hr = sample->QueryInterface(IID_PPV_ARGS(&unk));
1604 if (FAILED(hr))
1605 goto done;
1606
1607 m_frameStep.sampleNoRef = reinterpret_cast<DWORD_PTR>(unk); // No add-ref.
1608
1609 // NOTE: We do not AddRef the IUnknown pointer, because that would prevent the
1610 // sample from invoking the OnSampleFree callback after the sample is presented.
1611 // We use this IUnknown pointer purely to identify the sample later; we never
1612 // attempt to dereference the pointer.
1613
1614 m_frameStep.state = FrameStepScheduled;
1615 }
1616 }
1617done:
1618 qt_evr_safe_release(&unk);
1619 return hr;
1620}
1621
1622HRESULT EVRCustomPresenter::trackSample(const ComPtr<IMFSample> &sample)
1623{
1624 IMFTrackedSample *tracked = NULL;
1625
1626 HRESULT hr = sample->QueryInterface(IID_PPV_ARGS(&tracked));
1627
1628 if (SUCCEEDED(hr))
1629 hr = tracked->SetAllocator(&m_sampleFreeCB, NULL);
1630
1631 qt_evr_safe_release(&tracked);
1632 return hr;
1633}
1634
1635void EVRCustomPresenter::releaseResources()
1636{
1637 // Increment the token counter to indicate that all existing video samples
1638 // are "stale." As these samples get released, we'll dispose of them.
1639 //
1640 // Note: The token counter is required because the samples are shared
1641 // between more than one thread, and they are returned to the presenter
1642 // through an asynchronous callback (onSampleFree). Without the token, we
1643 // might accidentally re-use a stale sample after the ReleaseResources
1644 // method returns.
1645
1646 m_tokenCounter++;
1647
1648 flush();
1649
1650 m_samplePool.clear();
1651
1652 m_presentEngine->releaseResources();
1653}
1654
1655HRESULT EVRCustomPresenter::onSampleFree(IMFAsyncResult *result)
1656{
1657 IUnknown *object = NULL;
1658 IMFSample *sample = NULL;
1659 IUnknown *unk = NULL;
1660 UINT32 token;
1661
1662 // Get the sample from the async result object.
1663 HRESULT hr = result->GetObject(&object);
1664 if (FAILED(hr))
1665 goto done;
1666
1667 hr = object->QueryInterface(IID_PPV_ARGS(&sample));
1668 if (FAILED(hr))
1669 goto done;
1670
1671 // If this sample was submitted for a frame-step, the frame step operation
1672 // is complete.
1673
1674 if (m_frameStep.state == FrameStepScheduled) {
1675 // Query the sample for IUnknown and compare it to our cached value.
1676 hr = sample->QueryInterface(IID_PPV_ARGS(&unk));
1677 if (FAILED(hr))
1678 goto done;
1679
1680 if (m_frameStep.sampleNoRef == reinterpret_cast<DWORD_PTR>(unk)) {
1681 // Notify the EVR.
1682 hr = completeFrameStep(sample);
1683 if (FAILED(hr))
1684 goto done;
1685 }
1686
1687 // Note: Although object is also an IUnknown pointer, it is not
1688 // guaranteed to be the exact pointer value returned through
1689 // QueryInterface. Therefore, the second QueryInterface call is
1690 // required.
1691 }
1692
1693 m_mutex.lock();
1694
1695 token = MFGetAttributeUINT32(sample, MFSamplePresenter_SampleCounter, (UINT32)-1);
1696
1697 if (token == m_tokenCounter) {
1698 // Return the sample to the sample pool.
1699 m_samplePool.returnSample(sample);
1700 // A free sample is available. Process more data if possible.
1701 processOutputLoop();
1702 }
1703
1704 m_mutex.unlock();
1705
1706done:
1707 if (FAILED(hr))
1708 notifyEvent(EC_ERRORABORT, hr, 0);
1709 qt_evr_safe_release(&object);
1710 qt_evr_safe_release(&sample);
1711 qt_evr_safe_release(&unk);
1712 return hr;
1713}
1714
1715float EVRCustomPresenter::getMaxRate(bool thin)
1716{
1717 // Non-thinned:
1718 // If we have a valid frame rate and a monitor refresh rate, the maximum
1719 // playback rate is equal to the refresh rate. Otherwise, the maximum rate
1720 // is unbounded (FLT_MAX).
1721
1722 // Thinned: The maximum rate is unbounded.
1723
1724 float maxRate = FLT_MAX;
1725 MFRatio fps = { 0, 0 };
1726 UINT monitorRateHz = 0;
1727
1728 if (!thin && m_mediaType) {
1729 qt_evr_getFrameRate(m_mediaType.Get(), &fps);
1730 monitorRateHz = m_presentEngine->refreshRate();
1731
1732 if (fps.Denominator && fps.Numerator && monitorRateHz) {
1733 // Max Rate = Refresh Rate / Frame Rate
1734 maxRate = (float)MulDiv(monitorRateHz, fps.Denominator, fps.Numerator);
1735 }
1736 }
1737
1738 return maxRate;
1739}
1740
1741bool EVRCustomPresenter::event(QEvent *e)
1742{
1743 switch (int(e->type())) {
1744 case StartSurface:
1746 return true;
1747 case StopSurface:
1749 return true;
1750 case PresentSample:
1751 presentSample(static_cast<PresentSampleEvent *>(e)->sample());
1752 return true;
1753 default:
1754 break;
1755 }
1756 return QObject::event(e);
1757}
1758
1760{
1761 if (thread() != QThread::currentThread()) {
1762 QCoreApplication::postEvent(this, new QEvent(QEvent::Type(StartSurface)));
1763 return;
1764 }
1765}
1766
1768{
1769 if (thread() != QThread::currentThread()) {
1770 QCoreApplication::postEvent(this, new QEvent(QEvent::Type(StopSurface)));
1771 return;
1772 }
1773}
1774
1775void EVRCustomPresenter::presentSample(const ComPtr<IMFSample> &sample)
1776{
1777 if (thread() != QThread::currentThread()) {
1778 QCoreApplication::postEvent(this, new PresentSampleEvent(sample));
1779 return;
1780 }
1781
1782 if (!m_videoSink || !m_presentEngine->videoSurfaceFormat().isValid())
1783 return;
1784
1785 QtVideo::Rotation rotation = [&] {
1786 ComPtr<IMFMediaType> inputStreamType;
1787 if (SUCCEEDED(m_mixer->GetInputCurrentType(0, inputStreamType.GetAddressOf()))) {
1788 auto rotation = static_cast<MFVideoRotationFormat>(
1789 MFGetAttributeUINT32(inputStreamType.Get(), MF_MT_VIDEO_ROTATION, 0));
1790 switch (rotation) {
1791 case MFVideoRotationFormat_90:
1792 return QtVideo::Rotation::Clockwise90;
1793 case MFVideoRotationFormat_180:
1794 return QtVideo::Rotation::Clockwise180;
1795 case MFVideoRotationFormat_270:
1796 return QtVideo::Rotation::Clockwise270;
1797 case MFVideoRotationFormat_0:
1798 default:
1799 return QtVideo::Rotation::None;
1800 }
1801 }
1802 return QtVideo::Rotation::None;
1803 }();
1804
1805 QVideoFrame frame = m_presentEngine->makeVideoFrame(sample, rotation);
1806
1807 // Since start/end times are related to a position when the clock is started,
1808 // to have times from the beginning, need to adjust it by adding seeked position.
1809 if (m_positionOffset) {
1810 if (frame.startTime())
1811 frame.setStartTime(frame.startTime() + m_positionOffset);
1812 if (frame.endTime())
1813 frame.setEndTime(frame.endTime() + m_positionOffset);
1814 }
1815
1816 m_videoSink->platformVideoSink()->setVideoFrame(frame);
1817}
1818
1819void EVRCustomPresenter::positionChanged(qint64 position)
1820{
1821 m_positionOffset = position * 1000;
1822}
1823
1824HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect &sourceRect)
1825{
1826 if (!mixer)
1827 return E_POINTER;
1828
1829 IMFAttributes *attributes = NULL;
1830
1831 HRESULT hr = mixer->GetAttributes(&attributes);
1832 if (SUCCEEDED(hr)) {
1833 hr = attributes->SetBlob(VIDEO_ZOOM_RECT, reinterpret_cast<const UINT8*>(&sourceRect),
1834 sizeof(sourceRect));
1835 attributes->Release();
1836 }
1837 return hr;
1838}
1839
1841{
1842 GUID majorType;
1843 if (FAILED(type->GetMajorType(&majorType)))
1844 return QVideoFrameFormat::Format_Invalid;
1845 if (majorType != MFMediaType_Video)
1846 return QVideoFrameFormat::Format_Invalid;
1847
1848 GUID subtype;
1849 if (FAILED(type->GetGUID(MF_MT_SUBTYPE, &subtype)))
1850 return QVideoFrameFormat::Format_Invalid;
1851
1852 return QWindowsMultimediaUtils::pixelFormatFromMediaSubtype(subtype);
1853}
1854
1855QT_END_NAMESPACE
STDMETHODIMP GetDeviceID(IID *deviceID) override
STDMETHODIMP GetSlowestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate) override
STDMETHODIMP OnClockStop(MFTIME systemTime) override
STDMETHODIMP ReleaseServicePointers() override
STDMETHODIMP QueryInterface(REFIID riid, void **ppv) override
STDMETHODIMP OnClockRestart(MFTIME systemTime) override
STDMETHODIMP InitServicePointers(IMFTopologyServiceLookup *lookup) override
STDMETHODIMP GetService(REFGUID guidService, REFIID riid, LPVOID *ppvObject) override
void setCropRect(QRect cropRect)
STDMETHODIMP IsRateSupported(BOOL thin, float rate, float *nearestSupportedRate) override
STDMETHODIMP OnClockSetRate(MFTIME systemTime, float rate) override
STDMETHODIMP GetCurrentMediaType(IMFVideoMediaType **mediaType) override
void setSink(QVideoSink *sink)
STDMETHODIMP GetFastestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate) override
void presentSample(const ComPtr< IMFSample > &sample)
STDMETHODIMP OnClockStart(MFTIME systemTime, LONGLONG clockStartOffset) override
STDMETHODIMP OnClockPause(MFTIME systemTime) override
STDMETHODIMP ProcessMessage(MFVP_MESSAGE_TYPE message, ULONG_PTR param) override
EVRCustomPresenter(QVideoSink *sink=0)
ComPtr< IMFSample > sample() const
PresentSampleEvent(const ComPtr< IMFSample > &sample)
\inmodule QtCore
Definition qmutex.h:346
\inmodule QtCore
Definition qmutex.h:342
ComPtr< IMFSample > takeSample()
void returnSample(const ComPtr< IMFSample > &sample)
HRESULT initialize(QList< ComPtr< IMFSample > > &&samples)
HRESULT stopScheduler()
Scheduler(EVRCustomPresenter *presenter)
HRESULT startScheduler(ComPtr< IMFClock > clock)
void setFrameRate(const MFRatio &fps)
HRESULT processSamplesInQueue(LONG *nextSleep)
HRESULT scheduleSample(const ComPtr< IMFSample > &sample, bool presentNow)
static LONG MFTimeToMsec(const LONGLONG &time)
static QVideoFrameFormat::PixelFormat pixelFormatFromMediaType(IMFMediaType *type)
static HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect &nrcSource)
#define QMM_PRESENTATION_CURRENT_POSITION
static const DWORD SCHEDULER_TIMEOUT
bool qt_evr_setCustomPresenter(IUnknown *evr, EVRCustomPresenter *presenter)
static const LONG ONE_MSEC
static const MFTIME ONE_SECOND
static const MFRatio g_DefaultFrameRate
#define qCWarning(category,...)
#define qCDebug(category,...)
#define Q_STATIC_LOGGING_CATEGORY(name,...)