8#include <private/qwindowsmultimediautils_p.h>
9#include <private/qplatformvideosink_p.h>
13#include <QtCore/qmutex.h>
14#include <QtCore/qvarlengtharray.h>
15#include <QtCore/qrect.h>
17#include <qcoreapplication.h>
19#include <qloggingcategory.h>
35#define QMM_PRESENTATION_CURRENT_POSITION 0x7fffffffffffffff
43 return (LONG)(time / (ONE_SECOND / ONE_MSEC));
48 if (!evr || !presenter)
51 HRESULT result = E_FAIL;
53 IMFVideoRenderer *renderer = NULL;
54 if (SUCCEEDED(evr->QueryInterface(IID_PPV_ARGS(&renderer)))) {
55 result = renderer->InitializeRenderer(NULL, presenter);
59 return result == S_OK;
73 const ComPtr<IMFSample> m_sample;
77 : m_presenter(presenter)
79 , m_playbackRate(1.0f)
86 m_scheduledSamples.clear();
91 UINT64 AvgTimePerFrame = 0;
94 MFFrameRateToAverageTimePerFrame(fps.Numerator, fps.Denominator, &AvgTimePerFrame);
97 m_perFrame_1_4th = AvgTimePerFrame / 4;
102 if (m_schedulerThread)
116 m_threadReadyEvent = EventHandle{ CreateEvent(NULL, FALSE, FALSE, NULL) };
117 if (!m_threadReadyEvent) {
118 hr = HRESULT_FROM_WIN32(GetLastError());
123 m_flushEvent = EventHandle{ CreateEvent(NULL, FALSE, FALSE, NULL) };
125 hr = HRESULT_FROM_WIN32(GetLastError());
130 m_schedulerThread = ThreadHandle{ CreateThread(NULL, 0, schedulerThreadProc, (LPVOID)
this, 0, &dwID) };
131 if (!m_schedulerThread) {
132 hr = HRESULT_FROM_WIN32(GetLastError());
137 hObjects[0] = m_threadReadyEvent.get();
138 hObjects[1] = m_schedulerThread.get();
139 dwWait = WaitForMultipleObjects(2, hObjects, FALSE, INFINITE);
140 if (WAIT_OBJECT_0 != dwWait) {
142 m_schedulerThread = {};
152 m_threadReadyEvent = {};
159 if (!m_schedulerThread)
163 PostThreadMessage(m_threadID, Terminate, 0, 0);
166 WaitForSingleObject(m_schedulerThread.get(), INFINITE);
169 m_schedulerThread = {};
174 m_scheduledSamples.clear();
185 if (m_schedulerThread) {
187 PostThreadMessage(m_threadID, Flush, 0 , 0);
191 HANDLE objects[] = { m_flushEvent.get(), m_schedulerThread.get() };
193 WaitForMultipleObjects(ARRAYSIZE(objects), objects, FALSE, SCHEDULER_TIMEOUT);
202 return m_scheduledSamples.count() > 0;
207 if (!m_schedulerThread)
208 return MF_E_NOT_INITIALIZED;
211 DWORD dwExitCode = 0;
213 GetExitCodeThread(m_schedulerThread.get(), &dwExitCode);
214 if (dwExitCode != STILL_ACTIVE)
217 if (presentNow || !m_clock) {
218 m_presenter->presentSample(sample);
220 if (m_playbackRate > 0.0f && qt_evr_isSampleTimePassed(m_clock.Get(), sample.Get())) {
221 qCDebug(qLcEvrCustomPresenter) <<
"Discard the sample, it came too late";
227 m_scheduledSamples.enqueue(sample);
231 PostThreadMessage(m_threadID, Schedule, 0, 0);
242 QQueue<ComPtr<IMFSample>> scheduledSamples;
245 m_scheduledSamples.swap(scheduledSamples);
249 while (!scheduledSamples.isEmpty()) {
250 ComPtr<IMFSample> sample = scheduledSamples.dequeue();
255 if (isSampleReadyToPresent(sample.Get(), &wait)) {
256 m_presenter->presentSample(sample.Get());
262 scheduledSamples.prepend(sample);
268 scheduledSamples.append(std::move(m_scheduledSamples));
269 m_scheduledSamples.swap(scheduledSamples);
282bool Scheduler::isSampleReadyToPresent(IMFSample *sample, LONG *pNextSleep)
const
288 MFTIME hnsPresentationTime = 0;
289 MFTIME hnsTimeNow = 0;
290 MFTIME hnsSystemTime = 0;
294 HRESULT hr = sample->GetSampleTime(&hnsPresentationTime);
299 hr = m_clock->GetCorrelatedTime(0, &hnsTimeNow, &hnsSystemTime);
303 MFTIME hnsDelta = hnsPresentationTime - hnsTimeNow;
304 if (m_playbackRate < 0) {
307 hnsDelta = - hnsDelta;
310 if (hnsDelta < - m_perFrame_1_4th) {
313 }
else if (hnsDelta > (3 * m_perFrame_1_4th)) {
315 *pNextSleep = MFTimeToMsec(hnsDelta - (3 * m_perFrame_1_4th));
319 if (m_playbackRate != 0)
320 *pNextSleep = (LONG)(*pNextSleep / qFabs(m_playbackRate));
321 return *pNextSleep == 0;
328DWORD WINAPI Scheduler::schedulerThreadProc(LPVOID parameter)
330 Scheduler* scheduler =
reinterpret_cast<Scheduler*>(parameter);
333 return scheduler->schedulerThreadProcPrivate();
336DWORD
Scheduler::schedulerThreadProcPrivate()
340 LONG wait = INFINITE;
341 bool exitThread =
false;
345 PeekMessage(&msg, NULL, WM_USER, WM_USER, PM_NOREMOVE);
348 SetEvent(m_threadReadyEvent.get());
350 while (!exitThread) {
352 DWORD result = MsgWaitForMultipleObjects(0, NULL, FALSE, wait, QS_POSTMESSAGE);
354 if (result == WAIT_TIMEOUT) {
356 hr = processSamplesInQueue(&wait);
361 while (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE)) {
362 bool processSamples =
true;
364 switch (msg.message) {
371 m_scheduledSamples.clear();
374 SetEvent(m_flushEvent.get());
378 if (processSamples) {
379 hr = processSamplesInQueue(&wait);
382 processSamples = (wait != (LONG)INFINITE);
390 return (SUCCEEDED(hr) ? 0 : 1);
395 : m_initialized(
false)
408 if (!m_initialized) {
409 qCWarning(qLcEvrCustomPresenter) <<
"SamplePool is not initialized yet";
413 if (m_videoSampleQueue.isEmpty()) {
414 qCDebug(qLcEvrCustomPresenter) <<
"SamplePool is empty";
424 return m_videoSampleQueue.takeFirst();
431 Q_ASSERT(m_initialized);
432 if (!m_initialized) {
433 qCWarning(qLcEvrCustomPresenter) <<
"SamplePool is not initialized yet";
437 m_videoSampleQueue.append(sample);
445 return MF_E_INVALIDREQUEST;
448 m_videoSampleQueue.append(std::move(samples));
450 m_initialized =
true;
459 m_videoSampleQueue.clear();
460 m_initialized =
false;
473 , m_sampleNotify(
false)
475 , m_endStreaming(
false)
476 , m_playbackRate(1.0f)
480 , m_canRenderToSurface(
false)
484 m_sourceRect.top = 0;
485 m_sourceRect.left = 0;
486 m_sourceRect.bottom = 1;
487 m_sourceRect.right = 1;
495 m_scheduler.stopScheduler();
496 m_samplePool.clear();
498 delete m_presentEngine;
505 if (riid == IID_IMFGetService) {
506 *ppvObject =
static_cast<IMFGetService*>(
this);
507 }
else if (riid == IID_IMFTopologyServiceLookupClient) {
508 *ppvObject =
static_cast<IMFTopologyServiceLookupClient*>(
this);
509 }
else if (riid == IID_IMFVideoDeviceID) {
510 *ppvObject =
static_cast<IMFVideoDeviceID*>(
this);
511 }
else if (riid == IID_IMFVideoPresenter) {
512 *ppvObject =
static_cast<IMFVideoPresenter*>(
this);
513 }
else if (riid == IID_IMFRateSupport) {
514 *ppvObject =
static_cast<IMFRateSupport*>(
this);
515 }
else if (riid == IID_IUnknown) {
516 *ppvObject =
static_cast<IUnknown*>(
static_cast<IMFGetService*>(
this));
517 }
else if (riid == IID_IMFClockStateSink) {
518 *ppvObject =
static_cast<IMFClockStateSink*>(
this);
521 return E_NOINTERFACE;
529 return InterlockedIncrement(&m_refCount);
534 ULONG uCount = InterlockedDecrement(&m_refCount);
548 if (guidService != MR_VIDEO_RENDER_SERVICE)
549 return MF_E_UNSUPPORTED_SERVICE;
552 hr = m_presentEngine->getService(guidService, riid, ppvObject);
555 hr = QueryInterface(riid, ppvObject);
565 *deviceID = IID_IDirect3DDevice9;
576 DWORD objectCount = 0;
582 return MF_E_INVALIDREQUEST;
586 m_mediaEventSink.Reset();
591 lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
592 MR_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_clock),
599 hr = lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
600 MR_VIDEO_MIXER_SERVICE, IID_PPV_ARGS(&m_mixer),
608 hr = configureMixer(m_mixer.Get());
615 hr = lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
616 MR_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_mediaEventSink),
621 m_renderState = RenderStopped;
631 m_renderState = RenderShutdown;
642 m_mediaEventSink.Reset();
649 return m_presentEngine->isValid() && m_canRenderToSurface;
658 hr = checkShutdown();
664 case MFVP_MESSAGE_FLUSH:
669 case MFVP_MESSAGE_INVALIDATEMEDIATYPE:
670 hr = renegotiateMediaType();
674 case MFVP_MESSAGE_PROCESSINPUTNOTIFY:
675 hr = processInputNotify();
679 case MFVP_MESSAGE_BEGINSTREAMING:
680 hr = beginStreaming();
684 case MFVP_MESSAGE_ENDSTREAMING:
689 case MFVP_MESSAGE_ENDOFSTREAM:
691 m_endStreaming =
true;
693 hr = checkEndOfStream();
697 case MFVP_MESSAGE_STEP:
698 hr = prepareFrameStep(DWORD(param));
702 case MFVP_MESSAGE_CANCELSTEP:
703 hr = cancelFrameStep();
725 hr = checkShutdown();
730 return MF_E_NOT_INITIALIZED;
732 return m_mediaType->QueryInterface(IID_PPV_ARGS(mediaType));
740 HRESULT hr = checkShutdown();
746 m_renderState = RenderStarted;
753 m_renderState = RenderStarted;
759 hr = startFrameStep();
774 HRESULT hr = checkShutdown();
780 m_renderState = RenderStarted;
784 hr = startFrameStep();
798 HRESULT hr = checkShutdown();
802 if (m_renderState != RenderStopped) {
803 m_renderState = RenderStopped;
807 if (m_frameStep.state != FrameStepNone)
819 HRESULT hr = checkShutdown();
822 m_renderState = RenderPaused;
835 HRESULT hr = checkShutdown();
841 if ((m_playbackRate == 0.0f) && (rate != 0.0f)) {
843 m_frameStep.samples.clear();
846 m_playbackRate = rate;
849 m_scheduler.setClockRate(rate);
861 HRESULT hr = checkShutdown();
878 float maxRate = 0.0f;
880 HRESULT hr = checkShutdown();
885 maxRate = getMaxRate(thin);
888 if (direction == MFRATE_REVERSE)
900 float maxRate = 0.0f;
901 float nearestRate = rate;
903 HRESULT hr = checkShutdown();
909 maxRate = getMaxRate(thin);
911 if (qFabs(rate) > maxRate) {
913 hr = MF_E_UNSUPPORTED_RATE;
916 nearestRate = maxRate;
919 nearestRate = -nearestRate;
924 if (nearestSupportedRate)
925 *nearestSupportedRate = nearestRate;
934 m_canRenderToSurface =
false;
938 for (
int f = 0; f < QVideoFrameFormat::NPixelFormats; ++f) {
940 QVideoFrameFormat::PixelFormat format = QVideoFrameFormat::PixelFormat(f);
941 if (SUCCEEDED(m_presentEngine->checkFormat(qt_evr_D3DFormatFromPixelFormat(format)))) {
942 m_canRenderToSurface =
true;
955 m_presentEngine->setSink(sink);
964 m_cropRect = cropRect;
971 return setMixerSourceRect(mixer, m_sourceRect);
977 bool foundMediaType =
false;
979 IMFMediaType *mixerType = NULL;
980 IMFMediaType *optimalType = NULL;
983 return MF_E_INVALIDREQUEST;
987 while (!foundMediaType && (hr != MF_E_NO_MORE_TYPES)) {
988 qt_evr_safe_release(&mixerType);
989 qt_evr_safe_release(&optimalType);
992 hr = m_mixer->GetOutputAvailableType(0, typeIndex++, &mixerType);
1001 hr = isMediaTypeSupported(mixerType);
1005 hr = createOptimalVideoType(mixerType, &optimalType);
1009 hr = m_mixer->SetOutputType(0, optimalType, MFT_SET_TYPE_TEST_ONLY);
1013 hr = setMediaType(optimalType);
1016 if (SUCCEEDED(hr)) {
1017 hr = m_mixer->SetOutputType(0, optimalType, 0);
1025 foundMediaType =
true;
1028 qt_evr_safe_release(&mixerType);
1029 qt_evr_safe_release(&optimalType);
1036 m_prerolled =
false;
1042 m_scheduler.flush();
1045 m_frameStep.samples.clear();
1047 if (m_renderState == RenderStopped && m_videoSink) {
1049 presentSample(
nullptr);
1060 m_sampleNotify =
true;
1064 hr = MF_E_TRANSFORM_TYPE_NOT_SET;
1067 processOutputLoop();
1077 hr = m_scheduler.startScheduler(m_clock);
1087 hr = m_scheduler.stopScheduler();
1094 if (!m_endStreaming) {
1099 if (m_sampleNotify) {
1104 if (m_scheduler.areSamplesScheduled()) {
1110 notifyEvent(EC_COMPLETE, (LONG_PTR)S_OK, 0);
1111 m_endStreaming =
false;
1122 m_frameStep.steps += steps;
1125 m_frameStep.state = FrameStepWaitingStart;
1129 if (m_renderState == RenderStarted)
1130 hr = startFrameStep();
1137 if (m_frameStep.state == FrameStepWaitingStart) {
1140 m_frameStep.state = FrameStepPending;
1143 while (!m_frameStep.samples.isEmpty() && (m_frameStep.state == FrameStepPending)) {
1144 const ComPtr<IMFSample> sample = m_frameStep.samples.takeFirst();
1146 const HRESULT hr = deliverFrameStepSample(sample.Get());
1154 }
else if (m_frameStep.state == FrameStepNone) {
1157 while (!m_frameStep.samples.isEmpty()) {
1158 const ComPtr<IMFSample> sample = m_frameStep.samples.takeFirst();
1160 const HRESULT hr = deliverSample(sample.Get());
1172 MFTIME sampleTime = 0;
1173 MFTIME systemTime = 0;
1176 m_frameStep.state = FrameStepComplete;
1177 m_frameStep.sampleNoRef = 0;
1180 notifyEvent(EC_STEP_COMPLETE, FALSE, 0);
1183 if (isScrubbing()) {
1185 hr = sample->GetSampleTime(&sampleTime);
1189 m_clock->GetCorrelatedTime(0, &sampleTime, &systemTime);
1194 notifyEvent(EC_SCRUB_TIME, DWORD(sampleTime), DWORD(((sampleTime) >> 32) & 0xffffffff));
1203 m_frameStep.state = FrameStepNone;
1204 m_frameStep.steps = 0;
1205 m_frameStep.sampleNoRef = 0;
1211 notifyEvent(EC_STEP_COMPLETE, TRUE, 0);
1216HRESULT
EVRCustomPresenter::createOptimalVideoType(IMFMediaType *proposedType, IMFMediaType **optimalType)
1221 ZeroMemory(&rcOutput,
sizeof(rcOutput));
1223 MFVideoArea displayArea;
1224 ZeroMemory(&displayArea,
sizeof(displayArea));
1226 IMFMediaType *mtOptimal = NULL;
1234 hr = MFCreateMediaType(&mtOptimal);
1238 hr = proposedType->CopyAllItems(mtOptimal);
1244 hr = proposedType->GetUINT64(MF_MT_FRAME_SIZE, &size);
1245 width =
int(HI32(size));
1246 height =
int(LO32(size));
1248 if (m_cropRect.isValid()) {
1249 rcOutput.left = m_cropRect.x();
1250 rcOutput.top = m_cropRect.y();
1251 rcOutput.right = m_cropRect.x() + m_cropRect.width();
1252 rcOutput.bottom = m_cropRect.y() + m_cropRect.height();
1254 m_sourceRect.left =
float(m_cropRect.x()) / width;
1255 m_sourceRect.top =
float(m_cropRect.y()) / height;
1256 m_sourceRect.right =
float(m_cropRect.x() + m_cropRect.width()) / width;
1257 m_sourceRect.bottom =
float(m_cropRect.y() + m_cropRect.height()) / height;
1260 configureMixer(m_mixer.Get());
1264 rcOutput.right = width;
1265 rcOutput.bottom = height;
1269 displayArea = qt_evr_makeMFArea(0, 0, rcOutput.right - rcOutput.left,
1270 rcOutput.bottom - rcOutput.top);
1272 hr = mtOptimal->SetUINT32(MF_MT_PAN_SCAN_ENABLED, FALSE);
1276 hr = mtOptimal->SetBlob(MF_MT_GEOMETRIC_APERTURE,
reinterpret_cast<UINT8*>(&displayArea),
1277 sizeof(displayArea));
1284 hr = mtOptimal->SetBlob(MF_MT_PAN_SCAN_APERTURE,
reinterpret_cast<UINT8*>(&displayArea),
1285 sizeof(displayArea));
1289 hr = mtOptimal->SetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE,
reinterpret_cast<UINT8*>(&displayArea),
1290 sizeof(displayArea));
1295 *optimalType = mtOptimal;
1296 (*optimalType)->AddRef();
1299 qt_evr_safe_release(&mtOptimal);
1311 m_mediaType.Reset();
1316 MFRatio fps = { 0, 0 };
1317 QList<ComPtr<IMFSample>> sampleQueue;
1320 HRESULT hr = checkShutdown();
1326 if (qt_evr_areMediaTypesEqual(m_mediaType.Get(), mediaType))
1330 m_mediaType.Reset();
1336 hr = m_presentEngine->createVideoSamples(mediaType, sampleQueue, m_cropRect.size());
1342 for (
auto sample : std::as_const(sampleQueue)) {
1343 hr = sample->SetUINT32(MFSamplePresenter_SampleCounter, m_tokenCounter);
1349 hr = m_samplePool.initialize(std::move(sampleQueue));
1354 if (SUCCEEDED(qt_evr_getFrameRate(mediaType, &fps)) && (fps.Numerator != 0) && (fps.Denominator != 0)) {
1355 m_scheduler.setFrameRate(fps);
1360 m_scheduler.setFrameRate(g_DefaultFrameRate);
1364 m_mediaType = mediaType;
1365 m_mediaType->AddRef();
1377 D3DFORMAT d3dFormat = D3DFMT_UNKNOWN;
1378 BOOL compressed = FALSE;
1379 MFVideoInterlaceMode interlaceMode = MFVideoInterlace_Unknown;
1380 MFVideoArea videoCropArea;
1381 UINT32 width = 0, height = 0;
1384 HRESULT hr = qt_evr_getFourCC(proposed,
reinterpret_cast<DWORD*>(&d3dFormat));
1388 QVideoFrameFormat::PixelFormat pixelFormat = pixelFormatFromMediaType(proposed);
1389 if (pixelFormat == QVideoFrameFormat::Format_Invalid)
1390 return MF_E_INVALIDMEDIATYPE;
1393 hr = proposed->IsCompressedFormat(&compressed);
1398 return MF_E_INVALIDMEDIATYPE;
1401 hr = m_presentEngine->checkFormat(d3dFormat);
1406 hr = proposed->GetUINT32(MF_MT_INTERLACE_MODE,
reinterpret_cast<UINT32*>(&interlaceMode));
1410 if (interlaceMode != MFVideoInterlace_Progressive)
1411 return MF_E_INVALIDMEDIATYPE;
1413 hr = MFGetAttributeSize(proposed, MF_MT_FRAME_SIZE, &width, &height);
1421 if (SUCCEEDED(proposed->GetBlob(MF_MT_PAN_SCAN_APERTURE,
1422 reinterpret_cast<UINT8*>(&videoCropArea),
1423 sizeof(videoCropArea),
nullptr))) {
1424 hr = qt_evr_validateVideoArea(videoCropArea, width, height);
1426 if (SUCCEEDED(proposed->GetBlob(MF_MT_GEOMETRIC_APERTURE,
1427 reinterpret_cast<UINT8*>(&videoCropArea),
1428 sizeof(videoCropArea),
nullptr))) {
1429 hr = qt_evr_validateVideoArea(videoCropArea, width, height);
1431 if (SUCCEEDED(proposed->GetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE,
1432 reinterpret_cast<UINT8*>(&videoCropArea),
1433 sizeof(videoCropArea),
nullptr))) {
1434 hr = qt_evr_validateVideoArea(videoCropArea, width, height);
1444 while (hr == S_OK) {
1446 if (!m_sampleNotify) {
1447 hr = MF_E_TRANSFORM_NEED_MORE_INPUT;
1452 hr = processOutput();
1458 if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
1468 if ((m_renderState != RenderStarted) && m_prerolled)
1473 return MF_E_INVALIDREQUEST;
1476 ComPtr<IMFSample> sample = m_samplePool.takeSample();
1483 LONGLONG mixerStartTime = 0, mixerEndTime = 0;
1484 MFTIME systemTime = 0;
1488 m_clock->GetCorrelatedTime(0, &mixerStartTime, &systemTime);
1493 MFT_OUTPUT_DATA_BUFFER dataBuffer = {};
1494 dataBuffer.pSample = sample.Get();
1495 HRESULT hr = m_mixer->ProcessOutput(0, 1, &dataBuffer, &status);
1497 qt_evr_safe_release(&dataBuffer.pEvents);
1501 m_samplePool.returnSample(sample);
1504 if (hr == MF_E_TRANSFORM_TYPE_NOT_SET) {
1506 hr = renegotiateMediaType();
1507 }
else if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
1510 }
else if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
1513 m_sampleNotify =
false;
1524 m_clock->GetCorrelatedTime(0, &mixerEndTime, &systemTime);
1526 LONGLONG latencyTime = mixerEndTime - mixerStartTime;
1527 notifyEvent(EC_PROCESSING_LATENCY,
reinterpret_cast<LONG_PTR>(&latencyTime), 0);
1531 hr = trackSample(sample);
1536 if (m_frameStep.state == FrameStepNone)
1537 hr = deliverSample(sample);
1539 hr = deliverFrameStepSample(sample);
1554 bool presentNow = ((m_renderState != RenderStarted) || isScrubbing());
1556 HRESULT hr = m_scheduler.scheduleSample(sample, presentNow);
1562 notifyEvent(EC_ERRORABORT, hr, 0);
1571 IUnknown *unk = NULL;
1574 if (isScrubbing() && m_clock && qt_evr_isSampleTimePassed(m_clock.Get(), sample.Get())) {
1576 }
else if (m_frameStep.state >= FrameStepScheduled) {
1580 m_frameStep.samples.append(sample);
1585 if (m_frameStep.steps > 0)
1586 m_frameStep.steps--;
1588 if (m_frameStep.steps > 0) {
1590 }
else if (m_frameStep.state == FrameStepWaitingStart) {
1594 m_frameStep.samples.append(sample);
1597 hr = deliverSample(sample);
1603 hr = sample->QueryInterface(IID_PPV_ARGS(&unk));
1607 m_frameStep.sampleNoRef =
reinterpret_cast<DWORD_PTR>(unk);
1614 m_frameStep.state = FrameStepScheduled;
1618 qt_evr_safe_release(&unk);
1624 IMFTrackedSample *tracked = NULL;
1626 HRESULT hr = sample->QueryInterface(IID_PPV_ARGS(&tracked));
1629 hr = tracked->SetAllocator(&m_sampleFreeCB, NULL);
1631 qt_evr_safe_release(&tracked);
1650 m_samplePool.clear();
1652 m_presentEngine->releaseResources();
1657 IUnknown *object = NULL;
1658 IMFSample *sample = NULL;
1659 IUnknown *unk = NULL;
1663 HRESULT hr = result->GetObject(&object);
1667 hr = object->QueryInterface(IID_PPV_ARGS(&sample));
1674 if (m_frameStep.state == FrameStepScheduled) {
1676 hr = sample->QueryInterface(IID_PPV_ARGS(&unk));
1680 if (m_frameStep.sampleNoRef ==
reinterpret_cast<DWORD_PTR>(unk)) {
1682 hr = completeFrameStep(sample);
1695 token = MFGetAttributeUINT32(sample, MFSamplePresenter_SampleCounter, (UINT32)-1);
1697 if (token == m_tokenCounter) {
1699 m_samplePool.returnSample(sample);
1701 processOutputLoop();
1708 notifyEvent(EC_ERRORABORT, hr, 0);
1709 qt_evr_safe_release(&object);
1710 qt_evr_safe_release(&sample);
1711 qt_evr_safe_release(&unk);
1724 float maxRate = FLT_MAX;
1725 MFRatio fps = { 0, 0 };
1726 UINT monitorRateHz = 0;
1728 if (!thin && m_mediaType) {
1729 qt_evr_getFrameRate(m_mediaType.Get(), &fps);
1730 monitorRateHz = m_presentEngine->refreshRate();
1732 if (fps.Denominator && fps.Numerator && monitorRateHz) {
1734 maxRate = (
float)MulDiv(monitorRateHz, fps.Denominator, fps.Numerator);
1743 switch (
int(e->type())) {
1751 presentSample(
static_cast<PresentSampleEvent *>(e)->sample());
1756 return QObject::event(e);
1761 if (thread() != QThread::currentThread()) {
1762 QCoreApplication::postEvent(
this,
new QEvent(QEvent::Type(StartSurface)));
1769 if (thread() != QThread::currentThread()) {
1770 QCoreApplication::postEvent(
this,
new QEvent(QEvent::Type(StopSurface)));
1777 if (thread() != QThread::currentThread()) {
1778 QCoreApplication::postEvent(
this,
new PresentSampleEvent(sample));
1782 if (!m_videoSink || !m_presentEngine->videoSurfaceFormat().isValid())
1785 QtVideo::Rotation rotation = [&] {
1786 ComPtr<IMFMediaType> inputStreamType;
1787 if (SUCCEEDED(m_mixer->GetInputCurrentType(0, inputStreamType.GetAddressOf()))) {
1788 auto rotation =
static_cast<MFVideoRotationFormat>(
1789 MFGetAttributeUINT32(inputStreamType.Get(), MF_MT_VIDEO_ROTATION, 0));
1791 case MFVideoRotationFormat_90:
1792 return QtVideo::Rotation::Clockwise90;
1793 case MFVideoRotationFormat_180:
1794 return QtVideo::Rotation::Clockwise180;
1795 case MFVideoRotationFormat_270:
1796 return QtVideo::Rotation::Clockwise270;
1797 case MFVideoRotationFormat_0:
1799 return QtVideo::Rotation::None;
1802 return QtVideo::Rotation::None;
1805 QVideoFrame frame = m_presentEngine->makeVideoFrame(sample, rotation);
1809 if (m_positionOffset) {
1810 if (frame.startTime())
1811 frame.setStartTime(frame.startTime() + m_positionOffset);
1812 if (frame.endTime())
1813 frame.setEndTime(frame.endTime() + m_positionOffset);
1816 m_videoSink->platformVideoSink()->setVideoFrame(frame);
1821 m_positionOffset = position * 1000;
1824HRESULT setMixerSourceRect(IMFTransform *mixer,
const MFVideoNormalizedRect &sourceRect)
1829 IMFAttributes *attributes = NULL;
1831 HRESULT hr = mixer->GetAttributes(&attributes);
1832 if (SUCCEEDED(hr)) {
1833 hr = attributes->SetBlob(VIDEO_ZOOM_RECT,
reinterpret_cast<
const UINT8*>(&sourceRect),
1834 sizeof(sourceRect));
1835 attributes->Release();
1843 if (FAILED(type->GetMajorType(&majorType)))
1844 return QVideoFrameFormat::Format_Invalid;
1845 if (majorType != MFMediaType_Video)
1846 return QVideoFrameFormat::Format_Invalid;
1849 if (FAILED(type->GetGUID(MF_MT_SUBTYPE, &subtype)))
1850 return QVideoFrameFormat::Format_Invalid;
1852 return QWindowsMultimediaUtils::pixelFormatFromMediaSubtype(subtype);
STDMETHODIMP GetDeviceID(IID *deviceID) override
STDMETHODIMP GetSlowestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate) override
STDMETHODIMP OnClockStop(MFTIME systemTime) override
STDMETHODIMP ReleaseServicePointers() override
STDMETHODIMP QueryInterface(REFIID riid, void **ppv) override
STDMETHODIMP OnClockRestart(MFTIME systemTime) override
STDMETHODIMP InitServicePointers(IMFTopologyServiceLookup *lookup) override
STDMETHODIMP GetService(REFGUID guidService, REFIID riid, LPVOID *ppvObject) override
void supportedFormatsChanged()
void setCropRect(QRect cropRect)
STDMETHODIMP IsRateSupported(BOOL thin, float rate, float *nearestSupportedRate) override
STDMETHODIMP OnClockSetRate(MFTIME systemTime, float rate) override
STDMETHODIMP GetCurrentMediaType(IMFVideoMediaType **mediaType) override
void setSink(QVideoSink *sink)
STDMETHODIMP GetFastestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate) override
void presentSample(const ComPtr< IMFSample > &sample)
STDMETHODIMP OnClockStart(MFTIME systemTime, LONGLONG clockStartOffset) override
STDMETHODIMP OnClockPause(MFTIME systemTime) override
STDMETHODIMP ProcessMessage(MFVP_MESSAGE_TYPE message, ULONG_PTR param) override
~EVRCustomPresenter() override
EVRCustomPresenter(QVideoSink *sink=0)
ComPtr< IMFSample > sample() const
PresentSampleEvent(const ComPtr< IMFSample > &sample)
ComPtr< IMFSample > takeSample()
void returnSample(const ComPtr< IMFSample > &sample)
HRESULT initialize(QList< ComPtr< IMFSample > > &&samples)
Scheduler(EVRCustomPresenter *presenter)
HRESULT startScheduler(ComPtr< IMFClock > clock)
void setFrameRate(const MFRatio &fps)
HRESULT processSamplesInQueue(LONG *nextSleep)
bool areSamplesScheduled()
HRESULT scheduleSample(const ComPtr< IMFSample > &sample, bool presentNow)
static LONG MFTimeToMsec(const LONGLONG &time)
static QVideoFrameFormat::PixelFormat pixelFormatFromMediaType(IMFMediaType *type)
static HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect &nrcSource)
#define QMM_PRESENTATION_CURRENT_POSITION
static const DWORD SCHEDULER_TIMEOUT
bool qt_evr_setCustomPresenter(IUnknown *evr, EVRCustomPresenter *presenter)
static const LONG ONE_MSEC
static const MFTIME ONE_SECOND
static const MFRatio g_DefaultFrameRate
#define qCWarning(category,...)
#define qCDebug(category,...)
#define Q_STATIC_LOGGING_CATEGORY(name,...)