Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qwasmvideooutput.cpp
Go to the documentation of this file.
1// Copyright (C) 2022 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include <QDebug>
5#include <QUrl>
6#include <QPoint>
7#include <QRect>
8#include <QMediaPlayer>
9#include <QVideoFrame>
10#include <QFile>
11#include <QBuffer>
12#include <QMimeDatabase>
14
15#include <qvideosink.h>
16#include <private/qplatformvideosink_p.h>
17#include <private/qmemoryvideobuffer_p.h>
18#include <private/qvideotexturehelper_p.h>
19#include <private/qvideoframe_p.h>
20#include <private/qstdweb_p.h>
21#include <QTimer>
22
23#include <emscripten/bind.h>
24#include <emscripten/html5.h>
25#include <emscripten/val.h>
26
27
29
30
31using namespace emscripten;
32using namespace Qt::Literals;
33
34Q_LOGGING_CATEGORY(qWasmMediaVideoOutput, "qt.multimedia.wasm.videooutput")
35
36
37static bool checkForVideoFrame()
38{
39 emscripten::val videoFrame = emscripten::val::global("VideoFrame");
40 return (!videoFrame.isNull() && !videoFrame.isUndefined());
41}
42
43QWasmVideoOutput::QWasmVideoOutput(QObject *parent) : QObject{ parent }
44{
46}
47
49
50void QWasmVideoOutput::setVideoSize(const QSize &newSize)
51{
52 if (m_pendingVideoSize == newSize)
53 return;
54
55 m_pendingVideoSize = newSize;
56 updateVideoElementGeometry(QRect(0, 0, m_pendingVideoSize.width(), m_pendingVideoSize.height()));
57}
58
60{
61 m_currentVideoMode = mode;
62}
63
65{
66 if (m_video.isUndefined() || m_video.isNull()
67 || !m_wasmSink) {
68 // error
69 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
70 return;
71 }
72 switch (m_currentVideoMode) {
74 emscripten::val sourceObj = m_video["src"];
75 if ((sourceObj.isUndefined() || sourceObj.isNull()) && !m_source.isEmpty()) {
76 m_video.set("src", m_source);
77 }
78 if (!isReady())
79 m_video.call<void>("load");
80 } break;
83 if (!m_cameraIsReady) {
84 m_shouldBeStarted = true;
85 }
86
87 if (!m_connection)
88 m_connection = connect(m_mediaInputStream, &JsMediaInputStream::mediaVideoStreamReady, this,
89 [=]( ) {
90 m_video.set("srcObject", m_mediaInputStream->getMediaStream());
91
92 emscripten::val stream = m_video["srcObject"];
93 if (stream.isNull() || stream.isUndefined()) { // camera device
94 qCDebug(qWasmMediaVideoOutput) << "srcObject ERROR";
95 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
96 return;
97 } else {
98 emscripten::val videoTracks = stream.call<emscripten::val>("getVideoTracks");
99 if (videoTracks.isNull() || videoTracks.isUndefined()) {
100 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << "videoTracks is null";
101 emit errorOccured(QMediaPlayer::ResourceError,
102 QStringLiteral("video surface error"));
103 return;
104 }
105 if (videoTracks["length"].as<int>() == 0) {
106 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << "videoTracks count is 0";
107 emit errorOccured(QMediaPlayer::ResourceError,
108 QStringLiteral("video surface error"));
109 return;
110 }
111 emscripten::val videoSettings = videoTracks[0].call<emscripten::val>("getSettings");
112 if (!videoSettings.isNull() || !videoSettings.isUndefined()) {
113 const int width = videoSettings["width"].as<int>();
114 const int height = videoSettings["height"].as<int>();
115 updateVideoElementGeometry(QRect(0, 0, width, height));
116 }
117 }
118
119 m_video.call<void>("play");
120
121 if (m_currentVideoMode == QWasmVideoOutput::Camera
122 || m_currentVideoMode == QWasmVideoOutput::SurfaceCapture) {
123 emit readyChanged(true);
124 if (m_hasVideoFrame)
125 videoFrameTimerCallback();
126 }
127
128 });
129 m_mediaInputStream->setUseAudio(false);
130 m_shouldBeStarted = true;
131 m_mediaInputStream->setStreamDevice(m_cameraId);
132
133 } break;
134 };
135
136 m_shouldStop = false;
137 m_toBePaused = false;
138
139 if (m_currentVideoMode != QWasmVideoOutput::Camera
140 && m_currentVideoMode != QWasmVideoOutput::SurfaceCapture) {
141 m_video.call<void>("play");
142 }
143}
144
146{
147 if (m_shouldStop)
148 return;
149 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO;
150 if (m_video.isUndefined() || m_video.isNull()) {
151 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("Resource error"));
152 return;
153 }
154 m_shouldStop = true;
155 if (!m_toBePaused) {
156 if (m_mediaInputStream && m_mediaInputStream->isActive())
157 m_mediaInputStream->stopMediaStream(m_mediaInputStream->getMediaStream());
158
159 m_video.set("srcObject", emscripten::val::null());
160 disconnect(m_connection);
161 m_video.call<void>("remove");
162 } else {
163 m_video.call<void>("pause");
164 }
165}
166
168{
169 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO;
170
171 if (m_video.isUndefined() || m_video.isNull()) {
172 // error
173 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
174 return;
175 }
176 m_shouldStop = false;
177 m_toBePaused = true;
178 m_video.call<void>("pause");
179}
180
182{
183 // flush pending frame
184 if (m_wasmSink)
185 m_wasmSink->platformVideoSink()->setVideoFrame(QVideoFrame());
186
187 m_source.clear();
188 m_video.set("currentTime", emscripten::val(0));
189 m_video.call<void>("load");
190}
191
193{
194 return m_video;
195}
196
197void QWasmVideoOutput::setSurface(QVideoSink *surface)
198{
199 if (!surface || surface == m_wasmSink) {
200 return;
201 }
202
203 m_wasmSink = surface;
204}
205
207{
208 if (m_video.isUndefined() || m_video.isNull()) {
209 // error
210 return false;
211 }
212
213 return m_currentMediaStatus == MediaStatus::LoadedMedia;
214 }
215
216void QWasmVideoOutput::setSource(const QUrl &url)
217{
218 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << url;
219
220 m_source = url.toString();
221
222 if (m_video.isUndefined() || m_video.isNull()) {
223 return;
224 }
225
226 if (url.isEmpty()) {
227 stop();
228 return;
229 }
230 if (url.isLocalFile()) {
231 QFile localFile(url.toLocalFile());
232 if (localFile.open(QIODevice::ReadOnly)) {
233 QDataStream buffer(&localFile); // we will serialize the data into the file
234 setSource(buffer.device());
235 } else {
236 qWarning() << "Failed to open file";
237 }
238 return;
239 }
240
241 updateVideoElementSource(m_source);
242}
243
245{
246 m_video.set("src", src.toStdString());
247 m_video.call<void>("load");
248}
249
250void QWasmVideoOutput::addCameraSourceElement(const std::string &id)
251{
252 m_cameraIsReady = false;
253 m_mediaInputStream = JsMediaInputStream::instance();
254
255 m_mediaInputStream->setUseAudio(m_hasAudio);
256 m_mediaInputStream->setUseVideo(true);
257
258 connect(m_mediaInputStream, &JsMediaInputStream::mediaVideoStreamReady, this,
259 [this]() {
260 qCDebug(qWasmMediaVideoOutput) << "mediaVideoStreamReady" << m_shouldBeStarted;
261
262 m_cameraIsReady = true;
263 if (m_shouldBeStarted) {
264 start();
265 m_shouldBeStarted = false;
266 }
267 });
268
269 m_cameraId = id;
270}
271
272void QWasmVideoOutput::setSource(QIODevice *stream)
273{
274 if (stream->bytesAvailable() == 0) {
275 qWarning() << "data not available";
276 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("data not available"));
277 return;
278 }
279 if (m_video.isUndefined() || m_video.isNull()) {
280 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
281 return;
282 }
283
284 QMimeDatabase db;
285 QMimeType mime = db.mimeTypeForData(stream);
286
287 QByteArray buffer = stream->readAll();
288
289 qstdweb::Blob contentBlob = qstdweb::Blob::copyFrom(buffer.data(), buffer.size(), mime.name().toStdString());
290
291 emscripten::val window = qstdweb::window();
292
293 if (window["safari"].isUndefined()) {
294 emscripten::val contentUrl = window["URL"].call<emscripten::val>("createObjectURL", contentBlob.val());
295 m_video.set("src", contentUrl);
296 m_source = QString::fromStdString(contentUrl.as<std::string>());
297 } else {
298 // only Safari currently supports Blob with srcObject
299 m_video.set("srcObject", contentBlob.val());
300 }
301}
302
303void QWasmVideoOutput::setVolume(qreal volume)
304{ // between 0 - 1
305 volume = qBound(qreal(0.0), volume, qreal(1.0));
306 m_video.set("volume", volume);
307}
308
309void QWasmVideoOutput::setMuted(bool muted)
310{
311 if (m_video.isUndefined() || m_video.isNull()) {
312 // error
313 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
314 return;
315 }
316 m_video.set("muted", muted);
317}
318
320{
321 return (!m_video.isUndefined() || !m_video.isNull())
322 ? (m_video["currentTime"].as<double>() * 1000)
323 : 0;
324}
325
326void QWasmVideoOutput::seekTo(qint64 positionMSecs)
327{
328 if (isVideoSeekable()) {
329 float positionToSetInSeconds = float(positionMSecs) / 1000;
330 emscripten::val seekableTimeRange = m_video["seekable"];
331 if (!seekableTimeRange.isNull() || !seekableTimeRange.isUndefined()) {
332 // range user can seek
333 if (seekableTimeRange["length"].as<int>() < 1)
334 return;
335 if (positionToSetInSeconds
336 >= seekableTimeRange.call<emscripten::val>("start", 0).as<double>()
337 && positionToSetInSeconds
338 <= seekableTimeRange.call<emscripten::val>("end", 0).as<double>()) {
339 m_requestedPosition = positionToSetInSeconds;
340
341 m_video.set("currentTime", m_requestedPosition);
342 }
343 }
344 }
345 qCDebug(qWasmMediaVideoOutput) << "m_requestedPosition" << m_requestedPosition;
346}
347
349{
350 if (m_video.isUndefined() || m_video.isNull()) {
351 // error
352 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
353 return false;
354 }
355
356 emscripten::val seekableTimeRange = m_video["seekable"];
357 if (seekableTimeRange["length"].as<int>() < 1)
358 return false;
359 if (!seekableTimeRange.isNull() || !seekableTimeRange.isUndefined()) {
360 bool isit = !QtPrivate::fuzzyCompare(
361 seekableTimeRange.call<emscripten::val>("start", 0).as<double>(),
362 seekableTimeRange.call<emscripten::val>("end", 0).as<double>());
363 return isit;
364 }
365 return false;
366}
367
368void QWasmVideoOutput::createVideoElement(const std::string &id)
369{
370 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << this << id;
371 // Create <video> element and add it to the page body
372
373 emscripten::val document = emscripten::val::global("document");
374 emscripten::val body = document["body"];
375
376 emscripten::val oldVideo = document.call<emscripten::val>("getElementById", id);
377
378 // need to remove stale element
379 if (!oldVideo.isUndefined() && !oldVideo.isNull())
380 oldVideo.call<void>("remove");
381
382 m_videoSurfaceId = id;
383 m_video = document.call<emscripten::val>("createElement", std::string("video"));
384
385 m_video.set("id", m_videoSurfaceId.c_str());
386 m_video.call<void>("setAttribute", std::string("class"),
387 (m_currentVideoMode == QWasmVideoOutput::Camera ? std::string("Camera")
388 : std::string("Video")));
389 m_video.set("data-qvideocontext",
390 emscripten::val(quintptr(reinterpret_cast<void *>(this))));
391
392 m_video.set("preload", "metadata");
393
394 // Uncaught DOMException: Failed to execute 'getImageData' on
395 // 'OffscreenCanvasRenderingContext2D': The canvas has been tainted by
396 // cross-origin data.
397 // TODO figure out somehow to let user choose between these
398 std::string originString = "anonymous"; // requires server Access-Control-Allow-Origin *
399 // std::string originString = "use-credentials"; // must not
400 // Access-Control-Allow-Origin *
401
402 m_video.call<void>("setAttribute", std::string("crossorigin"), originString);
403 body.call<void>("appendChild", m_video);
404
405 // Create/add video source
406 document.call<emscripten::val>("createElement",
407 std::string("source")).set("src", m_source.toStdString());
408
409 // Set position:absolute, which makes it possible to position the video
410 // element using x,y. coordinates, relative to its parent (the page's <body>
411 // element)
412 emscripten::val style = m_video["style"];
413 style.set("position", "absolute");
414 style.set("display", "none"); // hide
415
416 if (!m_source.isEmpty())
417 updateVideoElementSource(m_source);
418}
419
420void QWasmVideoOutput::createOffscreenElement(const QSize &offscreenSize)
421{
422 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO;
423
424 if (m_hasVideoFrame) // VideoFrame does not require offscreen canvas/context
425 return;
426
427 // create offscreen element for grabbing frames
428 // OffscreenCanvas - no safari :(
429 // https://developer.mozilla.org/en-US/docs/Web/API/OffscreenCanvas
430
431 emscripten::val document = emscripten::val::global("document");
432
433 // TODO use correct frameBytesAllocationSize?
434 // offscreen render buffer
435 m_offscreen = emscripten::val::global("OffscreenCanvas");
436
437 if (m_offscreen.isUndefined()) {
438 // Safari OffscreenCanvas not supported, try old skool way
439 m_offscreen = document.call<emscripten::val>("createElement", std::string("canvas"));
440
441 m_offscreen.set("style",
442 "position:absolute;left:-1000px;top:-1000px"); // offscreen
443 m_offscreen.set("width", offscreenSize.width());
444 m_offscreen.set("height", offscreenSize.height());
445 m_offscreenContext = m_offscreen.call<emscripten::val>("getContext", std::string("2d"));
446 } else {
447 m_offscreen = emscripten::val::global("OffscreenCanvas")
448 .new_(offscreenSize.width(), offscreenSize.height());
449 emscripten::val offscreenAttributes = emscripten::val::array();
450 offscreenAttributes.set("willReadFrequently", true);
451 m_offscreenContext = m_offscreen.call<emscripten::val>("getContext", std::string("2d"),
452 offscreenAttributes);
453 }
454 std::string offscreenId = m_videoSurfaceId + "_offscreenOutputSurface";
455 m_offscreen.set("id", offscreenId.c_str());
456}
457
459{
460 if (!m_video.isUndefined() && !m_video.isNull())
461 m_video.call<void>("remove");
462}
463
465{
466 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO;
467
468 // event callbacks
469 // timupdate
470 auto timeUpdateCallback = [=](emscripten::val event) {
471 qCDebug(qWasmMediaVideoOutput) << "timeupdate";
472
473 // qt progress is ms
474 emit progressChanged(event["target"]["currentTime"].as<double>() * 1000);
475 };
476 m_timeUpdateEvent.reset(new QWasmEventHandler(m_video, "timeupdate", timeUpdateCallback));
477
478 // play
479 auto playCallback = [=](emscripten::val event) {
480 Q_UNUSED(event)
481 qCDebug(qWasmMediaVideoOutput) << "play" << m_video["src"].as<std::string>();
482 if (!m_isSeeking)
483 emit stateChanged(QWasmMediaPlayer::Preparing);
484 };
485 m_playEvent.reset(new QWasmEventHandler(m_video, "play", playCallback));
486
487 // ended
488 auto endedCallback = [=](emscripten::val event) {
489 Q_UNUSED(event)
490 qCDebug(qWasmMediaVideoOutput) << "ended";
491 m_currentMediaStatus = MediaStatus::EndOfMedia;
492 emit statusChanged(m_currentMediaStatus);
493 };
494 m_endedEvent.reset(new QWasmEventHandler(m_video, "ended", endedCallback));
495
496 // durationchange
497 auto durationChangeCallback = [=](emscripten::val event) {
498 qCDebug(qWasmMediaVideoOutput) << "durationChange";
499
500 // qt duration is in milliseconds.
501 qint64 dur = event["target"]["duration"].as<double>() * 1000;
502 emit durationChanged(dur);
503 };
504 m_durationChangeEvent.reset(
505 new QWasmEventHandler(m_video, "durationchange", durationChangeCallback));
506
507 // loadeddata
508 auto loadedDataCallback = [=](emscripten::val event) {
509 Q_UNUSED(event)
510 qCDebug(qWasmMediaVideoOutput) << "loaded data";
511
512 emit stateChanged(QWasmMediaPlayer::Prepared);
513 if (m_isSeekable != isVideoSeekable()) {
514 m_isSeekable = isVideoSeekable();
515 emit seekableChanged(m_isSeekable);
516 }
517 };
518 m_loadedDataEvent.reset(new QWasmEventHandler(m_video, "loadeddata", loadedDataCallback));
519
520 // error
521 auto errorCallback = [=](emscripten::val event) {
522 qCDebug(qWasmMediaVideoOutput) << "error";
523 if (event.isUndefined() || event.isNull())
524 return;
525 emit errorOccured(m_video["error"]["code"].as<int>(),
526 QString::fromStdString(m_video["error"]["message"].as<std::string>()));
527 };
528 m_errorChangeEvent.reset(new QWasmEventHandler(m_video, "error", errorCallback));
529
530 // resize
531 auto resizeCallback = [=](emscripten::val event) {
532 Q_UNUSED(event)
533 qCDebug(qWasmMediaVideoOutput) << "resize";
534
535 updateVideoElementGeometry(
536 QRect(0, 0, m_video["videoWidth"].as<int>(), m_video["videoHeight"].as<int>()));
537 emit sizeChange(m_video["videoWidth"].as<int>(), m_video["videoHeight"].as<int>());
538
539 };
540 m_resizeChangeEvent.reset(new QWasmEventHandler(m_video, "resize", resizeCallback));
541
542 // loadedmetadata
543 auto loadedMetadataCallback = [=](emscripten::val event) {
544 Q_UNUSED(event)
545 qCDebug(qWasmMediaVideoOutput) << "loaded meta data";
546
547 emit metaDataLoaded();
548 };
549 m_loadedMetadataChangeEvent.reset(
550 new QWasmEventHandler(m_video, "loadedmetadata", loadedMetadataCallback));
551
552 // loadstart
553 auto loadStartCallback = [=](emscripten::val event) {
554 Q_UNUSED(event)
555 qCDebug(qWasmMediaVideoOutput) << "load started";
556 m_currentMediaStatus = MediaStatus::LoadingMedia;
557 emit statusChanged(m_currentMediaStatus);
558 m_shouldStop = false;
559 };
560 m_loadStartChangeEvent.reset(new QWasmEventHandler(m_video, "loadstart", loadStartCallback));
561
562 // canplay
563
564 auto canPlayCallback = [=](emscripten::val event) {
565 if (event.isUndefined() || event.isNull())
566 return;
567 qCDebug(qWasmMediaVideoOutput) << "can play"
568 << "m_requestedPosition" << m_requestedPosition;
569
570 if (!m_shouldStop)
571 emit readyChanged(true); // sets video available
572 };
573 m_canPlayChangeEvent.reset(new QWasmEventHandler(m_video, "canplay", canPlayCallback));
574
575 // canplaythrough
576 auto canPlayThroughCallback = [=](emscripten::val event) {
577 Q_UNUSED(event)
578 qCDebug(qWasmMediaVideoOutput) << "can play through"
579 << "m_shouldStop" << m_shouldStop;
580
581 if (m_currentMediaStatus == MediaStatus::EndOfMedia)
582 return;
583 bool seekable = isVideoSeekable();
584 if (m_isSeekable != seekable) {
585 m_isSeekable = seekable;
586 emit seekableChanged(m_isSeekable);
587 }
588 if (!m_isSeeking && !m_shouldStop) {
589 emscripten::val timeRanges = m_video["buffered"];
590 if ((!timeRanges.isNull() || !timeRanges.isUndefined())
591 && timeRanges["length"].as<int>() == 1) {
592 double buffered = m_video["buffered"].call<emscripten::val>("end", 0).as<double>();
593 const double duration = m_video["duration"].as<double>();
594
595 if (duration == buffered) {
596 m_currentBufferedValue = 100;
597 emit bufferingChanged(m_currentBufferedValue);
598 }
599 }
600 constexpr int hasEnoughData = 4;
601 if (m_video["readyState"].as<int>() == hasEnoughData) {
602 m_currentMediaStatus = MediaStatus::LoadedMedia;
603 emit statusChanged(m_currentMediaStatus);
605 }
606 } else {
607 m_shouldStop = false;
608 }
609 };
610 m_canPlayThroughChangeEvent.reset(
611 new QWasmEventHandler(m_video, "canplaythrough", canPlayThroughCallback));
612
613 // seeking
614 auto seekingCallback = [=](emscripten::val event) {
615 Q_UNUSED(event)
616 qCDebug(qWasmMediaVideoOutput)
617 << "seeking started" << (m_video["currentTime"].as<double>() * 1000);
618 m_isSeeking = true;
619 };
620 m_seekingChangeEvent.reset(new QWasmEventHandler(m_video, "seeking", seekingCallback));
621
622 // seeked
623 auto seekedCallback = [=](emscripten::val event) {
624 Q_UNUSED(event)
625 qCDebug(qWasmMediaVideoOutput) << "seeked" << (m_video["currentTime"].as<double>() * 1000);
626 emit progressChanged(m_video["currentTime"].as<double>() * 1000);
627 m_isSeeking = false;
628 };
629 m_seekedChangeEvent.reset(new QWasmEventHandler(m_video, "seeked", seekedCallback));
630
631 // emptied
632 auto emptiedCallback = [=](emscripten::val event) {
633 Q_UNUSED(event)
634 qCDebug(qWasmMediaVideoOutput) << "emptied";
635 emit readyChanged(false);
636 m_currentMediaStatus = MediaStatus::EndOfMedia;
637 emit statusChanged(m_currentMediaStatus);
638 };
639 m_emptiedChangeEvent.reset(new QWasmEventHandler(m_video, "emptied", emptiedCallback));
640
641 // stalled
642 auto stalledCallback = [=](emscripten::val event) {
643 Q_UNUSED(event)
644 qCDebug(qWasmMediaVideoOutput) << "stalled";
645 m_currentMediaStatus = MediaStatus::StalledMedia;
646 emit statusChanged(m_currentMediaStatus);
647 };
648 m_stalledChangeEvent.reset(new QWasmEventHandler(m_video, "stalled", stalledCallback));
649
650 // waiting
651 auto waitingCallback = [=](emscripten::val event) {
652 Q_UNUSED(event)
653
654 qCDebug(qWasmMediaVideoOutput) << "waiting";
655 // check buffer
656 };
657 m_waitingChangeEvent.reset(new QWasmEventHandler(m_video, "waiting", waitingCallback));
658
659 // suspend
660
661 // playing
662 auto playingCallback = [=](emscripten::val event) {
663 Q_UNUSED(event)
664 qCDebug(qWasmMediaVideoOutput) << "playing";
665 if (m_isSeeking)
666 return;
667 emit stateChanged(QWasmMediaPlayer::Started);
668 if (m_toBePaused || !m_shouldStop) { // paused
669 m_toBePaused = false;
670 QMetaObject::invokeMethod(this, &QWasmVideoOutput::videoFrameTimerCallback, Qt::QueuedConnection);
671 }
672 };
673 m_playingChangeEvent.reset(new QWasmEventHandler(m_video, "playing", playingCallback));
674
675 // progress (buffering progress)
676 auto progesssCallback = [=](emscripten::val event) {
677 if (event.isUndefined() || event.isNull())
678 return;
679
680 const double duration = event["target"]["duration"].as<double>();
681 if (duration < 0) // track not exactly ready yet
682 return;
683
684 emscripten::val timeRanges = event["target"]["buffered"];
685
686 if ((!timeRanges.isNull() || !timeRanges.isUndefined())
687 && timeRanges["length"].as<int>() == 1) {
688 emscripten::val dVal = timeRanges.call<emscripten::val>("end", 0);
689 if (!dVal.isNull() || !dVal.isUndefined()) {
690 double bufferedEnd = dVal.as<double>();
691
692 if (duration > 0 && bufferedEnd > 0) {
693 const double bufferedValue = (bufferedEnd / duration * 100);
694 qCDebug(qWasmMediaVideoOutput) << "progress buffered";
695 m_currentBufferedValue = bufferedValue;
696 emit bufferingChanged(m_currentBufferedValue);
697 if (bufferedEnd == duration)
698 m_currentMediaStatus = MediaStatus::BufferedMedia;
699 else
700 m_currentMediaStatus = MediaStatus::BufferingMedia;
701 emit statusChanged(m_currentMediaStatus);
702 }
703 }
704 }
705 };
706 m_progressChangeEvent.reset(new QWasmEventHandler(m_video, "progress", progesssCallback));
707
708 // pause
709 auto pauseCallback = [=](emscripten::val event) {
710 Q_UNUSED(event)
711 qCDebug(qWasmMediaVideoOutput) << "pause";
712
713 const double currentTime = m_video["currentTime"].as<double>(); // in seconds
714 const double duration = m_video["duration"].as<double>(); // in seconds
715 if ((currentTime > 0 && currentTime < duration) && (!m_shouldStop && m_toBePaused)) {
716 emit stateChanged(QWasmMediaPlayer::Paused);
717 } else {
718 // stop this crazy thing!
719 m_video.set("currentTime", emscripten::val(0));
720 emit stateChanged(QWasmMediaPlayer::Stopped);
721 }
722 };
723 m_pauseChangeEvent.reset(new QWasmEventHandler(m_video, "pause", pauseCallback));
724
725 // onunload
726 // we use lower level events here as to avert a crash on activate using the
727 // qtdweb see _qt_beforeUnload
728 emscripten::val window = emscripten::val::global("window");
729
730 auto beforeUnloadCallback = [=](emscripten::val event) {
731 Q_UNUSED(event)
732 // large videos will leave the unloading window
733 // in a frozen state, so remove the video element src first
734 m_video.call<void>("removeAttribute", emscripten::val("src"));
735 m_video.call<void>("load");
736 };
737 m_beforeUnloadEvent.reset(new QWasmEventHandler(window, "beforeunload", beforeUnloadCallback));
738
739}
740
741void QWasmVideoOutput::updateVideoElementGeometry(const QRect &windowGeometry)
742{
743 QRect m_videoElementSource(windowGeometry.topLeft(), windowGeometry.size());
744
745 emscripten::val style = m_video["style"];
746 style.set("left", QStringLiteral("%1px").arg(m_videoElementSource.left()).toStdString());
747 style.set("top", QStringLiteral("%1px").arg(m_videoElementSource.top()).toStdString());
748 m_video.set("width", m_videoElementSource.width());
749 m_video.set("height", m_videoElementSource.height());
750 style.set("z-index", "999");
751
752 if (!m_hasVideoFrame) {
753 // offscreen
754 m_offscreen.set("width", m_videoElementSource.width());
755 m_offscreen.set("height", m_videoElementSource.height());
756 }
757}
758
760{
761 // qt duration is in ms
762 // js is sec
763
764 if (m_video.isUndefined() || m_video.isNull())
765 return 0;
766 return m_video["duration"].as<double>() * 1000;
767}
768
769void QWasmVideoOutput::newFrame(const QVideoFrame &frame)
770{
771 m_wasmSink->setVideoFrame(frame);
772}
773
775{
776 m_video.set("playbackRate", emscripten::val(rate));
777}
778
780{
781 return (m_video.isUndefined() || m_video.isNull()) ? 0 : m_video["playbackRate"].as<float>();
782}
783
784void QWasmVideoOutput::checkNetworkState()
785{
786 int netState = m_video["networkState"].as<int>();
787
788 qCDebug(qWasmMediaVideoOutput) << netState;
789
790 switch (netState) {
791 case QWasmMediaPlayer::QWasmMediaNetworkState::NetworkEmpty: // no data
792 break;
793 case QWasmMediaPlayer::QWasmMediaNetworkState::NetworkIdle:
794 break;
795 case QWasmMediaPlayer::QWasmMediaNetworkState::NetworkLoading:
796 break;
797 case QWasmMediaPlayer::QWasmMediaNetworkState::NetworkNoSource: // no source
798 emit errorOccured(netState, QStringLiteral("No media source found"));
799 break;
800 };
801}
802
803void QWasmVideoOutput::videoComputeFrame(void *context)
804{
805 if (m_offscreenContext.isUndefined() || m_offscreenContext.isNull()) {
806 qCDebug(qWasmMediaVideoOutput) << "offscreen canvas context could not be found";
807 return;
808 }
809 emscripten::val document = emscripten::val::global("document");
810
811 if (m_video.isUndefined() || m_video.isNull()) {
812 qCDebug(qWasmMediaVideoOutput) << "video element could not be found";
813 return;
814 }
815
816 const int videoWidth = m_video["videoWidth"].as<int>();
817 const int videoHeight = m_video["videoHeight"].as<int>();
818
819 if (videoWidth == 0 || videoHeight == 0)
820 return;
821
822 m_offscreenContext.call<void>("drawImage", m_video, 0, 0, videoWidth, videoHeight);
823
824 emscripten::val frame = // one frame, Uint8ClampedArray
825 m_offscreenContext.call<emscripten::val>("getImageData", 0, 0, videoWidth, videoHeight);
826
827 const QSize frameBytesAllocationSize(videoWidth, videoHeight);
828
829 // this seems to work ok, even though getImageData returns a Uint8ClampedArray
830 QByteArray frameBytes = qstdweb::Uint8Array(frame["data"]).copyToQByteArray();
831
832 QVideoFrameFormat frameFormat =
833 QVideoFrameFormat(frameBytesAllocationSize, QVideoFrameFormat::Format_RGBA8888);
834
835 auto *textureDescription = QVideoTextureHelper::textureDescription(frameFormat.pixelFormat());
836
837 QVideoFrame vFrame = QVideoFramePrivate::createFrame(
838 std::make_unique<QMemoryVideoBuffer>(
839 std::move(frameBytes),
840 textureDescription->strideForWidth(frameFormat.frameWidth())), // width of line with padding
841 frameFormat);
842 QWasmVideoOutput *wasmVideoOutput = reinterpret_cast<QWasmVideoOutput *>(context);
843
844 if (!wasmVideoOutput->m_wasmSink) {
845 qWarning() << "ERROR ALERT!! video sink not set";
846 }
847 wasmVideoOutput->m_wasmSink->setVideoFrame(vFrame);
848}
849
850
852{
853 QWasmVideoOutput *videoOutput = reinterpret_cast<QWasmVideoOutput *>(context);
854 if (!videoOutput || !videoOutput->isReady())
855 return;
856 emscripten::val videoElement = videoOutput->currentVideoElement();
857 emscripten::val oneVideoFrame = val::global("VideoFrame").new_(videoElement);
858
859 if (oneVideoFrame.isNull() || oneVideoFrame.isUndefined()) {
860 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO
861 << "ERROR" << "failed to construct VideoFrame";
862 return;
863 }
864
865 emscripten::val options = emscripten::val::object();
866 emscripten::val rectOptions = emscripten::val::object();
867
868 int displayWidth = oneVideoFrame["displayWidth"].as<int>();
869 int displayHeight = oneVideoFrame["displayHeight"].as<int>();
870
871 rectOptions.set("width", displayWidth);
872 rectOptions.set("height", displayHeight);
873 options.set("rect", rectOptions);
874
875 emscripten::val frameBytesAllocationSize = oneVideoFrame.call<emscripten::val>("allocationSize", options);
876 emscripten::val frameBuffer =
877 emscripten::val::global("Uint8Array").new_(frameBytesAllocationSize);
878 QWasmVideoOutput *wasmVideoOutput =
879 reinterpret_cast<QWasmVideoOutput*>(videoElement["data-qvideocontext"].as<quintptr>());
880
881 qstdweb::PromiseCallbacks copyToCallback;
882 copyToCallback.thenFunc = [wasmVideoOutput, oneVideoFrame, frameBuffer,
883 displayWidth, displayHeight]
884 (emscripten::val frameLayout)
885 {
886 if (frameLayout.isNull() || frameLayout.isUndefined()) {
887 qCDebug(qWasmMediaVideoOutput) << "theres no frameLayout";
888 return;
889 }
890
891 // frameBuffer now has a new frame, send to Qt
892 const QSize frameSize(displayWidth,
893 displayHeight);
894
895 QByteArray frameBytes = QByteArray::fromEcmaUint8Array(frameBuffer);
896
897 QVideoFrameFormat::PixelFormat pixelFormat = fromJsPixelFormat(oneVideoFrame["format"].as<std::string>());
898 if (pixelFormat == QVideoFrameFormat::Format_Invalid) {
899 qWarning() << "Invalid pixel format";
900 return;
901 }
902 QVideoFrameFormat frameFormat = QVideoFrameFormat(frameSize, pixelFormat);
903
904 auto buffer = std::make_unique<QMemoryVideoBuffer>(
905 std::move(frameBytes),
906 oneVideoFrame["codedWidth"].as<int>());
907
908 QVideoFrame vFrame =
909 QVideoFramePrivate::createFrame(std::move(buffer), std::move(frameFormat));
910
911 if (!wasmVideoOutput) {
912 qCDebug(qWasmMediaVideoOutput) << "ERROR:"
913 << "data-qvideocontext not found";
914 return;
915 }
916 if (!wasmVideoOutput->m_wasmSink) {
917 qWarning() << "ERROR ALERT!! video sink not set";
918 return;
919 }
920 wasmVideoOutput->m_wasmSink->setVideoFrame(vFrame);
921 oneVideoFrame.call<emscripten::val>("close");
922 };
923 copyToCallback.catchFunc = [&, wasmVideoOutput, oneVideoFrame](emscripten::val error)
924 {
925 qCDebug(qWasmMediaVideoOutput) << "Error"
926 << QString::fromStdString(error["name"].as<std::string>())
927 << QString::fromStdString(error["message"].as<std::string>()) ;
928
929 oneVideoFrame.call<emscripten::val>("close");
930 wasmVideoOutput->stop();
931 return;
932 };
933
934 qstdweb::Promise::make(oneVideoFrame, u"copyTo"_s, std::move(copyToCallback), frameBuffer);
935}
936
938{
939 static auto frame = [](double frameTime, void *context) -> EM_BOOL {
940 Q_UNUSED(frameTime);
941
942 QWasmVideoOutput *videoOutput = reinterpret_cast<QWasmVideoOutput *>(context);
943 if (!videoOutput || videoOutput->m_currentMediaStatus != MediaStatus::LoadedMedia)
944 return false;
945 if (videoOutput->m_shouldStop)
946 return false;
947 emscripten::val videoElement = videoOutput->currentVideoElement();
948
949 if (videoElement.isNull() || videoElement.isUndefined()) {
950 qWarning() << "no video element";
951 }
952
953 if (videoElement["paused"].as<bool>() || videoElement["ended"].as<bool>()
954 || videoElement["readyState"].as<int>() != 4)
955 return false;
956
957 if (videoOutput->m_hasVideoFrame) {
958 videoOutput->videoFrameCallback(context);
959 } else {
960 videoOutput->videoComputeFrame(context);
961 }
962 return true;
963 };
964
965 if ((!m_shouldStop && m_video["className"].as<std::string>() == "Camera" && m_cameraIsReady)
966 || isReady())
967 emscripten_request_animation_frame_loop(frame, this);
968 // about 60 fps
969}
970
971QVideoFrameFormat::PixelFormat QWasmVideoOutput::fromJsPixelFormat(std::string_view videoFormat)
972{
973 if (videoFormat == "I420")
974 return QVideoFrameFormat::Format_YUV420P;
975 // no equivalent pixel format
976 // else if (videoFormat == "I420A") // AYUV ?
977 else if (videoFormat == "I422")
978 return QVideoFrameFormat::Format_YUV422P;
979 // no equivalent pixel format
980 // else if (videoFormat == "I444")
981 else if (videoFormat == "NV12")
982 return QVideoFrameFormat::Format_NV12;
983 else if (videoFormat == "RGBA")
984 return QVideoFrameFormat::Format_RGBA8888;
985 else if (videoFormat == "RGBX")
986 return QVideoFrameFormat::Format_RGBX8888;
987 else if (videoFormat == "BGRA")
988 return QVideoFrameFormat::Format_BGRA8888;
989 else if (videoFormat == "BGRX")
990 return QVideoFrameFormat::Format_BGRX8888;
991
992 return QVideoFrameFormat::Format_Invalid;
993}
994
996{
997 emscripten::val stream = m_video["srcObject"];
998 if ((!stream.isNull() && !stream.isUndefined()) && stream["active"].as<bool>()) {
999 emscripten::val tracks = stream.call<emscripten::val>("getVideoTracks");
1000 if (!tracks.isUndefined()) {
1001 if (tracks["length"].as<int>() == 0)
1002 return emscripten::val::undefined();
1003
1004 emscripten::val track = tracks[0];
1005 if (!track.isUndefined()) {
1006 emscripten::val trackCaps = emscripten::val::undefined();
1007 if (!track["getCapabilities"].isUndefined())
1008 trackCaps = track.call<emscripten::val>("getCapabilities");
1009 else // firefox does not support getCapabilities
1010 trackCaps = track.call<emscripten::val>("getSettings");
1011
1012 if (!trackCaps.isUndefined())
1013 return trackCaps;
1014 }
1015 }
1016 } else {
1017 // camera not started track capabilities not available
1018 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("capabilities not available"));
1019 }
1020
1021 return emscripten::val::undefined();
1022}
1023
1024bool QWasmVideoOutput::setDeviceSetting(const std::string &key, emscripten::val value)
1025{
1026 emscripten::val stream = m_video["srcObject"];
1027 if (stream.isNull() || stream.isUndefined()
1028 || stream["getVideoTracks"].isUndefined())
1029 return false;
1030
1031 emscripten::val tracks = stream.call<emscripten::val>("getVideoTracks");
1032 if (!tracks.isNull() || !tracks.isUndefined()) {
1033 if (tracks["length"].as<int>() == 0)
1034 return false;
1035
1036 emscripten::val track = tracks[0];
1037 emscripten::val contraint = emscripten::val::object();
1038 contraint.set(std::move(key), value);
1039 track.call<emscripten::val>("applyConstraints", contraint);
1040 return true;
1041 }
1042
1043 return false;
1044}
1045
1046QT_END_NAMESPACE
1047
1048#include "moc_qwasmvideooutput_p.cpp"
void addCameraSourceElement(const std::string &id)
void updateVideoElementGeometry(const QRect &windowGeometry)
bool setDeviceSetting(const std::string &key, emscripten::val value)
emscripten::val surfaceElement()
emscripten::val getDeviceCapabilities()
void videoFrameCallback(void *context)
void setVideoSize(const QSize &)
void setMuted(bool muted)
void setSource(const QUrl &url)
void setVideoMode(QWasmVideoOutput::WasmVideoMode mode)
void seekTo(qint64 position)
void setVolume(qreal volume)
void createVideoElement(const std::string &id)
void updateVideoElementSource(const QString &src)
void setSource(QIODevice *stream)
void setPlaybackRate(qreal rate)
void createOffscreenElement(const QSize &offscreenSize)
Combined button and popup list for selecting options.
Q_LOGGING_CATEGORY(lcEventDispatcher, "qt.eventdispatcher")
static bool checkForVideoFrame()