Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qwasmvideooutput.cpp
Go to the documentation of this file.
1// Copyright (C) 2022 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include <QDebug>
5#include <QUrl>
6#include <QPoint>
7#include <QRect>
8#include <QMediaPlayer>
9#include <QVideoFrame>
10#include <QFile>
11#include <QBuffer>
12#include <QMimeDatabase>
14
15#include <qvideosink.h>
16#include <private/qplatformvideosink_p.h>
17#include <private/qmemoryvideobuffer_p.h>
18#include <private/qvideotexturehelper_p.h>
19#include <private/qvideoframe_p.h>
20#include <private/qstdweb_p.h>
21#include <QTimer>
22
23#include <emscripten/bind.h>
24#include <emscripten/html5.h>
25#include <emscripten/val.h>
26
27
29
30
31using namespace emscripten;
32using namespace Qt::Literals;
33
34Q_LOGGING_CATEGORY(qWasmMediaVideoOutput, "qt.multimedia.wasm.videooutput")
35
36
37static bool checkForVideoFrame()
38{
39 emscripten::val videoFrame = emscripten::val::global("VideoFrame");
40 return (!videoFrame.isNull() && !videoFrame.isUndefined());
41}
42
43QWasmVideoOutput::QWasmVideoOutput(QObject *parent) : QObject{ parent }
44{
46}
47
49
50void QWasmVideoOutput::setVideoSize(const QSize &newSize)
51{
52 if (m_pendingVideoSize == newSize)
53 return;
54
55 m_pendingVideoSize = newSize;
56 updateVideoElementGeometry(QRect(0, 0, m_pendingVideoSize.width(), m_pendingVideoSize.height()));
57}
58
60{
61 m_currentVideoMode = mode;
62}
63
65{
66 if (m_video.isUndefined() || m_video.isNull()
67 || !m_wasmSink) {
68 // error
69 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
70 return;
71 }
72 switch (m_currentVideoMode) {
74 emscripten::val sourceObj = m_video["src"];
75 if ((sourceObj.isUndefined() || sourceObj.isNull()) && !m_source.isEmpty()) {
76 m_video.set("src", m_source);
77 }
78 if (!isReady())
79 m_video.call<void>("load");
80 } break;
83 if (!m_cameraIsReady) {
84 m_shouldBeStarted = true;
85 }
86
87 emscripten::val stream = m_video["srcObject"];
88 if (stream.isNull() || stream.isUndefined()) { // camera device
89 qCDebug(qWasmMediaVideoOutput) << "srcObject ERROR";
90 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
91 return;
92 } else {
93 emscripten::val videoTracks = stream.call<emscripten::val>("getVideoTracks");
94 if (videoTracks.isNull() || videoTracks.isUndefined()) {
95 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << "videoTracks is null";
96 emit errorOccured(QMediaPlayer::ResourceError,
97 QStringLiteral("video surface error"));
98 return;
99 }
100 if (videoTracks["length"].as<int>() == 0) {
101 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << "videoTracks count is 0";
102 emit errorOccured(QMediaPlayer::ResourceError,
103 QStringLiteral("video surface error"));
104 return;
105 }
106 emscripten::val videoSettings = videoTracks[0].call<emscripten::val>("getSettings");
107 if (!videoSettings.isNull() || !videoSettings.isUndefined()) {
108 const int width = videoSettings["width"].as<int>();
109 const int height = videoSettings["height"].as<int>();
110 updateVideoElementGeometry(QRect(0, 0, width, height));
111 }
112 }
113 } break;
114 };
115
116 m_shouldStop = false;
117 m_toBePaused = false;
118 m_video.call<void>("play");
119
120 if (m_currentVideoMode == QWasmVideoOutput::Camera
121 || m_currentVideoMode == QWasmVideoOutput::SurfaceCapture) {
122 emit readyChanged(true);
123 if (m_hasVideoFrame)
125 }
126}
127
129{
130 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO;
131
132 if (m_video.isUndefined() || m_video.isNull()) {
133 // error
134 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("Resource error"));
135 return;
136 }
137 m_shouldStop = true;
138 if (!m_toBePaused) {
139 // we are stopped , need to reset
140 m_video.call<void>("pause");
141 emscripten::val stream = m_video["srcObject"];
142 if (!stream.isNull() && !stream.isUndefined() && !stream["getTracks"].isUndefined()) {
143 emscripten::val tracks = stream.call<emscripten::val>("getTracks");
144 if (!tracks.isUndefined() && tracks["length"].as<int>() > 0) {
145 for (int i = 0; i < tracks["length"].as<int>(); i++) {
146 tracks[i].call<void>("stop");
147 }
148 }
149 }
150 m_video.set("srcObject", emscripten::val::null());
151 m_video.call<void>("load");
152 } else {
153 m_video.call<void>("pause");
154 }
155}
156
158{
159 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO;
160
161 if (m_video.isUndefined() || m_video.isNull()) {
162 // error
163 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
164 return;
165 }
166 m_shouldStop = false;
167 m_toBePaused = true;
168 m_video.call<void>("pause");
169}
170
172{
173 // flush pending frame
174 if (m_wasmSink)
175 m_wasmSink->platformVideoSink()->setVideoFrame(QVideoFrame());
176
177 m_source = QStringLiteral("") ;
178 m_video.set("currentTime", emscripten::val(0));
179 m_video.call<void>("load");
180}
181
183{
184 return m_video;
185}
186
187void QWasmVideoOutput::setSurface(QVideoSink *surface)
188{
189 if (!surface || surface == m_wasmSink) {
190 return;
191 }
192
193 m_wasmSink = surface;
194}
195
197{
198 if (m_video.isUndefined() || m_video.isNull()) {
199 // error
200 return false;
201 }
202
203 return m_currentMediaStatus == MediaStatus::LoadedMedia;
204 }
205
206void QWasmVideoOutput::setSource(const QUrl &url)
207{
208 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << url;
209
210 m_source = url.toString();
211
212 if (m_video.isUndefined() || m_video.isNull()) {
213 return;
214 }
215
216 if (url.isEmpty()) {
217 stop();
218 return;
219 }
220 if (url.isLocalFile()) {
221 QFile localFile(url.toLocalFile());
222 if (localFile.open(QIODevice::ReadOnly)) {
223 QDataStream buffer(&localFile); // we will serialize the data into the file
224 setSource(buffer.device());
225 } else {
226 qWarning() << "Failed to open file";
227 }
228 return;
229 }
230
231 updateVideoElementSource(m_source);
232}
233
235{
236 m_video.set("src", src.toStdString());
237 m_video.call<void>("load");
238}
239
240void QWasmVideoOutput::addCameraSourceElement(const std::string &id)
241{
242 m_cameraIsReady = false;
243
244 m_mediaInputStream.reset(new JsMediaInputStream());
245
246 m_mediaInputStream->setUseAudio(m_hasAudio);
247 m_mediaInputStream->setUseVideo(true);
248
249 connect(m_mediaInputStream.get(), &JsMediaInputStream::mediaStreamReady, this,
250 [this]() {
251 qCDebug(qWasmMediaVideoOutput) << "mediaStreamReady";
252
253 m_video.set("srcObject", m_mediaInputStream->getMediaStream());
254 m_video.call<void>("load");
255
256 m_cameraIsReady = true;
257 if (m_shouldBeStarted) {
258 start();
259 m_shouldBeStarted = false;
260 }
261 });
262
263 m_mediaInputStream->setStreamDevice(id);
264}
265
266void QWasmVideoOutput::setSource(QIODevice *stream)
267{
268 if (stream->bytesAvailable() == 0) {
269 qWarning() << "data not available";
270 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("data not available"));
271 return;
272 }
273 if (m_video.isUndefined() || m_video.isNull()) {
274 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
275 return;
276 }
277
278 QMimeDatabase db;
279 QMimeType mime = db.mimeTypeForData(stream);
280
281 QByteArray buffer = stream->readAll();
282
283 qstdweb::Blob contentBlob = qstdweb::Blob::copyFrom(buffer.data(), buffer.size(), mime.name().toStdString());
284
285 emscripten::val window = qstdweb::window();
286
287 if (window["safari"].isUndefined()) {
288 emscripten::val contentUrl = window["URL"].call<emscripten::val>("createObjectURL", contentBlob.val());
289 m_video.set("src", contentUrl);
290 m_source = QString::fromStdString(contentUrl.as<std::string>());
291 } else {
292 // only Safari currently supports Blob with srcObject
293 m_video.set("srcObject", contentBlob.val());
294 }
295}
296
297void QWasmVideoOutput::setVolume(qreal volume)
298{ // between 0 - 1
299 volume = qBound(qreal(0.0), volume, qreal(1.0));
300 m_video.set("volume", volume);
301}
302
303void QWasmVideoOutput::setMuted(bool muted)
304{
305 if (m_video.isUndefined() || m_video.isNull()) {
306 // error
307 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
308 return;
309 }
310 m_video.set("muted", muted);
311}
312
314{
315 return (!m_video.isUndefined() || !m_video.isNull())
316 ? (m_video["currentTime"].as<double>() * 1000)
317 : 0;
318}
319
320void QWasmVideoOutput::seekTo(qint64 positionMSecs)
321{
322 if (isVideoSeekable()) {
323 float positionToSetInSeconds = float(positionMSecs) / 1000;
324 emscripten::val seekableTimeRange = m_video["seekable"];
325 if (!seekableTimeRange.isNull() || !seekableTimeRange.isUndefined()) {
326 // range user can seek
327 if (seekableTimeRange["length"].as<int>() < 1)
328 return;
329 if (positionToSetInSeconds
330 >= seekableTimeRange.call<emscripten::val>("start", 0).as<double>()
331 && positionToSetInSeconds
332 <= seekableTimeRange.call<emscripten::val>("end", 0).as<double>()) {
333 m_requestedPosition = positionToSetInSeconds;
334
335 m_video.set("currentTime", m_requestedPosition);
336 }
337 }
338 }
339 qCDebug(qWasmMediaVideoOutput) << "m_requestedPosition" << m_requestedPosition;
340}
341
343{
344 if (m_video.isUndefined() || m_video.isNull()) {
345 // error
346 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
347 return false;
348 }
349
350 emscripten::val seekableTimeRange = m_video["seekable"];
351 if (seekableTimeRange["length"].as<int>() < 1)
352 return false;
353 if (!seekableTimeRange.isNull() || !seekableTimeRange.isUndefined()) {
354 bool isit = !qFuzzyCompare(seekableTimeRange.call<emscripten::val>("start", 0).as<double>(),
355 seekableTimeRange.call<emscripten::val>("end", 0).as<double>());
356 return isit;
357 }
358 return false;
359}
360
361void QWasmVideoOutput::createVideoElement(const std::string &id)
362{
363 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << this << id;
364 // Create <video> element and add it to the page body
365
366 emscripten::val document = emscripten::val::global("document");
367 emscripten::val body = document["body"];
368
369 emscripten::val oldVideo = document.call<emscripten::val>("getElementById", id);
370
371 // need to remove stale element
372 if (!oldVideo.isUndefined() && !oldVideo.isNull())
373 oldVideo.call<void>("remove");
374
375 m_videoSurfaceId = id;
376 m_video = document.call<emscripten::val>("createElement", std::string("video"));
377
378 m_video.set("id", m_videoSurfaceId.c_str());
379 m_video.call<void>("setAttribute", std::string("class"),
380 (m_currentVideoMode == QWasmVideoOutput::Camera ? std::string("Camera")
381 : std::string("Video")));
382 m_video.set("data-qvideocontext",
383 emscripten::val(quintptr(reinterpret_cast<void *>(this))));
384
385 m_video.set("preload", "metadata");
386
387 // Uncaught DOMException: Failed to execute 'getImageData' on
388 // 'OffscreenCanvasRenderingContext2D': The canvas has been tainted by
389 // cross-origin data.
390 // TODO figure out somehow to let user choose between these
391 std::string originString = "anonymous"; // requires server Access-Control-Allow-Origin *
392 // std::string originString = "use-credentials"; // must not
393 // Access-Control-Allow-Origin *
394
395 m_video.call<void>("setAttribute", std::string("crossorigin"), originString);
396 body.call<void>("appendChild", m_video);
397
398 // Create/add video source
399 document.call<emscripten::val>("createElement",
400 std::string("source")).set("src", m_source.toStdString());
401
402 // Set position:absolute, which makes it possible to position the video
403 // element using x,y. coordinates, relative to its parent (the page's <body>
404 // element)
405 emscripten::val style = m_video["style"];
406 style.set("position", "absolute");
407 style.set("display", "none"); // hide
408
409 if (!m_source.isEmpty())
410 updateVideoElementSource(m_source);
411}
412
413void QWasmVideoOutput::createOffscreenElement(const QSize &offscreenSize)
414{
415 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO;
416
417 if (m_hasVideoFrame) // VideoFrame does not require offscreen canvas/context
418 return;
419
420 // create offscreen element for grabbing frames
421 // OffscreenCanvas - no safari :(
422 // https://developer.mozilla.org/en-US/docs/Web/API/OffscreenCanvas
423
424 emscripten::val document = emscripten::val::global("document");
425
426 // TODO use correct frameBytesAllocationSize?
427 // offscreen render buffer
428 m_offscreen = emscripten::val::global("OffscreenCanvas");
429
430 if (m_offscreen.isUndefined()) {
431 // Safari OffscreenCanvas not supported, try old skool way
432 m_offscreen = document.call<emscripten::val>("createElement", std::string("canvas"));
433
434 m_offscreen.set("style",
435 "position:absolute;left:-1000px;top:-1000px"); // offscreen
436 m_offscreen.set("width", offscreenSize.width());
437 m_offscreen.set("height", offscreenSize.height());
438 m_offscreenContext = m_offscreen.call<emscripten::val>("getContext", std::string("2d"));
439 } else {
440 m_offscreen = emscripten::val::global("OffscreenCanvas")
441 .new_(offscreenSize.width(), offscreenSize.height());
442 emscripten::val offscreenAttributes = emscripten::val::array();
443 offscreenAttributes.set("willReadFrequently", true);
444 m_offscreenContext = m_offscreen.call<emscripten::val>("getContext", std::string("2d"),
445 offscreenAttributes);
446 }
447 std::string offscreenId = m_videoSurfaceId + "_offscreenOutputSurface";
448 m_offscreen.set("id", offscreenId.c_str());
449}
450
452{
453 if (!m_video.isUndefined() && !m_video.isNull())
454 m_video.call<void>("remove");
455}
456
458{
459 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO;
460
461 // event callbacks
462 // timupdate
463 auto timeUpdateCallback = [=](emscripten::val event) {
464 qCDebug(qWasmMediaVideoOutput) << "timeupdate";
465
466 // qt progress is ms
467 emit progressChanged(event["target"]["currentTime"].as<double>() * 1000);
468 };
469 m_timeUpdateEvent.reset(new QWasmEventHandler(m_video, "timeupdate", timeUpdateCallback));
470
471 // play
472 auto playCallback = [=](emscripten::val event) {
473 Q_UNUSED(event)
474 qCDebug(qWasmMediaVideoOutput) << "play" << m_video["src"].as<std::string>();
475 if (!m_isSeeking)
476 emit stateChanged(QWasmMediaPlayer::Preparing);
477 };
478 m_playEvent.reset(new QWasmEventHandler(m_video, "play", playCallback));
479
480 // ended
481 auto endedCallback = [=](emscripten::val event) {
482 Q_UNUSED(event)
483 qCDebug(qWasmMediaVideoOutput) << "ended";
484 m_currentMediaStatus = MediaStatus::EndOfMedia;
485 emit statusChanged(m_currentMediaStatus);
486 m_shouldStop = true;
487 stop();
488 };
489 m_endedEvent.reset(new QWasmEventHandler(m_video, "ended", endedCallback));
490
491 // durationchange
492 auto durationChangeCallback = [=](emscripten::val event) {
493 qCDebug(qWasmMediaVideoOutput) << "durationChange";
494
495 // qt duration is in milliseconds.
496 qint64 dur = event["target"]["duration"].as<double>() * 1000;
497 emit durationChanged(dur);
498 };
499 m_durationChangeEvent.reset(
500 new QWasmEventHandler(m_video, "durationchange", durationChangeCallback));
501
502 // loadeddata
503 auto loadedDataCallback = [=](emscripten::val event) {
504 Q_UNUSED(event)
505 qCDebug(qWasmMediaVideoOutput) << "loaded data";
506
507 emit stateChanged(QWasmMediaPlayer::Prepared);
508 if (m_isSeekable != isVideoSeekable()) {
509 m_isSeekable = isVideoSeekable();
510 emit seekableChanged(m_isSeekable);
511 }
512 };
513 m_loadedDataEvent.reset(new QWasmEventHandler(m_video, "loadeddata", loadedDataCallback));
514
515 // error
516 auto errorCallback = [=](emscripten::val event) {
517 qCDebug(qWasmMediaVideoOutput) << "error";
518 if (event.isUndefined() || event.isNull())
519 return;
520 emit errorOccured(m_video["error"]["code"].as<int>(),
521 QString::fromStdString(m_video["error"]["message"].as<std::string>()));
522 };
523 m_errorChangeEvent.reset(new QWasmEventHandler(m_video, "error", errorCallback));
524
525 // resize
526 auto resizeCallback = [=](emscripten::val event) {
527 Q_UNUSED(event)
528 qCDebug(qWasmMediaVideoOutput) << "resize";
529
530 updateVideoElementGeometry(
531 QRect(0, 0, m_video["videoWidth"].as<int>(), m_video["videoHeight"].as<int>()));
532 emit sizeChange(m_video["videoWidth"].as<int>(), m_video["videoHeight"].as<int>());
533
534 };
535 m_resizeChangeEvent.reset(new QWasmEventHandler(m_video, "resize", resizeCallback));
536
537 // loadedmetadata
538 auto loadedMetadataCallback = [=](emscripten::val event) {
539 Q_UNUSED(event)
540 qCDebug(qWasmMediaVideoOutput) << "loaded meta data";
541
542 emit metaDataLoaded();
543 };
544 m_loadedMetadataChangeEvent.reset(
545 new QWasmEventHandler(m_video, "loadedmetadata", loadedMetadataCallback));
546
547 // loadstart
548 auto loadStartCallback = [=](emscripten::val event) {
549 Q_UNUSED(event)
550 qCDebug(qWasmMediaVideoOutput) << "load started";
551 m_currentMediaStatus = MediaStatus::LoadingMedia;
552 emit statusChanged(m_currentMediaStatus);
553 m_shouldStop = false;
554 };
555 m_loadStartChangeEvent.reset(new QWasmEventHandler(m_video, "loadstart", loadStartCallback));
556
557 // canplay
558
559 auto canPlayCallback = [=](emscripten::val event) {
560 if (event.isUndefined() || event.isNull())
561 return;
562 qCDebug(qWasmMediaVideoOutput) << "can play"
563 << "m_requestedPosition" << m_requestedPosition;
564
565 if (!m_shouldStop)
566 emit readyChanged(true); // sets video available
567 };
568 m_canPlayChangeEvent.reset(new QWasmEventHandler(m_video, "canplay", canPlayCallback));
569
570 // canplaythrough
571 auto canPlayThroughCallback = [=](emscripten::val event) {
572 Q_UNUSED(event)
573 qCDebug(qWasmMediaVideoOutput) << "can play through"
574 << "m_shouldStop" << m_shouldStop;
575
576 if (m_currentMediaStatus == MediaStatus::EndOfMedia)
577 return;
578 if (m_isSeekable != isVideoSeekable()) {
579 m_isSeekable = isVideoSeekable();
580 emit seekableChanged(m_isSeekable);
581 }
582 if (!m_isSeeking && !m_shouldStop) {
583 emscripten::val timeRanges = m_video["buffered"];
584 if ((!timeRanges.isNull() || !timeRanges.isUndefined())
585 && timeRanges["length"].as<int>() == 1) {
586 double buffered = m_video["buffered"].call<emscripten::val>("end", 0).as<double>();
587 const double duration = m_video["duration"].as<double>();
588
589 if (duration == buffered) {
590 m_currentBufferedValue = 100;
591 emit bufferingChanged(m_currentBufferedValue);
592 }
593 }
594 constexpr int hasCurrentData = 2;
595 if (m_video["readyState"].as<int>() >= hasCurrentData) {
596 m_currentMediaStatus = MediaStatus::LoadedMedia;
597 emit statusChanged(m_currentMediaStatus);
599 }
600 } else {
601 m_shouldStop = false;
602 }
603 };
604 m_canPlayThroughChangeEvent.reset(
605 new QWasmEventHandler(m_video, "canplaythrough", canPlayThroughCallback));
606
607 // seeking
608 auto seekingCallback = [=](emscripten::val event) {
609 Q_UNUSED(event)
610 qCDebug(qWasmMediaVideoOutput)
611 << "seeking started" << (m_video["currentTime"].as<double>() * 1000);
612 m_isSeeking = true;
613 };
614 m_seekingChangeEvent.reset(new QWasmEventHandler(m_video, "seeking", seekingCallback));
615
616 // seeked
617 auto seekedCallback = [=](emscripten::val event) {
618 Q_UNUSED(event)
619 qCDebug(qWasmMediaVideoOutput) << "seeked" << (m_video["currentTime"].as<double>() * 1000);
620 emit progressChanged(m_video["currentTime"].as<double>() * 1000);
621 m_isSeeking = false;
622 };
623 m_seekedChangeEvent.reset(new QWasmEventHandler(m_video, "seeked", seekedCallback));
624
625 // emptied
626 auto emptiedCallback = [=](emscripten::val event) {
627 Q_UNUSED(event)
628 qCDebug(qWasmMediaVideoOutput) << "emptied";
629 emit readyChanged(false);
630 m_currentMediaStatus = MediaStatus::EndOfMedia;
631 emit statusChanged(m_currentMediaStatus);
632 };
633 m_emptiedChangeEvent.reset(new QWasmEventHandler(m_video, "emptied", emptiedCallback));
634
635 // stalled
636 auto stalledCallback = [=](emscripten::val event) {
637 Q_UNUSED(event)
638 qCDebug(qWasmMediaVideoOutput) << "stalled";
639 m_currentMediaStatus = MediaStatus::StalledMedia;
640 emit statusChanged(m_currentMediaStatus);
641 };
642 m_stalledChangeEvent.reset(new QWasmEventHandler(m_video, "stalled", stalledCallback));
643
644 // waiting
645 auto waitingCallback = [=](emscripten::val event) {
646 Q_UNUSED(event)
647
648 qCDebug(qWasmMediaVideoOutput) << "waiting";
649 // check buffer
650 };
651 m_waitingChangeEvent.reset(new QWasmEventHandler(m_video, "waiting", waitingCallback));
652
653 // suspend
654
655 // playing
656 auto playingCallback = [=](emscripten::val event) {
657 Q_UNUSED(event)
658 qCDebug(qWasmMediaVideoOutput) << "playing";
659 if (m_isSeeking)
660 return;
661 emit stateChanged(QWasmMediaPlayer::Started);
662 if (m_toBePaused || !m_shouldStop) { // paused
663 m_toBePaused = false;
664
665 videoFrameTimerCallback(); // get the ball rolling
666 }
667 };
668 m_playingChangeEvent.reset(new QWasmEventHandler(m_video, "playing", playingCallback));
669
670 // progress (buffering progress)
671 auto progesssCallback = [=](emscripten::val event) {
672 if (event.isUndefined() || event.isNull())
673 return;
674
675 const double duration = event["target"]["duration"].as<double>();
676 if (duration < 0) // track not exactly ready yet
677 return;
678
679 emscripten::val timeRanges = event["target"]["buffered"];
680
681 if ((!timeRanges.isNull() || !timeRanges.isUndefined())
682 && timeRanges["length"].as<int>() == 1) {
683 emscripten::val dVal = timeRanges.call<emscripten::val>("end", 0);
684 if (!dVal.isNull() || !dVal.isUndefined()) {
685 double bufferedEnd = dVal.as<double>();
686
687 if (duration > 0 && bufferedEnd > 0) {
688 const double bufferedValue = (bufferedEnd / duration * 100);
689 qCDebug(qWasmMediaVideoOutput) << "progress buffered";
690 m_currentBufferedValue = bufferedValue;
691 emit bufferingChanged(m_currentBufferedValue);
692 if (bufferedEnd == duration)
693 m_currentMediaStatus = MediaStatus::BufferedMedia;
694 else
695 m_currentMediaStatus = MediaStatus::BufferingMedia;
696 emit statusChanged(m_currentMediaStatus);
697 }
698 }
699 }
700 };
701 m_progressChangeEvent.reset(new QWasmEventHandler(m_video, "progress", progesssCallback));
702
703 // pause
704 auto pauseCallback = [=](emscripten::val event) {
705 Q_UNUSED(event)
706 qCDebug(qWasmMediaVideoOutput) << "pause";
707
708 const double currentTime = m_video["currentTime"].as<double>(); // in seconds
709 const double duration = m_video["duration"].as<double>(); // in seconds
710 if ((currentTime > 0 && currentTime < duration) && (!m_shouldStop && m_toBePaused)) {
711 emit stateChanged(QWasmMediaPlayer::Paused);
712 } else {
713 // stop this crazy thing!
714 m_video.set("currentTime", emscripten::val(0));
715 emit stateChanged(QWasmMediaPlayer::Stopped);
716 }
717 };
718 m_pauseChangeEvent.reset(new QWasmEventHandler(m_video, "pause", pauseCallback));
719
720 // onunload
721 // we use lower level events here as to avert a crash on activate using the
722 // qtdweb see _qt_beforeUnload
723 emscripten::val window = emscripten::val::global("window");
724
725 auto beforeUnloadCallback = [=](emscripten::val event) {
726 Q_UNUSED(event)
727 // large videos will leave the unloading window
728 // in a frozen state, so remove the video element src first
729 m_video.call<void>("removeAttribute", emscripten::val("src"));
730 m_video.call<void>("load");
731 };
732 m_beforeUnloadEvent.reset(new QWasmEventHandler(window, "beforeunload", beforeUnloadCallback));
733
734}
735
736void QWasmVideoOutput::updateVideoElementGeometry(const QRect &windowGeometry)
737{
738 QRect m_videoElementSource(windowGeometry.topLeft(), windowGeometry.size());
739
740 emscripten::val style = m_video["style"];
741 style.set("left", QStringLiteral("%1px").arg(m_videoElementSource.left()).toStdString());
742 style.set("top", QStringLiteral("%1px").arg(m_videoElementSource.top()).toStdString());
743 m_video.set("width", m_videoElementSource.width());
744 m_video.set("height", m_videoElementSource.height());
745 style.set("z-index", "999");
746
747 if (!m_hasVideoFrame) {
748 // offscreen
749 m_offscreen.set("width", m_videoElementSource.width());
750 m_offscreen.set("height", m_videoElementSource.height());
751 }
752}
753
755{
756 // qt duration is in ms
757 // js is sec
758
759 if (m_video.isUndefined() || m_video.isNull())
760 return 0;
761 return m_video["duration"].as<double>() * 1000;
762}
763
764void QWasmVideoOutput::newFrame(const QVideoFrame &frame)
765{
766 m_wasmSink->setVideoFrame(frame);
767}
768
770{
771 m_video.set("playbackRate", emscripten::val(rate));
772}
773
775{
776 return (m_video.isUndefined() || m_video.isNull()) ? 0 : m_video["playbackRate"].as<float>();
777}
778
779void QWasmVideoOutput::checkNetworkState()
780{
781 int netState = m_video["networkState"].as<int>();
782
783 qCDebug(qWasmMediaVideoOutput) << netState;
784
785 switch (netState) {
786 case QWasmMediaPlayer::QWasmMediaNetworkState::NetworkEmpty: // no data
787 break;
788 case QWasmMediaPlayer::QWasmMediaNetworkState::NetworkIdle:
789 break;
790 case QWasmMediaPlayer::QWasmMediaNetworkState::NetworkLoading:
791 break;
792 case QWasmMediaPlayer::QWasmMediaNetworkState::NetworkNoSource: // no source
793 emit errorOccured(netState, QStringLiteral("No media source found"));
794 break;
795 };
796}
797
798void QWasmVideoOutput::videoComputeFrame(void *context)
799{
800 if (m_offscreenContext.isUndefined() || m_offscreenContext.isNull()) {
801 qCDebug(qWasmMediaVideoOutput) << "offscreen canvas context could not be found";
802 return;
803 }
804 emscripten::val document = emscripten::val::global("document");
805
806 emscripten::val videoElement =
807 document.call<emscripten::val>("getElementById", std::string(m_videoSurfaceId));
808
809 if (videoElement.isUndefined() || videoElement.isNull()) {
810 qCDebug(qWasmMediaVideoOutput) << "video element could not be found";
811 return;
812 }
813
814 const int videoWidth = videoElement["videoWidth"].as<int>();
815 const int videoHeight = videoElement["videoHeight"].as<int>();
816
817 if (videoWidth == 0 || videoHeight == 0)
818 return;
819
820 m_offscreenContext.call<void>("drawImage", videoElement, 0, 0, videoWidth, videoHeight);
821
822 emscripten::val frame = // one frame, Uint8ClampedArray
823 m_offscreenContext.call<emscripten::val>("getImageData", 0, 0, videoWidth, videoHeight);
824
825 const QSize frameBytesAllocationSize(videoWidth, videoHeight);
826
827 // this seems to work ok, even though getImageData returns a Uint8ClampedArray
828 QByteArray frameBytes = qstdweb::Uint8Array(frame["data"]).copyToQByteArray();
829
830 QVideoFrameFormat frameFormat =
831 QVideoFrameFormat(frameBytesAllocationSize, QVideoFrameFormat::Format_RGBA8888);
832
833 auto *textureDescription = QVideoTextureHelper::textureDescription(frameFormat.pixelFormat());
834
835 QVideoFrame vFrame = QVideoFramePrivate::createFrame(
836 std::make_unique<QMemoryVideoBuffer>(
837 std::move(frameBytes),
838 textureDescription->strideForWidth(frameFormat.frameWidth())), // width of line with padding
839 frameFormat);
840 QWasmVideoOutput *wasmVideoOutput = reinterpret_cast<QWasmVideoOutput *>(context);
841
842 if (!wasmVideoOutput->m_wasmSink) {
843 qWarning() << "ERROR ALERT!! video sink not set";
844 }
845 wasmVideoOutput->m_wasmSink->setVideoFrame(vFrame);
846}
847
848
850{
851 QWasmVideoOutput *videoOutput = reinterpret_cast<QWasmVideoOutput *>(context);
852 if (!videoOutput || !videoOutput->isReady())
853 return;
854 emscripten::val videoElement = videoOutput->currentVideoElement();
855 emscripten::val oneVideoFrame = val::global("VideoFrame").new_(videoElement);
856
857 if (oneVideoFrame.isNull() || oneVideoFrame.isUndefined()) {
858 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO
859 << "ERROR" << "failed to construct VideoFrame";
860 return;
861 }
862
863 emscripten::val options = emscripten::val::object();
864 emscripten::val rectOptions = emscripten::val::object();
865
866 rectOptions.set("width",oneVideoFrame["displayWidth"].as<int>());
867 rectOptions.set("height", oneVideoFrame["displayHeight"].as<int>());
868 options.set("rect", rectOptions);
869
870 emscripten::val frameBytesAllocationSize = oneVideoFrame.call<emscripten::val>("allocationSize", options);
871 emscripten::val frameBuffer =
872 emscripten::val::global("Uint8Array").new_(frameBytesAllocationSize);
873 QWasmVideoOutput *wasmVideoOutput =
874 reinterpret_cast<QWasmVideoOutput*>(videoElement["data-qvideocontext"].as<quintptr>());
875
876 qstdweb::PromiseCallbacks copyToCallback;
877 copyToCallback.thenFunc = [wasmVideoOutput, oneVideoFrame, frameBuffer, videoElement]
878 (emscripten::val frameLayout)
879 {
880 if (frameLayout.isNull() || frameLayout.isUndefined()) {
881 qCDebug(qWasmMediaVideoOutput) << "theres no frameLayout";
882 return;
883 }
884
885 // frameBuffer now has a new frame, send to Qt
886 const QSize frameSize(oneVideoFrame["displayWidth"].as<int>(),
887 oneVideoFrame["displayHeight"].as<int>());
888
889 QByteArray frameBytes = QByteArray::fromEcmaUint8Array(frameBuffer);
890
891 QVideoFrameFormat::PixelFormat pixelFormat = fromJsPixelFormat(oneVideoFrame["format"].as<std::string>());
892 if (pixelFormat == QVideoFrameFormat::Format_Invalid) {
893 qWarning() << "Invalid pixel format";
894 return;
895 }
896 QVideoFrameFormat frameFormat = QVideoFrameFormat(frameSize, pixelFormat);
897
898 auto buffer = std::make_unique<QMemoryVideoBuffer>(
899 std::move(frameBytes),
900 oneVideoFrame["codedWidth"].as<int>());
901
902 QVideoFrame vFrame =
903 QVideoFramePrivate::createFrame(std::move(buffer), std::move(frameFormat));
904
905 if (!wasmVideoOutput) {
906 qCDebug(qWasmMediaVideoOutput) << "ERROR:"
907 << "data-qvideocontext not found";
908 return;
909 }
910 if (!wasmVideoOutput->m_wasmSink) {
911 qWarning() << "ERROR ALERT!! video sink not set";
912 return;
913 }
914 wasmVideoOutput->m_wasmSink->setVideoFrame(vFrame);
915 oneVideoFrame.call<emscripten::val>("close");
916 };
917 copyToCallback.catchFunc = [&, wasmVideoOutput, oneVideoFrame, videoElement](emscripten::val error)
918 {
919 qCDebug(qWasmMediaVideoOutput) << "Error"
920 << QString::fromStdString(error["name"].as<std::string>())
921 << QString::fromStdString(error["message"].as<std::string>()) ;
922
923 oneVideoFrame.call<emscripten::val>("close");
924 wasmVideoOutput->stop();
925 return;
926 };
927
928 qstdweb::Promise::make(oneVideoFrame, u"copyTo"_s, std::move(copyToCallback), frameBuffer);
929}
930
932{
933 static auto frame = [](double frameTime, void *context) -> EM_BOOL {
934 Q_UNUSED(frameTime);
935
936 QWasmVideoOutput *videoOutput = reinterpret_cast<QWasmVideoOutput *>(context);
937 if (!videoOutput || videoOutput->m_currentMediaStatus != MediaStatus::LoadedMedia)
938 return false;
939 if (videoOutput->m_shouldStop)
940 return false;
941 emscripten::val videoElement = videoOutput->currentVideoElement();
942
943 if (videoElement.isNull() || videoElement.isUndefined()) {
944 qWarning() << "no video element";
945 }
946
947 if (videoElement["paused"].as<bool>() || videoElement["ended"].as<bool>())
948 return false;
949
950 if (checkForVideoFrame()) {
951 videoOutput->videoFrameCallback(context);
952 } else {
953 videoOutput->videoComputeFrame(context);
954 }
955 return true;
956 };
957
958 if ((!m_shouldStop && m_video["className"].as<std::string>() == "Camera" && m_cameraIsReady)
959 || isReady())
960 emscripten_request_animation_frame_loop(frame, this);
961 // about 60 fps
962}
963
964QVideoFrameFormat::PixelFormat QWasmVideoOutput::fromJsPixelFormat(std::string videoFormat)
965{
966 if (videoFormat == "I420")
967 return QVideoFrameFormat::Format_YUV420P;
968 // no equivalent pixel format
969 // else if (videoFormat == "I420A") // AYUV ?
970 else if (videoFormat == "I422")
971 return QVideoFrameFormat::Format_YUV422P;
972 // no equivalent pixel format
973 // else if (videoFormat == "I444")
974 else if (videoFormat == "NV12")
975 return QVideoFrameFormat::Format_NV12;
976 else if (videoFormat == "RGBA")
977 return QVideoFrameFormat::Format_RGBA8888;
978 else if (videoFormat == "RGBX")
979 return QVideoFrameFormat::Format_RGBX8888;
980 else if (videoFormat == "BGRA")
981 return QVideoFrameFormat::Format_BGRA8888;
982 else if (videoFormat == "BGRX")
983 return QVideoFrameFormat::Format_BGRX8888;
984
985 return QVideoFrameFormat::Format_Invalid;
986}
987
989{
990 emscripten::val stream = m_video["srcObject"];
991 if ((!stream.isNull() && !stream.isUndefined()) && stream["active"].as<bool>()) {
992 emscripten::val tracks = stream.call<emscripten::val>("getVideoTracks");
993 if (!tracks.isUndefined()) {
994 if (tracks["length"].as<int>() == 0)
995 return emscripten::val::undefined();
996
997 emscripten::val track = tracks[0];
998 if (!track.isUndefined()) {
999 emscripten::val trackCaps = emscripten::val::undefined();
1000 if (!track["getCapabilities"].isUndefined())
1001 trackCaps = track.call<emscripten::val>("getCapabilities");
1002 else // firefox does not support getCapabilities
1003 trackCaps = track.call<emscripten::val>("getSettings");
1004
1005 if (!trackCaps.isUndefined())
1006 return trackCaps;
1007 }
1008 }
1009 } else {
1010 // camera not started track capabilities not available
1011 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("capabilities not available"));
1012 }
1013
1014 return emscripten::val::undefined();
1015}
1016
1017bool QWasmVideoOutput::setDeviceSetting(const std::string &key, emscripten::val value)
1018{
1019 emscripten::val stream = m_video["srcObject"];
1020 if (stream.isNull() || stream.isUndefined()
1021 || stream["getVideoTracks"].isUndefined())
1022 return false;
1023
1024 emscripten::val tracks = stream.call<emscripten::val>("getVideoTracks");
1025 if (!tracks.isNull() || !tracks.isUndefined()) {
1026 if (tracks["length"].as<int>() == 0)
1027 return false;
1028
1029 emscripten::val track = tracks[0];
1030 emscripten::val contraint = emscripten::val::object();
1031 contraint.set(std::move(key), value);
1032 track.call<emscripten::val>("applyConstraints", contraint);
1033 return true;
1034 }
1035
1036 return false;
1037}
1038
1039QT_END_NAMESPACE
1040
1041#include "moc_qwasmvideooutput_p.cpp"
void addCameraSourceElement(const std::string &id)
void updateVideoElementGeometry(const QRect &windowGeometry)
bool setDeviceSetting(const std::string &key, emscripten::val value)
emscripten::val surfaceElement()
emscripten::val getDeviceCapabilities()
void videoFrameCallback(void *context)
void setVideoSize(const QSize &)
void setMuted(bool muted)
void setSource(const QUrl &url)
void setVideoMode(QWasmVideoOutput::WasmVideoMode mode)
void seekTo(qint64 position)
void setVolume(qreal volume)
void createVideoElement(const std::string &id)
void updateVideoElementSource(const QString &src)
void setSource(QIODevice *stream)
void setPlaybackRate(qreal rate)
void createOffscreenElement(const QSize &offscreenSize)
Q_LOGGING_CATEGORY(lcEventDispatcher, "qt.eventdispatcher")
static bool checkForVideoFrame()