Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qwasmvideooutput.cpp
Go to the documentation of this file.
1// Copyright (C) 2022 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include <QDebug>
5#include <QUrl>
6#include <QPoint>
7#include <QRect>
8#include <QMediaPlayer>
9#include <QVideoFrame>
10#include <QFile>
11#include <QBuffer>
12#include <QMimeDatabase>
14
15#include <qvideosink.h>
16#include <private/qplatformvideosink_p.h>
17#include <private/qmemoryvideobuffer_p.h>
18#include <private/qvideotexturehelper_p.h>
19#include <private/qvideoframe_p.h>
20#include <private/qstdweb_p.h>
21#include <QTimer>
22
23#include <emscripten/bind.h>
24#include <emscripten/html5.h>
25#include <emscripten/val.h>
26
27
29
30
31using namespace emscripten;
32using namespace Qt::Literals;
33
34Q_LOGGING_CATEGORY(qWasmMediaVideoOutput, "qt.multimedia.wasm.videooutput")
35
36
37static bool checkForVideoFrame()
38{
39 emscripten::val videoFrame = emscripten::val::global("VideoFrame");
40 return (!videoFrame.isNull() && !videoFrame.isUndefined());
41}
42
43QWasmVideoOutput::QWasmVideoOutput(QObject *parent) : QObject{ parent }
44{
46}
47
49
50void QWasmVideoOutput::setVideoSize(const QSize &newSize)
51{
52 if (m_pendingVideoSize == newSize)
53 return;
54
55 m_pendingVideoSize = newSize;
56 updateVideoElementGeometry(QRect(0, 0, m_pendingVideoSize.width(), m_pendingVideoSize.height()));
57}
58
60{
61 m_currentVideoMode = mode;
62}
63
65{
66 if (m_video.isUndefined() || m_video.isNull()
67 || !m_wasmSink) {
68 // error
69 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
70 return;
71 }
72 switch (m_currentVideoMode) {
74 emscripten::val sourceObj = m_video["src"];
75 if ((sourceObj.isUndefined() || sourceObj.isNull()) && !m_source.isEmpty()) {
76 m_video.set("src", m_source);
77 }
78 if (!isReady())
79 m_video.call<void>("load");
80 } break;
83 if (!m_cameraIsReady) {
84 m_shouldBeStarted = true;
85 }
86
87 emscripten::val stream = m_video["srcObject"];
88 if (stream.isNull() || stream.isUndefined()) { // camera device
89 qCDebug(qWasmMediaVideoOutput) << "srcObject ERROR";
90 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
91 return;
92 } else {
93 emscripten::val videoTracks = stream.call<emscripten::val>("getVideoTracks");
94 if (videoTracks.isNull() || videoTracks.isUndefined()) {
95 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << "videoTracks is null";
96 emit errorOccured(QMediaPlayer::ResourceError,
97 QStringLiteral("video surface error"));
98 return;
99 }
100 if (videoTracks["length"].as<int>() == 0) {
101 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << "videoTracks count is 0";
102 emit errorOccured(QMediaPlayer::ResourceError,
103 QStringLiteral("video surface error"));
104 return;
105 }
106 emscripten::val videoSettings = videoTracks[0].call<emscripten::val>("getSettings");
107 if (!videoSettings.isNull() || !videoSettings.isUndefined()) {
108 const int width = videoSettings["width"].as<int>();
109 const int height = videoSettings["height"].as<int>();
110 updateVideoElementGeometry(QRect(0, 0, width, height));
111 }
112 }
113 } break;
114 };
115
116 m_shouldStop = false;
117 m_toBePaused = false;
118 m_video.call<void>("play");
119
120 if (m_currentVideoMode == QWasmVideoOutput::Camera
121 || m_currentVideoMode == QWasmVideoOutput::SurfaceCapture) {
122 emit readyChanged(true);
123 if (m_hasVideoFrame)
125 }
126}
127
129{
130 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO;
131
132 if (m_video.isUndefined() || m_video.isNull()) {
133 // error
134 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("Resource error"));
135 return;
136 }
137 m_shouldStop = true;
138 if (!m_toBePaused) {
139 // we are stopped , need to reset
140 m_video.call<void>("pause");
141 emscripten::val stream = m_video["srcObject"];
142 if (!stream.isNull() && !stream.isUndefined() && !stream["getTracks"].isUndefined()) {
143 emscripten::val tracks = stream.call<emscripten::val>("getTracks");
144 if (!tracks.isUndefined() && tracks["length"].as<int>() > 0) {
145 for (int i = 0; i < tracks["length"].as<int>(); i++) {
146 tracks[i].call<void>("stop");
147 }
148 }
149 }
150 m_video.set("srcObject", emscripten::val::null());
151 m_video.call<void>("load");
152 } else {
153 m_video.call<void>("pause");
154 }
155}
156
158{
159 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO;
160
161 if (m_video.isUndefined() || m_video.isNull()) {
162 // error
163 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
164 return;
165 }
166 m_shouldStop = false;
167 m_toBePaused = true;
168 m_video.call<void>("pause");
169}
170
172{
173 // flush pending frame
174 if (m_wasmSink)
175 m_wasmSink->platformVideoSink()->setVideoFrame(QVideoFrame());
176
177 m_source = QStringLiteral("") ;
178 m_video.set("currentTime", emscripten::val(0));
179 m_video.call<void>("load");
180}
181
183{
184 return m_video;
185}
186
187void QWasmVideoOutput::setSurface(QVideoSink *surface)
188{
189 if (!surface || surface == m_wasmSink) {
190 return;
191 }
192
193 m_wasmSink = surface;
194}
195
197{
198 if (m_video.isUndefined() || m_video.isNull()) {
199 // error
200 return false;
201 }
202
203 return m_currentMediaStatus == MediaStatus::LoadedMedia;
204 }
205
206void QWasmVideoOutput::setSource(const QUrl &url)
207{
208 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << url;
209
210 m_source = url.toString();
211
212 if (m_video.isUndefined() || m_video.isNull()) {
213 return;
214 }
215
216 if (url.isEmpty()) {
217 stop();
218 return;
219 }
220 if (url.isLocalFile()) {
221 QFile localFile(url.toLocalFile());
222 if (localFile.open(QIODevice::ReadOnly)) {
223 QDataStream buffer(&localFile); // we will serialize the data into the file
224 setSource(buffer.device());
225 } else {
226 qWarning() << "Failed to open file";
227 }
228 return;
229 }
230
231 updateVideoElementSource(m_source);
232}
233
235{
236 m_video.set("src", src.toStdString());
237 m_video.call<void>("load");
238}
239
240void QWasmVideoOutput::addCameraSourceElement(const std::string &id)
241{
242 m_cameraIsReady = false;
243 m_mediaInputStream.reset(new JsMediaInputStream(this));
244
245 m_mediaInputStream->setUseAudio(m_hasAudio);
246 m_mediaInputStream->setUseVideo(true);
247
248 connect(m_mediaInputStream.get(), &JsMediaInputStream::mediaStreamReady, this,
249 [this]() {
250 qCDebug(qWasmMediaVideoOutput) << "mediaStreamReady";
251
252 m_video.set("srcObject", m_mediaInputStream->getMediaStream());
253 m_video.call<void>("load");
254
255 m_cameraIsReady = true;
256 if (m_shouldBeStarted) {
257 start();
258 m_shouldBeStarted = false;
259 }
260 });
261
262 m_mediaInputStream->setStreamDevice(id);
263}
264
265void QWasmVideoOutput::setSource(QIODevice *stream)
266{
267 if (stream->bytesAvailable() == 0) {
268 qWarning() << "data not available";
269 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("data not available"));
270 return;
271 }
272 if (m_video.isUndefined() || m_video.isNull()) {
273 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
274 return;
275 }
276
277 QMimeDatabase db;
278 QMimeType mime = db.mimeTypeForData(stream);
279
280 QByteArray buffer = stream->readAll();
281
282 qstdweb::Blob contentBlob = qstdweb::Blob::copyFrom(buffer.data(), buffer.size(), mime.name().toStdString());
283
284 emscripten::val window = qstdweb::window();
285
286 if (window["safari"].isUndefined()) {
287 emscripten::val contentUrl = window["URL"].call<emscripten::val>("createObjectURL", contentBlob.val());
288 m_video.set("src", contentUrl);
289 m_source = QString::fromStdString(contentUrl.as<std::string>());
290 } else {
291 // only Safari currently supports Blob with srcObject
292 m_video.set("srcObject", contentBlob.val());
293 }
294}
295
296void QWasmVideoOutput::setVolume(qreal volume)
297{ // between 0 - 1
298 volume = qBound(qreal(0.0), volume, qreal(1.0));
299 m_video.set("volume", volume);
300}
301
302void QWasmVideoOutput::setMuted(bool muted)
303{
304 if (m_video.isUndefined() || m_video.isNull()) {
305 // error
306 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
307 return;
308 }
309 m_video.set("muted", muted);
310}
311
313{
314 return (!m_video.isUndefined() || !m_video.isNull())
315 ? (m_video["currentTime"].as<double>() * 1000)
316 : 0;
317}
318
319void QWasmVideoOutput::seekTo(qint64 positionMSecs)
320{
321 if (isVideoSeekable()) {
322 float positionToSetInSeconds = float(positionMSecs) / 1000;
323 emscripten::val seekableTimeRange = m_video["seekable"];
324 if (!seekableTimeRange.isNull() || !seekableTimeRange.isUndefined()) {
325 // range user can seek
326 if (seekableTimeRange["length"].as<int>() < 1)
327 return;
328 if (positionToSetInSeconds
329 >= seekableTimeRange.call<emscripten::val>("start", 0).as<double>()
330 && positionToSetInSeconds
331 <= seekableTimeRange.call<emscripten::val>("end", 0).as<double>()) {
332 m_requestedPosition = positionToSetInSeconds;
333
334 m_video.set("currentTime", m_requestedPosition);
335 }
336 }
337 }
338 qCDebug(qWasmMediaVideoOutput) << "m_requestedPosition" << m_requestedPosition;
339}
340
342{
343 if (m_video.isUndefined() || m_video.isNull()) {
344 // error
345 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
346 return false;
347 }
348
349 emscripten::val seekableTimeRange = m_video["seekable"];
350 if (seekableTimeRange["length"].as<int>() < 1)
351 return false;
352 if (!seekableTimeRange.isNull() || !seekableTimeRange.isUndefined()) {
353 bool isit = !QtPrivate::fuzzyCompare(
354 seekableTimeRange.call<emscripten::val>("start", 0).as<double>(),
355 seekableTimeRange.call<emscripten::val>("end", 0).as<double>());
356 return isit;
357 }
358 return false;
359}
360
361void QWasmVideoOutput::createVideoElement(const std::string &id)
362{
363 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << this << id;
364 // Create <video> element and add it to the page body
365
366 emscripten::val document = emscripten::val::global("document");
367 emscripten::val body = document["body"];
368
369 emscripten::val oldVideo = document.call<emscripten::val>("getElementById", id);
370
371 // need to remove stale element
372 if (!oldVideo.isUndefined() && !oldVideo.isNull())
373 oldVideo.call<void>("remove");
374
375 m_videoSurfaceId = id;
376 m_video = document.call<emscripten::val>("createElement", std::string("video"));
377
378 m_video.set("id", m_videoSurfaceId.c_str());
379 m_video.call<void>("setAttribute", std::string("class"),
380 (m_currentVideoMode == QWasmVideoOutput::Camera ? std::string("Camera")
381 : std::string("Video")));
382 m_video.set("data-qvideocontext",
383 emscripten::val(quintptr(reinterpret_cast<void *>(this))));
384
385 m_video.set("preload", "metadata");
386
387 // Uncaught DOMException: Failed to execute 'getImageData' on
388 // 'OffscreenCanvasRenderingContext2D': The canvas has been tainted by
389 // cross-origin data.
390 // TODO figure out somehow to let user choose between these
391 std::string originString = "anonymous"; // requires server Access-Control-Allow-Origin *
392 // std::string originString = "use-credentials"; // must not
393 // Access-Control-Allow-Origin *
394
395 m_video.call<void>("setAttribute", std::string("crossorigin"), originString);
396 body.call<void>("appendChild", m_video);
397
398 // Create/add video source
399 document.call<emscripten::val>("createElement",
400 std::string("source")).set("src", m_source.toStdString());
401
402 // Set position:absolute, which makes it possible to position the video
403 // element using x,y. coordinates, relative to its parent (the page's <body>
404 // element)
405 emscripten::val style = m_video["style"];
406 style.set("position", "absolute");
407 style.set("display", "none"); // hide
408
409 if (!m_source.isEmpty())
410 updateVideoElementSource(m_source);
411}
412
413void QWasmVideoOutput::createOffscreenElement(const QSize &offscreenSize)
414{
415 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO;
416
417 if (m_hasVideoFrame) // VideoFrame does not require offscreen canvas/context
418 return;
419
420 // create offscreen element for grabbing frames
421 // OffscreenCanvas - no safari :(
422 // https://developer.mozilla.org/en-US/docs/Web/API/OffscreenCanvas
423
424 emscripten::val document = emscripten::val::global("document");
425
426 // TODO use correct frameBytesAllocationSize?
427 // offscreen render buffer
428 m_offscreen = emscripten::val::global("OffscreenCanvas");
429
430 if (m_offscreen.isUndefined()) {
431 // Safari OffscreenCanvas not supported, try old skool way
432 m_offscreen = document.call<emscripten::val>("createElement", std::string("canvas"));
433
434 m_offscreen.set("style",
435 "position:absolute;left:-1000px;top:-1000px"); // offscreen
436 m_offscreen.set("width", offscreenSize.width());
437 m_offscreen.set("height", offscreenSize.height());
438 m_offscreenContext = m_offscreen.call<emscripten::val>("getContext", std::string("2d"));
439 } else {
440 m_offscreen = emscripten::val::global("OffscreenCanvas")
441 .new_(offscreenSize.width(), offscreenSize.height());
442 emscripten::val offscreenAttributes = emscripten::val::array();
443 offscreenAttributes.set("willReadFrequently", true);
444 m_offscreenContext = m_offscreen.call<emscripten::val>("getContext", std::string("2d"),
445 offscreenAttributes);
446 }
447 std::string offscreenId = m_videoSurfaceId + "_offscreenOutputSurface";
448 m_offscreen.set("id", offscreenId.c_str());
449}
450
452{
453 if (!m_video.isUndefined() && !m_video.isNull())
454 m_video.call<void>("remove");
455}
456
458{
459 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO;
460
461 // event callbacks
462 // timupdate
463 auto timeUpdateCallback = [=](emscripten::val event) {
464 qCDebug(qWasmMediaVideoOutput) << "timeupdate";
465
466 // qt progress is ms
467 emit progressChanged(event["target"]["currentTime"].as<double>() * 1000);
468 };
469 m_timeUpdateEvent.reset(new QWasmEventHandler(m_video, "timeupdate", timeUpdateCallback));
470
471 // play
472 auto playCallback = [=](emscripten::val event) {
473 Q_UNUSED(event)
474 qCDebug(qWasmMediaVideoOutput) << "play" << m_video["src"].as<std::string>();
475 if (!m_isSeeking)
476 emit stateChanged(QWasmMediaPlayer::Preparing);
477 };
478 m_playEvent.reset(new QWasmEventHandler(m_video, "play", playCallback));
479
480 // ended
481 auto endedCallback = [=](emscripten::val event) {
482 Q_UNUSED(event)
483 qCDebug(qWasmMediaVideoOutput) << "ended";
484 m_currentMediaStatus = MediaStatus::EndOfMedia;
485 emit statusChanged(m_currentMediaStatus);
486 m_shouldStop = true;
487 stop();
488 };
489 m_endedEvent.reset(new QWasmEventHandler(m_video, "ended", endedCallback));
490
491 // durationchange
492 auto durationChangeCallback = [=](emscripten::val event) {
493 qCDebug(qWasmMediaVideoOutput) << "durationChange";
494
495 // qt duration is in milliseconds.
496 qint64 dur = event["target"]["duration"].as<double>() * 1000;
497 emit durationChanged(dur);
498 };
499 m_durationChangeEvent.reset(
500 new QWasmEventHandler(m_video, "durationchange", durationChangeCallback));
501
502 // loadeddata
503 auto loadedDataCallback = [=](emscripten::val event) {
504 Q_UNUSED(event)
505 qCDebug(qWasmMediaVideoOutput) << "loaded data";
506
507 emit stateChanged(QWasmMediaPlayer::Prepared);
508 if (m_isSeekable != isVideoSeekable()) {
509 m_isSeekable = isVideoSeekable();
510 emit seekableChanged(m_isSeekable);
511 }
512 };
513 m_loadedDataEvent.reset(new QWasmEventHandler(m_video, "loadeddata", loadedDataCallback));
514
515 // error
516 auto errorCallback = [=](emscripten::val event) {
517 qCDebug(qWasmMediaVideoOutput) << "error";
518 if (event.isUndefined() || event.isNull())
519 return;
520 emit errorOccured(m_video["error"]["code"].as<int>(),
521 QString::fromStdString(m_video["error"]["message"].as<std::string>()));
522 };
523 m_errorChangeEvent.reset(new QWasmEventHandler(m_video, "error", errorCallback));
524
525 // resize
526 auto resizeCallback = [=](emscripten::val event) {
527 Q_UNUSED(event)
528 qCDebug(qWasmMediaVideoOutput) << "resize";
529
530 updateVideoElementGeometry(
531 QRect(0, 0, m_video["videoWidth"].as<int>(), m_video["videoHeight"].as<int>()));
532 emit sizeChange(m_video["videoWidth"].as<int>(), m_video["videoHeight"].as<int>());
533
534 };
535 m_resizeChangeEvent.reset(new QWasmEventHandler(m_video, "resize", resizeCallback));
536
537 // loadedmetadata
538 auto loadedMetadataCallback = [=](emscripten::val event) {
539 Q_UNUSED(event)
540 qCDebug(qWasmMediaVideoOutput) << "loaded meta data";
541
542 emit metaDataLoaded();
543 };
544 m_loadedMetadataChangeEvent.reset(
545 new QWasmEventHandler(m_video, "loadedmetadata", loadedMetadataCallback));
546
547 // loadstart
548 auto loadStartCallback = [=](emscripten::val event) {
549 Q_UNUSED(event)
550 qCDebug(qWasmMediaVideoOutput) << "load started";
551 m_currentMediaStatus = MediaStatus::LoadingMedia;
552 emit statusChanged(m_currentMediaStatus);
553 m_shouldStop = false;
554 };
555 m_loadStartChangeEvent.reset(new QWasmEventHandler(m_video, "loadstart", loadStartCallback));
556
557 // canplay
558
559 auto canPlayCallback = [=](emscripten::val event) {
560 if (event.isUndefined() || event.isNull())
561 return;
562 qCDebug(qWasmMediaVideoOutput) << "can play"
563 << "m_requestedPosition" << m_requestedPosition;
564
565 if (!m_shouldStop)
566 emit readyChanged(true); // sets video available
567 };
568 m_canPlayChangeEvent.reset(new QWasmEventHandler(m_video, "canplay", canPlayCallback));
569
570 // canplaythrough
571 auto canPlayThroughCallback = [=](emscripten::val event) {
572 Q_UNUSED(event)
573 qCDebug(qWasmMediaVideoOutput) << "can play through"
574 << "m_shouldStop" << m_shouldStop;
575
576 if (m_currentMediaStatus == MediaStatus::EndOfMedia)
577 return;
578 if (m_isSeekable != isVideoSeekable()) {
579 m_isSeekable = isVideoSeekable();
580 emit seekableChanged(m_isSeekable);
581 }
582 if (!m_isSeeking && !m_shouldStop) {
583 emscripten::val timeRanges = m_video["buffered"];
584 if ((!timeRanges.isNull() || !timeRanges.isUndefined())
585 && timeRanges["length"].as<int>() == 1) {
586 double buffered = m_video["buffered"].call<emscripten::val>("end", 0).as<double>();
587 const double duration = m_video["duration"].as<double>();
588
589 if (duration == buffered) {
590 m_currentBufferedValue = 100;
591 emit bufferingChanged(m_currentBufferedValue);
592 }
593 }
594 constexpr int hasCurrentData = 2;
595 if (m_video["readyState"].as<int>() >= hasCurrentData) {
596 m_currentMediaStatus = MediaStatus::LoadedMedia;
597 emit statusChanged(m_currentMediaStatus);
599 }
600 } else {
601 m_shouldStop = false;
602 }
603 };
604 m_canPlayThroughChangeEvent.reset(
605 new QWasmEventHandler(m_video, "canplaythrough", canPlayThroughCallback));
606
607 // seeking
608 auto seekingCallback = [=](emscripten::val event) {
609 Q_UNUSED(event)
610 qCDebug(qWasmMediaVideoOutput)
611 << "seeking started" << (m_video["currentTime"].as<double>() * 1000);
612 m_isSeeking = true;
613 };
614 m_seekingChangeEvent.reset(new QWasmEventHandler(m_video, "seeking", seekingCallback));
615
616 // seeked
617 auto seekedCallback = [=](emscripten::val event) {
618 Q_UNUSED(event)
619 qCDebug(qWasmMediaVideoOutput) << "seeked" << (m_video["currentTime"].as<double>() * 1000);
620 emit progressChanged(m_video["currentTime"].as<double>() * 1000);
621 m_isSeeking = false;
622 };
623 m_seekedChangeEvent.reset(new QWasmEventHandler(m_video, "seeked", seekedCallback));
624
625 // emptied
626 auto emptiedCallback = [=](emscripten::val event) {
627 Q_UNUSED(event)
628 qCDebug(qWasmMediaVideoOutput) << "emptied";
629 emit readyChanged(false);
630 m_currentMediaStatus = MediaStatus::EndOfMedia;
631 emit statusChanged(m_currentMediaStatus);
632 };
633 m_emptiedChangeEvent.reset(new QWasmEventHandler(m_video, "emptied", emptiedCallback));
634
635 // stalled
636 auto stalledCallback = [=](emscripten::val event) {
637 Q_UNUSED(event)
638 qCDebug(qWasmMediaVideoOutput) << "stalled";
639 m_currentMediaStatus = MediaStatus::StalledMedia;
640 emit statusChanged(m_currentMediaStatus);
641 };
642 m_stalledChangeEvent.reset(new QWasmEventHandler(m_video, "stalled", stalledCallback));
643
644 // waiting
645 auto waitingCallback = [=](emscripten::val event) {
646 Q_UNUSED(event)
647
648 qCDebug(qWasmMediaVideoOutput) << "waiting";
649 // check buffer
650 };
651 m_waitingChangeEvent.reset(new QWasmEventHandler(m_video, "waiting", waitingCallback));
652
653 // suspend
654
655 // playing
656 auto playingCallback = [=](emscripten::val event) {
657 Q_UNUSED(event)
658 qCDebug(qWasmMediaVideoOutput) << "playing";
659 if (m_isSeeking)
660 return;
661 emit stateChanged(QWasmMediaPlayer::Started);
662 if (m_toBePaused || !m_shouldStop) { // paused
663 m_toBePaused = false;
664 QMetaObject::invokeMethod(this, &QWasmVideoOutput::videoFrameTimerCallback, Qt::QueuedConnection);
665 }
666 };
667 m_playingChangeEvent.reset(new QWasmEventHandler(m_video, "playing", playingCallback));
668
669 // progress (buffering progress)
670 auto progesssCallback = [=](emscripten::val event) {
671 if (event.isUndefined() || event.isNull())
672 return;
673
674 const double duration = event["target"]["duration"].as<double>();
675 if (duration < 0) // track not exactly ready yet
676 return;
677
678 emscripten::val timeRanges = event["target"]["buffered"];
679
680 if ((!timeRanges.isNull() || !timeRanges.isUndefined())
681 && timeRanges["length"].as<int>() == 1) {
682 emscripten::val dVal = timeRanges.call<emscripten::val>("end", 0);
683 if (!dVal.isNull() || !dVal.isUndefined()) {
684 double bufferedEnd = dVal.as<double>();
685
686 if (duration > 0 && bufferedEnd > 0) {
687 const double bufferedValue = (bufferedEnd / duration * 100);
688 qCDebug(qWasmMediaVideoOutput) << "progress buffered";
689 m_currentBufferedValue = bufferedValue;
690 emit bufferingChanged(m_currentBufferedValue);
691 if (bufferedEnd == duration)
692 m_currentMediaStatus = MediaStatus::BufferedMedia;
693 else
694 m_currentMediaStatus = MediaStatus::BufferingMedia;
695 emit statusChanged(m_currentMediaStatus);
696 }
697 }
698 }
699 };
700 m_progressChangeEvent.reset(new QWasmEventHandler(m_video, "progress", progesssCallback));
701
702 // pause
703 auto pauseCallback = [=](emscripten::val event) {
704 Q_UNUSED(event)
705 qCDebug(qWasmMediaVideoOutput) << "pause";
706
707 const double currentTime = m_video["currentTime"].as<double>(); // in seconds
708 const double duration = m_video["duration"].as<double>(); // in seconds
709 if ((currentTime > 0 && currentTime < duration) && (!m_shouldStop && m_toBePaused)) {
710 emit stateChanged(QWasmMediaPlayer::Paused);
711 } else {
712 // stop this crazy thing!
713 m_video.set("currentTime", emscripten::val(0));
714 emit stateChanged(QWasmMediaPlayer::Stopped);
715 }
716 };
717 m_pauseChangeEvent.reset(new QWasmEventHandler(m_video, "pause", pauseCallback));
718
719 // onunload
720 // we use lower level events here as to avert a crash on activate using the
721 // qtdweb see _qt_beforeUnload
722 emscripten::val window = emscripten::val::global("window");
723
724 auto beforeUnloadCallback = [=](emscripten::val event) {
725 Q_UNUSED(event)
726 // large videos will leave the unloading window
727 // in a frozen state, so remove the video element src first
728 m_video.call<void>("removeAttribute", emscripten::val("src"));
729 m_video.call<void>("load");
730 };
731 m_beforeUnloadEvent.reset(new QWasmEventHandler(window, "beforeunload", beforeUnloadCallback));
732
733}
734
735void QWasmVideoOutput::updateVideoElementGeometry(const QRect &windowGeometry)
736{
737 QRect m_videoElementSource(windowGeometry.topLeft(), windowGeometry.size());
738
739 emscripten::val style = m_video["style"];
740 style.set("left", QStringLiteral("%1px").arg(m_videoElementSource.left()).toStdString());
741 style.set("top", QStringLiteral("%1px").arg(m_videoElementSource.top()).toStdString());
742 m_video.set("width", m_videoElementSource.width());
743 m_video.set("height", m_videoElementSource.height());
744 style.set("z-index", "999");
745
746 if (!m_hasVideoFrame) {
747 // offscreen
748 m_offscreen.set("width", m_videoElementSource.width());
749 m_offscreen.set("height", m_videoElementSource.height());
750 }
751}
752
754{
755 // qt duration is in ms
756 // js is sec
757
758 if (m_video.isUndefined() || m_video.isNull())
759 return 0;
760 return m_video["duration"].as<double>() * 1000;
761}
762
763void QWasmVideoOutput::newFrame(const QVideoFrame &frame)
764{
765 m_wasmSink->setVideoFrame(frame);
766}
767
769{
770 m_video.set("playbackRate", emscripten::val(rate));
771}
772
774{
775 return (m_video.isUndefined() || m_video.isNull()) ? 0 : m_video["playbackRate"].as<float>();
776}
777
778void QWasmVideoOutput::checkNetworkState()
779{
780 int netState = m_video["networkState"].as<int>();
781
782 qCDebug(qWasmMediaVideoOutput) << netState;
783
784 switch (netState) {
785 case QWasmMediaPlayer::QWasmMediaNetworkState::NetworkEmpty: // no data
786 break;
787 case QWasmMediaPlayer::QWasmMediaNetworkState::NetworkIdle:
788 break;
789 case QWasmMediaPlayer::QWasmMediaNetworkState::NetworkLoading:
790 break;
791 case QWasmMediaPlayer::QWasmMediaNetworkState::NetworkNoSource: // no source
792 emit errorOccured(netState, QStringLiteral("No media source found"));
793 break;
794 };
795}
796
797void QWasmVideoOutput::videoComputeFrame(void *context)
798{
799 if (m_offscreenContext.isUndefined() || m_offscreenContext.isNull()) {
800 qCDebug(qWasmMediaVideoOutput) << "offscreen canvas context could not be found";
801 return;
802 }
803 emscripten::val document = emscripten::val::global("document");
804
805 emscripten::val videoElement =
806 document.call<emscripten::val>("getElementById", std::string(m_videoSurfaceId));
807
808 if (videoElement.isUndefined() || videoElement.isNull()) {
809 qCDebug(qWasmMediaVideoOutput) << "video element could not be found";
810 return;
811 }
812
813 const int videoWidth = videoElement["videoWidth"].as<int>();
814 const int videoHeight = videoElement["videoHeight"].as<int>();
815
816 if (videoWidth == 0 || videoHeight == 0)
817 return;
818
819 m_offscreenContext.call<void>("drawImage", videoElement, 0, 0, videoWidth, videoHeight);
820
821 emscripten::val frame = // one frame, Uint8ClampedArray
822 m_offscreenContext.call<emscripten::val>("getImageData", 0, 0, videoWidth, videoHeight);
823
824 const QSize frameBytesAllocationSize(videoWidth, videoHeight);
825
826 // this seems to work ok, even though getImageData returns a Uint8ClampedArray
827 QByteArray frameBytes = qstdweb::Uint8Array(frame["data"]).copyToQByteArray();
828
829 QVideoFrameFormat frameFormat =
830 QVideoFrameFormat(frameBytesAllocationSize, QVideoFrameFormat::Format_RGBA8888);
831
832 auto *textureDescription = QVideoTextureHelper::textureDescription(frameFormat.pixelFormat());
833
834 QVideoFrame vFrame = QVideoFramePrivate::createFrame(
835 std::make_unique<QMemoryVideoBuffer>(
836 std::move(frameBytes),
837 textureDescription->strideForWidth(frameFormat.frameWidth())), // width of line with padding
838 frameFormat);
839 QWasmVideoOutput *wasmVideoOutput = reinterpret_cast<QWasmVideoOutput *>(context);
840
841 if (!wasmVideoOutput->m_wasmSink) {
842 qWarning() << "ERROR ALERT!! video sink not set";
843 }
844 wasmVideoOutput->m_wasmSink->setVideoFrame(vFrame);
845}
846
847
849{
850 QWasmVideoOutput *videoOutput = reinterpret_cast<QWasmVideoOutput *>(context);
851 if (!videoOutput || !videoOutput->isReady())
852 return;
853 emscripten::val videoElement = videoOutput->currentVideoElement();
854 emscripten::val oneVideoFrame = val::global("VideoFrame").new_(videoElement);
855
856 if (oneVideoFrame.isNull() || oneVideoFrame.isUndefined()) {
857 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO
858 << "ERROR" << "failed to construct VideoFrame";
859 return;
860 }
861
862 emscripten::val options = emscripten::val::object();
863 emscripten::val rectOptions = emscripten::val::object();
864
865 rectOptions.set("width",oneVideoFrame["displayWidth"].as<int>());
866 rectOptions.set("height", oneVideoFrame["displayHeight"].as<int>());
867 options.set("rect", rectOptions);
868
869 emscripten::val frameBytesAllocationSize = oneVideoFrame.call<emscripten::val>("allocationSize", options);
870 emscripten::val frameBuffer =
871 emscripten::val::global("Uint8Array").new_(frameBytesAllocationSize);
872 QWasmVideoOutput *wasmVideoOutput =
873 reinterpret_cast<QWasmVideoOutput*>(videoElement["data-qvideocontext"].as<quintptr>());
874
875 qstdweb::PromiseCallbacks copyToCallback;
876 copyToCallback.thenFunc = [wasmVideoOutput, oneVideoFrame, frameBuffer, videoElement]
877 (emscripten::val frameLayout)
878 {
879 if (frameLayout.isNull() || frameLayout.isUndefined()) {
880 qCDebug(qWasmMediaVideoOutput) << "theres no frameLayout";
881 return;
882 }
883
884 // frameBuffer now has a new frame, send to Qt
885 const QSize frameSize(oneVideoFrame["displayWidth"].as<int>(),
886 oneVideoFrame["displayHeight"].as<int>());
887
888 QByteArray frameBytes = QByteArray::fromEcmaUint8Array(frameBuffer);
889
890 QVideoFrameFormat::PixelFormat pixelFormat = fromJsPixelFormat(oneVideoFrame["format"].as<std::string>());
891 if (pixelFormat == QVideoFrameFormat::Format_Invalid) {
892 qWarning() << "Invalid pixel format";
893 return;
894 }
895 QVideoFrameFormat frameFormat = QVideoFrameFormat(frameSize, pixelFormat);
896
897 auto buffer = std::make_unique<QMemoryVideoBuffer>(
898 std::move(frameBytes),
899 oneVideoFrame["codedWidth"].as<int>());
900
901 QVideoFrame vFrame =
902 QVideoFramePrivate::createFrame(std::move(buffer), std::move(frameFormat));
903
904 if (!wasmVideoOutput) {
905 qCDebug(qWasmMediaVideoOutput) << "ERROR:"
906 << "data-qvideocontext not found";
907 return;
908 }
909 if (!wasmVideoOutput->m_wasmSink) {
910 qWarning() << "ERROR ALERT!! video sink not set";
911 return;
912 }
913 wasmVideoOutput->m_wasmSink->setVideoFrame(vFrame);
914 oneVideoFrame.call<emscripten::val>("close");
915 };
916 copyToCallback.catchFunc = [&, wasmVideoOutput, oneVideoFrame, videoElement](emscripten::val error)
917 {
918 qCDebug(qWasmMediaVideoOutput) << "Error"
919 << QString::fromStdString(error["name"].as<std::string>())
920 << QString::fromStdString(error["message"].as<std::string>()) ;
921
922 oneVideoFrame.call<emscripten::val>("close");
923 wasmVideoOutput->stop();
924 return;
925 };
926
927 qstdweb::Promise::make(oneVideoFrame, u"copyTo"_s, std::move(copyToCallback), frameBuffer);
928}
929
931{
932 static auto frame = [](double frameTime, void *context) -> EM_BOOL {
933 Q_UNUSED(frameTime);
934
935 QWasmVideoOutput *videoOutput = reinterpret_cast<QWasmVideoOutput *>(context);
936 if (!videoOutput || videoOutput->m_currentMediaStatus != MediaStatus::LoadedMedia)
937 return false;
938 if (videoOutput->m_shouldStop)
939 return false;
940 emscripten::val videoElement = videoOutput->currentVideoElement();
941
942 if (videoElement.isNull() || videoElement.isUndefined()) {
943 qWarning() << "no video element";
944 }
945
946 if (videoElement["paused"].as<bool>() || videoElement["ended"].as<bool>())
947 return false;
948
949 if (checkForVideoFrame()) {
950 videoOutput->videoFrameCallback(context);
951 } else {
952 videoOutput->videoComputeFrame(context);
953 }
954 return true;
955 };
956
957 if ((!m_shouldStop && m_video["className"].as<std::string>() == "Camera" && m_cameraIsReady)
958 || isReady())
959 emscripten_request_animation_frame_loop(frame, this);
960 // about 60 fps
961}
962
963QVideoFrameFormat::PixelFormat QWasmVideoOutput::fromJsPixelFormat(std::string videoFormat)
964{
965 if (videoFormat == "I420")
966 return QVideoFrameFormat::Format_YUV420P;
967 // no equivalent pixel format
968 // else if (videoFormat == "I420A") // AYUV ?
969 else if (videoFormat == "I422")
970 return QVideoFrameFormat::Format_YUV422P;
971 // no equivalent pixel format
972 // else if (videoFormat == "I444")
973 else if (videoFormat == "NV12")
974 return QVideoFrameFormat::Format_NV12;
975 else if (videoFormat == "RGBA")
976 return QVideoFrameFormat::Format_RGBA8888;
977 else if (videoFormat == "RGBX")
978 return QVideoFrameFormat::Format_RGBX8888;
979 else if (videoFormat == "BGRA")
980 return QVideoFrameFormat::Format_BGRA8888;
981 else if (videoFormat == "BGRX")
982 return QVideoFrameFormat::Format_BGRX8888;
983
984 return QVideoFrameFormat::Format_Invalid;
985}
986
988{
989 emscripten::val stream = m_video["srcObject"];
990 if ((!stream.isNull() && !stream.isUndefined()) && stream["active"].as<bool>()) {
991 emscripten::val tracks = stream.call<emscripten::val>("getVideoTracks");
992 if (!tracks.isUndefined()) {
993 if (tracks["length"].as<int>() == 0)
994 return emscripten::val::undefined();
995
996 emscripten::val track = tracks[0];
997 if (!track.isUndefined()) {
998 emscripten::val trackCaps = emscripten::val::undefined();
999 if (!track["getCapabilities"].isUndefined())
1000 trackCaps = track.call<emscripten::val>("getCapabilities");
1001 else // firefox does not support getCapabilities
1002 trackCaps = track.call<emscripten::val>("getSettings");
1003
1004 if (!trackCaps.isUndefined())
1005 return trackCaps;
1006 }
1007 }
1008 } else {
1009 // camera not started track capabilities not available
1010 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("capabilities not available"));
1011 }
1012
1013 return emscripten::val::undefined();
1014}
1015
1016bool QWasmVideoOutput::setDeviceSetting(const std::string &key, emscripten::val value)
1017{
1018 emscripten::val stream = m_video["srcObject"];
1019 if (stream.isNull() || stream.isUndefined()
1020 || stream["getVideoTracks"].isUndefined())
1021 return false;
1022
1023 emscripten::val tracks = stream.call<emscripten::val>("getVideoTracks");
1024 if (!tracks.isNull() || !tracks.isUndefined()) {
1025 if (tracks["length"].as<int>() == 0)
1026 return false;
1027
1028 emscripten::val track = tracks[0];
1029 emscripten::val contraint = emscripten::val::object();
1030 contraint.set(std::move(key), value);
1031 track.call<emscripten::val>("applyConstraints", contraint);
1032 return true;
1033 }
1034
1035 return false;
1036}
1037
1038QT_END_NAMESPACE
1039
1040#include "moc_qwasmvideooutput_p.cpp"
void addCameraSourceElement(const std::string &id)
void updateVideoElementGeometry(const QRect &windowGeometry)
bool setDeviceSetting(const std::string &key, emscripten::val value)
emscripten::val surfaceElement()
emscripten::val getDeviceCapabilities()
void videoFrameCallback(void *context)
void setVideoSize(const QSize &)
void setMuted(bool muted)
void setSource(const QUrl &url)
void setVideoMode(QWasmVideoOutput::WasmVideoMode mode)
void seekTo(qint64 position)
void setVolume(qreal volume)
void createVideoElement(const std::string &id)
void updateVideoElementSource(const QString &src)
void setSource(QIODevice *stream)
void setPlaybackRate(qreal rate)
void createOffscreenElement(const QSize &offscreenSize)
Combined button and popup list for selecting options.
Q_LOGGING_CATEGORY(lcEventDispatcher, "qt.eventdispatcher")
static bool checkForVideoFrame()