Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qwasmvideooutput.cpp
Go to the documentation of this file.
1// Copyright (C) 2022 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include <QDebug>
5#include <QUrl>
6#include <QPoint>
7#include <QRect>
8#include <QMediaPlayer>
9#include <QVideoFrame>
10#include <QFile>
11#include <QBuffer>
12#include <QMimeDatabase>
14
15#include <qvideosink.h>
16#include <private/qplatformvideosink_p.h>
17#include <private/qmemoryvideobuffer_p.h>
18#include <private/qvideotexturehelper_p.h>
19#include <private/qvideoframe_p.h>
20#include <private/qstdweb_p.h>
21#include <QTimer>
22
23#include <emscripten/bind.h>
24#include <emscripten/html5.h>
25#include <emscripten/val.h>
26
27
29
30
31using namespace emscripten;
32using namespace Qt::Literals;
33
34Q_LOGGING_CATEGORY(qWasmMediaVideoOutput, "qt.multimedia.wasm.videooutput")
35
36
37static bool checkForVideoFrame()
38{
39 emscripten::val videoFrame = emscripten::val::global("VideoFrame");
40 return (!videoFrame.isNull() && !videoFrame.isUndefined());
41}
42
43QWasmVideoOutput::QWasmVideoOutput(QObject *parent) : QObject{ parent }
44{
46}
47
49
50void QWasmVideoOutput::setVideoSize(const QSize &newSize)
51{
52 if (m_pendingVideoSize == newSize)
53 return;
54
55 m_pendingVideoSize = newSize;
56 updateVideoElementGeometry(QRect(0, 0, m_pendingVideoSize.width(), m_pendingVideoSize.height()));
57}
58
60{
61 m_currentVideoMode = mode;
62}
63
65{
66 if (m_video.isUndefined() || m_video.isNull()
67 || !m_wasmSink) {
68 // error
69 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
70 return;
71 }
72 switch (m_currentVideoMode) {
74 emscripten::val sourceObj = m_video["src"];
75 if ((sourceObj.isUndefined() || sourceObj.isNull()) && !m_source.isEmpty()) {
76 m_video.set("src", m_source);
77 }
78 if (!isReady())
79 m_video.call<void>("load");
80 } break;
83 if (!m_cameraIsReady) {
84 m_shouldBeStarted = true;
85 }
86
87 if (!m_connection)
88 m_connection = connect(m_mediaInputStream.get(), &JsMediaInputStream::mediaStreamReady, this,
89 [=]( ) {
90 m_video.set("srcObject", m_mediaInputStream->getMediaStream());
91
92 emscripten::val stream = m_video["srcObject"];
93 if (stream.isNull() || stream.isUndefined()) { // camera device
94 qCDebug(qWasmMediaVideoOutput) << "srcObject ERROR";
95 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
96 return;
97 } else {
98 emscripten::val videoTracks = stream.call<emscripten::val>("getVideoTracks");
99 if (videoTracks.isNull() || videoTracks.isUndefined()) {
100 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << "videoTracks is null";
101 emit errorOccured(QMediaPlayer::ResourceError,
102 QStringLiteral("video surface error"));
103 return;
104 }
105 if (videoTracks["length"].as<int>() == 0) {
106 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << "videoTracks count is 0";
107 emit errorOccured(QMediaPlayer::ResourceError,
108 QStringLiteral("video surface error"));
109 return;
110 }
111 emscripten::val videoSettings = videoTracks[0].call<emscripten::val>("getSettings");
112 if (!videoSettings.isNull() || !videoSettings.isUndefined()) {
113 const int width = videoSettings["width"].as<int>();
114 const int height = videoSettings["height"].as<int>();
115 updateVideoElementGeometry(QRect(0, 0, width, height));
116 }
117 }
118
119 m_shouldBeStarted = false;
120 m_video.call<void>("play");
121
122 if (m_currentVideoMode == QWasmVideoOutput::Camera
123 || m_currentVideoMode == QWasmVideoOutput::SurfaceCapture) {
124 emit readyChanged(true);
125 if (m_hasVideoFrame)
126 videoFrameTimerCallback();
127 }
128
129 });
130
131 m_mediaInputStream->setStreamDevice(m_cameraId);
132
133 } break;
134 };
135
136 m_shouldStop = false;
137 m_toBePaused = false;
138
139 if (m_currentVideoMode != QWasmVideoOutput::Camera
140 && m_currentVideoMode != QWasmVideoOutput::SurfaceCapture) {
141 m_video.call<void>("play");
142 }
143}
144
146{
147 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO;
148
149 if (m_video.isUndefined() || m_video.isNull()) {
150 // error
151 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("Resource error"));
152 return;
153 }
154 m_shouldStop = true;
155 if (!m_toBePaused) {
156 // we are stopped , need to reset
157 m_mediaInputStream->stopMediaStream();
158
159 m_video.set("srcObject", emscripten::val::null());
160 disconnect(m_connection);
161 } else {
162 m_video.call<void>("pause");
163 }
164}
165
167{
168 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO;
169
170 if (m_video.isUndefined() || m_video.isNull()) {
171 // error
172 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
173 return;
174 }
175 m_shouldStop = false;
176 m_toBePaused = true;
177 m_video.call<void>("pause");
178}
179
181{
182 // flush pending frame
183 if (m_wasmSink)
184 m_wasmSink->platformVideoSink()->setVideoFrame(QVideoFrame());
185
186 m_source = QStringLiteral("") ;
187 m_video.set("currentTime", emscripten::val(0));
188 m_video.call<void>("load");
189}
190
192{
193 return m_video;
194}
195
196void QWasmVideoOutput::setSurface(QVideoSink *surface)
197{
198 if (!surface || surface == m_wasmSink) {
199 return;
200 }
201
202 m_wasmSink = surface;
203}
204
206{
207 if (m_video.isUndefined() || m_video.isNull()) {
208 // error
209 return false;
210 }
211
212 return m_currentMediaStatus == MediaStatus::LoadedMedia;
213 }
214
215void QWasmVideoOutput::setSource(const QUrl &url)
216{
217 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << url;
218
219 m_source = url.toString();
220
221 if (m_video.isUndefined() || m_video.isNull()) {
222 return;
223 }
224
225 if (url.isEmpty()) {
226 stop();
227 return;
228 }
229 if (url.isLocalFile()) {
230 QFile localFile(url.toLocalFile());
231 if (localFile.open(QIODevice::ReadOnly)) {
232 QDataStream buffer(&localFile); // we will serialize the data into the file
233 setSource(buffer.device());
234 } else {
235 qWarning() << "Failed to open file";
236 }
237 return;
238 }
239
240 updateVideoElementSource(m_source);
241}
242
244{
245 m_video.set("src", src.toStdString());
246 m_video.call<void>("load");
247}
248
249void QWasmVideoOutput::addCameraSourceElement(const std::string &id)
250{
251 m_cameraIsReady = false;
252 m_mediaInputStream.reset(new JsMediaInputStream(this));
253
254 m_mediaInputStream->setUseAudio(m_hasAudio);
255 m_mediaInputStream->setUseVideo(true);
256
257 connect(m_mediaInputStream.get(), &JsMediaInputStream::mediaStreamReady, this,
258 [this]() {
259 qCDebug(qWasmMediaVideoOutput) << "mediaStreamReady" << m_shouldBeStarted;
260
261 m_cameraIsReady = true;
262 if (m_shouldBeStarted) {
263 start();
264 m_shouldBeStarted = false;
265 }
266 });
267
268 m_cameraId = id;
269}
270
271void QWasmVideoOutput::setSource(QIODevice *stream)
272{
273 if (stream->bytesAvailable() == 0) {
274 qWarning() << "data not available";
275 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("data not available"));
276 return;
277 }
278 if (m_video.isUndefined() || m_video.isNull()) {
279 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
280 return;
281 }
282
283 QMimeDatabase db;
284 QMimeType mime = db.mimeTypeForData(stream);
285
286 QByteArray buffer = stream->readAll();
287
288 qstdweb::Blob contentBlob = qstdweb::Blob::copyFrom(buffer.data(), buffer.size(), mime.name().toStdString());
289
290 emscripten::val window = qstdweb::window();
291
292 if (window["safari"].isUndefined()) {
293 emscripten::val contentUrl = window["URL"].call<emscripten::val>("createObjectURL", contentBlob.val());
294 m_video.set("src", contentUrl);
295 m_source = QString::fromStdString(contentUrl.as<std::string>());
296 } else {
297 // only Safari currently supports Blob with srcObject
298 m_video.set("srcObject", contentBlob.val());
299 }
300}
301
302void QWasmVideoOutput::setVolume(qreal volume)
303{ // between 0 - 1
304 volume = qBound(qreal(0.0), volume, qreal(1.0));
305 m_video.set("volume", volume);
306}
307
308void QWasmVideoOutput::setMuted(bool muted)
309{
310 if (m_video.isUndefined() || m_video.isNull()) {
311 // error
312 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
313 return;
314 }
315 m_video.set("muted", muted);
316}
317
319{
320 return (!m_video.isUndefined() || !m_video.isNull())
321 ? (m_video["currentTime"].as<double>() * 1000)
322 : 0;
323}
324
325void QWasmVideoOutput::seekTo(qint64 positionMSecs)
326{
327 if (isVideoSeekable()) {
328 float positionToSetInSeconds = float(positionMSecs) / 1000;
329 emscripten::val seekableTimeRange = m_video["seekable"];
330 if (!seekableTimeRange.isNull() || !seekableTimeRange.isUndefined()) {
331 // range user can seek
332 if (seekableTimeRange["length"].as<int>() < 1)
333 return;
334 if (positionToSetInSeconds
335 >= seekableTimeRange.call<emscripten::val>("start", 0).as<double>()
336 && positionToSetInSeconds
337 <= seekableTimeRange.call<emscripten::val>("end", 0).as<double>()) {
338 m_requestedPosition = positionToSetInSeconds;
339
340 m_video.set("currentTime", m_requestedPosition);
341 }
342 }
343 }
344 qCDebug(qWasmMediaVideoOutput) << "m_requestedPosition" << m_requestedPosition;
345}
346
348{
349 if (m_video.isUndefined() || m_video.isNull()) {
350 // error
351 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
352 return false;
353 }
354
355 emscripten::val seekableTimeRange = m_video["seekable"];
356 if (seekableTimeRange["length"].as<int>() < 1)
357 return false;
358 if (!seekableTimeRange.isNull() || !seekableTimeRange.isUndefined()) {
359 bool isit = !QtPrivate::fuzzyCompare(
360 seekableTimeRange.call<emscripten::val>("start", 0).as<double>(),
361 seekableTimeRange.call<emscripten::val>("end", 0).as<double>());
362 return isit;
363 }
364 return false;
365}
366
367void QWasmVideoOutput::createVideoElement(const std::string &id)
368{
369 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << this << id;
370 // Create <video> element and add it to the page body
371
372 emscripten::val document = emscripten::val::global("document");
373 emscripten::val body = document["body"];
374
375 emscripten::val oldVideo = document.call<emscripten::val>("getElementById", id);
376
377 // need to remove stale element
378 if (!oldVideo.isUndefined() && !oldVideo.isNull())
379 oldVideo.call<void>("remove");
380
381 m_videoSurfaceId = id;
382 m_video = document.call<emscripten::val>("createElement", std::string("video"));
383
384 m_video.set("id", m_videoSurfaceId.c_str());
385 m_video.call<void>("setAttribute", std::string("class"),
386 (m_currentVideoMode == QWasmVideoOutput::Camera ? std::string("Camera")
387 : std::string("Video")));
388 m_video.set("data-qvideocontext",
389 emscripten::val(quintptr(reinterpret_cast<void *>(this))));
390
391 m_video.set("preload", "metadata");
392
393 // Uncaught DOMException: Failed to execute 'getImageData' on
394 // 'OffscreenCanvasRenderingContext2D': The canvas has been tainted by
395 // cross-origin data.
396 // TODO figure out somehow to let user choose between these
397 std::string originString = "anonymous"; // requires server Access-Control-Allow-Origin *
398 // std::string originString = "use-credentials"; // must not
399 // Access-Control-Allow-Origin *
400
401 m_video.call<void>("setAttribute", std::string("crossorigin"), originString);
402 body.call<void>("appendChild", m_video);
403
404 // Create/add video source
405 document.call<emscripten::val>("createElement",
406 std::string("source")).set("src", m_source.toStdString());
407
408 // Set position:absolute, which makes it possible to position the video
409 // element using x,y. coordinates, relative to its parent (the page's <body>
410 // element)
411 emscripten::val style = m_video["style"];
412 style.set("position", "absolute");
413 style.set("display", "none"); // hide
414
415 if (!m_source.isEmpty())
416 updateVideoElementSource(m_source);
417}
418
419void QWasmVideoOutput::createOffscreenElement(const QSize &offscreenSize)
420{
421 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO;
422
423 if (m_hasVideoFrame) // VideoFrame does not require offscreen canvas/context
424 return;
425
426 // create offscreen element for grabbing frames
427 // OffscreenCanvas - no safari :(
428 // https://developer.mozilla.org/en-US/docs/Web/API/OffscreenCanvas
429
430 emscripten::val document = emscripten::val::global("document");
431
432 // TODO use correct frameBytesAllocationSize?
433 // offscreen render buffer
434 m_offscreen = emscripten::val::global("OffscreenCanvas");
435
436 if (m_offscreen.isUndefined()) {
437 // Safari OffscreenCanvas not supported, try old skool way
438 m_offscreen = document.call<emscripten::val>("createElement", std::string("canvas"));
439
440 m_offscreen.set("style",
441 "position:absolute;left:-1000px;top:-1000px"); // offscreen
442 m_offscreen.set("width", offscreenSize.width());
443 m_offscreen.set("height", offscreenSize.height());
444 m_offscreenContext = m_offscreen.call<emscripten::val>("getContext", std::string("2d"));
445 } else {
446 m_offscreen = emscripten::val::global("OffscreenCanvas")
447 .new_(offscreenSize.width(), offscreenSize.height());
448 emscripten::val offscreenAttributes = emscripten::val::array();
449 offscreenAttributes.set("willReadFrequently", true);
450 m_offscreenContext = m_offscreen.call<emscripten::val>("getContext", std::string("2d"),
451 offscreenAttributes);
452 }
453 std::string offscreenId = m_videoSurfaceId + "_offscreenOutputSurface";
454 m_offscreen.set("id", offscreenId.c_str());
455}
456
458{
459 if (!m_video.isUndefined() && !m_video.isNull())
460 m_video.call<void>("remove");
461}
462
464{
465 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO;
466
467 // event callbacks
468 // timupdate
469 auto timeUpdateCallback = [=](emscripten::val event) {
470 qCDebug(qWasmMediaVideoOutput) << "timeupdate";
471
472 // qt progress is ms
473 emit progressChanged(event["target"]["currentTime"].as<double>() * 1000);
474 };
475 m_timeUpdateEvent.reset(new QWasmEventHandler(m_video, "timeupdate", timeUpdateCallback));
476
477 // play
478 auto playCallback = [=](emscripten::val event) {
479 Q_UNUSED(event)
480 qCDebug(qWasmMediaVideoOutput) << "play" << m_video["src"].as<std::string>();
481 if (!m_isSeeking)
482 emit stateChanged(QWasmMediaPlayer::Preparing);
483 };
484 m_playEvent.reset(new QWasmEventHandler(m_video, "play", playCallback));
485
486 // ended
487 auto endedCallback = [=](emscripten::val event) {
488 Q_UNUSED(event)
489 qCDebug(qWasmMediaVideoOutput) << "ended";
490 m_currentMediaStatus = MediaStatus::EndOfMedia;
491 emit statusChanged(m_currentMediaStatus);
492 m_shouldStop = true;
493 stop();
494 };
495 m_endedEvent.reset(new QWasmEventHandler(m_video, "ended", endedCallback));
496
497 // durationchange
498 auto durationChangeCallback = [=](emscripten::val event) {
499 qCDebug(qWasmMediaVideoOutput) << "durationChange";
500
501 // qt duration is in milliseconds.
502 qint64 dur = event["target"]["duration"].as<double>() * 1000;
503 emit durationChanged(dur);
504 };
505 m_durationChangeEvent.reset(
506 new QWasmEventHandler(m_video, "durationchange", durationChangeCallback));
507
508 // loadeddata
509 auto loadedDataCallback = [=](emscripten::val event) {
510 Q_UNUSED(event)
511 qCDebug(qWasmMediaVideoOutput) << "loaded data";
512
513 emit stateChanged(QWasmMediaPlayer::Prepared);
514 if (m_isSeekable != isVideoSeekable()) {
515 m_isSeekable = isVideoSeekable();
516 emit seekableChanged(m_isSeekable);
517 }
518 };
519 m_loadedDataEvent.reset(new QWasmEventHandler(m_video, "loadeddata", loadedDataCallback));
520
521 // error
522 auto errorCallback = [=](emscripten::val event) {
523 qCDebug(qWasmMediaVideoOutput) << "error";
524 if (event.isUndefined() || event.isNull())
525 return;
526 emit errorOccured(m_video["error"]["code"].as<int>(),
527 QString::fromStdString(m_video["error"]["message"].as<std::string>()));
528 };
529 m_errorChangeEvent.reset(new QWasmEventHandler(m_video, "error", errorCallback));
530
531 // resize
532 auto resizeCallback = [=](emscripten::val event) {
533 Q_UNUSED(event)
534 qCDebug(qWasmMediaVideoOutput) << "resize";
535
536 updateVideoElementGeometry(
537 QRect(0, 0, m_video["videoWidth"].as<int>(), m_video["videoHeight"].as<int>()));
538 emit sizeChange(m_video["videoWidth"].as<int>(), m_video["videoHeight"].as<int>());
539
540 };
541 m_resizeChangeEvent.reset(new QWasmEventHandler(m_video, "resize", resizeCallback));
542
543 // loadedmetadata
544 auto loadedMetadataCallback = [=](emscripten::val event) {
545 Q_UNUSED(event)
546 qCDebug(qWasmMediaVideoOutput) << "loaded meta data";
547
548 emit metaDataLoaded();
549 };
550 m_loadedMetadataChangeEvent.reset(
551 new QWasmEventHandler(m_video, "loadedmetadata", loadedMetadataCallback));
552
553 // loadstart
554 auto loadStartCallback = [=](emscripten::val event) {
555 Q_UNUSED(event)
556 qCDebug(qWasmMediaVideoOutput) << "load started";
557 m_currentMediaStatus = MediaStatus::LoadingMedia;
558 emit statusChanged(m_currentMediaStatus);
559 m_shouldStop = false;
560 };
561 m_loadStartChangeEvent.reset(new QWasmEventHandler(m_video, "loadstart", loadStartCallback));
562
563 // canplay
564
565 auto canPlayCallback = [=](emscripten::val event) {
566 if (event.isUndefined() || event.isNull())
567 return;
568 qCDebug(qWasmMediaVideoOutput) << "can play"
569 << "m_requestedPosition" << m_requestedPosition;
570
571 if (!m_shouldStop)
572 emit readyChanged(true); // sets video available
573 };
574 m_canPlayChangeEvent.reset(new QWasmEventHandler(m_video, "canplay", canPlayCallback));
575
576 // canplaythrough
577 auto canPlayThroughCallback = [=](emscripten::val event) {
578 Q_UNUSED(event)
579 qCDebug(qWasmMediaVideoOutput) << "can play through"
580 << "m_shouldStop" << m_shouldStop;
581
582 if (m_currentMediaStatus == MediaStatus::EndOfMedia)
583 return;
584 if (m_isSeekable != isVideoSeekable()) {
585 m_isSeekable = isVideoSeekable();
586 emit seekableChanged(m_isSeekable);
587 }
588 if (!m_isSeeking && !m_shouldStop) {
589 emscripten::val timeRanges = m_video["buffered"];
590 if ((!timeRanges.isNull() || !timeRanges.isUndefined())
591 && timeRanges["length"].as<int>() == 1) {
592 double buffered = m_video["buffered"].call<emscripten::val>("end", 0).as<double>();
593 const double duration = m_video["duration"].as<double>();
594
595 if (duration == buffered) {
596 m_currentBufferedValue = 100;
597 emit bufferingChanged(m_currentBufferedValue);
598 }
599 }
600 constexpr int hasCurrentData = 2;
601 if (m_video["readyState"].as<int>() >= hasCurrentData) {
602 m_currentMediaStatus = MediaStatus::LoadedMedia;
603 emit statusChanged(m_currentMediaStatus);
605 }
606 } else {
607 m_shouldStop = false;
608 }
609 };
610 m_canPlayThroughChangeEvent.reset(
611 new QWasmEventHandler(m_video, "canplaythrough", canPlayThroughCallback));
612
613 // seeking
614 auto seekingCallback = [=](emscripten::val event) {
615 Q_UNUSED(event)
616 qCDebug(qWasmMediaVideoOutput)
617 << "seeking started" << (m_video["currentTime"].as<double>() * 1000);
618 m_isSeeking = true;
619 };
620 m_seekingChangeEvent.reset(new QWasmEventHandler(m_video, "seeking", seekingCallback));
621
622 // seeked
623 auto seekedCallback = [=](emscripten::val event) {
624 Q_UNUSED(event)
625 qCDebug(qWasmMediaVideoOutput) << "seeked" << (m_video["currentTime"].as<double>() * 1000);
626 emit progressChanged(m_video["currentTime"].as<double>() * 1000);
627 m_isSeeking = false;
628 };
629 m_seekedChangeEvent.reset(new QWasmEventHandler(m_video, "seeked", seekedCallback));
630
631 // emptied
632 auto emptiedCallback = [=](emscripten::val event) {
633 Q_UNUSED(event)
634 qCDebug(qWasmMediaVideoOutput) << "emptied";
635 emit readyChanged(false);
636 m_currentMediaStatus = MediaStatus::EndOfMedia;
637 emit statusChanged(m_currentMediaStatus);
638 };
639 m_emptiedChangeEvent.reset(new QWasmEventHandler(m_video, "emptied", emptiedCallback));
640
641 // stalled
642 auto stalledCallback = [=](emscripten::val event) {
643 Q_UNUSED(event)
644 qCDebug(qWasmMediaVideoOutput) << "stalled";
645 m_currentMediaStatus = MediaStatus::StalledMedia;
646 emit statusChanged(m_currentMediaStatus);
647 };
648 m_stalledChangeEvent.reset(new QWasmEventHandler(m_video, "stalled", stalledCallback));
649
650 // waiting
651 auto waitingCallback = [=](emscripten::val event) {
652 Q_UNUSED(event)
653
654 qCDebug(qWasmMediaVideoOutput) << "waiting";
655 // check buffer
656 };
657 m_waitingChangeEvent.reset(new QWasmEventHandler(m_video, "waiting", waitingCallback));
658
659 // suspend
660
661 // playing
662 auto playingCallback = [=](emscripten::val event) {
663 Q_UNUSED(event)
664 qCDebug(qWasmMediaVideoOutput) << "playing";
665 if (m_isSeeking)
666 return;
667 emit stateChanged(QWasmMediaPlayer::Started);
668 if (m_toBePaused || !m_shouldStop) { // paused
669 m_toBePaused = false;
670 QMetaObject::invokeMethod(this, &QWasmVideoOutput::videoFrameTimerCallback, Qt::QueuedConnection);
671 }
672 };
673 m_playingChangeEvent.reset(new QWasmEventHandler(m_video, "playing", playingCallback));
674
675 // progress (buffering progress)
676 auto progesssCallback = [=](emscripten::val event) {
677 if (event.isUndefined() || event.isNull())
678 return;
679
680 const double duration = event["target"]["duration"].as<double>();
681 if (duration < 0) // track not exactly ready yet
682 return;
683
684 emscripten::val timeRanges = event["target"]["buffered"];
685
686 if ((!timeRanges.isNull() || !timeRanges.isUndefined())
687 && timeRanges["length"].as<int>() == 1) {
688 emscripten::val dVal = timeRanges.call<emscripten::val>("end", 0);
689 if (!dVal.isNull() || !dVal.isUndefined()) {
690 double bufferedEnd = dVal.as<double>();
691
692 if (duration > 0 && bufferedEnd > 0) {
693 const double bufferedValue = (bufferedEnd / duration * 100);
694 qCDebug(qWasmMediaVideoOutput) << "progress buffered";
695 m_currentBufferedValue = bufferedValue;
696 emit bufferingChanged(m_currentBufferedValue);
697 if (bufferedEnd == duration)
698 m_currentMediaStatus = MediaStatus::BufferedMedia;
699 else
700 m_currentMediaStatus = MediaStatus::BufferingMedia;
701 emit statusChanged(m_currentMediaStatus);
702 }
703 }
704 }
705 };
706 m_progressChangeEvent.reset(new QWasmEventHandler(m_video, "progress", progesssCallback));
707
708 // pause
709 auto pauseCallback = [=](emscripten::val event) {
710 Q_UNUSED(event)
711 qCDebug(qWasmMediaVideoOutput) << "pause";
712
713 const double currentTime = m_video["currentTime"].as<double>(); // in seconds
714 const double duration = m_video["duration"].as<double>(); // in seconds
715 if ((currentTime > 0 && currentTime < duration) && (!m_shouldStop && m_toBePaused)) {
716 emit stateChanged(QWasmMediaPlayer::Paused);
717 } else {
718 // stop this crazy thing!
719 m_video.set("currentTime", emscripten::val(0));
720 emit stateChanged(QWasmMediaPlayer::Stopped);
721 }
722 };
723 m_pauseChangeEvent.reset(new QWasmEventHandler(m_video, "pause", pauseCallback));
724
725 // onunload
726 // we use lower level events here as to avert a crash on activate using the
727 // qtdweb see _qt_beforeUnload
728 emscripten::val window = emscripten::val::global("window");
729
730 auto beforeUnloadCallback = [=](emscripten::val event) {
731 Q_UNUSED(event)
732 // large videos will leave the unloading window
733 // in a frozen state, so remove the video element src first
734 m_video.call<void>("removeAttribute", emscripten::val("src"));
735 m_video.call<void>("load");
736 };
737 m_beforeUnloadEvent.reset(new QWasmEventHandler(window, "beforeunload", beforeUnloadCallback));
738
739}
740
741void QWasmVideoOutput::updateVideoElementGeometry(const QRect &windowGeometry)
742{
743 QRect m_videoElementSource(windowGeometry.topLeft(), windowGeometry.size());
744
745 emscripten::val style = m_video["style"];
746 style.set("left", QStringLiteral("%1px").arg(m_videoElementSource.left()).toStdString());
747 style.set("top", QStringLiteral("%1px").arg(m_videoElementSource.top()).toStdString());
748 m_video.set("width", m_videoElementSource.width());
749 m_video.set("height", m_videoElementSource.height());
750 style.set("z-index", "999");
751
752 if (!m_hasVideoFrame) {
753 // offscreen
754 m_offscreen.set("width", m_videoElementSource.width());
755 m_offscreen.set("height", m_videoElementSource.height());
756 }
757}
758
760{
761 // qt duration is in ms
762 // js is sec
763
764 if (m_video.isUndefined() || m_video.isNull())
765 return 0;
766 return m_video["duration"].as<double>() * 1000;
767}
768
769void QWasmVideoOutput::newFrame(const QVideoFrame &frame)
770{
771 m_wasmSink->setVideoFrame(frame);
772}
773
775{
776 m_video.set("playbackRate", emscripten::val(rate));
777}
778
780{
781 return (m_video.isUndefined() || m_video.isNull()) ? 0 : m_video["playbackRate"].as<float>();
782}
783
784void QWasmVideoOutput::checkNetworkState()
785{
786 int netState = m_video["networkState"].as<int>();
787
788 qCDebug(qWasmMediaVideoOutput) << netState;
789
790 switch (netState) {
791 case QWasmMediaPlayer::QWasmMediaNetworkState::NetworkEmpty: // no data
792 break;
793 case QWasmMediaPlayer::QWasmMediaNetworkState::NetworkIdle:
794 break;
795 case QWasmMediaPlayer::QWasmMediaNetworkState::NetworkLoading:
796 break;
797 case QWasmMediaPlayer::QWasmMediaNetworkState::NetworkNoSource: // no source
798 emit errorOccured(netState, QStringLiteral("No media source found"));
799 break;
800 };
801}
802
803void QWasmVideoOutput::videoComputeFrame(void *context)
804{
805 if (m_offscreenContext.isUndefined() || m_offscreenContext.isNull()) {
806 qCDebug(qWasmMediaVideoOutput) << "offscreen canvas context could not be found";
807 return;
808 }
809 emscripten::val document = emscripten::val::global("document");
810
811 emscripten::val videoElement =
812 document.call<emscripten::val>("getElementById", std::string(m_videoSurfaceId));
813
814 if (videoElement.isUndefined() || videoElement.isNull()) {
815 qCDebug(qWasmMediaVideoOutput) << "video element could not be found";
816 return;
817 }
818
819 const int videoWidth = videoElement["videoWidth"].as<int>();
820 const int videoHeight = videoElement["videoHeight"].as<int>();
821
822 if (videoWidth == 0 || videoHeight == 0)
823 return;
824
825 m_offscreenContext.call<void>("drawImage", videoElement, 0, 0, videoWidth, videoHeight);
826
827 emscripten::val frame = // one frame, Uint8ClampedArray
828 m_offscreenContext.call<emscripten::val>("getImageData", 0, 0, videoWidth, videoHeight);
829
830 const QSize frameBytesAllocationSize(videoWidth, videoHeight);
831
832 // this seems to work ok, even though getImageData returns a Uint8ClampedArray
833 QByteArray frameBytes = qstdweb::Uint8Array(frame["data"]).copyToQByteArray();
834
835 QVideoFrameFormat frameFormat =
836 QVideoFrameFormat(frameBytesAllocationSize, QVideoFrameFormat::Format_RGBA8888);
837
838 auto *textureDescription = QVideoTextureHelper::textureDescription(frameFormat.pixelFormat());
839
840 QVideoFrame vFrame = QVideoFramePrivate::createFrame(
841 std::make_unique<QMemoryVideoBuffer>(
842 std::move(frameBytes),
843 textureDescription->strideForWidth(frameFormat.frameWidth())), // width of line with padding
844 frameFormat);
845 QWasmVideoOutput *wasmVideoOutput = reinterpret_cast<QWasmVideoOutput *>(context);
846
847 if (!wasmVideoOutput->m_wasmSink) {
848 qWarning() << "ERROR ALERT!! video sink not set";
849 }
850 wasmVideoOutput->m_wasmSink->setVideoFrame(vFrame);
851}
852
853
855{
856 QWasmVideoOutput *videoOutput = reinterpret_cast<QWasmVideoOutput *>(context);
857 if (!videoOutput || !videoOutput->isReady())
858 return;
859 emscripten::val videoElement = videoOutput->currentVideoElement();
860 emscripten::val oneVideoFrame = val::global("VideoFrame").new_(videoElement);
861
862 if (oneVideoFrame.isNull() || oneVideoFrame.isUndefined()) {
863 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO
864 << "ERROR" << "failed to construct VideoFrame";
865 return;
866 }
867
868 emscripten::val options = emscripten::val::object();
869 emscripten::val rectOptions = emscripten::val::object();
870
871 rectOptions.set("width",oneVideoFrame["displayWidth"].as<int>());
872 rectOptions.set("height", oneVideoFrame["displayHeight"].as<int>());
873 options.set("rect", rectOptions);
874
875 emscripten::val frameBytesAllocationSize = oneVideoFrame.call<emscripten::val>("allocationSize", options);
876 emscripten::val frameBuffer =
877 emscripten::val::global("Uint8Array").new_(frameBytesAllocationSize);
878 QWasmVideoOutput *wasmVideoOutput =
879 reinterpret_cast<QWasmVideoOutput*>(videoElement["data-qvideocontext"].as<quintptr>());
880
881 qstdweb::PromiseCallbacks copyToCallback;
882 copyToCallback.thenFunc = [wasmVideoOutput, oneVideoFrame, frameBuffer, videoElement]
883 (emscripten::val frameLayout)
884 {
885 if (frameLayout.isNull() || frameLayout.isUndefined()) {
886 qCDebug(qWasmMediaVideoOutput) << "theres no frameLayout";
887 return;
888 }
889
890 // frameBuffer now has a new frame, send to Qt
891 const QSize frameSize(oneVideoFrame["displayWidth"].as<int>(),
892 oneVideoFrame["displayHeight"].as<int>());
893
894 QByteArray frameBytes = QByteArray::fromEcmaUint8Array(frameBuffer);
895
896 QVideoFrameFormat::PixelFormat pixelFormat = fromJsPixelFormat(oneVideoFrame["format"].as<std::string>());
897 if (pixelFormat == QVideoFrameFormat::Format_Invalid) {
898 qWarning() << "Invalid pixel format";
899 return;
900 }
901 QVideoFrameFormat frameFormat = QVideoFrameFormat(frameSize, pixelFormat);
902
903 auto buffer = std::make_unique<QMemoryVideoBuffer>(
904 std::move(frameBytes),
905 oneVideoFrame["codedWidth"].as<int>());
906
907 QVideoFrame vFrame =
908 QVideoFramePrivate::createFrame(std::move(buffer), std::move(frameFormat));
909
910 if (!wasmVideoOutput) {
911 qCDebug(qWasmMediaVideoOutput) << "ERROR:"
912 << "data-qvideocontext not found";
913 return;
914 }
915 if (!wasmVideoOutput->m_wasmSink) {
916 qWarning() << "ERROR ALERT!! video sink not set";
917 return;
918 }
919 wasmVideoOutput->m_wasmSink->setVideoFrame(vFrame);
920 oneVideoFrame.call<emscripten::val>("close");
921 };
922 copyToCallback.catchFunc = [&, wasmVideoOutput, oneVideoFrame, videoElement](emscripten::val error)
923 {
924 qCDebug(qWasmMediaVideoOutput) << "Error"
925 << QString::fromStdString(error["name"].as<std::string>())
926 << QString::fromStdString(error["message"].as<std::string>()) ;
927
928 oneVideoFrame.call<emscripten::val>("close");
929 wasmVideoOutput->stop();
930 return;
931 };
932
933 qstdweb::Promise::make(oneVideoFrame, u"copyTo"_s, std::move(copyToCallback), frameBuffer);
934}
935
937{
938 static auto frame = [](double frameTime, void *context) -> EM_BOOL {
939 Q_UNUSED(frameTime);
940
941 QWasmVideoOutput *videoOutput = reinterpret_cast<QWasmVideoOutput *>(context);
942 if (!videoOutput || videoOutput->m_currentMediaStatus != MediaStatus::LoadedMedia)
943 return false;
944 if (videoOutput->m_shouldStop)
945 return false;
946 emscripten::val videoElement = videoOutput->currentVideoElement();
947
948 if (videoElement.isNull() || videoElement.isUndefined()) {
949 qWarning() << "no video element";
950 }
951
952 if (videoElement["paused"].as<bool>() || videoElement["ended"].as<bool>())
953 return false;
954
955 if (checkForVideoFrame()) {
956 videoOutput->videoFrameCallback(context);
957 } else {
958 videoOutput->videoComputeFrame(context);
959 }
960 return true;
961 };
962
963 if ((!m_shouldStop && m_video["className"].as<std::string>() == "Camera" && m_cameraIsReady)
964 || isReady())
965 emscripten_request_animation_frame_loop(frame, this);
966 // about 60 fps
967}
968
969QVideoFrameFormat::PixelFormat QWasmVideoOutput::fromJsPixelFormat(std::string videoFormat)
970{
971 if (videoFormat == "I420")
972 return QVideoFrameFormat::Format_YUV420P;
973 // no equivalent pixel format
974 // else if (videoFormat == "I420A") // AYUV ?
975 else if (videoFormat == "I422")
976 return QVideoFrameFormat::Format_YUV422P;
977 // no equivalent pixel format
978 // else if (videoFormat == "I444")
979 else if (videoFormat == "NV12")
980 return QVideoFrameFormat::Format_NV12;
981 else if (videoFormat == "RGBA")
982 return QVideoFrameFormat::Format_RGBA8888;
983 else if (videoFormat == "RGBX")
984 return QVideoFrameFormat::Format_RGBX8888;
985 else if (videoFormat == "BGRA")
986 return QVideoFrameFormat::Format_BGRA8888;
987 else if (videoFormat == "BGRX")
988 return QVideoFrameFormat::Format_BGRX8888;
989
990 return QVideoFrameFormat::Format_Invalid;
991}
992
994{
995 emscripten::val stream = m_video["srcObject"];
996 if ((!stream.isNull() && !stream.isUndefined()) && stream["active"].as<bool>()) {
997 emscripten::val tracks = stream.call<emscripten::val>("getVideoTracks");
998 if (!tracks.isUndefined()) {
999 if (tracks["length"].as<int>() == 0)
1000 return emscripten::val::undefined();
1001
1002 emscripten::val track = tracks[0];
1003 if (!track.isUndefined()) {
1004 emscripten::val trackCaps = emscripten::val::undefined();
1005 if (!track["getCapabilities"].isUndefined())
1006 trackCaps = track.call<emscripten::val>("getCapabilities");
1007 else // firefox does not support getCapabilities
1008 trackCaps = track.call<emscripten::val>("getSettings");
1009
1010 if (!trackCaps.isUndefined())
1011 return trackCaps;
1012 }
1013 }
1014 } else {
1015 // camera not started track capabilities not available
1016 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("capabilities not available"));
1017 }
1018
1019 return emscripten::val::undefined();
1020}
1021
1022bool QWasmVideoOutput::setDeviceSetting(const std::string &key, emscripten::val value)
1023{
1024 emscripten::val stream = m_video["srcObject"];
1025 if (stream.isNull() || stream.isUndefined()
1026 || stream["getVideoTracks"].isUndefined())
1027 return false;
1028
1029 emscripten::val tracks = stream.call<emscripten::val>("getVideoTracks");
1030 if (!tracks.isNull() || !tracks.isUndefined()) {
1031 if (tracks["length"].as<int>() == 0)
1032 return false;
1033
1034 emscripten::val track = tracks[0];
1035 emscripten::val contraint = emscripten::val::object();
1036 contraint.set(std::move(key), value);
1037 track.call<emscripten::val>("applyConstraints", contraint);
1038 return true;
1039 }
1040
1041 return false;
1042}
1043
1044QT_END_NAMESPACE
1045
1046#include "moc_qwasmvideooutput_p.cpp"
void addCameraSourceElement(const std::string &id)
void updateVideoElementGeometry(const QRect &windowGeometry)
bool setDeviceSetting(const std::string &key, emscripten::val value)
emscripten::val surfaceElement()
emscripten::val getDeviceCapabilities()
void videoFrameCallback(void *context)
void setVideoSize(const QSize &)
void setMuted(bool muted)
void setSource(const QUrl &url)
void setVideoMode(QWasmVideoOutput::WasmVideoMode mode)
void seekTo(qint64 position)
void setVolume(qreal volume)
void createVideoElement(const std::string &id)
void updateVideoElementSource(const QString &src)
void setSource(QIODevice *stream)
void setPlaybackRate(qreal rate)
void createOffscreenElement(const QSize &offscreenSize)
Combined button and popup list for selecting options.
Q_LOGGING_CATEGORY(lcEventDispatcher, "qt.eventdispatcher")
static bool checkForVideoFrame()