Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qwasmvideooutput.cpp
Go to the documentation of this file.
1// Copyright (C) 2022 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include <QDebug>
5#include <QUrl>
6#include <QPoint>
7#include <QRect>
8#include <QMediaPlayer>
9#include <QVideoFrame>
10#include <QFile>
11#include <QBuffer>
12#include <QMimeDatabase>
14
15#include <qvideosink.h>
16#include <private/qplatformvideosink_p.h>
17#include <private/qmemoryvideobuffer_p.h>
18#include <private/qvideotexturehelper_p.h>
19#include <private/qvideoframe_p.h>
20#include <private/qstdweb_p.h>
21#include <QTimer>
22
23#include <emscripten/bind.h>
24#include <emscripten/html5.h>
25#include <emscripten/val.h>
26
27
29
30
31using namespace emscripten;
32using namespace Qt::Literals;
33
34Q_LOGGING_CATEGORY(qWasmMediaVideoOutput, "qt.multimedia.wasm.videooutput")
35
36
37static bool checkForVideoFrame()
38{
39 emscripten::val videoFrame = emscripten::val::global("VideoFrame");
40 return (!videoFrame.isNull() && !videoFrame.isUndefined());
41}
42
43QWasmVideoOutput::QWasmVideoOutput(QObject *parent) : QObject{ parent }
44{
46}
47
49
50void QWasmVideoOutput::setVideoSize(const QSize &newSize)
51{
52 if (m_pendingVideoSize == newSize)
53 return;
54
55 m_pendingVideoSize = newSize;
56 updateVideoElementGeometry(QRect(0, 0, m_pendingVideoSize.width(), m_pendingVideoSize.height()));
57}
58
60{
61 m_currentVideoMode = mode;
62}
63
65{
66 if (m_video.isUndefined() || m_video.isNull()
67 || !m_wasmSink) {
68 // error
69 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
70 return;
71 }
72 switch (m_currentVideoMode) {
74 emscripten::val sourceObj = m_video["src"];
75 if ((sourceObj.isUndefined() || sourceObj.isNull()) && !m_source.isEmpty()) {
76 m_video.set("src", m_source);
77 }
78 if (!isReady())
79 m_video.call<void>("load");
80 } break;
83 if (!m_cameraIsReady) {
84 m_shouldBeStarted = true;
85 }
86
87 if (!m_connection)
88 m_connection = connect(m_mediaInputStream, &JsMediaInputStream::mediaVideoStreamReady, this,
89 [=]( ) {
90 m_video.set("srcObject", m_mediaInputStream->getMediaStream());
91
92 emscripten::val stream = m_video["srcObject"];
93 if (stream.isNull() || stream.isUndefined()) { // camera device
94 qCDebug(qWasmMediaVideoOutput) << "srcObject ERROR";
95 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
96 return;
97 } else {
98 emscripten::val videoTracks = stream.call<emscripten::val>("getVideoTracks");
99 if (videoTracks.isNull() || videoTracks.isUndefined()) {
100 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << "videoTracks is null";
101 emit errorOccured(QMediaPlayer::ResourceError,
102 QStringLiteral("video surface error"));
103 return;
104 }
105 if (videoTracks["length"].as<int>() == 0) {
106 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << "videoTracks count is 0";
107 emit errorOccured(QMediaPlayer::ResourceError,
108 QStringLiteral("video surface error"));
109 return;
110 }
111 emscripten::val videoSettings = videoTracks[0].call<emscripten::val>("getSettings");
112 if (!videoSettings.isNull() || !videoSettings.isUndefined()) {
113 const int width = videoSettings["width"].as<int>();
114 const int height = videoSettings["height"].as<int>();
115 updateVideoElementGeometry(QRect(0, 0, width, height));
116 }
117 }
118
119 m_video.call<void>("play");
120
121 if (m_currentVideoMode == QWasmVideoOutput::Camera
122 || m_currentVideoMode == QWasmVideoOutput::SurfaceCapture) {
123 emit readyChanged(true);
124 if (m_hasVideoFrame)
125 videoFrameTimerCallback();
126 }
127
128 });
129 m_mediaInputStream->setUseAudio(false);
130 m_shouldBeStarted = true;
131 m_mediaInputStream->setStreamDevice(m_cameraId);
132
133 } break;
134 };
135
136 m_shouldStop = false;
137 m_toBePaused = false;
138
139 if (m_currentVideoMode != QWasmVideoOutput::Camera
140 && m_currentVideoMode != QWasmVideoOutput::SurfaceCapture) {
141 m_video.call<void>("play");
142 }
143}
144
146{
147 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO;
148
149 if (m_video.isUndefined() || m_video.isNull()) {
150 // error
151 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("Resource error"));
152 return;
153 }
154 m_shouldStop = true;
155 if (!m_toBePaused) {
156 // we are stopped , need to reset
157 m_mediaInputStream->stopMediaStream(m_mediaInputStream->getMediaStream());
158
159 m_video.set("srcObject", emscripten::val::null());
160 disconnect(m_connection);
161
162 m_video.call<void>("remove");
163
164 } else {
165 m_video.call<void>("pause");
166 }
167}
168
170{
171 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO;
172
173 if (m_video.isUndefined() || m_video.isNull()) {
174 // error
175 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
176 return;
177 }
178 m_shouldStop = false;
179 m_toBePaused = true;
180 m_video.call<void>("pause");
181}
182
184{
185 // flush pending frame
186 if (m_wasmSink)
187 m_wasmSink->platformVideoSink()->setVideoFrame(QVideoFrame());
188
189 m_source.clear();
190 m_video.set("currentTime", emscripten::val(0));
191 m_video.call<void>("load");
192}
193
195{
196 return m_video;
197}
198
199void QWasmVideoOutput::setSurface(QVideoSink *surface)
200{
201 if (!surface || surface == m_wasmSink) {
202 return;
203 }
204
205 m_wasmSink = surface;
206}
207
209{
210 if (m_video.isUndefined() || m_video.isNull()) {
211 // error
212 return false;
213 }
214
215 return m_currentMediaStatus == MediaStatus::LoadedMedia;
216 }
217
218void QWasmVideoOutput::setSource(const QUrl &url)
219{
220 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << url;
221
222 m_source = url.toString();
223
224 if (m_video.isUndefined() || m_video.isNull()) {
225 return;
226 }
227
228 if (url.isEmpty()) {
229 stop();
230 return;
231 }
232 if (url.isLocalFile()) {
233 QFile localFile(url.toLocalFile());
234 if (localFile.open(QIODevice::ReadOnly)) {
235 QDataStream buffer(&localFile); // we will serialize the data into the file
236 setSource(buffer.device());
237 } else {
238 qWarning() << "Failed to open file";
239 }
240 return;
241 }
242
243 updateVideoElementSource(m_source);
244}
245
247{
248 m_video.set("src", src.toStdString());
249 m_video.call<void>("load");
250}
251
252void QWasmVideoOutput::addCameraSourceElement(const std::string &id)
253{
254 m_cameraIsReady = false;
255 m_mediaInputStream = JsMediaInputStream::instance();
256
257 m_mediaInputStream->setUseAudio(m_hasAudio);
258 m_mediaInputStream->setUseVideo(true);
259
260 connect(m_mediaInputStream, &JsMediaInputStream::mediaVideoStreamReady, this,
261 [this]() {
262 qCDebug(qWasmMediaVideoOutput) << "mediaVideoStreamReady" << m_shouldBeStarted;
263
264 m_cameraIsReady = true;
265 if (m_shouldBeStarted) {
266 start();
267 m_shouldBeStarted = false;
268 }
269 });
270
271 m_cameraId = id;
272}
273
274void QWasmVideoOutput::setSource(QIODevice *stream)
275{
276 if (stream->bytesAvailable() == 0) {
277 qWarning() << "data not available";
278 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("data not available"));
279 return;
280 }
281 if (m_video.isUndefined() || m_video.isNull()) {
282 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
283 return;
284 }
285
286 QMimeDatabase db;
287 QMimeType mime = db.mimeTypeForData(stream);
288
289 QByteArray buffer = stream->readAll();
290
291 qstdweb::Blob contentBlob = qstdweb::Blob::copyFrom(buffer.data(), buffer.size(), mime.name().toStdString());
292
293 emscripten::val window = qstdweb::window();
294
295 if (window["safari"].isUndefined()) {
296 emscripten::val contentUrl = window["URL"].call<emscripten::val>("createObjectURL", contentBlob.val());
297 m_video.set("src", contentUrl);
298 m_source = QString::fromStdString(contentUrl.as<std::string>());
299 } else {
300 // only Safari currently supports Blob with srcObject
301 m_video.set("srcObject", contentBlob.val());
302 }
303}
304
305void QWasmVideoOutput::setVolume(qreal volume)
306{ // between 0 - 1
307 volume = qBound(qreal(0.0), volume, qreal(1.0));
308 m_video.set("volume", volume);
309}
310
311void QWasmVideoOutput::setMuted(bool muted)
312{
313 if (m_video.isUndefined() || m_video.isNull()) {
314 // error
315 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
316 return;
317 }
318 m_video.set("muted", muted);
319}
320
322{
323 return (!m_video.isUndefined() || !m_video.isNull())
324 ? (m_video["currentTime"].as<double>() * 1000)
325 : 0;
326}
327
328void QWasmVideoOutput::seekTo(qint64 positionMSecs)
329{
330 if (isVideoSeekable()) {
331 float positionToSetInSeconds = float(positionMSecs) / 1000;
332 emscripten::val seekableTimeRange = m_video["seekable"];
333 if (!seekableTimeRange.isNull() || !seekableTimeRange.isUndefined()) {
334 // range user can seek
335 if (seekableTimeRange["length"].as<int>() < 1)
336 return;
337 if (positionToSetInSeconds
338 >= seekableTimeRange.call<emscripten::val>("start", 0).as<double>()
339 && positionToSetInSeconds
340 <= seekableTimeRange.call<emscripten::val>("end", 0).as<double>()) {
341 m_requestedPosition = positionToSetInSeconds;
342
343 m_video.set("currentTime", m_requestedPosition);
344 }
345 }
346 }
347 qCDebug(qWasmMediaVideoOutput) << "m_requestedPosition" << m_requestedPosition;
348}
349
351{
352 if (m_video.isUndefined() || m_video.isNull()) {
353 // error
354 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
355 return false;
356 }
357
358 emscripten::val seekableTimeRange = m_video["seekable"];
359 if (seekableTimeRange["length"].as<int>() < 1)
360 return false;
361 if (!seekableTimeRange.isNull() || !seekableTimeRange.isUndefined()) {
362 bool isit = !QtPrivate::fuzzyCompare(
363 seekableTimeRange.call<emscripten::val>("start", 0).as<double>(),
364 seekableTimeRange.call<emscripten::val>("end", 0).as<double>());
365 return isit;
366 }
367 return false;
368}
369
370void QWasmVideoOutput::createVideoElement(const std::string &id)
371{
372 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << this << id;
373 // Create <video> element and add it to the page body
374
375 emscripten::val document = emscripten::val::global("document");
376 emscripten::val body = document["body"];
377
378 emscripten::val oldVideo = document.call<emscripten::val>("getElementById", id);
379
380 // need to remove stale element
381 if (!oldVideo.isUndefined() && !oldVideo.isNull())
382 oldVideo.call<void>("remove");
383
384 m_videoSurfaceId = id;
385 m_video = document.call<emscripten::val>("createElement", std::string("video"));
386
387 m_video.set("id", m_videoSurfaceId.c_str());
388 m_video.call<void>("setAttribute", std::string("class"),
389 (m_currentVideoMode == QWasmVideoOutput::Camera ? std::string("Camera")
390 : std::string("Video")));
391 m_video.set("data-qvideocontext",
392 emscripten::val(quintptr(reinterpret_cast<void *>(this))));
393
394 m_video.set("preload", "metadata");
395
396 // Uncaught DOMException: Failed to execute 'getImageData' on
397 // 'OffscreenCanvasRenderingContext2D': The canvas has been tainted by
398 // cross-origin data.
399 // TODO figure out somehow to let user choose between these
400 std::string originString = "anonymous"; // requires server Access-Control-Allow-Origin *
401 // std::string originString = "use-credentials"; // must not
402 // Access-Control-Allow-Origin *
403
404 m_video.call<void>("setAttribute", std::string("crossorigin"), originString);
405 body.call<void>("appendChild", m_video);
406
407 // Create/add video source
408 document.call<emscripten::val>("createElement",
409 std::string("source")).set("src", m_source.toStdString());
410
411 // Set position:absolute, which makes it possible to position the video
412 // element using x,y. coordinates, relative to its parent (the page's <body>
413 // element)
414 emscripten::val style = m_video["style"];
415 style.set("position", "absolute");
416 style.set("display", "none"); // hide
417
418 if (!m_source.isEmpty())
419 updateVideoElementSource(m_source);
420}
421
422void QWasmVideoOutput::createOffscreenElement(const QSize &offscreenSize)
423{
424 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO;
425
426 if (m_hasVideoFrame) // VideoFrame does not require offscreen canvas/context
427 return;
428
429 // create offscreen element for grabbing frames
430 // OffscreenCanvas - no safari :(
431 // https://developer.mozilla.org/en-US/docs/Web/API/OffscreenCanvas
432
433 emscripten::val document = emscripten::val::global("document");
434
435 // TODO use correct frameBytesAllocationSize?
436 // offscreen render buffer
437 m_offscreen = emscripten::val::global("OffscreenCanvas");
438
439 if (m_offscreen.isUndefined()) {
440 // Safari OffscreenCanvas not supported, try old skool way
441 m_offscreen = document.call<emscripten::val>("createElement", std::string("canvas"));
442
443 m_offscreen.set("style",
444 "position:absolute;left:-1000px;top:-1000px"); // offscreen
445 m_offscreen.set("width", offscreenSize.width());
446 m_offscreen.set("height", offscreenSize.height());
447 m_offscreenContext = m_offscreen.call<emscripten::val>("getContext", std::string("2d"));
448 } else {
449 m_offscreen = emscripten::val::global("OffscreenCanvas")
450 .new_(offscreenSize.width(), offscreenSize.height());
451 emscripten::val offscreenAttributes = emscripten::val::array();
452 offscreenAttributes.set("willReadFrequently", true);
453 m_offscreenContext = m_offscreen.call<emscripten::val>("getContext", std::string("2d"),
454 offscreenAttributes);
455 }
456 std::string offscreenId = m_videoSurfaceId + "_offscreenOutputSurface";
457 m_offscreen.set("id", offscreenId.c_str());
458}
459
461{
462 if (!m_video.isUndefined() && !m_video.isNull())
463 m_video.call<void>("remove");
464}
465
467{
468 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO;
469
470 // event callbacks
471 // timupdate
472 auto timeUpdateCallback = [=](emscripten::val event) {
473 qCDebug(qWasmMediaVideoOutput) << "timeupdate";
474
475 // qt progress is ms
476 emit progressChanged(event["target"]["currentTime"].as<double>() * 1000);
477 };
478 m_timeUpdateEvent.reset(new QWasmEventHandler(m_video, "timeupdate", timeUpdateCallback));
479
480 // play
481 auto playCallback = [=](emscripten::val event) {
482 Q_UNUSED(event)
483 qCDebug(qWasmMediaVideoOutput) << "play" << m_video["src"].as<std::string>();
484 if (!m_isSeeking)
485 emit stateChanged(QWasmMediaPlayer::Preparing);
486 };
487 m_playEvent.reset(new QWasmEventHandler(m_video, "play", playCallback));
488
489 // ended
490 auto endedCallback = [=](emscripten::val event) {
491 Q_UNUSED(event)
492 qCDebug(qWasmMediaVideoOutput) << "ended";
493 m_currentMediaStatus = MediaStatus::EndOfMedia;
494 emit statusChanged(m_currentMediaStatus);
495 m_shouldStop = true;
496 stop();
497 };
498 m_endedEvent.reset(new QWasmEventHandler(m_video, "ended", endedCallback));
499
500 // durationchange
501 auto durationChangeCallback = [=](emscripten::val event) {
502 qCDebug(qWasmMediaVideoOutput) << "durationChange";
503
504 // qt duration is in milliseconds.
505 qint64 dur = event["target"]["duration"].as<double>() * 1000;
506 emit durationChanged(dur);
507 };
508 m_durationChangeEvent.reset(
509 new QWasmEventHandler(m_video, "durationchange", durationChangeCallback));
510
511 // loadeddata
512 auto loadedDataCallback = [=](emscripten::val event) {
513 Q_UNUSED(event)
514 qCDebug(qWasmMediaVideoOutput) << "loaded data";
515
516 emit stateChanged(QWasmMediaPlayer::Prepared);
517 if (m_isSeekable != isVideoSeekable()) {
518 m_isSeekable = isVideoSeekable();
519 emit seekableChanged(m_isSeekable);
520 }
521 };
522 m_loadedDataEvent.reset(new QWasmEventHandler(m_video, "loadeddata", loadedDataCallback));
523
524 // error
525 auto errorCallback = [=](emscripten::val event) {
526 qCDebug(qWasmMediaVideoOutput) << "error";
527 if (event.isUndefined() || event.isNull())
528 return;
529 emit errorOccured(m_video["error"]["code"].as<int>(),
530 QString::fromStdString(m_video["error"]["message"].as<std::string>()));
531 };
532 m_errorChangeEvent.reset(new QWasmEventHandler(m_video, "error", errorCallback));
533
534 // resize
535 auto resizeCallback = [=](emscripten::val event) {
536 Q_UNUSED(event)
537 qCDebug(qWasmMediaVideoOutput) << "resize";
538
539 updateVideoElementGeometry(
540 QRect(0, 0, m_video["videoWidth"].as<int>(), m_video["videoHeight"].as<int>()));
541 emit sizeChange(m_video["videoWidth"].as<int>(), m_video["videoHeight"].as<int>());
542
543 };
544 m_resizeChangeEvent.reset(new QWasmEventHandler(m_video, "resize", resizeCallback));
545
546 // loadedmetadata
547 auto loadedMetadataCallback = [=](emscripten::val event) {
548 Q_UNUSED(event)
549 qCDebug(qWasmMediaVideoOutput) << "loaded meta data";
550
551 emit metaDataLoaded();
552 };
553 m_loadedMetadataChangeEvent.reset(
554 new QWasmEventHandler(m_video, "loadedmetadata", loadedMetadataCallback));
555
556 // loadstart
557 auto loadStartCallback = [=](emscripten::val event) {
558 Q_UNUSED(event)
559 qCDebug(qWasmMediaVideoOutput) << "load started";
560 m_currentMediaStatus = MediaStatus::LoadingMedia;
561 emit statusChanged(m_currentMediaStatus);
562 m_shouldStop = false;
563 };
564 m_loadStartChangeEvent.reset(new QWasmEventHandler(m_video, "loadstart", loadStartCallback));
565
566 // canplay
567
568 auto canPlayCallback = [=](emscripten::val event) {
569 if (event.isUndefined() || event.isNull())
570 return;
571 qCDebug(qWasmMediaVideoOutput) << "can play"
572 << "m_requestedPosition" << m_requestedPosition;
573
574 if (!m_shouldStop)
575 emit readyChanged(true); // sets video available
576 };
577 m_canPlayChangeEvent.reset(new QWasmEventHandler(m_video, "canplay", canPlayCallback));
578
579 // canplaythrough
580 auto canPlayThroughCallback = [=](emscripten::val event) {
581 Q_UNUSED(event)
582 qCDebug(qWasmMediaVideoOutput) << "can play through"
583 << "m_shouldStop" << m_shouldStop;
584
585 if (m_currentMediaStatus == MediaStatus::EndOfMedia)
586 return;
587 bool seekable = isVideoSeekable();
588 if (m_isSeekable != seekable) {
589 m_isSeekable = seekable;
590 emit seekableChanged(m_isSeekable);
591 }
592 if (!m_isSeeking && !m_shouldStop) {
593 emscripten::val timeRanges = m_video["buffered"];
594 if ((!timeRanges.isNull() || !timeRanges.isUndefined())
595 && timeRanges["length"].as<int>() == 1) {
596 double buffered = m_video["buffered"].call<emscripten::val>("end", 0).as<double>();
597 const double duration = m_video["duration"].as<double>();
598
599 if (duration == buffered) {
600 m_currentBufferedValue = 100;
601 emit bufferingChanged(m_currentBufferedValue);
602 }
603 }
604 constexpr int hasEnoughData = 4;
605 if (m_video["readyState"].as<int>() == hasEnoughData) {
606 m_currentMediaStatus = MediaStatus::LoadedMedia;
607 emit statusChanged(m_currentMediaStatus);
609 }
610 } else {
611 m_shouldStop = false;
612 }
613 };
614 m_canPlayThroughChangeEvent.reset(
615 new QWasmEventHandler(m_video, "canplaythrough", canPlayThroughCallback));
616
617 // seeking
618 auto seekingCallback = [=](emscripten::val event) {
619 Q_UNUSED(event)
620 qCDebug(qWasmMediaVideoOutput)
621 << "seeking started" << (m_video["currentTime"].as<double>() * 1000);
622 m_isSeeking = true;
623 };
624 m_seekingChangeEvent.reset(new QWasmEventHandler(m_video, "seeking", seekingCallback));
625
626 // seeked
627 auto seekedCallback = [=](emscripten::val event) {
628 Q_UNUSED(event)
629 qCDebug(qWasmMediaVideoOutput) << "seeked" << (m_video["currentTime"].as<double>() * 1000);
630 emit progressChanged(m_video["currentTime"].as<double>() * 1000);
631 m_isSeeking = false;
632 };
633 m_seekedChangeEvent.reset(new QWasmEventHandler(m_video, "seeked", seekedCallback));
634
635 // emptied
636 auto emptiedCallback = [=](emscripten::val event) {
637 Q_UNUSED(event)
638 qCDebug(qWasmMediaVideoOutput) << "emptied";
639 emit readyChanged(false);
640 m_currentMediaStatus = MediaStatus::EndOfMedia;
641 emit statusChanged(m_currentMediaStatus);
642 };
643 m_emptiedChangeEvent.reset(new QWasmEventHandler(m_video, "emptied", emptiedCallback));
644
645 // stalled
646 auto stalledCallback = [=](emscripten::val event) {
647 Q_UNUSED(event)
648 qCDebug(qWasmMediaVideoOutput) << "stalled";
649 m_currentMediaStatus = MediaStatus::StalledMedia;
650 emit statusChanged(m_currentMediaStatus);
651 };
652 m_stalledChangeEvent.reset(new QWasmEventHandler(m_video, "stalled", stalledCallback));
653
654 // waiting
655 auto waitingCallback = [=](emscripten::val event) {
656 Q_UNUSED(event)
657
658 qCDebug(qWasmMediaVideoOutput) << "waiting";
659 // check buffer
660 };
661 m_waitingChangeEvent.reset(new QWasmEventHandler(m_video, "waiting", waitingCallback));
662
663 // suspend
664
665 // playing
666 auto playingCallback = [=](emscripten::val event) {
667 Q_UNUSED(event)
668 qCDebug(qWasmMediaVideoOutput) << "playing";
669 if (m_isSeeking)
670 return;
671 emit stateChanged(QWasmMediaPlayer::Started);
672 if (m_toBePaused || !m_shouldStop) { // paused
673 m_toBePaused = false;
674 QMetaObject::invokeMethod(this, &QWasmVideoOutput::videoFrameTimerCallback, Qt::QueuedConnection);
675 }
676 };
677 m_playingChangeEvent.reset(new QWasmEventHandler(m_video, "playing", playingCallback));
678
679 // progress (buffering progress)
680 auto progesssCallback = [=](emscripten::val event) {
681 if (event.isUndefined() || event.isNull())
682 return;
683
684 const double duration = event["target"]["duration"].as<double>();
685 if (duration < 0) // track not exactly ready yet
686 return;
687
688 emscripten::val timeRanges = event["target"]["buffered"];
689
690 if ((!timeRanges.isNull() || !timeRanges.isUndefined())
691 && timeRanges["length"].as<int>() == 1) {
692 emscripten::val dVal = timeRanges.call<emscripten::val>("end", 0);
693 if (!dVal.isNull() || !dVal.isUndefined()) {
694 double bufferedEnd = dVal.as<double>();
695
696 if (duration > 0 && bufferedEnd > 0) {
697 const double bufferedValue = (bufferedEnd / duration * 100);
698 qCDebug(qWasmMediaVideoOutput) << "progress buffered";
699 m_currentBufferedValue = bufferedValue;
700 emit bufferingChanged(m_currentBufferedValue);
701 if (bufferedEnd == duration)
702 m_currentMediaStatus = MediaStatus::BufferedMedia;
703 else
704 m_currentMediaStatus = MediaStatus::BufferingMedia;
705 emit statusChanged(m_currentMediaStatus);
706 }
707 }
708 }
709 };
710 m_progressChangeEvent.reset(new QWasmEventHandler(m_video, "progress", progesssCallback));
711
712 // pause
713 auto pauseCallback = [=](emscripten::val event) {
714 Q_UNUSED(event)
715 qCDebug(qWasmMediaVideoOutput) << "pause";
716
717 const double currentTime = m_video["currentTime"].as<double>(); // in seconds
718 const double duration = m_video["duration"].as<double>(); // in seconds
719 if ((currentTime > 0 && currentTime < duration) && (!m_shouldStop && m_toBePaused)) {
720 emit stateChanged(QWasmMediaPlayer::Paused);
721 } else {
722 // stop this crazy thing!
723 m_video.set("currentTime", emscripten::val(0));
724 emit stateChanged(QWasmMediaPlayer::Stopped);
725 }
726 };
727 m_pauseChangeEvent.reset(new QWasmEventHandler(m_video, "pause", pauseCallback));
728
729 // onunload
730 // we use lower level events here as to avert a crash on activate using the
731 // qtdweb see _qt_beforeUnload
732 emscripten::val window = emscripten::val::global("window");
733
734 auto beforeUnloadCallback = [=](emscripten::val event) {
735 Q_UNUSED(event)
736 // large videos will leave the unloading window
737 // in a frozen state, so remove the video element src first
738 m_video.call<void>("removeAttribute", emscripten::val("src"));
739 m_video.call<void>("load");
740 };
741 m_beforeUnloadEvent.reset(new QWasmEventHandler(window, "beforeunload", beforeUnloadCallback));
742
743}
744
745void QWasmVideoOutput::updateVideoElementGeometry(const QRect &windowGeometry)
746{
747 QRect m_videoElementSource(windowGeometry.topLeft(), windowGeometry.size());
748
749 emscripten::val style = m_video["style"];
750 style.set("left", QStringLiteral("%1px").arg(m_videoElementSource.left()).toStdString());
751 style.set("top", QStringLiteral("%1px").arg(m_videoElementSource.top()).toStdString());
752 m_video.set("width", m_videoElementSource.width());
753 m_video.set("height", m_videoElementSource.height());
754 style.set("z-index", "999");
755
756 if (!m_hasVideoFrame) {
757 // offscreen
758 m_offscreen.set("width", m_videoElementSource.width());
759 m_offscreen.set("height", m_videoElementSource.height());
760 }
761}
762
764{
765 // qt duration is in ms
766 // js is sec
767
768 if (m_video.isUndefined() || m_video.isNull())
769 return 0;
770 return m_video["duration"].as<double>() * 1000;
771}
772
773void QWasmVideoOutput::newFrame(const QVideoFrame &frame)
774{
775 m_wasmSink->setVideoFrame(frame);
776}
777
779{
780 m_video.set("playbackRate", emscripten::val(rate));
781}
782
784{
785 return (m_video.isUndefined() || m_video.isNull()) ? 0 : m_video["playbackRate"].as<float>();
786}
787
788void QWasmVideoOutput::checkNetworkState()
789{
790 int netState = m_video["networkState"].as<int>();
791
792 qCDebug(qWasmMediaVideoOutput) << netState;
793
794 switch (netState) {
795 case QWasmMediaPlayer::QWasmMediaNetworkState::NetworkEmpty: // no data
796 break;
797 case QWasmMediaPlayer::QWasmMediaNetworkState::NetworkIdle:
798 break;
799 case QWasmMediaPlayer::QWasmMediaNetworkState::NetworkLoading:
800 break;
801 case QWasmMediaPlayer::QWasmMediaNetworkState::NetworkNoSource: // no source
802 emit errorOccured(netState, QStringLiteral("No media source found"));
803 break;
804 };
805}
806
807void QWasmVideoOutput::videoComputeFrame(void *context)
808{
809 if (m_offscreenContext.isUndefined() || m_offscreenContext.isNull()) {
810 qCDebug(qWasmMediaVideoOutput) << "offscreen canvas context could not be found";
811 return;
812 }
813 emscripten::val document = emscripten::val::global("document");
814
815 if (m_video.isUndefined() || m_video.isNull()) {
816 qCDebug(qWasmMediaVideoOutput) << "video element could not be found";
817 return;
818 }
819
820 const int videoWidth = m_video["videoWidth"].as<int>();
821 const int videoHeight = m_video["videoHeight"].as<int>();
822
823 if (videoWidth == 0 || videoHeight == 0)
824 return;
825
826 m_offscreenContext.call<void>("drawImage", m_video, 0, 0, videoWidth, videoHeight);
827
828 emscripten::val frame = // one frame, Uint8ClampedArray
829 m_offscreenContext.call<emscripten::val>("getImageData", 0, 0, videoWidth, videoHeight);
830
831 const QSize frameBytesAllocationSize(videoWidth, videoHeight);
832
833 // this seems to work ok, even though getImageData returns a Uint8ClampedArray
834 QByteArray frameBytes = qstdweb::Uint8Array(frame["data"]).copyToQByteArray();
835
836 QVideoFrameFormat frameFormat =
837 QVideoFrameFormat(frameBytesAllocationSize, QVideoFrameFormat::Format_RGBA8888);
838
839 auto *textureDescription = QVideoTextureHelper::textureDescription(frameFormat.pixelFormat());
840
841 QVideoFrame vFrame = QVideoFramePrivate::createFrame(
842 std::make_unique<QMemoryVideoBuffer>(
843 std::move(frameBytes),
844 textureDescription->strideForWidth(frameFormat.frameWidth())), // width of line with padding
845 frameFormat);
846 QWasmVideoOutput *wasmVideoOutput = reinterpret_cast<QWasmVideoOutput *>(context);
847
848 if (!wasmVideoOutput->m_wasmSink) {
849 qWarning() << "ERROR ALERT!! video sink not set";
850 }
851 wasmVideoOutput->m_wasmSink->setVideoFrame(vFrame);
852}
853
854
856{
857 QWasmVideoOutput *videoOutput = reinterpret_cast<QWasmVideoOutput *>(context);
858 if (!videoOutput || !videoOutput->isReady())
859 return;
860 emscripten::val videoElement = videoOutput->currentVideoElement();
861 emscripten::val oneVideoFrame = val::global("VideoFrame").new_(videoElement);
862
863 if (oneVideoFrame.isNull() || oneVideoFrame.isUndefined()) {
864 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO
865 << "ERROR" << "failed to construct VideoFrame";
866 return;
867 }
868
869 emscripten::val options = emscripten::val::object();
870 emscripten::val rectOptions = emscripten::val::object();
871
872 int displayWidth = oneVideoFrame["displayWidth"].as<int>();
873 int displayHeight = oneVideoFrame["displayHeight"].as<int>();
874
875 rectOptions.set("width", displayWidth);
876 rectOptions.set("height", displayHeight);
877 options.set("rect", rectOptions);
878
879 emscripten::val frameBytesAllocationSize = oneVideoFrame.call<emscripten::val>("allocationSize", options);
880 emscripten::val frameBuffer =
881 emscripten::val::global("Uint8Array").new_(frameBytesAllocationSize);
882 QWasmVideoOutput *wasmVideoOutput =
883 reinterpret_cast<QWasmVideoOutput*>(videoElement["data-qvideocontext"].as<quintptr>());
884
885 qstdweb::PromiseCallbacks copyToCallback;
886 copyToCallback.thenFunc = [wasmVideoOutput, oneVideoFrame, frameBuffer,
887 displayWidth, displayHeight]
888 (emscripten::val frameLayout)
889 {
890 if (frameLayout.isNull() || frameLayout.isUndefined()) {
891 qCDebug(qWasmMediaVideoOutput) << "theres no frameLayout";
892 return;
893 }
894
895 // frameBuffer now has a new frame, send to Qt
896 const QSize frameSize(displayWidth,
897 displayHeight);
898
899 QByteArray frameBytes = QByteArray::fromEcmaUint8Array(frameBuffer);
900
901 QVideoFrameFormat::PixelFormat pixelFormat = fromJsPixelFormat(oneVideoFrame["format"].as<std::string>());
902 if (pixelFormat == QVideoFrameFormat::Format_Invalid) {
903 qWarning() << "Invalid pixel format";
904 return;
905 }
906 QVideoFrameFormat frameFormat = QVideoFrameFormat(frameSize, pixelFormat);
907
908 auto buffer = std::make_unique<QMemoryVideoBuffer>(
909 std::move(frameBytes),
910 oneVideoFrame["codedWidth"].as<int>());
911
912 QVideoFrame vFrame =
913 QVideoFramePrivate::createFrame(std::move(buffer), std::move(frameFormat));
914
915 if (!wasmVideoOutput) {
916 qCDebug(qWasmMediaVideoOutput) << "ERROR:"
917 << "data-qvideocontext not found";
918 return;
919 }
920 if (!wasmVideoOutput->m_wasmSink) {
921 qWarning() << "ERROR ALERT!! video sink not set";
922 return;
923 }
924 wasmVideoOutput->m_wasmSink->setVideoFrame(vFrame);
925 oneVideoFrame.call<emscripten::val>("close");
926 };
927 copyToCallback.catchFunc = [&, wasmVideoOutput, oneVideoFrame](emscripten::val error)
928 {
929 qCDebug(qWasmMediaVideoOutput) << "Error"
930 << QString::fromStdString(error["name"].as<std::string>())
931 << QString::fromStdString(error["message"].as<std::string>()) ;
932
933 oneVideoFrame.call<emscripten::val>("close");
934 wasmVideoOutput->stop();
935 return;
936 };
937
938 qstdweb::Promise::make(oneVideoFrame, u"copyTo"_s, std::move(copyToCallback), frameBuffer);
939}
940
942{
943 static auto frame = [](double frameTime, void *context) -> EM_BOOL {
944 Q_UNUSED(frameTime);
945
946 QWasmVideoOutput *videoOutput = reinterpret_cast<QWasmVideoOutput *>(context);
947 if (!videoOutput || videoOutput->m_currentMediaStatus != MediaStatus::LoadedMedia)
948 return false;
949 if (videoOutput->m_shouldStop)
950 return false;
951 emscripten::val videoElement = videoOutput->currentVideoElement();
952
953 if (videoElement.isNull() || videoElement.isUndefined()) {
954 qWarning() << "no video element";
955 }
956
957 if (videoElement["paused"].as<bool>() || videoElement["ended"].as<bool>()
958 || videoElement["readyState"].as<int>() != 4)
959 return false;
960
961 if (videoOutput->m_hasVideoFrame) {
962 videoOutput->videoFrameCallback(context);
963 } else {
964 videoOutput->videoComputeFrame(context);
965 }
966 return true;
967 };
968
969 if ((!m_shouldStop && m_video["className"].as<std::string>() == "Camera" && m_cameraIsReady)
970 || isReady())
971 emscripten_request_animation_frame_loop(frame, this);
972 // about 60 fps
973}
974
975QVideoFrameFormat::PixelFormat QWasmVideoOutput::fromJsPixelFormat(std::string_view videoFormat)
976{
977 if (videoFormat == "I420")
978 return QVideoFrameFormat::Format_YUV420P;
979 // no equivalent pixel format
980 // else if (videoFormat == "I420A") // AYUV ?
981 else if (videoFormat == "I422")
982 return QVideoFrameFormat::Format_YUV422P;
983 // no equivalent pixel format
984 // else if (videoFormat == "I444")
985 else if (videoFormat == "NV12")
986 return QVideoFrameFormat::Format_NV12;
987 else if (videoFormat == "RGBA")
988 return QVideoFrameFormat::Format_RGBA8888;
989 else if (videoFormat == "RGBX")
990 return QVideoFrameFormat::Format_RGBX8888;
991 else if (videoFormat == "BGRA")
992 return QVideoFrameFormat::Format_BGRA8888;
993 else if (videoFormat == "BGRX")
994 return QVideoFrameFormat::Format_BGRX8888;
995
996 return QVideoFrameFormat::Format_Invalid;
997}
998
1000{
1001 emscripten::val stream = m_video["srcObject"];
1002 if ((!stream.isNull() && !stream.isUndefined()) && stream["active"].as<bool>()) {
1003 emscripten::val tracks = stream.call<emscripten::val>("getVideoTracks");
1004 if (!tracks.isUndefined()) {
1005 if (tracks["length"].as<int>() == 0)
1006 return emscripten::val::undefined();
1007
1008 emscripten::val track = tracks[0];
1009 if (!track.isUndefined()) {
1010 emscripten::val trackCaps = emscripten::val::undefined();
1011 if (!track["getCapabilities"].isUndefined())
1012 trackCaps = track.call<emscripten::val>("getCapabilities");
1013 else // firefox does not support getCapabilities
1014 trackCaps = track.call<emscripten::val>("getSettings");
1015
1016 if (!trackCaps.isUndefined())
1017 return trackCaps;
1018 }
1019 }
1020 } else {
1021 // camera not started track capabilities not available
1022 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("capabilities not available"));
1023 }
1024
1025 return emscripten::val::undefined();
1026}
1027
1028bool QWasmVideoOutput::setDeviceSetting(const std::string &key, emscripten::val value)
1029{
1030 emscripten::val stream = m_video["srcObject"];
1031 if (stream.isNull() || stream.isUndefined()
1032 || stream["getVideoTracks"].isUndefined())
1033 return false;
1034
1035 emscripten::val tracks = stream.call<emscripten::val>("getVideoTracks");
1036 if (!tracks.isNull() || !tracks.isUndefined()) {
1037 if (tracks["length"].as<int>() == 0)
1038 return false;
1039
1040 emscripten::val track = tracks[0];
1041 emscripten::val contraint = emscripten::val::object();
1042 contraint.set(std::move(key), value);
1043 track.call<emscripten::val>("applyConstraints", contraint);
1044 return true;
1045 }
1046
1047 return false;
1048}
1049
1050QT_END_NAMESPACE
1051
1052#include "moc_qwasmvideooutput_p.cpp"
void addCameraSourceElement(const std::string &id)
void updateVideoElementGeometry(const QRect &windowGeometry)
bool setDeviceSetting(const std::string &key, emscripten::val value)
emscripten::val surfaceElement()
emscripten::val getDeviceCapabilities()
void videoFrameCallback(void *context)
void setVideoSize(const QSize &)
void setMuted(bool muted)
void setSource(const QUrl &url)
void setVideoMode(QWasmVideoOutput::WasmVideoMode mode)
void seekTo(qint64 position)
void setVolume(qreal volume)
void createVideoElement(const std::string &id)
void updateVideoElementSource(const QString &src)
void setSource(QIODevice *stream)
void setPlaybackRate(qreal rate)
void createOffscreenElement(const QSize &offscreenSize)
Combined button and popup list for selecting options.
Q_LOGGING_CATEGORY(lcEventDispatcher, "qt.eventdispatcher")
static bool checkForVideoFrame()