Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qwasmvideooutput.cpp
Go to the documentation of this file.
1// Copyright (C) 2022 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include <QDebug>
5#include <QUrl>
6#include <QPoint>
7#include <QRect>
8#include <QMediaPlayer>
9#include <QVideoFrame>
10#include <QFile>
11#include <QBuffer>
12#include <QMimeDatabase>
14
15#include <qvideosink.h>
16#include <private/qplatformvideosink_p.h>
17#include <private/qmemoryvideobuffer_p.h>
18#include <private/qvideotexturehelper_p.h>
19#include <private/qvideoframe_p.h>
20#include <private/qstdweb_p.h>
21#include <QTimer>
22
23#include <emscripten/bind.h>
24#include <emscripten/html5.h>
25#include <emscripten/val.h>
26
27
29
30
31using namespace emscripten;
32using namespace Qt::Literals;
33
34Q_LOGGING_CATEGORY(qWasmMediaVideoOutput, "qt.multimedia.wasm.videooutput")
35
36
37static bool checkForVideoFrame()
38{
39 emscripten::val videoFrame = emscripten::val::global("VideoFrame");
40 return (!videoFrame.isNull() && !videoFrame.isUndefined());
41}
42
43QWasmVideoOutput::QWasmVideoOutput(QObject *parent) : QObject{ parent }
44{
46}
47
49
50void QWasmVideoOutput::setVideoSize(const QSize &newSize)
51{
52 if (m_pendingVideoSize == newSize)
53 return;
54
55 m_pendingVideoSize = newSize;
56 updateVideoElementGeometry(QRect(0, 0, m_pendingVideoSize.width(), m_pendingVideoSize.height()));
57}
58
60{
61 m_currentVideoMode = mode;
62}
63
65{
66 if (m_video.isUndefined() || m_video.isNull()
67 || !m_wasmSink) {
68 // error
69 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
70 return;
71 }
72 switch (m_currentVideoMode) {
74 emscripten::val sourceObj = m_video["src"];
75 if ((sourceObj.isUndefined() || sourceObj.isNull()) && !m_source.isEmpty()) {
76 m_video.set("src", m_source);
77 }
78 if (!isReady())
79 m_video.call<void>("load");
80 } break;
83 if (!m_cameraIsReady) {
84 m_shouldBeStarted = true;
85 }
86
87 if (!m_connection)
88 m_connection = connect(m_mediaInputStream.get(), &JsMediaInputStream::mediaStreamReady, this,
89 [=]( ) {
90 m_video.set("srcObject", m_mediaInputStream->getMediaStream());
91
92 emscripten::val stream = m_video["srcObject"];
93 if (stream.isNull() || stream.isUndefined()) { // camera device
94 qCDebug(qWasmMediaVideoOutput) << "srcObject ERROR";
95 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
96 return;
97 } else {
98 emscripten::val videoTracks = stream.call<emscripten::val>("getVideoTracks");
99 if (videoTracks.isNull() || videoTracks.isUndefined()) {
100 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << "videoTracks is null";
101 emit errorOccured(QMediaPlayer::ResourceError,
102 QStringLiteral("video surface error"));
103 return;
104 }
105 if (videoTracks["length"].as<int>() == 0) {
106 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << "videoTracks count is 0";
107 emit errorOccured(QMediaPlayer::ResourceError,
108 QStringLiteral("video surface error"));
109 return;
110 }
111 emscripten::val videoSettings = videoTracks[0].call<emscripten::val>("getSettings");
112 if (!videoSettings.isNull() || !videoSettings.isUndefined()) {
113 const int width = videoSettings["width"].as<int>();
114 const int height = videoSettings["height"].as<int>();
115 updateVideoElementGeometry(QRect(0, 0, width, height));
116 }
117 }
118
119 m_shouldBeStarted = false;
120 m_video.call<void>("play");
121
122 if (m_currentVideoMode == QWasmVideoOutput::Camera
123 || m_currentVideoMode == QWasmVideoOutput::SurfaceCapture) {
124 emit readyChanged(true);
125 if (m_hasVideoFrame)
126 videoFrameTimerCallback();
127 }
128
129 });
130
131 m_mediaInputStream->setStreamDevice(m_cameraId);
132
133 } break;
134 };
135
136 m_shouldStop = false;
137 m_toBePaused = false;
138
139 if (m_currentVideoMode != QWasmVideoOutput::Camera
140 && m_currentVideoMode != QWasmVideoOutput::SurfaceCapture) {
141 m_video.call<void>("play");
142 }
143}
144
146{
147 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO;
148
149 if (m_video.isUndefined() || m_video.isNull()) {
150 // error
151 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("Resource error"));
152 return;
153 }
154 m_shouldStop = true;
155 if (!m_toBePaused) {
156 // we are stopped , need to reset
157 m_mediaInputStream->stopMediaStream();
158
159 m_video.set("srcObject", emscripten::val::null());
160 disconnect(m_connection);
161 } else {
162 m_video.call<void>("pause");
163 }
164}
165
167{
168 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO;
169
170 if (m_video.isUndefined() || m_video.isNull()) {
171 // error
172 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
173 return;
174 }
175 m_shouldStop = false;
176 m_toBePaused = true;
177 m_video.call<void>("pause");
178}
179
181{
182 // flush pending frame
183 if (m_wasmSink)
184 m_wasmSink->platformVideoSink()->setVideoFrame(QVideoFrame());
185
186 m_source.clear();
187 m_video.set("currentTime", emscripten::val(0));
188 m_video.call<void>("load");
189}
190
192{
193 return m_video;
194}
195
196void QWasmVideoOutput::setSurface(QVideoSink *surface)
197{
198 if (!surface || surface == m_wasmSink) {
199 return;
200 }
201
202 m_wasmSink = surface;
203}
204
206{
207 if (m_video.isUndefined() || m_video.isNull()) {
208 // error
209 return false;
210 }
211
212 return m_currentMediaStatus == MediaStatus::LoadedMedia;
213 }
214
215void QWasmVideoOutput::setSource(const QUrl &url)
216{
217 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << url;
218
219 m_source = url.toString();
220
221 if (m_video.isUndefined() || m_video.isNull()) {
222 return;
223 }
224
225 if (url.isEmpty()) {
226 stop();
227 return;
228 }
229 if (url.isLocalFile()) {
230 QFile localFile(url.toLocalFile());
231 if (localFile.open(QIODevice::ReadOnly)) {
232 QDataStream buffer(&localFile); // we will serialize the data into the file
233 setSource(buffer.device());
234 } else {
235 qWarning() << "Failed to open file";
236 }
237 return;
238 }
239
240 updateVideoElementSource(m_source);
241}
242
244{
245 m_video.set("src", src.toStdString());
246 m_video.call<void>("load");
247}
248
249void QWasmVideoOutput::addCameraSourceElement(const std::string &id)
250{
251 m_cameraIsReady = false;
252 m_mediaInputStream.reset(new JsMediaInputStream(this));
253
254 m_mediaInputStream->setUseAudio(m_hasAudio);
255 m_mediaInputStream->setUseVideo(true);
256
257 connect(m_mediaInputStream.get(), &JsMediaInputStream::mediaStreamReady, this,
258 [this]() {
259 qCDebug(qWasmMediaVideoOutput) << "mediaStreamReady" << m_shouldBeStarted;
260
261 m_cameraIsReady = true;
262 if (m_shouldBeStarted) {
263 start();
264 m_shouldBeStarted = false;
265 }
266 });
267
268 m_cameraId = id;
269}
270
271void QWasmVideoOutput::setSource(QIODevice *stream)
272{
273 if (stream->bytesAvailable() == 0) {
274 qWarning() << "data not available";
275 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("data not available"));
276 return;
277 }
278 if (m_video.isUndefined() || m_video.isNull()) {
279 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
280 return;
281 }
282
283 QMimeDatabase db;
284 QMimeType mime = db.mimeTypeForData(stream);
285
286 QByteArray buffer = stream->readAll();
287
288 qstdweb::Blob contentBlob = qstdweb::Blob::copyFrom(buffer.data(), buffer.size(), mime.name().toStdString());
289
290 emscripten::val window = qstdweb::window();
291
292 if (window["safari"].isUndefined()) {
293 emscripten::val contentUrl = window["URL"].call<emscripten::val>("createObjectURL", contentBlob.val());
294 m_video.set("src", contentUrl);
295 m_source = QString::fromStdString(contentUrl.as<std::string>());
296 } else {
297 // only Safari currently supports Blob with srcObject
298 m_video.set("srcObject", contentBlob.val());
299 }
300}
301
302void QWasmVideoOutput::setVolume(qreal volume)
303{ // between 0 - 1
304 volume = qBound(qreal(0.0), volume, qreal(1.0));
305 m_video.set("volume", volume);
306}
307
308void QWasmVideoOutput::setMuted(bool muted)
309{
310 if (m_video.isUndefined() || m_video.isNull()) {
311 // error
312 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
313 return;
314 }
315 m_video.set("muted", muted);
316}
317
319{
320 return (!m_video.isUndefined() || !m_video.isNull())
321 ? (m_video["currentTime"].as<double>() * 1000)
322 : 0;
323}
324
325void QWasmVideoOutput::seekTo(qint64 positionMSecs)
326{
327 if (isVideoSeekable()) {
328 float positionToSetInSeconds = float(positionMSecs) / 1000;
329 emscripten::val seekableTimeRange = m_video["seekable"];
330 if (!seekableTimeRange.isNull() || !seekableTimeRange.isUndefined()) {
331 // range user can seek
332 if (seekableTimeRange["length"].as<int>() < 1)
333 return;
334 if (positionToSetInSeconds
335 >= seekableTimeRange.call<emscripten::val>("start", 0).as<double>()
336 && positionToSetInSeconds
337 <= seekableTimeRange.call<emscripten::val>("end", 0).as<double>()) {
338 m_requestedPosition = positionToSetInSeconds;
339
340 m_video.set("currentTime", m_requestedPosition);
341 }
342 }
343 }
344 qCDebug(qWasmMediaVideoOutput) << "m_requestedPosition" << m_requestedPosition;
345}
346
348{
349 if (m_video.isUndefined() || m_video.isNull()) {
350 // error
351 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("video surface error"));
352 return false;
353 }
354
355 emscripten::val seekableTimeRange = m_video["seekable"];
356 if (seekableTimeRange["length"].as<int>() < 1)
357 return false;
358 if (!seekableTimeRange.isNull() || !seekableTimeRange.isUndefined()) {
359 bool isit = !QtPrivate::fuzzyCompare(
360 seekableTimeRange.call<emscripten::val>("start", 0).as<double>(),
361 seekableTimeRange.call<emscripten::val>("end", 0).as<double>());
362 return isit;
363 }
364 return false;
365}
366
367void QWasmVideoOutput::createVideoElement(const std::string &id)
368{
369 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO << this << id;
370 // Create <video> element and add it to the page body
371
372 emscripten::val document = emscripten::val::global("document");
373 emscripten::val body = document["body"];
374
375 emscripten::val oldVideo = document.call<emscripten::val>("getElementById", id);
376
377 // need to remove stale element
378 if (!oldVideo.isUndefined() && !oldVideo.isNull())
379 oldVideo.call<void>("remove");
380
381 m_videoSurfaceId = id;
382 m_video = document.call<emscripten::val>("createElement", std::string("video"));
383
384 m_video.set("id", m_videoSurfaceId.c_str());
385 m_video.call<void>("setAttribute", std::string("class"),
386 (m_currentVideoMode == QWasmVideoOutput::Camera ? std::string("Camera")
387 : std::string("Video")));
388 m_video.set("data-qvideocontext",
389 emscripten::val(quintptr(reinterpret_cast<void *>(this))));
390
391 m_video.set("preload", "metadata");
392
393 // Uncaught DOMException: Failed to execute 'getImageData' on
394 // 'OffscreenCanvasRenderingContext2D': The canvas has been tainted by
395 // cross-origin data.
396 // TODO figure out somehow to let user choose between these
397 std::string originString = "anonymous"; // requires server Access-Control-Allow-Origin *
398 // std::string originString = "use-credentials"; // must not
399 // Access-Control-Allow-Origin *
400
401 m_video.call<void>("setAttribute", std::string("crossorigin"), originString);
402 body.call<void>("appendChild", m_video);
403
404 // Create/add video source
405 document.call<emscripten::val>("createElement",
406 std::string("source")).set("src", m_source.toStdString());
407
408 // Set position:absolute, which makes it possible to position the video
409 // element using x,y. coordinates, relative to its parent (the page's <body>
410 // element)
411 emscripten::val style = m_video["style"];
412 style.set("position", "absolute");
413 style.set("display", "none"); // hide
414
415 if (!m_source.isEmpty())
416 updateVideoElementSource(m_source);
417}
418
419void QWasmVideoOutput::createOffscreenElement(const QSize &offscreenSize)
420{
421 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO;
422
423 if (m_hasVideoFrame) // VideoFrame does not require offscreen canvas/context
424 return;
425
426 // create offscreen element for grabbing frames
427 // OffscreenCanvas - no safari :(
428 // https://developer.mozilla.org/en-US/docs/Web/API/OffscreenCanvas
429
430 emscripten::val document = emscripten::val::global("document");
431
432 // TODO use correct frameBytesAllocationSize?
433 // offscreen render buffer
434 m_offscreen = emscripten::val::global("OffscreenCanvas");
435
436 if (m_offscreen.isUndefined()) {
437 // Safari OffscreenCanvas not supported, try old skool way
438 m_offscreen = document.call<emscripten::val>("createElement", std::string("canvas"));
439
440 m_offscreen.set("style",
441 "position:absolute;left:-1000px;top:-1000px"); // offscreen
442 m_offscreen.set("width", offscreenSize.width());
443 m_offscreen.set("height", offscreenSize.height());
444 m_offscreenContext = m_offscreen.call<emscripten::val>("getContext", std::string("2d"));
445 } else {
446 m_offscreen = emscripten::val::global("OffscreenCanvas")
447 .new_(offscreenSize.width(), offscreenSize.height());
448 emscripten::val offscreenAttributes = emscripten::val::array();
449 offscreenAttributes.set("willReadFrequently", true);
450 m_offscreenContext = m_offscreen.call<emscripten::val>("getContext", std::string("2d"),
451 offscreenAttributes);
452 }
453 std::string offscreenId = m_videoSurfaceId + "_offscreenOutputSurface";
454 m_offscreen.set("id", offscreenId.c_str());
455}
456
458{
459 if (!m_video.isUndefined() && !m_video.isNull())
460 m_video.call<void>("remove");
461}
462
464{
465 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO;
466
467 // event callbacks
468 // timupdate
469 auto timeUpdateCallback = [=](emscripten::val event) {
470 qCDebug(qWasmMediaVideoOutput) << "timeupdate";
471
472 // qt progress is ms
473 emit progressChanged(event["target"]["currentTime"].as<double>() * 1000);
474 };
475 m_timeUpdateEvent.reset(new QWasmEventHandler(m_video, "timeupdate", timeUpdateCallback));
476
477 // play
478 auto playCallback = [=](emscripten::val event) {
479 Q_UNUSED(event)
480 qCDebug(qWasmMediaVideoOutput) << "play" << m_video["src"].as<std::string>();
481 if (!m_isSeeking)
482 emit stateChanged(QWasmMediaPlayer::Preparing);
483 };
484 m_playEvent.reset(new QWasmEventHandler(m_video, "play", playCallback));
485
486 // ended
487 auto endedCallback = [=](emscripten::val event) {
488 Q_UNUSED(event)
489 qCDebug(qWasmMediaVideoOutput) << "ended";
490 m_currentMediaStatus = MediaStatus::EndOfMedia;
491 emit statusChanged(m_currentMediaStatus);
492 m_shouldStop = true;
493 stop();
494 };
495 m_endedEvent.reset(new QWasmEventHandler(m_video, "ended", endedCallback));
496
497 // durationchange
498 auto durationChangeCallback = [=](emscripten::val event) {
499 qCDebug(qWasmMediaVideoOutput) << "durationChange";
500
501 // qt duration is in milliseconds.
502 qint64 dur = event["target"]["duration"].as<double>() * 1000;
503 emit durationChanged(dur);
504 };
505 m_durationChangeEvent.reset(
506 new QWasmEventHandler(m_video, "durationchange", durationChangeCallback));
507
508 // loadeddata
509 auto loadedDataCallback = [=](emscripten::val event) {
510 Q_UNUSED(event)
511 qCDebug(qWasmMediaVideoOutput) << "loaded data";
512
513 emit stateChanged(QWasmMediaPlayer::Prepared);
514 if (m_isSeekable != isVideoSeekable()) {
515 m_isSeekable = isVideoSeekable();
516 emit seekableChanged(m_isSeekable);
517 }
518 };
519 m_loadedDataEvent.reset(new QWasmEventHandler(m_video, "loadeddata", loadedDataCallback));
520
521 // error
522 auto errorCallback = [=](emscripten::val event) {
523 qCDebug(qWasmMediaVideoOutput) << "error";
524 if (event.isUndefined() || event.isNull())
525 return;
526 emit errorOccured(m_video["error"]["code"].as<int>(),
527 QString::fromStdString(m_video["error"]["message"].as<std::string>()));
528 };
529 m_errorChangeEvent.reset(new QWasmEventHandler(m_video, "error", errorCallback));
530
531 // resize
532 auto resizeCallback = [=](emscripten::val event) {
533 Q_UNUSED(event)
534 qCDebug(qWasmMediaVideoOutput) << "resize";
535
536 updateVideoElementGeometry(
537 QRect(0, 0, m_video["videoWidth"].as<int>(), m_video["videoHeight"].as<int>()));
538 emit sizeChange(m_video["videoWidth"].as<int>(), m_video["videoHeight"].as<int>());
539
540 };
541 m_resizeChangeEvent.reset(new QWasmEventHandler(m_video, "resize", resizeCallback));
542
543 // loadedmetadata
544 auto loadedMetadataCallback = [=](emscripten::val event) {
545 Q_UNUSED(event)
546 qCDebug(qWasmMediaVideoOutput) << "loaded meta data";
547
548 emit metaDataLoaded();
549 };
550 m_loadedMetadataChangeEvent.reset(
551 new QWasmEventHandler(m_video, "loadedmetadata", loadedMetadataCallback));
552
553 // loadstart
554 auto loadStartCallback = [=](emscripten::val event) {
555 Q_UNUSED(event)
556 qCDebug(qWasmMediaVideoOutput) << "load started";
557 m_currentMediaStatus = MediaStatus::LoadingMedia;
558 emit statusChanged(m_currentMediaStatus);
559 m_shouldStop = false;
560 };
561 m_loadStartChangeEvent.reset(new QWasmEventHandler(m_video, "loadstart", loadStartCallback));
562
563 // canplay
564
565 auto canPlayCallback = [=](emscripten::val event) {
566 if (event.isUndefined() || event.isNull())
567 return;
568 qCDebug(qWasmMediaVideoOutput) << "can play"
569 << "m_requestedPosition" << m_requestedPosition;
570
571 if (!m_shouldStop)
572 emit readyChanged(true); // sets video available
573 };
574 m_canPlayChangeEvent.reset(new QWasmEventHandler(m_video, "canplay", canPlayCallback));
575
576 // canplaythrough
577 auto canPlayThroughCallback = [=](emscripten::val event) {
578 Q_UNUSED(event)
579 qCDebug(qWasmMediaVideoOutput) << "can play through"
580 << "m_shouldStop" << m_shouldStop;
581
582 if (m_currentMediaStatus == MediaStatus::EndOfMedia)
583 return;
584 bool seekable = isVideoSeekable();
585 if (m_isSeekable != seekable) {
586 m_isSeekable = seekable;
587 emit seekableChanged(m_isSeekable);
588 }
589 if (!m_isSeeking && !m_shouldStop) {
590 emscripten::val timeRanges = m_video["buffered"];
591 if ((!timeRanges.isNull() || !timeRanges.isUndefined())
592 && timeRanges["length"].as<int>() == 1) {
593 double buffered = m_video["buffered"].call<emscripten::val>("end", 0).as<double>();
594 const double duration = m_video["duration"].as<double>();
595
596 if (duration == buffered) {
597 m_currentBufferedValue = 100;
598 emit bufferingChanged(m_currentBufferedValue);
599 }
600 }
601 constexpr int hasEnoughData = 4;
602 if (m_video["readyState"].as<int>() == hasEnoughData) {
603 m_currentMediaStatus = MediaStatus::LoadedMedia;
604 emit statusChanged(m_currentMediaStatus);
606 }
607 } else {
608 m_shouldStop = false;
609 }
610 };
611 m_canPlayThroughChangeEvent.reset(
612 new QWasmEventHandler(m_video, "canplaythrough", canPlayThroughCallback));
613
614 // seeking
615 auto seekingCallback = [=](emscripten::val event) {
616 Q_UNUSED(event)
617 qCDebug(qWasmMediaVideoOutput)
618 << "seeking started" << (m_video["currentTime"].as<double>() * 1000);
619 m_isSeeking = true;
620 };
621 m_seekingChangeEvent.reset(new QWasmEventHandler(m_video, "seeking", seekingCallback));
622
623 // seeked
624 auto seekedCallback = [=](emscripten::val event) {
625 Q_UNUSED(event)
626 qCDebug(qWasmMediaVideoOutput) << "seeked" << (m_video["currentTime"].as<double>() * 1000);
627 emit progressChanged(m_video["currentTime"].as<double>() * 1000);
628 m_isSeeking = false;
629 };
630 m_seekedChangeEvent.reset(new QWasmEventHandler(m_video, "seeked", seekedCallback));
631
632 // emptied
633 auto emptiedCallback = [=](emscripten::val event) {
634 Q_UNUSED(event)
635 qCDebug(qWasmMediaVideoOutput) << "emptied";
636 emit readyChanged(false);
637 m_currentMediaStatus = MediaStatus::EndOfMedia;
638 emit statusChanged(m_currentMediaStatus);
639 };
640 m_emptiedChangeEvent.reset(new QWasmEventHandler(m_video, "emptied", emptiedCallback));
641
642 // stalled
643 auto stalledCallback = [=](emscripten::val event) {
644 Q_UNUSED(event)
645 qCDebug(qWasmMediaVideoOutput) << "stalled";
646 m_currentMediaStatus = MediaStatus::StalledMedia;
647 emit statusChanged(m_currentMediaStatus);
648 };
649 m_stalledChangeEvent.reset(new QWasmEventHandler(m_video, "stalled", stalledCallback));
650
651 // waiting
652 auto waitingCallback = [=](emscripten::val event) {
653 Q_UNUSED(event)
654
655 qCDebug(qWasmMediaVideoOutput) << "waiting";
656 // check buffer
657 };
658 m_waitingChangeEvent.reset(new QWasmEventHandler(m_video, "waiting", waitingCallback));
659
660 // suspend
661
662 // playing
663 auto playingCallback = [=](emscripten::val event) {
664 Q_UNUSED(event)
665 qCDebug(qWasmMediaVideoOutput) << "playing";
666 if (m_isSeeking)
667 return;
668 emit stateChanged(QWasmMediaPlayer::Started);
669 if (m_toBePaused || !m_shouldStop) { // paused
670 m_toBePaused = false;
671 QMetaObject::invokeMethod(this, &QWasmVideoOutput::videoFrameTimerCallback, Qt::QueuedConnection);
672 }
673 };
674 m_playingChangeEvent.reset(new QWasmEventHandler(m_video, "playing", playingCallback));
675
676 // progress (buffering progress)
677 auto progesssCallback = [=](emscripten::val event) {
678 if (event.isUndefined() || event.isNull())
679 return;
680
681 const double duration = event["target"]["duration"].as<double>();
682 if (duration < 0) // track not exactly ready yet
683 return;
684
685 emscripten::val timeRanges = event["target"]["buffered"];
686
687 if ((!timeRanges.isNull() || !timeRanges.isUndefined())
688 && timeRanges["length"].as<int>() == 1) {
689 emscripten::val dVal = timeRanges.call<emscripten::val>("end", 0);
690 if (!dVal.isNull() || !dVal.isUndefined()) {
691 double bufferedEnd = dVal.as<double>();
692
693 if (duration > 0 && bufferedEnd > 0) {
694 const double bufferedValue = (bufferedEnd / duration * 100);
695 qCDebug(qWasmMediaVideoOutput) << "progress buffered";
696 m_currentBufferedValue = bufferedValue;
697 emit bufferingChanged(m_currentBufferedValue);
698 if (bufferedEnd == duration)
699 m_currentMediaStatus = MediaStatus::BufferedMedia;
700 else
701 m_currentMediaStatus = MediaStatus::BufferingMedia;
702 emit statusChanged(m_currentMediaStatus);
703 }
704 }
705 }
706 };
707 m_progressChangeEvent.reset(new QWasmEventHandler(m_video, "progress", progesssCallback));
708
709 // pause
710 auto pauseCallback = [=](emscripten::val event) {
711 Q_UNUSED(event)
712 qCDebug(qWasmMediaVideoOutput) << "pause";
713
714 const double currentTime = m_video["currentTime"].as<double>(); // in seconds
715 const double duration = m_video["duration"].as<double>(); // in seconds
716 if ((currentTime > 0 && currentTime < duration) && (!m_shouldStop && m_toBePaused)) {
717 emit stateChanged(QWasmMediaPlayer::Paused);
718 } else {
719 // stop this crazy thing!
720 m_video.set("currentTime", emscripten::val(0));
721 emit stateChanged(QWasmMediaPlayer::Stopped);
722 }
723 };
724 m_pauseChangeEvent.reset(new QWasmEventHandler(m_video, "pause", pauseCallback));
725
726 // onunload
727 // we use lower level events here as to avert a crash on activate using the
728 // qtdweb see _qt_beforeUnload
729 emscripten::val window = emscripten::val::global("window");
730
731 auto beforeUnloadCallback = [=](emscripten::val event) {
732 Q_UNUSED(event)
733 // large videos will leave the unloading window
734 // in a frozen state, so remove the video element src first
735 m_video.call<void>("removeAttribute", emscripten::val("src"));
736 m_video.call<void>("load");
737 };
738 m_beforeUnloadEvent.reset(new QWasmEventHandler(window, "beforeunload", beforeUnloadCallback));
739
740}
741
742void QWasmVideoOutput::updateVideoElementGeometry(const QRect &windowGeometry)
743{
744 QRect m_videoElementSource(windowGeometry.topLeft(), windowGeometry.size());
745
746 emscripten::val style = m_video["style"];
747 style.set("left", QStringLiteral("%1px").arg(m_videoElementSource.left()).toStdString());
748 style.set("top", QStringLiteral("%1px").arg(m_videoElementSource.top()).toStdString());
749 m_video.set("width", m_videoElementSource.width());
750 m_video.set("height", m_videoElementSource.height());
751 style.set("z-index", "999");
752
753 if (!m_hasVideoFrame) {
754 // offscreen
755 m_offscreen.set("width", m_videoElementSource.width());
756 m_offscreen.set("height", m_videoElementSource.height());
757 }
758}
759
761{
762 // qt duration is in ms
763 // js is sec
764
765 if (m_video.isUndefined() || m_video.isNull())
766 return 0;
767 return m_video["duration"].as<double>() * 1000;
768}
769
770void QWasmVideoOutput::newFrame(const QVideoFrame &frame)
771{
772 m_wasmSink->setVideoFrame(frame);
773}
774
776{
777 m_video.set("playbackRate", emscripten::val(rate));
778}
779
781{
782 return (m_video.isUndefined() || m_video.isNull()) ? 0 : m_video["playbackRate"].as<float>();
783}
784
785void QWasmVideoOutput::checkNetworkState()
786{
787 int netState = m_video["networkState"].as<int>();
788
789 qCDebug(qWasmMediaVideoOutput) << netState;
790
791 switch (netState) {
792 case QWasmMediaPlayer::QWasmMediaNetworkState::NetworkEmpty: // no data
793 break;
794 case QWasmMediaPlayer::QWasmMediaNetworkState::NetworkIdle:
795 break;
796 case QWasmMediaPlayer::QWasmMediaNetworkState::NetworkLoading:
797 break;
798 case QWasmMediaPlayer::QWasmMediaNetworkState::NetworkNoSource: // no source
799 emit errorOccured(netState, QStringLiteral("No media source found"));
800 break;
801 };
802}
803
804void QWasmVideoOutput::videoComputeFrame(void *context)
805{
806 if (m_offscreenContext.isUndefined() || m_offscreenContext.isNull()) {
807 qCDebug(qWasmMediaVideoOutput) << "offscreen canvas context could not be found";
808 return;
809 }
810 emscripten::val document = emscripten::val::global("document");
811
812 if (m_video.isUndefined() || m_video.isNull()) {
813 qCDebug(qWasmMediaVideoOutput) << "video element could not be found";
814 return;
815 }
816
817 const int videoWidth = m_video["videoWidth"].as<int>();
818 const int videoHeight = m_video["videoHeight"].as<int>();
819
820 if (videoWidth == 0 || videoHeight == 0)
821 return;
822
823 m_offscreenContext.call<void>("drawImage", m_video, 0, 0, videoWidth, videoHeight);
824
825 emscripten::val frame = // one frame, Uint8ClampedArray
826 m_offscreenContext.call<emscripten::val>("getImageData", 0, 0, videoWidth, videoHeight);
827
828 const QSize frameBytesAllocationSize(videoWidth, videoHeight);
829
830 // this seems to work ok, even though getImageData returns a Uint8ClampedArray
831 QByteArray frameBytes = qstdweb::Uint8Array(frame["data"]).copyToQByteArray();
832
833 QVideoFrameFormat frameFormat =
834 QVideoFrameFormat(frameBytesAllocationSize, QVideoFrameFormat::Format_RGBA8888);
835
836 auto *textureDescription = QVideoTextureHelper::textureDescription(frameFormat.pixelFormat());
837
838 QVideoFrame vFrame = QVideoFramePrivate::createFrame(
839 std::make_unique<QMemoryVideoBuffer>(
840 std::move(frameBytes),
841 textureDescription->strideForWidth(frameFormat.frameWidth())), // width of line with padding
842 frameFormat);
843 QWasmVideoOutput *wasmVideoOutput = reinterpret_cast<QWasmVideoOutput *>(context);
844
845 if (!wasmVideoOutput->m_wasmSink) {
846 qWarning() << "ERROR ALERT!! video sink not set";
847 }
848 wasmVideoOutput->m_wasmSink->setVideoFrame(vFrame);
849}
850
851
853{
854 QWasmVideoOutput *videoOutput = reinterpret_cast<QWasmVideoOutput *>(context);
855 if (!videoOutput || !videoOutput->isReady())
856 return;
857 emscripten::val videoElement = videoOutput->currentVideoElement();
858 emscripten::val oneVideoFrame = val::global("VideoFrame").new_(videoElement);
859
860 if (oneVideoFrame.isNull() || oneVideoFrame.isUndefined()) {
861 qCDebug(qWasmMediaVideoOutput) << Q_FUNC_INFO
862 << "ERROR" << "failed to construct VideoFrame";
863 return;
864 }
865
866 emscripten::val options = emscripten::val::object();
867 emscripten::val rectOptions = emscripten::val::object();
868
869 int displayWidth = oneVideoFrame["displayWidth"].as<int>();
870 int displayHeight = oneVideoFrame["displayHeight"].as<int>();
871
872 rectOptions.set("width", displayWidth);
873 rectOptions.set("height", displayHeight);
874 options.set("rect", rectOptions);
875
876 emscripten::val frameBytesAllocationSize = oneVideoFrame.call<emscripten::val>("allocationSize", options);
877 emscripten::val frameBuffer =
878 emscripten::val::global("Uint8Array").new_(frameBytesAllocationSize);
879 QWasmVideoOutput *wasmVideoOutput =
880 reinterpret_cast<QWasmVideoOutput*>(videoElement["data-qvideocontext"].as<quintptr>());
881
882 qstdweb::PromiseCallbacks copyToCallback;
883 copyToCallback.thenFunc = [wasmVideoOutput, oneVideoFrame, frameBuffer,
884 displayWidth, displayHeight]
885 (emscripten::val frameLayout)
886 {
887 if (frameLayout.isNull() || frameLayout.isUndefined()) {
888 qCDebug(qWasmMediaVideoOutput) << "theres no frameLayout";
889 return;
890 }
891
892 // frameBuffer now has a new frame, send to Qt
893 const QSize frameSize(displayWidth,
894 displayHeight);
895
896 QByteArray frameBytes = QByteArray::fromEcmaUint8Array(frameBuffer);
897
898 QVideoFrameFormat::PixelFormat pixelFormat = fromJsPixelFormat(oneVideoFrame["format"].as<std::string>());
899 if (pixelFormat == QVideoFrameFormat::Format_Invalid) {
900 qWarning() << "Invalid pixel format";
901 return;
902 }
903 QVideoFrameFormat frameFormat = QVideoFrameFormat(frameSize, pixelFormat);
904
905 auto buffer = std::make_unique<QMemoryVideoBuffer>(
906 std::move(frameBytes),
907 oneVideoFrame["codedWidth"].as<int>());
908
909 QVideoFrame vFrame =
910 QVideoFramePrivate::createFrame(std::move(buffer), std::move(frameFormat));
911
912 if (!wasmVideoOutput) {
913 qCDebug(qWasmMediaVideoOutput) << "ERROR:"
914 << "data-qvideocontext not found";
915 return;
916 }
917 if (!wasmVideoOutput->m_wasmSink) {
918 qWarning() << "ERROR ALERT!! video sink not set";
919 return;
920 }
921 wasmVideoOutput->m_wasmSink->setVideoFrame(vFrame);
922 oneVideoFrame.call<emscripten::val>("close");
923 };
924 copyToCallback.catchFunc = [&, wasmVideoOutput, oneVideoFrame](emscripten::val error)
925 {
926 qCDebug(qWasmMediaVideoOutput) << "Error"
927 << QString::fromStdString(error["name"].as<std::string>())
928 << QString::fromStdString(error["message"].as<std::string>()) ;
929
930 oneVideoFrame.call<emscripten::val>("close");
931 wasmVideoOutput->stop();
932 return;
933 };
934
935 qstdweb::Promise::make(oneVideoFrame, u"copyTo"_s, std::move(copyToCallback), frameBuffer);
936}
937
939{
940 static auto frame = [](double frameTime, void *context) -> EM_BOOL {
941 Q_UNUSED(frameTime);
942
943 QWasmVideoOutput *videoOutput = reinterpret_cast<QWasmVideoOutput *>(context);
944 if (!videoOutput || videoOutput->m_currentMediaStatus != MediaStatus::LoadedMedia)
945 return false;
946 if (videoOutput->m_shouldStop)
947 return false;
948 emscripten::val videoElement = videoOutput->currentVideoElement();
949
950 if (videoElement.isNull() || videoElement.isUndefined()) {
951 qWarning() << "no video element";
952 }
953
954 if (videoElement["paused"].as<bool>() || videoElement["ended"].as<bool>()
955 || videoElement["readyState"].as<int>() != 4)
956 return false;
957
958 if (videoOutput->m_hasVideoFrame) {
959 videoOutput->videoFrameCallback(context);
960 } else {
961 videoOutput->videoComputeFrame(context);
962 }
963 return true;
964 };
965
966 if ((!m_shouldStop && m_video["className"].as<std::string>() == "Camera" && m_cameraIsReady)
967 || isReady())
968 emscripten_request_animation_frame_loop(frame, this);
969 // about 60 fps
970}
971
972QVideoFrameFormat::PixelFormat QWasmVideoOutput::fromJsPixelFormat(std::string_view videoFormat)
973{
974 if (videoFormat == "I420")
975 return QVideoFrameFormat::Format_YUV420P;
976 // no equivalent pixel format
977 // else if (videoFormat == "I420A") // AYUV ?
978 else if (videoFormat == "I422")
979 return QVideoFrameFormat::Format_YUV422P;
980 // no equivalent pixel format
981 // else if (videoFormat == "I444")
982 else if (videoFormat == "NV12")
983 return QVideoFrameFormat::Format_NV12;
984 else if (videoFormat == "RGBA")
985 return QVideoFrameFormat::Format_RGBA8888;
986 else if (videoFormat == "RGBX")
987 return QVideoFrameFormat::Format_RGBX8888;
988 else if (videoFormat == "BGRA")
989 return QVideoFrameFormat::Format_BGRA8888;
990 else if (videoFormat == "BGRX")
991 return QVideoFrameFormat::Format_BGRX8888;
992
993 return QVideoFrameFormat::Format_Invalid;
994}
995
997{
998 emscripten::val stream = m_video["srcObject"];
999 if ((!stream.isNull() && !stream.isUndefined()) && stream["active"].as<bool>()) {
1000 emscripten::val tracks = stream.call<emscripten::val>("getVideoTracks");
1001 if (!tracks.isUndefined()) {
1002 if (tracks["length"].as<int>() == 0)
1003 return emscripten::val::undefined();
1004
1005 emscripten::val track = tracks[0];
1006 if (!track.isUndefined()) {
1007 emscripten::val trackCaps = emscripten::val::undefined();
1008 if (!track["getCapabilities"].isUndefined())
1009 trackCaps = track.call<emscripten::val>("getCapabilities");
1010 else // firefox does not support getCapabilities
1011 trackCaps = track.call<emscripten::val>("getSettings");
1012
1013 if (!trackCaps.isUndefined())
1014 return trackCaps;
1015 }
1016 }
1017 } else {
1018 // camera not started track capabilities not available
1019 emit errorOccured(QMediaPlayer::ResourceError, QStringLiteral("capabilities not available"));
1020 }
1021
1022 return emscripten::val::undefined();
1023}
1024
1025bool QWasmVideoOutput::setDeviceSetting(const std::string &key, emscripten::val value)
1026{
1027 emscripten::val stream = m_video["srcObject"];
1028 if (stream.isNull() || stream.isUndefined()
1029 || stream["getVideoTracks"].isUndefined())
1030 return false;
1031
1032 emscripten::val tracks = stream.call<emscripten::val>("getVideoTracks");
1033 if (!tracks.isNull() || !tracks.isUndefined()) {
1034 if (tracks["length"].as<int>() == 0)
1035 return false;
1036
1037 emscripten::val track = tracks[0];
1038 emscripten::val contraint = emscripten::val::object();
1039 contraint.set(std::move(key), value);
1040 track.call<emscripten::val>("applyConstraints", contraint);
1041 return true;
1042 }
1043
1044 return false;
1045}
1046
1047QT_END_NAMESPACE
1048
1049#include "moc_qwasmvideooutput_p.cpp"
void addCameraSourceElement(const std::string &id)
void updateVideoElementGeometry(const QRect &windowGeometry)
bool setDeviceSetting(const std::string &key, emscripten::val value)
emscripten::val surfaceElement()
emscripten::val getDeviceCapabilities()
void videoFrameCallback(void *context)
void setVideoSize(const QSize &)
void setMuted(bool muted)
void setSource(const QUrl &url)
void setVideoMode(QWasmVideoOutput::WasmVideoMode mode)
void seekTo(qint64 position)
void setVolume(qreal volume)
void createVideoElement(const std::string &id)
void updateVideoElementSource(const QString &src)
void setSource(QIODevice *stream)
void setPlaybackRate(qreal rate)
void createOffscreenElement(const QSize &offscreenSize)
Combined button and popup list for selecting options.
Q_LOGGING_CATEGORY(lcEventDispatcher, "qt.eventdispatcher")
static bool checkForVideoFrame()