4#include <mediacapture/qgstreamermediacapturesession_p.h>
5#include <mediacapture/qgstreamermediarecorder_p.h>
6#include <mediacapture/qgstreamerimagecapture_p.h>
7#include <mediacapture/qgstreamercamera_p.h>
8#include <common/qgstpipeline_p.h>
9#include <common/qgstreameraudioinput_p.h>
10#include <common/qgstreameraudiooutput_p.h>
11#include <common/qgstreamervideooutput_p.h>
12#include <common/qgst_debug_p.h>
14#include <QtCore/qloggingcategory.h>
15#include <QtCore/private/quniquehandle_p.h>
24 tee.set(
"allow-not-linked",
true);
28template <
typename Functor>
35 return executeWhilePadsAreIdle(pads.subspan(1), f);
38 pads.front().modifyPipelineInIdleProbe(f);
40 auto remain = pads.subspan(1);
41 pads.front().modifyPipelineInIdleProbe([&] {
42 executeWhilePadsAreIdle(remain, f);
49 for (QGstElement element : elements)
51 element.setState(state);
56 for (QGstElement element : elements)
58 element.finishStateChange();
63QMaybe<QPlatformMediaCaptureSession *> QGstreamerMediaCaptureSession::create()
65 auto videoOutput = QGstreamerVideoOutput::create();
67 return { unexpect, videoOutput.error() };
69 static const auto error = qGstErrorMessageIfElementsNotAvailable(
"tee",
"capsfilter");
71 return { unexpect, *error };
73 return new QGstreamerMediaCaptureSession(videoOutput.value());
78 QGstPipeline::create(
"mediaCapturePipeline"),
83 audioSrcPadForEncoder{ gstAudioTee.getRequestPad(
"src_%u") },
84 audioSrcPadForOutput{ gstAudioTee.getRequestPad(
"src_%u") },
88 videoSrcPadForEncoder{ gstVideoTee.getRequestPad(
"src_%u") },
89 videoSrcPadForOutput{ gstVideoTee.getRequestPad(
"src_%u") },
90 videoSrcPadForImageCapture{ gstVideoTee.getRequestPad(
"src_%u") },
91 gstVideoOutput(videoOutput)
93 gstVideoOutput->setParent(
this);
96 capturePipeline.installMessageFilter(
static_cast<QGstreamerBusMessageFilter *>(
this));
97 capturePipeline.set(
"message-forward",
true);
104 QGstClockHandle systemClock{
105 gst_system_clock_obtain(),
106 QGstClockHandle::HasRef,
108 gst_pipeline_use_clock(capturePipeline.pipeline(), systemClock.get());
112 capturePipeline.setState(GST_STATE_PLAYING);
115 capturePipeline.dumpGraph(
"initial");
118QGstPad QGstreamerMediaCaptureSession::imageCaptureSink()
120 return m_imageCapture ? m_imageCapture->gstElement()
.staticPad("sink") : QGstPad{};
123QGstPad QGstreamerMediaCaptureSession::videoOutputSink()
128QGstPad QGstreamerMediaCaptureSession::audioOutputSink()
135 setMediaRecorder(
nullptr);
136 setImageCapture(
nullptr);
138 capturePipeline.removeMessageFilter(
static_cast<QGstreamerBusMessageFilter *>(
this));
139 capturePipeline.setStateSync(GST_STATE_READY);
140 capturePipeline.setStateSync(GST_STATE_NULL);
148void QGstreamerMediaCaptureSession::
setCamera(QPlatformCamera *platformCamera)
151 if (gstCamera == camera)
155 QObject::disconnect(gstCameraActiveConnection);
157 setCameraActive(
false);
163 gstCameraActiveConnection =
164 QObject::connect(camera, &QPlatformCamera::activeChanged,
this,
165 &QGstreamerMediaCaptureSession::setCameraActive);
166 if (gstCamera->isActive())
167 setCameraActive(
true);
170 emit cameraChanged();
173void QGstreamerMediaCaptureSession::setCameraActive(
bool activate)
175 std::array padsToSync = {
176 videoSrcPadForEncoder,
177 videoSrcPadForImageCapture,
178 videoSrcPadForOutput,
186 gstCamera->setCaptureSession(
this);
187 capturePipeline.add(gstVideoTee);
189 executeWhilePadsAreIdle(padsToSync, [&] {
190 capturePipeline.add(cameraElement);
191 if (videoOutputElement)
192 capturePipeline.add(videoOutputElement);
194 if (m_currentRecorderState && m_currentRecorderState->videoSink)
195 videoSrcPadForEncoder.link(m_currentRecorderState->videoSink);
196 if (videoOutputElement)
197 videoSrcPadForOutput
.link(videoOutputSink()
);
199 videoSrcPadForImageCapture
.link(imageCaptureSink()
);
201 qLinkGstElements(cameraElement, gstVideoTee);
203 setStateOnElements({ gstVideoTee, cameraElement, videoOutputElement },
207 finishStateChangeOnElements({ gstVideoTee, cameraElement, videoOutputElement });
209 for (QGstElement addedElement : { gstVideoTee, cameraElement, videoOutputElement })
210 addedElement.finishStateChange();
213 executeWhilePadsAreIdle(padsToSync, [&] {
214 for (QGstPad &pad : padsToSync)
217 capturePipeline.stopAndRemoveElements(cameraElement, gstVideoTee, videoOutputElement);
219 gstCamera->setCaptureSession(
nullptr);
222 capturePipeline.dumpGraph(
"camera");
227 return m_imageCapture;
230void QGstreamerMediaCaptureSession::
setImageCapture(QPlatformImageCapture *imageCapture)
232 QGstreamerImageCapture *control =
static_cast<QGstreamerImageCapture *>(imageCapture);
233 if (m_imageCapture == control)
236 videoSrcPadForEncoder.modifyPipelineInIdleProbe([&] {
237 if (m_imageCapture) {
238 qUnlinkGstElements(gstVideoTee, m_imageCapture->gstElement());
239 capturePipeline.stopAndRemoveElements(m_imageCapture->gstElement());
240 m_imageCapture->setCaptureSession(
nullptr);
243 m_imageCapture = control;
245 if (m_imageCapture) {
246 capturePipeline.add(m_imageCapture->gstElement());
247 videoSrcPadForImageCapture
.link(imageCaptureSink()
);
248 m_imageCapture->setCaptureSession(
this);
249 m_imageCapture->gstElement().setState(GST_STATE_PLAYING);
253 m_imageCapture->gstElement().finishStateChange();
255 capturePipeline.dumpGraph(
"imageCapture");
257 emit imageCaptureChanged();
263 if (m_mediaRecorder == control)
267 m_mediaRecorder->setCaptureSession(
nullptr);
268 m_mediaRecorder = control;
270 m_mediaRecorder->setCaptureSession(
this);
272 emit encoderChanged();
273 capturePipeline.dumpGraph(
"encoder");
278 return m_mediaRecorder;
282 const QMediaMetaData &metadata)
284 Q_ASSERT(!m_currentRecorderState);
286 std::array padsToSync = {
287 audioSrcPadForEncoder,
288 videoSrcPadForEncoder,
291 executeWhilePadsAreIdle(padsToSync, [&] {
292 capturePipeline.add(recorder.encodeBin, recorder.fileSink);
293 qLinkGstElements(recorder.encodeBin, recorder.fileSink);
295 applyMetaDataToTagSetter(metadata, recorder.encodeBin);
298 QGstCaps capsFromCamera = gstVideoTee.sink().currentCaps();
300 encoderVideoCapsFilter =
301 QGstElement::createFromFactory(
"capsfilter",
"encoderVideoCapsFilter");
302 encoderVideoCapsFilter.set(
"caps", capsFromCamera);
304 capturePipeline.add(encoderVideoCapsFilter);
305 encoderVideoCapsFilter.src().link(recorder.videoSink);
306 videoSrcPadForEncoder.link(encoderVideoCapsFilter.sink());
310 QGstCaps capsFromInput = gstAudioTee.sink().currentCaps();
312 encoderAudioCapsFilter =
313 QGstElement::createFromFactory(
"capsfilter",
"encoderAudioCapsFilter");
315 encoderAudioCapsFilter.set(
"caps", capsFromInput);
317 capturePipeline.add(encoderAudioCapsFilter);
319 encoderAudioCapsFilter.src().link(recorder.audioSink);
320 audioSrcPadForEncoder.link(encoderAudioCapsFilter.sink());
322 setStateOnElements({ recorder.encodeBin, recorder.fileSink, encoderVideoCapsFilter,
323 encoderAudioCapsFilter },
326 GstEvent *event = gst_event_new_reconfigure();
327 gst_element_send_event(recorder.fileSink.element(), event);
330 finishStateChangeOnElements({ recorder.encodeBin, recorder.fileSink, encoderVideoCapsFilter,
331 encoderAudioCapsFilter });
333 m_currentRecorderState = std::move(recorder);
338 std::array padsToSync = {
339 audioSrcPadForEncoder,
340 videoSrcPadForEncoder,
343 executeWhilePadsAreIdle(padsToSync, [&] {
344 if (encoderVideoCapsFilter)
345 qUnlinkGstElements(gstVideoTee, encoderVideoCapsFilter);
347 if (encoderAudioCapsFilter)
348 qUnlinkGstElements(gstAudioTee, encoderAudioCapsFilter);
351 if (encoderVideoCapsFilter) {
352 capturePipeline.stopAndRemoveElements(encoderVideoCapsFilter);
353 encoderVideoCapsFilter = {};
356 if (encoderAudioCapsFilter) {
357 capturePipeline.stopAndRemoveElements(encoderAudioCapsFilter);
358 encoderAudioCapsFilter = {};
361 m_currentRecorderState->encodeBin.sendEos();
366 capturePipeline.stopAndRemoveElements(m_currentRecorderState->encodeBin,
367 m_currentRecorderState->fileSink);
369 m_currentRecorderState = std::nullopt;
374 return capturePipeline;
377void QGstreamerMediaCaptureSession::
setAudioInput(QPlatformAudioInput *input)
379 if (gstAudioInput == input)
382 if (input && !gstAudioInput) {
385 capturePipeline.add(gstAudioTee);
387 std::array padsToSync = {
388 audioSrcPadForEncoder,
389 audioSrcPadForOutput,
393 executeWhilePadsAreIdle(padsToSync, [&] {
394 if (m_currentRecorderState && m_currentRecorderState->audioSink)
395 audioSrcPadForEncoder.link(m_currentRecorderState->audioSink);
396 if (gstAudioOutput) {
397 capturePipeline.add(gstAudioOutput->gstElement());
398 audioSrcPadForOutput
.link(audioOutputSink()
);
402 capturePipeline.add(gstAudioInput->gstElement());
404 qLinkGstElements(gstAudioInput->gstElement(), gstAudioTee);
406 gstAudioTee.setState(GST_STATE_PLAYING);
408 gstAudioOutput->gstElement().setState(GST_STATE_PLAYING);
409 gstAudioInput->gstElement().setState(GST_STATE_PLAYING);
412 }
else if (!input && gstAudioInput) {
415 std::array padsToSync = {
416 audioSrcPadForEncoder,
417 audioSrcPadForOutput,
421 executeWhilePadsAreIdle(padsToSync, [&] {
422 for (QGstPad &pad : padsToSync)
426 capturePipeline.stopAndRemoveElements(gstAudioTee);
428 capturePipeline.stopAndRemoveElements(gstAudioOutput->gstElement());
429 capturePipeline.stopAndRemoveElements(gstAudioInput->gstElement());
431 gstAudioInput =
nullptr;
435 gstAudioTee.sink().modifyPipelineInIdleProbe([&] {
436 oldInputElement.sink().unlinkPeer();
437 gstAudioInput =
static_cast<QGstreamerAudioInput *>(input);
438 capturePipeline.add(gstAudioInput->gstElement());
440 qLinkGstElements(gstAudioInput->gstElement(), gstAudioTee);
442 gstAudioInput->gstElement().setState(GST_STATE_PLAYING);
447 capturePipeline.stopAndRemoveElements(gstAudioInput->gstElement());
451void QGstreamerMediaCaptureSession::setVideoPreview(QVideoSink *sink)
453 auto *gstSink = sink ?
static_cast<
QGstreamerVideoSink *>(sink->platformVideoSink()) :
nullptr;
455 gstSink->setAsync(
false);
458 capturePipeline.dumpGraph(
"setVideoPreview");
463 if (gstAudioOutput == output)
468 gstOutput->setAsync(
false);
470 if (!gstAudioInput) {
478 audioSrcPadForOutput.modifyPipelineInIdleProbe([&] {
479 if (oldOutputElement)
482 if (gstAudioOutput) {
483 capturePipeline.add(gstAudioOutput->gstElement());
485 gstAudioOutput->gstElement().setState(GST_STATE_PLAYING);
492 if (oldOutputElement)
493 capturePipeline.stopAndRemoveElements(oldOutputElement);
507 switch (msg.type()) {
508 case GST_MESSAGE_ERROR:
509 return processBusMessageError(msg);
511 case GST_MESSAGE_LATENCY:
512 return processBusMessageLatency(msg);
521bool QGstreamerMediaCaptureSession::processBusMessageError(
const QGstreamerMessage &msg)
523 QUniqueGErrorHandle error;
524 QUniqueGStringHandle message;
525 gst_message_parse_error(msg.message(), &error, &message);
527 qWarning() <<
"QGstreamerMediaCapture: received error from gstreamer" << error << message;
528 capturePipeline.dumpGraph(
"captureError");
533bool QGstreamerMediaCaptureSession::processBusMessageLatency(
const QGstreamerMessage &)
535 capturePipeline.recalculateLatency();
QGstPad staticPad(const char *name) const
static QGstElement createFromFactory(const char *factory, const char *name=nullptr)
bool link(const QGstPad &sink) const
QGstElement gstElement() const
virtual QGstElement gstElement() const =0
QGstreamerVideoSink * gstreamerVideoSink() const
QGstElement gstElement() const
void setVideoSink(QVideoSink *sink)
Combined button and popup list for selecting options.
void executeWhilePadsAreIdle(QSpan< QGstPad > pads, Functor &&f)
void finishStateChangeOnElements(QSpan< const QGstElement > elements)
void setStateOnElements(QSpan< const QGstElement > elements, GstState state)
QGstElement makeTee(const char *name)