4#include <mediacapture/qgstreamermediacapturesession_p.h>
5#include <mediacapture/qgstreamermediarecorder_p.h>
6#include <mediacapture/qgstreamerimagecapture_p.h>
7#include <mediacapture/qgstreamercamera_p.h>
8#include <common/qgstpipeline_p.h>
9#include <common/qgstreameraudioinput_p.h>
10#include <common/qgstreameraudiooutput_p.h>
11#include <common/qgstreamervideooutput_p.h>
12#include <common/qgst_debug_p.h>
14#include <QtMultimedia/private/qthreadlocalrhi_p.h>
16#include <QtCore/qloggingcategory.h>
17#include <QtCore/private/quniquehandle_p.h>
26 tee.set(
"allow-not-linked",
true);
30template <
typename Functor>
37 return executeWhilePadsAreIdle(pads.subspan(1), f);
40 pads.front().modifyPipelineInIdleProbe(f);
42 auto remain = pads.subspan(1);
43 pads.front().modifyPipelineInIdleProbe([&] {
44 executeWhilePadsAreIdle(remain, f);
51 for (QGstElement element : elements)
53 element.setState(state);
58 for (QGstElement element : elements)
60 element.finishStateChange();
65q23::expected<QPlatformMediaCaptureSession *, QString> QGstreamerMediaCaptureSession::create()
67 auto videoOutput = QGstreamerVideoOutput::create();
69 return q23::unexpected{ videoOutput.error() };
71 static const auto error = qGstErrorMessageIfElementsNotAvailable(
"tee",
"capsfilter");
73 return q23::unexpected{ *error };
75 return new QGstreamerMediaCaptureSession(videoOutput.value());
80 QGstPipeline::create(
"mediaCapturePipeline"),
85 audioSrcPadForEncoder{ gstAudioTee.getRequestPad(
"src_%u") },
86 audioSrcPadForOutput{ gstAudioTee.getRequestPad(
"src_%u") },
90 videoSrcPadForEncoder{ gstVideoTee.getRequestPad(
"src_%u") },
91 videoSrcPadForOutput{ gstVideoTee.getRequestPad(
"src_%u") },
92 videoSrcPadForImageCapture{ gstVideoTee.getRequestPad(
"src_%u") },
93 gstVideoOutput(videoOutput)
95 gstVideoOutput->setParent(
this);
99 m_gstVideoSink =
new QGstreamerRelayVideoSink(
this);
100 m_gstVideoSink->setRhi(qEnsureThreadLocalRhi());
105 capturePipeline.installMessageFilter(
static_cast<QGstreamerBusMessageFilter *>(
this));
106 capturePipeline.set(
"message-forward",
true);
113 QGstClockHandle systemClock{
114 gst_system_clock_obtain(),
115 QGstClockHandle::HasRef,
117 gst_pipeline_use_clock(capturePipeline.pipeline(), systemClock.get());
121 capturePipeline.setState(GST_STATE_PLAYING);
124 capturePipeline.dumpGraph(
"initial");
127QGstPad QGstreamerMediaCaptureSession::imageCaptureSink()
129 return m_imageCapture ? m_imageCapture->gstElement()
.staticPad("sink") : QGstPad{};
132QGstPad QGstreamerMediaCaptureSession::videoOutputSink()
137QGstPad QGstreamerMediaCaptureSession::audioOutputSink()
144 setMediaRecorder(
nullptr);
145 setImageCapture(
nullptr);
147 capturePipeline.removeMessageFilter(
static_cast<QGstreamerBusMessageFilter *>(
this));
148 capturePipeline.setStateSync(GST_STATE_READY);
149 capturePipeline.setStateSync(GST_STATE_NULL);
157void QGstreamerMediaCaptureSession::
setCamera(QPlatformCamera *platformCamera)
160 if (gstCamera == camera)
164 QObject::disconnect(gstCameraActiveConnection);
166 setCameraActive(
false);
172 gstCameraActiveConnection =
173 QObject::connect(camera, &QPlatformCamera::activeChanged,
this,
174 &QGstreamerMediaCaptureSession::setCameraActive);
175 if (gstCamera->isActive())
176 setCameraActive(
true);
179 emit cameraChanged();
182void QGstreamerMediaCaptureSession::setCameraActive(
bool activate)
184 std::array padsToSync = {
185 videoSrcPadForEncoder,
186 videoSrcPadForImageCapture,
187 videoSrcPadForOutput,
195 gstCamera->setCaptureSession(
this);
196 capturePipeline.add(gstVideoTee);
198 executeWhilePadsAreIdle(padsToSync, [&] {
199 capturePipeline.add(cameraElement);
200 if (videoOutputElement)
201 capturePipeline.add(videoOutputElement);
203 if (m_currentRecorderState && m_currentRecorderState->videoSink)
204 videoSrcPadForEncoder.link(m_currentRecorderState->videoSink);
205 if (videoOutputElement)
206 videoSrcPadForOutput
.link(videoOutputSink()
);
208 videoSrcPadForImageCapture
.link(imageCaptureSink()
);
210 qLinkGstElements(cameraElement, gstVideoTee);
212 setStateOnElements({ gstVideoTee, cameraElement, videoOutputElement },
216 finishStateChangeOnElements({ gstVideoTee, cameraElement, videoOutputElement });
218 for (QGstElement addedElement : { gstVideoTee, cameraElement, videoOutputElement })
219 addedElement.finishStateChange();
222 executeWhilePadsAreIdle(padsToSync, [&] {
223 for (QGstPad &pad : padsToSync)
226 capturePipeline.stopAndRemoveElements(cameraElement, gstVideoTee, videoOutputElement);
228 gstCamera->setCaptureSession(
nullptr);
231 capturePipeline.dumpGraph(
"camera");
236 return m_imageCapture;
239void QGstreamerMediaCaptureSession::
setImageCapture(QPlatformImageCapture *imageCapture)
241 QGstreamerImageCapture *control =
static_cast<QGstreamerImageCapture *>(imageCapture);
242 if (m_imageCapture == control)
245 videoSrcPadForEncoder.modifyPipelineInIdleProbe([&] {
246 if (m_imageCapture) {
247 qUnlinkGstElements(gstVideoTee, m_imageCapture->gstElement());
248 capturePipeline.stopAndRemoveElements(m_imageCapture->gstElement());
249 m_imageCapture->setCaptureSession(
nullptr);
252 m_imageCapture = control;
254 if (m_imageCapture) {
255 capturePipeline.add(m_imageCapture->gstElement());
256 videoSrcPadForImageCapture
.link(imageCaptureSink()
);
257 m_imageCapture->setCaptureSession(
this);
258 m_imageCapture->gstElement().setState(GST_STATE_PLAYING);
262 m_imageCapture->gstElement().finishStateChange();
264 capturePipeline.dumpGraph(
"imageCapture");
266 emit imageCaptureChanged();
272 if (m_mediaRecorder == control)
276 m_mediaRecorder->setCaptureSession(
nullptr);
277 m_mediaRecorder = control;
279 m_mediaRecorder->setCaptureSession(
this);
281 emit encoderChanged();
282 capturePipeline.dumpGraph(
"encoder");
287 return m_mediaRecorder;
291 const QMediaMetaData &metadata)
293 Q_ASSERT(!m_currentRecorderState);
295 std::array padsToSync = {
296 audioSrcPadForEncoder,
297 videoSrcPadForEncoder,
300 executeWhilePadsAreIdle(padsToSync, [&] {
301 capturePipeline.add(recorder.encodeBin, recorder.fileSink);
302 qLinkGstElements(recorder.encodeBin, recorder.fileSink);
304 applyMetaDataToTagSetter(metadata, recorder.encodeBin);
307 QGstCaps capsFromCamera = gstVideoTee.sink().currentCaps();
309 encoderVideoCapsFilter =
310 QGstElement::createFromFactory(
"capsfilter",
"encoderVideoCapsFilter");
311 encoderVideoCapsFilter.set(
"caps", capsFromCamera);
313 capturePipeline.add(encoderVideoCapsFilter);
314 encoderVideoCapsFilter.src().link(recorder.videoSink);
315 videoSrcPadForEncoder.link(encoderVideoCapsFilter.sink());
319 QGstCaps capsFromInput = gstAudioTee.sink().currentCaps();
321 encoderAudioCapsFilter =
322 QGstElement::createFromFactory(
"capsfilter",
"encoderAudioCapsFilter");
324 encoderAudioCapsFilter.set(
"caps", capsFromInput);
326 capturePipeline.add(encoderAudioCapsFilter);
328 encoderAudioCapsFilter.src().link(recorder.audioSink);
329 audioSrcPadForEncoder.link(encoderAudioCapsFilter.sink());
331 setStateOnElements({ recorder.encodeBin, recorder.fileSink, encoderVideoCapsFilter,
332 encoderAudioCapsFilter },
335 GstEvent *event = gst_event_new_reconfigure();
336 gst_element_send_event(recorder.fileSink.element(), event);
339 finishStateChangeOnElements({ recorder.encodeBin, recorder.fileSink, encoderVideoCapsFilter,
340 encoderAudioCapsFilter });
342 m_currentRecorderState = std::move(recorder);
347 std::array padsToSync = {
348 audioSrcPadForEncoder,
349 videoSrcPadForEncoder,
352 executeWhilePadsAreIdle(padsToSync, [&] {
353 if (encoderVideoCapsFilter)
354 qUnlinkGstElements(gstVideoTee, encoderVideoCapsFilter);
356 if (encoderAudioCapsFilter)
357 qUnlinkGstElements(gstAudioTee, encoderAudioCapsFilter);
360 if (encoderVideoCapsFilter) {
361 capturePipeline.stopAndRemoveElements(encoderVideoCapsFilter);
362 encoderVideoCapsFilter = {};
365 if (encoderAudioCapsFilter) {
366 capturePipeline.stopAndRemoveElements(encoderAudioCapsFilter);
367 encoderAudioCapsFilter = {};
370 m_currentRecorderState->encodeBin.sendEos();
375 capturePipeline.stopAndRemoveElements(m_currentRecorderState->encodeBin,
376 m_currentRecorderState->fileSink);
378 m_currentRecorderState = std::nullopt;
383 return capturePipeline;
386void QGstreamerMediaCaptureSession::
setAudioInput(QPlatformAudioInput *input)
388 if (gstAudioInput == input)
391 if (input && !gstAudioInput) {
394 capturePipeline.add(gstAudioTee);
396 std::array padsToSync = {
397 audioSrcPadForEncoder,
398 audioSrcPadForOutput,
402 executeWhilePadsAreIdle(padsToSync, [&] {
403 if (m_currentRecorderState && m_currentRecorderState->audioSink)
404 audioSrcPadForEncoder.link(m_currentRecorderState->audioSink);
405 if (gstAudioOutput) {
406 capturePipeline.add(gstAudioOutput->gstElement());
407 audioSrcPadForOutput
.link(audioOutputSink()
);
411 capturePipeline.add(gstAudioInput->gstElement());
413 qLinkGstElements(gstAudioInput->gstElement(), gstAudioTee);
415 gstAudioTee.setState(GST_STATE_PLAYING);
417 gstAudioOutput->gstElement().setState(GST_STATE_PLAYING);
418 gstAudioInput->gstElement().setState(GST_STATE_PLAYING);
421 }
else if (!input && gstAudioInput) {
424 std::array padsToSync = {
425 audioSrcPadForEncoder,
426 audioSrcPadForOutput,
430 executeWhilePadsAreIdle(padsToSync, [&] {
431 for (QGstPad &pad : padsToSync)
435 capturePipeline.stopAndRemoveElements(gstAudioTee);
437 capturePipeline.stopAndRemoveElements(gstAudioOutput->gstElement());
438 capturePipeline.stopAndRemoveElements(gstAudioInput->gstElement());
440 gstAudioInput =
nullptr;
444 gstAudioTee.sink().modifyPipelineInIdleProbe([&] {
445 oldInputElement.sink().unlinkPeer();
446 gstAudioInput =
static_cast<QGstreamerAudioInput *>(input);
447 capturePipeline.add(gstAudioInput->gstElement());
449 qLinkGstElements(gstAudioInput->gstElement(), gstAudioTee);
451 gstAudioInput->gstElement().setState(GST_STATE_PLAYING);
456 capturePipeline.stopAndRemoveElements(gstAudioInput->gstElement());
460void QGstreamerMediaCaptureSession::setVideoPreview(QVideoSink *sink)
470 Q_ASSERT(pluggableSink);
476 if (gstAudioOutput == output)
481 gstOutput->setAsync(
false);
483 if (!gstAudioInput) {
491 audioSrcPadForOutput.modifyPipelineInIdleProbe([&] {
492 if (oldOutputElement)
495 if (gstAudioOutput) {
496 capturePipeline.add(gstAudioOutput->gstElement());
498 gstAudioOutput->gstElement().setState(GST_STATE_PLAYING);
505 if (oldOutputElement)
506 capturePipeline.stopAndRemoveElements(oldOutputElement);
520 switch (msg.type()) {
521 case GST_MESSAGE_ERROR:
522 return processBusMessageError(msg);
524 case GST_MESSAGE_LATENCY:
525 return processBusMessageLatency(msg);
534bool QGstreamerMediaCaptureSession::processBusMessageError(
const QGstreamerMessage &msg)
536 QUniqueGErrorHandle error;
537 QUniqueGStringHandle message;
538 gst_message_parse_error(msg.message(), &error, &message);
540 qWarning() <<
"QGstreamerMediaCapture: received error from gstreamer" << error << message;
541 capturePipeline.dumpGraph(
"captureError");
546bool QGstreamerMediaCaptureSession::processBusMessageLatency(
const QGstreamerMessage &)
548 capturePipeline.recalculateLatency();
QGstPad staticPad(const char *name) const
static QGstElement createFromFactory(const char *factory, const char *name=nullptr)
bool link(const QGstPad &sink) const
QGstElement gstElement() const
virtual QGstElement gstElement() const =0
void connectPluggableVideoSink(QGstreamerPluggableVideoSink *pluggableSink)
void disconnectPluggableVideoSink()
void setVideoSink(QGstreamerRelayVideoSink *sink)
QGstElement gstElement() const
QGstreamerRelayVideoSink * gstreamerVideoSink() const
Combined button and popup list for selecting options.
void executeWhilePadsAreIdle(QSpan< QGstPad > pads, Functor &&f)
void finishStateChangeOnElements(QSpan< const QGstElement > elements)
void setStateOnElements(QSpan< const QGstElement > elements, GstState state)
QGstElement makeTee(const char *name)