Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
avfcamerarenderer.mm
Go to the documentation of this file.
1// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
5#include "private/qcameradevice_p.h"
6#include "private/qvideoframe_p.h"
10#include <QtMultimedia/private/qavfcameradebug_p.h>
11#include "avfcamera_p.h"
12#include <avfvideosink_p.h>
13#include <avfvideobuffer_p.h>
14#include "qvideosink.h"
15#include <QtMultimedia/private/qavfhelpers_p.h>
16
17#include <rhi/qrhi.h>
18
19#import <AVFoundation/AVFoundation.h>
20
21#ifdef Q_OS_IOS
22#include <QtGui/qopengl.h>
23#endif
24
25#include <QtCore/qmetaobject.h>
26#include <QtMultimedia/qvideoframeformat.h>
27
28QT_USE_NAMESPACE
29
30@interface AVFCaptureFramesDelegate : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
31
32- (AVFCaptureFramesDelegate *) initWithRenderer:(AVFCameraRenderer*)renderer;
33
34- (void) captureOutput:(AVCaptureOutput *)captureOutput
35 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
36 fromConnection:(AVCaptureConnection *)connection;
37
38@end
39
40@implementation AVFCaptureFramesDelegate
41{
42@private
43 AVFCameraRenderer *m_renderer;
44}
45
46- (AVFCaptureFramesDelegate *) initWithRenderer:(AVFCameraRenderer*)renderer
47{
48 if (!(self = [super init]))
49 return nil;
50
51 self->m_renderer = renderer;
52 return self;
53}
54
55- (void)captureOutput:(AVCaptureOutput *)captureOutput
56 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
57 fromConnection:(AVCaptureConnection *)connection
58{
59 Q_UNUSED(connection);
60 Q_UNUSED(captureOutput);
61
62 // NB: on iOS captureOutput/connection can be nil (when recording a video -
63 // avfmediaassetwriter).
64
65 auto buffer =
66 std::make_unique<AVFVideoBuffer>(m_renderer,
67 QCFType<CVImageBufferRef>::constructFromGet(
68 CMSampleBufferGetImageBuffer(sampleBuffer)));
69 auto format = buffer->videoFormat();
70 if (!format.isValid()) {
71 return;
72 }
73
74 QVideoFrame frame = QVideoFramePrivate::createFrame(std::move(buffer), format);
75 m_renderer->syncHandleViewfinderFrame(frame);
76}
77
78@end
79
80AVFCameraRenderer::AVFCameraRenderer(QObject *parent)
81 : QObject(parent)
82{
83 m_viewfinderFramesDelegate = [[AVFCaptureFramesDelegate alloc] initWithRenderer:this];
84 connect(&m_orientationHandler, &QVideoOutputOrientationHandler::orientationChanged,
85 this, &AVFCameraRenderer::deviceOrientationChanged);
86}
87
89{
90 [m_cameraSession->captureSession() removeOutput:m_videoDataOutput];
91 [m_viewfinderFramesDelegate release];
92 [m_videoDataOutput release];
93
94 if (m_delegateQueue)
95 dispatch_release(m_delegateQueue);
96#ifdef Q_OS_IOS
97 if (m_textureCache)
98 CFRelease(m_textureCache);
99#endif
100}
101
103{
104 QMutexLocker lock(&m_vfMutex);
105
106 // ### This is a hack, need to use a reliable way to determine the size and not use the preview layer
107 if (m_layer)
108 m_sink->setNativeSize(QSize(m_layer.bounds.size.width, m_layer.bounds.size.height));
110 deviceOrientationChanged();
111}
112
114{
115 if (!m_videoDataOutput)
116 return;
117
118 if (m_cameraSession) {
119 const auto format = m_cameraSession->cameraFormat();
120 if (format.pixelFormat() != QVideoFrameFormat::Format_Invalid)
121 setPixelFormat(format.pixelFormat(), QCameraFormatPrivate::getColorRange(format));
122 }
123
124 // If no output settings set from above,
125 // it's most likely because the rhi is OpenGL
126 // and the pixel format is not BGRA.
127 // We force this in the base class implementation
128 if (!m_outputSettings)
130
131 if (m_outputSettings)
132 m_videoDataOutput.videoSettings = m_outputSettings;
133}
134
136{
137 m_cameraSession = cameraSession;
138 connect(m_cameraSession, SIGNAL(readyToConfigureConnections()),
139 this, SLOT(updateCaptureConnection()));
140
141 m_needsHorizontalMirroring = false;
142
143 m_videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
144
145 // Configure video output
146 m_delegateQueue = dispatch_queue_create("vf_queue", nullptr);
147 [m_videoDataOutput
148 setSampleBufferDelegate:m_viewfinderFramesDelegate
149 queue:m_delegateQueue];
150
151 [m_cameraSession->captureSession() addOutput:m_videoDataOutput];
152}
153
154void AVFCameraRenderer::updateCaptureConnection()
155{
156 AVCaptureConnection *connection = [m_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
157 if (connection == nil || !m_cameraSession->videoCaptureDevice())
158 return;
159
160 // Frames of front-facing cameras should be mirrored horizontally (it's the default when using
161 // AVCaptureVideoPreviewLayer but not with AVCaptureVideoDataOutput)
162 if (connection.isVideoMirroringSupported)
163 connection.videoMirrored = m_cameraSession->videoCaptureDevice().position == AVCaptureDevicePositionFront;
164
165 // If the connection does't support mirroring, we'll have to do it ourselves
166 m_needsHorizontalMirroring = !connection.isVideoMirrored
167 && m_cameraSession->videoCaptureDevice().position == AVCaptureDevicePositionFront;
168
169 deviceOrientationChanged();
170}
171
172void AVFCameraRenderer::deviceOrientationChanged(int angle)
173{
174 AVCaptureConnection *connection = [m_videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
175 if (connection == nil || !m_cameraSession->videoCaptureDevice())
176 return;
177
178 if (!connection.supportsVideoOrientation)
179 return;
180
181 if (angle < 0)
182 angle = m_orientationHandler.currentOrientation();
183
184 AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
185 switch (angle) {
186 default:
187 break;
188 case 90:
189 orientation = AVCaptureVideoOrientationLandscapeRight;
190 break;
191 case 180:
192 // this keeps the last orientation, don't do anything
193 return;
194 case 270:
195 orientation = AVCaptureVideoOrientationLandscapeLeft;
196 break;
197 }
198
199 connection.videoOrientation = orientation;
200}
201
202//can be called from non main thread
203void AVFCameraRenderer::syncHandleViewfinderFrame(const QVideoFrame &frame)
204{
205 Q_EMIT newViewfinderFrame(frame);
206
207 QMutexLocker lock(&m_vfMutex);
208
209 if (!m_lastViewfinderFrame.isValid()) {
210 static QMetaMethod handleViewfinderFrameSlot = metaObject()->method(
211 metaObject()->indexOfMethod("handleViewfinderFrame()"));
212
213 handleViewfinderFrameSlot.invoke(this, Qt::QueuedConnection);
214 }
215
216 m_lastViewfinderFrame = frame;
217}
218
220{
221 return m_videoDataOutput;
222}
223
225{
226 return m_viewfinderFramesDelegate;
227}
228
230{
231 [m_videoDataOutput setSampleBufferDelegate:m_viewfinderFramesDelegate queue:m_delegateQueue];
232}
233
234void AVFCameraRenderer::handleViewfinderFrame()
235{
236 QVideoFrame frame;
237 {
238 QMutexLocker lock(&m_vfMutex);
239 frame = m_lastViewfinderFrame;
240 m_lastViewfinderFrame = QVideoFrame();
241 }
242
243 if (m_sink && frame.isValid()) {
244 // frame.setMirroed(m_needsHorizontalMirroring) ?
245 m_sink->setVideoFrame(frame);
246 }
247}
248
249void AVFCameraRenderer::setPixelFormat(QVideoFrameFormat::PixelFormat pixelFormat,
250 QVideoFrameFormat::ColorRange colorRange)
251{
252 if (rhi() && rhi()->backend() == QRhi::OpenGLES2) {
253 if (pixelFormat != QVideoFrameFormat::Format_BGRA8888)
254 qWarning() << "OpenGL rhi backend only supports 32BGRA pixel format.";
255 return;
256 }
257
258 // Default to 32BGRA pixel formats on the viewfinder, in case the requested
259 // format can't be used (shouldn't happen unless the developers sets a wrong camera
260 // format on the camera).
261 auto cvPixelFormat = QAVFHelpers::toCVPixelFormat(pixelFormat, colorRange);
262 if (cvPixelFormat == CvPixelFormatInvalid) {
263 cvPixelFormat = kCVPixelFormatType_32BGRA;
264 qWarning() << "QCamera::setCameraFormat: couldn't convert requested pixel format, using ARGB32";
265 }
266
267 bool isSupported = false;
268 NSArray *supportedPixelFormats = m_videoDataOutput.availableVideoCVPixelFormatTypes;
269 for (NSNumber *currentPixelFormat in supportedPixelFormats)
270 {
271 if ([currentPixelFormat unsignedIntValue] == cvPixelFormat) {
272 isSupported = true;
273 break;
274 }
275 }
276
277 if (isSupported) {
278 NSDictionary *outputSettings = @{
279 (NSString *)
280 kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithUnsignedInt:cvPixelFormat]
281#ifndef Q_OS_IOS // On iOS this key generates a warning about 'unsupported key'.
282 ,
283 (NSString *)kCVPixelBufferMetalCompatibilityKey : @true
284#endif // Q_OS_IOS
285 };
286 if (m_outputSettings)
287 [m_outputSettings release];
288 m_outputSettings = [[NSDictionary alloc] initWithDictionary:outputSettings];
289 } else {
290 qWarning() << "QCamera::setCameraFormat: requested pixel format not supported. Did you use a camera format from another camera?";
291 }
292}
293
294#include "moc_avfcamerarenderer_p.cpp"
Q_FORWARD_DECLARE_OBJC_CLASS(AVFCaptureFramesDelegate)
void reconfigure() override
AVCaptureVideoDataOutput * videoDataOutput() const
void setOutputSettings() override
~AVFCameraRenderer() override
AVFCaptureFramesDelegate * captureDelegate() const
void configureAVCaptureSession(AVFCameraSession *cameraSession)
void setPixelFormat(QVideoFrameFormat::PixelFormat pixelFormat, QVideoFrameFormat::ColorRange colorRange)
void resetCaptureDelegate() const
virtual void setOutputSettings()
AVFVideoSink * m_sink