Qt
Internal/Contributor docs for the Qt SDK. <b>Note:</b> These are NOT official API docs; those are found <a href='https://doc.qt.io/'>here</a>.
Loading...
Searching...
No Matches
avfmediaassetwriter.mm
Go to the documentation of this file.
1// Copyright (C) 2016 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include "avfmediaencoder_p.h"
9#include "avfcameradebug_p.h"
11#include <avfmetadata_p.h>
12
13#include <QtCore/qmetaobject.h>
14#include <QtCore/qatomic.h>
15
17
18namespace {
19
21{
22 if (!service || !service->session())
23 return false;
24
25 AVFCameraSession *session = service->session();
26 if (!session->captureSession())
27 return false;
28
29 if (!session->videoInput() && !session->audioInput())
30 return false;
31
32 return true;
33}
34
42
43using AVFAtomicInt64 = QAtomicInteger<qint64>;
44
45} // unnamed namespace
46
48- (bool)addWriterInputs;
50- (void)updateDuration:(CMTime)newTimeStamp;
51- (CMSampleBufferRef)adjustTime:(CMSampleBufferRef)sample by:(CMTime)offset;
52@end
53
55{
56@private
57 AVFCameraService *m_service;
58
59 AVFScopedPointer<AVAssetWriterInput> m_cameraWriterInput;
60 AVFScopedPointer<AVAssetWriterInput> m_audioWriterInput;
61
62 // Queue to write sample buffers:
63 AVFScopedPointer<dispatch_queue_t> m_writerQueue;
64 // High priority serial queue for video output:
65 AVFScopedPointer<dispatch_queue_t> m_videoQueue;
66 // Serial queue for audio output:
67 AVFScopedPointer<dispatch_queue_t> m_audioQueue;
68
69 AVFScopedPointer<AVAssetWriter> m_assetWriter;
70
72
73 bool m_setStartTime;
74
75 QAtomicInt m_state;
76
77 bool m_writeFirstAudioBuffer;
78
79 CMTime m_startTime;
80 CMTime m_lastTimeStamp;
81 CMTime m_lastVideoTimestamp;
82 CMTime m_lastAudioTimestamp;
83 CMTime m_timeOffset;
84 bool m_adjustTime;
85
86 NSDictionary *m_audioSettings;
87 NSDictionary *m_videoSettings;
88
89 AVFAtomicInt64 m_durationInMs;
90}
91
92- (id)initWithDelegate:(AVFMediaEncoder *)delegate
93{
94 Q_ASSERT(delegate);
95
96 if (self = [super init]) {
97 m_delegate = delegate;
98 m_setStartTime = true;
99 m_state.storeRelaxed(WriterStateIdle);
100 m_startTime = kCMTimeInvalid;
101 m_lastTimeStamp = kCMTimeInvalid;
102 m_lastAudioTimestamp = kCMTimeInvalid;
103 m_lastVideoTimestamp = kCMTimeInvalid;
104 m_timeOffset = kCMTimeInvalid;
105 m_adjustTime = false;
106 m_durationInMs.storeRelaxed(0);
107 m_audioSettings = nil;
108 m_videoSettings = nil;
109 m_writeFirstAudioBuffer = false;
110 }
111
112 return self;
113}
114
115- (bool)setupWithFileURL:(NSURL *)fileURL
116 cameraService:(AVFCameraService *)service
117 audioSettings:(NSDictionary *)audioSettings
118 videoSettings:(NSDictionary *)videoSettings
119 fileFormat:(QMediaFormat::FileFormat)fileFormat
120 transform:(CGAffineTransform)transform
121{
122 Q_ASSERT(fileURL);
123
124 if (!qt_capture_session_isValid(service)) {
125 qCDebug(qLcCamera) << Q_FUNC_INFO << "invalid capture session";
126 return false;
127 }
128
129 m_service = service;
130 m_audioSettings = audioSettings;
131 m_videoSettings = videoSettings;
132
133 AVFCameraSession *session = m_service->session();
134
135 m_writerQueue.reset(dispatch_queue_create("asset-writer-queue", DISPATCH_QUEUE_SERIAL));
136 if (!m_writerQueue) {
137 qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create an asset writer's queue";
138 return false;
139 }
140
141 m_videoQueue.reset();
142 if (session->videoInput() && session->videoOutput() && session->videoOutput()->videoDataOutput()) {
143 m_videoQueue.reset(dispatch_queue_create("video-output-queue", DISPATCH_QUEUE_SERIAL));
144 if (!m_videoQueue) {
145 qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create video queue";
146 return false;
147 }
148 dispatch_set_target_queue(m_videoQueue, dispatch_get_global_queue(QOS_CLASS_USER_INITIATED, 0));
149 }
150
151 m_audioQueue.reset();
152 if (session->audioInput() && session->audioOutput()) {
153 m_audioQueue.reset(dispatch_queue_create("audio-output-queue", DISPATCH_QUEUE_SERIAL));
154 if (!m_audioQueue) {
155 qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create audio queue";
156 if (!m_videoQueue)
157 return false;
158 // But we still can write video!
159 }
160 }
161
163 m_assetWriter.reset([[AVAssetWriter alloc] initWithURL:fileURL
165 error:nil]);
166 if (!m_assetWriter) {
167 qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create asset writer";
168 return false;
169 }
170
171 if (!m_videoQueue)
172 m_writeFirstAudioBuffer = true;
173
174 if (![self addWriterInputs]) {
175 m_assetWriter.reset();
176 return false;
177 }
178
179 if (m_cameraWriterInput)
180 m_cameraWriterInput.data().transform = transform;
181
182 [self setMetaData:fileType];
183
184 // Ready to start ...
185 return true;
186}
187
188- (void)setMetaData:(AVFileType)fileType
189{
190 m_assetWriter.data().metadata = AVFMetaData::toAVMetadataForFormat(m_delegate->metaData(), fileType);
191}
192
193- (void)start
194{
195 [self setQueues];
196
197 m_setStartTime = true;
198
199 m_state.storeRelease(WriterStateActive);
200
201 [m_assetWriter startWriting];
202 AVCaptureSession *session = m_service->session()->captureSession();
203 if (!session.running)
204 [session startRunning];
205}
206
207- (void)stop
208{
209 if (m_state.loadAcquire() != WriterStateActive && m_state.loadAcquire() != WriterStatePaused)
210 return;
211
212 if ([m_assetWriter status] != AVAssetWriterStatusWriting
213 && [m_assetWriter status] != AVAssetWriterStatusFailed)
214 return;
215
216 // Do this here so that -
217 // 1. '-abort' should not try calling finishWriting again and
218 // 2. async block (see below) will know if recorder control was deleted
219 // before the block's execution:
220 m_state.storeRelease(WriterStateIdle);
221 // Now, since we have to ensure no sample buffers are
222 // appended after a call to finishWriting, we must
223 // ensure writer's queue sees this change in m_state
224 // _before_ we call finishWriting:
225 dispatch_sync(m_writerQueue, ^{});
226 // Done, but now we also want to prevent video queue
227 // from updating our viewfinder:
228 if (m_videoQueue)
229 dispatch_sync(m_videoQueue, ^{});
230
231 // Now we're safe to stop:
232 [m_assetWriter finishWritingWithCompletionHandler:^{
233 // This block is async, so by the time it's executed,
234 // it's possible that render control was deleted already ...
235 if (m_state.loadAcquire() == WriterStateAborted)
236 return;
237
238 AVCaptureSession *session = m_service->session()->captureSession();
239 if (session.running)
240 [session stopRunning];
241 QMetaObject::invokeMethod(m_delegate, "assetWriterFinished", Qt::QueuedConnection);
242 }];
243}
244
245- (void)abort
246{
247 // -abort is to be called from recorder control's dtor.
248
249 if (m_state.fetchAndStoreRelease(WriterStateAborted) != WriterStateActive) {
250 // Not recording, nothing to stop.
251 return;
252 }
253
254 // From Apple's docs:
255 // "To guarantee that all sample buffers are successfully written,
256 // you must ensure that all calls to appendSampleBuffer: and
257 // appendPixelBuffer:withPresentationTime: have returned before
258 // invoking this method."
259 //
260 // The only way we can ensure this is:
261 dispatch_sync(m_writerQueue, ^{});
262 // At this point next block (if any) on the writer's queue
263 // will see m_state preventing it from any further processing.
264 if (m_videoQueue)
265 dispatch_sync(m_videoQueue, ^{});
266 // After this point video queue will not try to modify our
267 // viewfider, so we're safe to delete now.
268
269 [m_assetWriter finishWritingWithCompletionHandler:^{
270 }];
271}
272
273- (void)pause
274{
275 if (m_state.loadAcquire() != WriterStateActive)
276 return;
277 if ([m_assetWriter status] != AVAssetWriterStatusWriting)
278 return;
279
280 m_state.storeRelease(WriterStatePaused);
281 m_adjustTime = true;
282}
283
284- (void)resume
285{
286 if (m_state.loadAcquire() != WriterStatePaused)
287 return;
288 if ([m_assetWriter status] != AVAssetWriterStatusWriting)
289 return;
290
291 m_state.storeRelease(WriterStateActive);
292}
293
294- (void)setStartTimeFrom:(CMSampleBufferRef)sampleBuffer
295{
296 // Writer's queue only.
297 Q_ASSERT(m_setStartTime);
298 Q_ASSERT(sampleBuffer);
299
300 if (m_state.loadAcquire() != WriterStateActive)
301 return;
302
304
305 m_durationInMs.storeRelease(0);
306 m_startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
307 m_lastTimeStamp = m_startTime;
308 [m_assetWriter startSessionAtSourceTime:m_startTime];
309 m_setStartTime = false;
310}
311
312- (CMSampleBufferRef)adjustTime:(CMSampleBufferRef)sample by:(CMTime)offset
313{
314 CMItemCount count;
315 CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count);
316 CMSampleTimingInfo* timingInfo = (CMSampleTimingInfo*) malloc(sizeof(CMSampleTimingInfo) * count);
317 CMSampleBufferGetSampleTimingInfoArray(sample, count, timingInfo, &count);
318 for (CMItemCount i = 0; i < count; i++)
319 {
320 timingInfo[i].decodeTimeStamp = CMTimeSubtract(timingInfo[i].decodeTimeStamp, offset);
321 timingInfo[i].presentationTimeStamp = CMTimeSubtract(timingInfo[i].presentationTimeStamp, offset);
322 }
323 CMSampleBufferRef updatedBuffer;
324 CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, sample, count, timingInfo, &updatedBuffer);
325 free(timingInfo);
326 return updatedBuffer;
327}
328
329- (void)writeVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer
330{
331 // This code is executed only on a writer's queue.
332 Q_ASSERT(sampleBuffer);
333
334 if (m_state.loadAcquire() == WriterStateActive) {
335 if (m_setStartTime)
336 [self setStartTimeFrom:sampleBuffer];
337
338 if (m_cameraWriterInput.data().readyForMoreMediaData) {
339 [self updateDuration:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
340 [m_cameraWriterInput appendSampleBuffer:sampleBuffer];
341 }
342 }
343}
344
345- (void)writeAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer
346{
347 Q_ASSERT(sampleBuffer);
348
349 // This code is executed only on a writer's queue.
350 if (m_state.loadAcquire() == WriterStateActive) {
351 if (m_setStartTime)
352 [self setStartTimeFrom:sampleBuffer];
353
354 if (m_audioWriterInput.data().readyForMoreMediaData) {
355 [self updateDuration:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
356 [m_audioWriterInput appendSampleBuffer:sampleBuffer];
357 }
358 }
359}
360
361- (void)captureOutput:(AVCaptureOutput *)captureOutput
362 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
363 fromConnection:(AVCaptureConnection *)connection
364{
366 Q_ASSERT(m_service && m_service->session());
367
368 if (m_state.loadAcquire() != WriterStateActive && m_state.loadAcquire() != WriterStatePaused)
369 return;
370
371 if ([m_assetWriter status] != AVAssetWriterStatusWriting) {
372 if ([m_assetWriter status] == AVAssetWriterStatusFailed) {
373 NSError *error = [m_assetWriter error];
374 NSString *failureReason = error.localizedFailureReason;
375 NSString *suggestion = error.localizedRecoverySuggestion;
376 NSString *errorString = suggestion ? [failureReason stringByAppendingString:suggestion] : failureReason;
377 QMetaObject::invokeMethod(m_delegate, "assetWriterError",
379 Q_ARG(QString, QString::fromNSString(errorString)));
380 }
381 return;
382 }
383
384 if (!CMSampleBufferDataIsReady(sampleBuffer)) {
385 qWarning() << Q_FUNC_INFO << "sample buffer is not ready, skipping.";
386 return;
387 }
388
389 CFRetain(sampleBuffer);
390
391 bool isVideoBuffer = true;
392 isVideoBuffer = (captureOutput != m_service->session()->audioOutput());
393 if (isVideoBuffer) {
394 // Find renderercontrol's delegate and invoke its method to
395 // show updated viewfinder's frame.
396 if (m_service->session()->videoOutput()) {
397 NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> *vfDelegate =
398 (NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> *)m_service->session()->videoOutput()->captureDelegate();
399 if (vfDelegate) {
400 AVCaptureOutput *output = nil;
401 AVCaptureConnection *connection = nil;
402 [vfDelegate captureOutput:output didOutputSampleBuffer:sampleBuffer fromConnection:connection];
403 }
404 }
405 } else {
406 if (m_service->session()->audioOutput()) {
407 NSObject<AVCaptureAudioDataOutputSampleBufferDelegate> *audioPreviewDelegate =
408 (NSObject<AVCaptureAudioDataOutputSampleBufferDelegate> *)m_service->session()->audioPreviewDelegate();
409 if (audioPreviewDelegate) {
410 AVCaptureOutput *output = nil;
411 AVCaptureConnection *connection = nil;
412 [audioPreviewDelegate captureOutput:output didOutputSampleBuffer:sampleBuffer fromConnection:connection];
413 }
414 }
415 }
416
417 if (m_state.loadAcquire() != WriterStateActive) {
418 CFRelease(sampleBuffer);
419 return;
420 }
421
422 if (m_adjustTime) {
423 CMTime currentTimestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
424 CMTime lastTimestamp = isVideoBuffer ? m_lastVideoTimestamp : m_lastAudioTimestamp;
425
426 if (!CMTIME_IS_INVALID(lastTimestamp)) {
427 if (!CMTIME_IS_INVALID(m_timeOffset))
428 currentTimestamp = CMTimeSubtract(currentTimestamp, m_timeOffset);
429
430 CMTime pauseDuration = CMTimeSubtract(currentTimestamp, lastTimestamp);
431
432 if (m_timeOffset.value == 0)
433 m_timeOffset = pauseDuration;
434 else
435 m_timeOffset = CMTimeAdd(m_timeOffset, pauseDuration);
436 }
437 m_lastVideoTimestamp = kCMTimeInvalid;
438 m_adjustTime = false;
439 }
440
441 if (m_timeOffset.value > 0) {
442 CFRelease(sampleBuffer);
443 sampleBuffer = [self adjustTime:sampleBuffer by:m_timeOffset];
444 }
445
446 CMTime currentTimestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
447 CMTime currentDuration = CMSampleBufferGetDuration(sampleBuffer);
448 if (currentDuration.value > 0)
449 currentTimestamp = CMTimeAdd(currentTimestamp, currentDuration);
450
451 if (isVideoBuffer)
452 {
453 m_lastVideoTimestamp = currentTimestamp;
454 dispatch_async(m_writerQueue, ^{
455 [self writeVideoSampleBuffer:sampleBuffer];
456 m_writeFirstAudioBuffer = true;
457 CFRelease(sampleBuffer);
458 });
459 } else if (m_writeFirstAudioBuffer) {
460 m_lastAudioTimestamp = currentTimestamp;
461 dispatch_async(m_writerQueue, ^{
462 [self writeAudioSampleBuffer:sampleBuffer];
463 CFRelease(sampleBuffer);
464 });
465 }
466}
467
468- (bool)addWriterInputs
469{
470 Q_ASSERT(m_service && m_service->session());
471 Q_ASSERT(m_assetWriter.data());
472
473 AVFCameraSession *session = m_service->session();
474
475 m_cameraWriterInput.reset();
476 if (m_videoQueue)
477 {
478 Q_ASSERT(session->videoCaptureDevice() && session->videoOutput() && session->videoOutput()->videoDataOutput());
479 m_cameraWriterInput.reset([[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo
480 outputSettings:m_videoSettings
481 sourceFormatHint:session->videoCaptureDevice().activeFormat.formatDescription]);
482
483 if (m_cameraWriterInput && [m_assetWriter canAddInput:m_cameraWriterInput]) {
484 [m_assetWriter addInput:m_cameraWriterInput];
485 } else {
486 qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to add camera writer input";
487 m_cameraWriterInput.reset();
488 return false;
489 }
490
491 m_cameraWriterInput.data().expectsMediaDataInRealTime = YES;
492 }
493
494 m_audioWriterInput.reset();
495 if (m_audioQueue) {
496 m_audioWriterInput.reset([[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio
497 outputSettings:m_audioSettings]);
498 if (!m_audioWriterInput) {
499 qWarning() << Q_FUNC_INFO << "failed to create audio writer input";
500 // But we still can record video.
501 if (!m_cameraWriterInput)
502 return false;
503 } else if ([m_assetWriter canAddInput:m_audioWriterInput]) {
504 [m_assetWriter addInput:m_audioWriterInput];
505 m_audioWriterInput.data().expectsMediaDataInRealTime = YES;
506 } else {
507 qWarning() << Q_FUNC_INFO << "failed to add audio writer input";
508 m_audioWriterInput.reset();
509 if (!m_cameraWriterInput)
510 return false;
511 // We can (still) write video though ...
512 }
513 }
514
515 return true;
516}
517
519{
520 Q_ASSERT(m_service && m_service->session());
521 AVFCameraSession *session = m_service->session();
522
523 if (m_videoQueue) {
524 Q_ASSERT(session->videoOutput() && session->videoOutput()->videoDataOutput());
525 [session->videoOutput()->videoDataOutput() setSampleBufferDelegate:self queue:m_videoQueue];
526 }
527
528 if (m_audioQueue) {
529 Q_ASSERT(session->audioOutput());
530 [session->audioOutput() setSampleBufferDelegate:self queue:m_audioQueue];
531 }
532}
533
534- (void)updateDuration:(CMTime)newTimeStamp
535{
536 Q_ASSERT(CMTimeCompare(m_startTime, kCMTimeInvalid));
537 Q_ASSERT(CMTimeCompare(m_lastTimeStamp, kCMTimeInvalid));
538 if (CMTimeCompare(newTimeStamp, m_lastTimeStamp) > 0) {
539
540 const CMTime duration = CMTimeSubtract(newTimeStamp, m_startTime);
541 if (!CMTimeCompare(duration, kCMTimeInvalid))
542 return;
543
544 m_durationInMs.storeRelease(CMTimeGetSeconds(duration) * 1000);
545 m_lastTimeStamp = newTimeStamp;
546
547 m_delegate->updateDuration([self durationInMs]);
548 }
549}
550
552{
553 return m_durationInMs.loadAcquire();
554}
555
556@end
DarwinBluetooth::DeviceInquiryDelegate * m_delegate
AVCaptureVideoDataOutput * videoDataOutput() const
AVFCaptureFramesDelegate * captureDelegate() const
AVFCameraSession * session() const
AVCaptureDeviceInput * videoInput() const
AVCaptureSession * captureSession() const
AVCaptureDevice * videoCaptureDevice() const
AVFCameraRenderer * videoOutput() const
AVCaptureDeviceInput * audioInput() const
AVFAudioPreviewDelegate * audioPreviewDelegate() const
AVCaptureAudioDataOutput * audioOutput() const
static NSMutableArray< AVMetadataItem * > * toAVMetadataForFormat(QMediaMetaData metaData, AVFileType format)
void reset(dispatch_queue_t q=nullptr)
\inmodule QtCore
Definition qatomic.h:112
T fetchAndStoreRelease(T newValue) noexcept
void storeRelaxed(T newValue) noexcept
T loadAcquire() const noexcept
void storeRelease(T newValue) noexcept
static NSString * avFileTypeForContainerFormat(QMediaFormat::FileFormat fileType)
\inmodule QtMultimedia
\macro QT_RESTRICTED_CAST_FROM_ASCII
Definition qstring.h:129
bool qt_capture_session_isValid(AVFCameraService *service)
Q_MULTIMEDIA_EXPORT QString errorString(HRESULT hr)
Q_CORE_EXPORT QtJniTypes::Service service()
@ QueuedConnection
QString self
Definition language.cpp:58
#define Q_FUNC_INFO
DBusConnection const char DBusError DBusBusType DBusError return DBusConnection DBusHandleMessageFunction void DBusFreeFunction return DBusConnection return DBusConnection return const char DBusError return DBusConnection DBusMessage dbus_uint32_t return DBusConnection dbus_bool_t DBusConnection DBusAddWatchFunction DBusRemoveWatchFunction DBusWatchToggledFunction void DBusFreeFunction return DBusConnection DBusDispatchStatusFunction void DBusFreeFunction DBusTimeout return DBusTimeout return DBusWatch return DBusWatch unsigned int return DBusError const DBusError return const DBusMessage return DBusMessage return DBusMessage return DBusMessage return DBusMessage return DBusMessage return DBusMessageIter int const void return DBusMessageIter DBusMessageIter return DBusMessageIter void DBusMessageIter void int return DBusMessage DBusMessageIter return DBusMessageIter return DBusMessageIter DBusMessageIter const char const char const char const char return DBusMessage return DBusMessage const char return DBusMessage dbus_bool_t return DBusMessage dbus_uint32_t return DBusMessage void
DBusConnection const char DBusError * error
DBusConnection * connection
QMediaFormat::FileFormat fileFormat
#define qWarning
Definition qlogging.h:166
#define qCDebug(category,...)
#define Q_ARG(Type, data)
Definition qobjectdefs.h:63
GLenum GLuint id
[7]
GLenum GLenum GLsizei count
GLuint start
GLenum GLuint GLintptr offset
GLuint GLenum GLenum transform
#define Q_ASSERT(cond)
Definition qrandom.cpp:47
static FileType fileType(const QFileInfo &fi)
static QT_BEGIN_NAMESPACE void init(QTextBoundaryFinder::BoundaryType type, QStringView str, QCharAttributes *attributes)
#define Q_UNUSED(x)
long long qint64
Definition qtypes.h:60
QT_BEGIN_NAMESPACE typedef uchar * output
static bool invokeMethod(QObject *obj, const char *member, Qt::ConnectionType, QGenericReturnArgument ret, QGenericArgument val0=QGenericArgument(nullptr), QGenericArgument val1=QGenericArgument(), QGenericArgument val2=QGenericArgument(), QGenericArgument val3=QGenericArgument(), QGenericArgument val4=QGenericArgument(), QGenericArgument val5=QGenericArgument(), QGenericArgument val6=QGenericArgument(), QGenericArgument val7=QGenericArgument(), QGenericArgument val8=QGenericArgument(), QGenericArgument val9=QGenericArgument())
\threadsafe This is an overloaded member function, provided for convenience. It differs from the abov...