Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
avfmediaassetwriter.mm
Go to the documentation of this file.
1// Copyright (C) 2016 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
9#include <QtMultimedia/private/qavfcameradebug_p.h>
10#include <qdarwinformatsinfo_p.h>
11#include <avfmetadata_p.h>
12
13#include <QtCore/qmetaobject.h>
14#include <QtCore/qatomic.h>
15#include <QtCore/private/qcore_mac_p.h>
16
17QT_USE_NAMESPACE
18
19namespace {
20
22{
23 if (!service || !service->session())
24 return false;
25
26 AVFCameraSession *session = service->session();
27 if (!session->captureSession())
28 return false;
29
30 if (!session->videoInput() && !session->audioInput())
31 return false;
32
33 return true;
34}
35
43
44using AVFAtomicInt64 = QAtomicInteger<qint64>;
45
46} // unnamed namespace
47
48@interface QT_MANGLE_NAMESPACE(AVFMediaAssetWriter) (PrivateAPI)
49- (bool)addWriterInputs;
50- (void)setQueues;
51- (void)updateDuration:(CMTime)newTimeStamp;
52- (QCFType<CMSampleBufferRef>)adjustTime:(const QCFType<CMSampleBufferRef> &)sample
53 by:(CMTime)offset;
54@end
55
56@implementation QT_MANGLE_NAMESPACE(AVFMediaAssetWriter)
57{
58@private
59 AVFCameraService *m_service;
60
61 AVFScopedPointer<AVAssetWriterInput> m_cameraWriterInput;
62 AVFScopedPointer<AVAssetWriterInput> m_audioWriterInput;
63
64 // Expected audio format description for validating incoming buffers:
65 QCFType<CMFormatDescriptionRef> m_audioFormatDescription;
66
67 // Queue to write sample buffers:
68 AVFScopedPointer<dispatch_queue_t> m_writerQueue;
69 // High priority serial queue for video output:
70 AVFScopedPointer<dispatch_queue_t> m_videoQueue;
71 // Serial queue for audio output:
72 AVFScopedPointer<dispatch_queue_t> m_audioQueue;
73
74 AVFScopedPointer<AVAssetWriter> m_assetWriter;
75
76 AVFMediaEncoder *m_delegate;
77
78 bool m_setStartTime;
79
80 QAtomicInt m_state;
81
82 bool m_writeFirstAudioBuffer;
83
84 CMTime m_startTime;
85 CMTime m_lastTimeStamp;
86 CMTime m_lastVideoTimestamp;
87 CMTime m_lastAudioTimestamp;
88 CMTime m_timeOffset;
89 bool m_adjustTime;
90
91 NSDictionary *m_audioSettings;
92 NSDictionary *m_videoSettings;
93
94 AVFAtomicInt64 m_durationInMs;
95}
96
97- (id)initWithDelegate:(AVFMediaEncoder *)delegate
98{
99 Q_ASSERT(delegate);
100
101 if (self = [super init]) {
102 m_delegate = delegate;
103 m_setStartTime = true;
104 m_state.storeRelaxed(WriterStateIdle);
105 }
106
107 return self;
108}
109
110- (bool)setupWithFileURL:(NSURL *)fileURL
111 cameraService:(AVFCameraService *)service
112 audioSettings:(NSDictionary *)audioSettings
113 videoSettings:(NSDictionary *)videoSettings
114 fileFormat:(QMediaFormat::FileFormat)fileFormat
115 transform:(CGAffineTransform)transform
116{
117 Q_ASSERT(fileURL);
118
119 if (!qt_capture_session_isValid(service)) {
120 qCDebug(qLcCamera) << Q_FUNC_INFO << "invalid capture session";
121 return false;
122 }
123
124 m_service = service;
125 m_audioSettings = audioSettings;
126 m_videoSettings = videoSettings;
127
128 AVFCameraSession *session = m_service->session();
129
130 m_writerQueue.reset(dispatch_queue_create("asset-writer-queue", DISPATCH_QUEUE_SERIAL));
131 if (!m_writerQueue) {
132 qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create an asset writer's queue";
133 return false;
134 }
135
136 m_videoQueue.reset();
137 if (session->videoInput() && session->videoOutput() && session->videoOutput()->videoDataOutput()) {
138 m_videoQueue.reset(dispatch_queue_create("video-output-queue", DISPATCH_QUEUE_SERIAL));
139 if (!m_videoQueue) {
140 qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create video queue";
141 return false;
142 }
143 dispatch_set_target_queue(m_videoQueue, dispatch_get_global_queue(QOS_CLASS_USER_INITIATED, 0));
144 }
145
146 m_audioQueue.reset();
147 if (session->audioInput() && session->audioOutput()) {
148 m_audioQueue.reset(dispatch_queue_create("audio-output-queue", DISPATCH_QUEUE_SERIAL));
149 if (!m_audioQueue) {
150 qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create audio queue";
151 if (!m_videoQueue)
152 return false;
153 // But we still can write video!
154 }
155 }
156
157 auto fileType = QDarwinFormatInfo::avFileTypeForContainerFormat(fileFormat);
158 m_assetWriter.reset([[AVAssetWriter alloc] initWithURL:fileURL
159 fileType:fileType
160 error:nil]);
161 if (!m_assetWriter) {
162 qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create asset writer";
163 return false;
164 }
165
166 if (!m_videoQueue)
167 m_writeFirstAudioBuffer = true;
168
169 if (![self addWriterInputs]) {
170 m_assetWriter.reset();
171 return false;
172 }
173
174 if (m_cameraWriterInput)
175 m_cameraWriterInput.data().transform = transform;
176
177 [self setMetaData:fileType];
178
179 // Ready to start ...
180 return true;
181}
182
183- (void)setMetaData:(AVFileType)fileType
184{
185 m_assetWriter.data().metadata = AVFMetaData::toAVMetadataForFormat(m_delegate->metaData(), fileType);
186}
187
188- (void)start
189{
190 [self setQueues];
191
192 m_setStartTime = true;
193
194 m_state.storeRelease(WriterStateActive);
195
196 [m_assetWriter startWriting];
197 AVCaptureSession *session = m_service->session()->captureSession();
198 if (!session.running)
199 [session startRunning];
200}
201
202- (void)stop
203{
204 if (m_state.loadAcquire() != WriterStateActive && m_state.loadAcquire() != WriterStatePaused)
205 return;
206
207 if ([m_assetWriter status] != AVAssetWriterStatusWriting
208 && [m_assetWriter status] != AVAssetWriterStatusFailed)
209 return;
210
211 // Do this here so that -
212 // 1. '-abort' should not try calling finishWriting again and
213 // 2. async block (see below) will know if recorder control was deleted
214 // before the block's execution:
215 m_state.storeRelease(WriterStateIdle);
216 // Now, since we have to ensure no sample buffers are
217 // appended after a call to finishWriting, we must
218 // ensure writer's queue sees this change in m_state
219 // _before_ we call finishWriting:
220 dispatch_sync(m_writerQueue, ^{});
221 // Done, but now we also want to prevent video queue
222 // from updating our viewfinder:
223 if (m_videoQueue)
224 dispatch_sync(m_videoQueue, ^{});
225
226 // Now we're safe to stop:
227 [m_assetWriter finishWritingWithCompletionHandler:^{
228 // This block is async, so by the time it's executed,
229 // it's possible that render control was deleted already ...
230 if (m_state.loadAcquire() == WriterStateAborted)
231 return;
232
233 AVCaptureSession *session = m_service->session()->captureSession();
234 if (session.running)
235 [session stopRunning];
236 QMetaObject::invokeMethod(m_delegate, "assetWriterFinished", Qt::QueuedConnection);
237 }];
238}
239
240- (void)abort
241{
242 // -abort is to be called from recorder control's dtor.
243
244 if (m_state.fetchAndStoreRelease(WriterStateAborted) != WriterStateActive) {
245 // Not recording, nothing to stop.
246 return;
247 }
248
249 // From Apple's docs:
250 // "To guarantee that all sample buffers are successfully written,
251 // you must ensure that all calls to appendSampleBuffer: and
252 // appendPixelBuffer:withPresentationTime: have returned before
253 // invoking this method."
254 //
255 // The only way we can ensure this is:
256 dispatch_sync(m_writerQueue, ^{});
257 // At this point next block (if any) on the writer's queue
258 // will see m_state preventing it from any further processing.
259 if (m_videoQueue)
260 dispatch_sync(m_videoQueue, ^{});
261 // After this point video queue will not try to modify our
262 // viewfider, so we're safe to delete now.
263
264 [m_assetWriter finishWritingWithCompletionHandler:^{
265 }];
266}
267
268- (void)pause
269{
270 if (m_state.loadAcquire() != WriterStateActive)
271 return;
272 if ([m_assetWriter status] != AVAssetWriterStatusWriting)
273 return;
274
275 m_state.storeRelease(WriterStatePaused);
276 m_adjustTime = true;
277}
278
279- (void)resume
280{
281 if (m_state.loadAcquire() != WriterStatePaused)
282 return;
283 if ([m_assetWriter status] != AVAssetWriterStatusWriting)
284 return;
285
286 m_state.storeRelease(WriterStateActive);
287}
288
289- (void)setStartTimeFrom:(CMSampleBufferRef)sampleBuffer
290{
291 // Writer's queue only.
292 Q_ASSERT(m_setStartTime);
293 Q_ASSERT(sampleBuffer);
294
295 if (m_state.loadAcquire() != WriterStateActive)
296 return;
297
298 QMetaObject::invokeMethod(m_delegate, "assetWriterStarted", Qt::QueuedConnection);
299
300 m_durationInMs.storeRelease(0);
301 m_startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
302 m_lastTimeStamp = m_startTime;
303 [m_assetWriter startSessionAtSourceTime:m_startTime];
304 m_setStartTime = false;
305}
306
307- (QCFType<CMSampleBufferRef>)adjustTime:(const QCFType<CMSampleBufferRef> &)sample
308 by:(CMTime)offset
309{
310 CMItemCount count;
311 CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count);
312 CMSampleTimingInfo* timingInfo = (CMSampleTimingInfo*) malloc(sizeof(CMSampleTimingInfo) * count);
313 CMSampleBufferGetSampleTimingInfoArray(sample, count, timingInfo, &count);
314 for (CMItemCount i = 0; i < count; i++)
315 {
316 timingInfo[i].decodeTimeStamp = CMTimeSubtract(timingInfo[i].decodeTimeStamp, offset);
317 timingInfo[i].presentationTimeStamp = CMTimeSubtract(timingInfo[i].presentationTimeStamp, offset);
318 }
319 CMSampleBufferRef updatedBuffer;
320 CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, sample, count, timingInfo, &updatedBuffer);
321 free(timingInfo);
322 return updatedBuffer;
323}
324
325- (void)writeVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer
326{
327 // This code is executed only on a writer's queue.
328 Q_ASSERT(sampleBuffer);
329
330 if (m_state.loadAcquire() == WriterStateActive) {
331 if (m_setStartTime)
332 [self setStartTimeFrom:sampleBuffer];
333
334 if (m_cameraWriterInput.data().readyForMoreMediaData) {
335 [self updateDuration:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
336 [m_cameraWriterInput appendSampleBuffer:sampleBuffer];
337 }
338 }
339}
340
341- (void)writeAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer
342{
343 Q_ASSERT(sampleBuffer);
344
345 // This code is executed only on a writer's queue.
346 if (m_state.loadAcquire() == WriterStateActive) {
347 if (m_setStartTime)
348 [self setStartTimeFrom:sampleBuffer];
349
350 // On macOS, AVCaptureSession may deliver the first audio buffer(s) in a
351 // different format (e.g. 24-bit) than subsequent ones (e.g. 32-bit) when
352 // using external microphones. Appending a buffer with a mismatched format
353 // causes AudioConverter errors (-12737) or noise in the output file.
354 // Drop buffers whose format doesn't match the expected one.
355 // See: QTBUG-127444, FB16500782.
356 if (m_audioFormatDescription) {
357 CMFormatDescriptionRef bufferFormat = CMSampleBufferGetFormatDescription(sampleBuffer);
358 if (bufferFormat && !CMFormatDescriptionEqual(bufferFormat, m_audioFormatDescription)) {
359 // Check if the ASBD (sample rate, channels, bit depth) differs.
360 const AudioStreamBasicDescription *expected =
361 CMAudioFormatDescriptionGetStreamBasicDescription(m_audioFormatDescription);
362 const AudioStreamBasicDescription *actual =
363 CMAudioFormatDescriptionGetStreamBasicDescription(bufferFormat);
364 if (expected && actual
365 && (expected->mBitsPerChannel != actual->mBitsPerChannel
366 || expected->mBytesPerPacket != actual->mBytesPerPacket
367 || expected->mFormatFlags != actual->mFormatFlags)) {
368 qCDebug(qLcCamera) << "Discarding audio buffer with mismatched format:"
369 << actual->mBitsPerChannel << "bit, expected"
370 << expected->mBitsPerChannel << "bit";
371 // Update expected format to the new (stable) format so that
372 // subsequent buffers in this format are accepted.
373 m_audioFormatDescription =
374 QCFType<CMFormatDescriptionRef>::constructFromGet(bufferFormat);
375 return;
376 }
377 }
378 }
379
380 if (m_audioWriterInput.data().readyForMoreMediaData) {
381 [self updateDuration:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
382 [m_audioWriterInput appendSampleBuffer:sampleBuffer];
383 }
384 }
385}
386
387- (void)captureOutput:(AVCaptureOutput *)captureOutput
388 didOutputSampleBuffer:(CMSampleBufferRef)buffer
389 fromConnection:(AVCaptureConnection *)connection
390{
391 Q_UNUSED(connection);
392 Q_ASSERT(m_service && m_service->session());
393
394 if (m_state.loadAcquire() != WriterStateActive && m_state.loadAcquire() != WriterStatePaused)
395 return;
396
397 if ([m_assetWriter status] != AVAssetWriterStatusWriting) {
398 if ([m_assetWriter status] == AVAssetWriterStatusFailed) {
399 NSError *error = [m_assetWriter error];
400 NSString *failureReason = error.localizedFailureReason;
401 NSString *suggestion = error.localizedRecoverySuggestion;
402 NSString *errorString = suggestion ? [failureReason stringByAppendingString:suggestion] : failureReason;
403 QMetaObject::invokeMethod(m_delegate, "assetWriterError",
404 Qt::QueuedConnection,
405 Q_ARG(QString, QString::fromNSString(errorString)));
406 }
407 return;
408 }
409
410 if (!CMSampleBufferDataIsReady(buffer)) {
411 qWarning() << Q_FUNC_INFO << "sample buffer is not ready, skipping.";
412 return;
413 }
414
415 // take ownership
416 auto sampleBuffer = QCFType<CMSampleBufferRef>::constructFromGet(buffer);
417
418 bool isVideoBuffer = true;
419 isVideoBuffer = (captureOutput != m_service->session()->audioOutput());
420 if (isVideoBuffer) {
421 // Find renderercontrol's delegate and invoke its method to
422 // show updated viewfinder's frame.
423 if (m_service->session()->videoOutput()) {
424 NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> *vfDelegate =
425 (NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> *)m_service->session()->videoOutput()->captureDelegate();
426 if (vfDelegate) {
427 AVCaptureOutput *output = nil;
428 AVCaptureConnection *connection = nil;
429 [vfDelegate captureOutput:output didOutputSampleBuffer:sampleBuffer fromConnection:connection];
430 }
431 }
432 } else {
433 if (m_service->session()->audioOutput()) {
434 NSObject<AVCaptureAudioDataOutputSampleBufferDelegate> *audioPreviewDelegate =
435 (NSObject<AVCaptureAudioDataOutputSampleBufferDelegate> *)m_service->session()->audioPreviewDelegate();
436 if (audioPreviewDelegate) {
437 AVCaptureOutput *output = nil;
438 AVCaptureConnection *connection = nil;
439 [audioPreviewDelegate captureOutput:output didOutputSampleBuffer:sampleBuffer fromConnection:connection];
440 }
441 }
442 }
443
444 if (m_state.loadAcquire() != WriterStateActive)
445 return;
446
447 if (m_adjustTime) {
448 CMTime currentTimestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
449 CMTime lastTimestamp = isVideoBuffer ? m_lastVideoTimestamp : m_lastAudioTimestamp;
450
451 if (!CMTIME_IS_INVALID(lastTimestamp)) {
452 if (!CMTIME_IS_INVALID(m_timeOffset))
453 currentTimestamp = CMTimeSubtract(currentTimestamp, m_timeOffset);
454
455 CMTime pauseDuration = CMTimeSubtract(currentTimestamp, lastTimestamp);
456
457 if (m_timeOffset.value == 0)
458 m_timeOffset = pauseDuration;
459 else
460 m_timeOffset = CMTimeAdd(m_timeOffset, pauseDuration);
461 }
462 m_lastVideoTimestamp = kCMTimeInvalid;
463 m_adjustTime = false;
464 }
465
466 if (m_timeOffset.value > 0) {
467 sampleBuffer = [self adjustTime:sampleBuffer by:m_timeOffset];
468 }
469
470 CMTime currentTimestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
471 CMTime currentDuration = CMSampleBufferGetDuration(sampleBuffer);
472 if (currentDuration.value > 0)
473 currentTimestamp = CMTimeAdd(currentTimestamp, currentDuration);
474
475 if (isVideoBuffer)
476 {
477 m_lastVideoTimestamp = currentTimestamp;
478 dispatch_async(m_writerQueue, ^{
479 [self writeVideoSampleBuffer:sampleBuffer];
480 m_writeFirstAudioBuffer = true;
481 });
482 } else if (m_writeFirstAudioBuffer) {
483 m_lastAudioTimestamp = currentTimestamp;
484 dispatch_async(m_writerQueue, ^{
485 [self writeAudioSampleBuffer:sampleBuffer];
486 });
487 }
488}
489
490- (bool)addWriterInputs
491{
492 Q_ASSERT(m_service && m_service->session());
493 Q_ASSERT(m_assetWriter.data());
494
495 AVFCameraSession *session = m_service->session();
496
497 m_cameraWriterInput.reset();
498 if (m_videoQueue)
499 {
500 Q_ASSERT(session->videoCaptureDevice() && session->videoOutput() && session->videoOutput()->videoDataOutput());
501 @try {
502 m_cameraWriterInput.reset([[AVAssetWriterInput alloc]
503 initWithMediaType:AVMediaTypeVideo
504 outputSettings:m_videoSettings
505 sourceFormatHint:session->videoCaptureDevice()
506 .activeFormat.formatDescription]);
507 } @catch (NSException *exception) {
508 qCWarning(qLcCamera) << Q_FUNC_INFO << "Failed to create video writer input:"
509 << QString::fromNSString(exception.reason);
510 m_cameraWriterInput.reset();
511 return false;
512 }
513
514 @try {
515 if (m_cameraWriterInput && [m_assetWriter canAddInput:m_cameraWriterInput]) {
516 [m_assetWriter addInput:m_cameraWriterInput];
517 } else {
518 qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to add camera writer input";
519 m_cameraWriterInput.reset();
520 return false;
521 }
522 } @catch (NSException *exception) {
523 qCWarning(qLcCamera) << Q_FUNC_INFO << "Failed to add video input:"
524 << QString::fromNSString(exception.reason);
525 m_cameraWriterInput.reset();
526 return false;
527 }
528
529 m_cameraWriterInput.data().expectsMediaDataInRealTime = YES;
530 }
531
532 m_audioWriterInput.reset();
533 m_audioFormatDescription = nullptr;
534 if (m_audioQueue) {
535 // Get the audio format description from the capture device to use as
536 // sourceFormatHint. This prevents AVAssetWriter from misconfiguring its
537 // AudioConverter when the first CMSampleBuffer arrives in a different
538 // format than subsequent buffers (observed with external/USB/Bluetooth
539 // microphones on Intel Macs). See also: QTBUG-127444, FB16500782.
540 CMFormatDescriptionRef audioHint = nil;
541 AVCaptureDevice *audioDevice = session->audioCaptureDevice();
542 if (audioDevice)
543 audioHint = audioDevice.activeFormat.formatDescription;
544
545 @try {
546 m_audioWriterInput.reset([[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio
547 outputSettings:m_audioSettings
548 sourceFormatHint:audioHint]);
549 m_audioFormatDescription = QCFType<CMFormatDescriptionRef>::constructFromGet(audioHint);
550 } @catch (NSException *exception) {
551 qCWarning(qLcCamera) << Q_FUNC_INFO << "Failed to create audio writer input:"
552 << QString::fromNSString(exception.reason);
553 m_audioWriterInput.reset();
554 // But we still can record video.
555 if (!m_cameraWriterInput)
556 return false;
557 }
558 if (!m_audioWriterInput) {
559 qWarning() << Q_FUNC_INFO << "failed to create audio writer input";
560 // But we still can record video.
561 if (!m_cameraWriterInput)
562 return false;
563 } else {
564 @try {
565 if ([m_assetWriter canAddInput:m_audioWriterInput]) {
566 [m_assetWriter addInput:m_audioWriterInput];
567 m_audioWriterInput.data().expectsMediaDataInRealTime = YES;
568 } else {
569 qWarning() << Q_FUNC_INFO << "failed to add audio writer input";
570 m_audioWriterInput.reset();
571 if (!m_cameraWriterInput)
572 return false;
573 // We can (still) write video though ...
574 }
575 } @catch (NSException *exception) {
576 qCWarning(qLcCamera)
577 << Q_FUNC_INFO
578 << "Failed to add audio input:" << QString::fromNSString(exception.reason);
579 m_audioWriterInput.reset();
580 if (!m_cameraWriterInput)
581 return false;
582 // We can (still) write video though ...
583 }
584 }
585 }
586
587 return true;
588}
589
590- (void)setQueues
591{
592 Q_ASSERT(m_service && m_service->session());
593 AVFCameraSession *session = m_service->session();
594
595 if (m_videoQueue) {
596 Q_ASSERT(session->videoOutput() && session->videoOutput()->videoDataOutput());
597 [session->videoOutput()->videoDataOutput() setSampleBufferDelegate:self queue:m_videoQueue];
598 }
599
600 if (m_audioQueue) {
601 Q_ASSERT(session->audioOutput());
602 [session->audioOutput() setSampleBufferDelegate:self queue:m_audioQueue];
603 }
604}
605
606- (void)updateDuration:(CMTime)newTimeStamp
607{
608 Q_ASSERT(CMTIME_IS_VALID(m_startTime));
609 Q_ASSERT(CMTIME_IS_VALID(m_lastTimeStamp));
610 if (CMTimeCompare(newTimeStamp, m_lastTimeStamp) > 0) {
611
612 const CMTime duration = CMTimeSubtract(newTimeStamp, m_startTime);
613 if (CMTIME_IS_INVALID(duration))
614 return;
615
616 m_durationInMs.storeRelease(CMTimeGetSeconds(duration) * 1000);
617 m_lastTimeStamp = newTimeStamp;
618
619 m_delegate->updateDuration([self durationInMs]);
620 }
621}
622
623- (qint64)durationInMs
624{
625 return m_durationInMs.loadAcquire();
626}
627
628@end
AVFCameraSession * session() const
bool qt_capture_session_isValid(AVFCameraService *service)