Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
avfmediaassetwriter.mm
Go to the documentation of this file.
1// Copyright (C) 2016 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
9#include <QtMultimedia/private/qavfcameradebug_p.h>
10#include <qdarwinformatsinfo_p.h>
11#include <avfmetadata_p.h>
12
13#include <QtCore/qmetaobject.h>
14#include <QtCore/qatomic.h>
15#include <QtCore/private/qcore_mac_p.h>
16
17QT_USE_NAMESPACE
18
19namespace {
20
22{
23 if (!service || !service->session())
24 return false;
25
26 AVFCameraSession *session = service->session();
27 if (!session->captureSession())
28 return false;
29
30 if (!session->videoInput() && !session->audioInput())
31 return false;
32
33 return true;
34}
35
43
44using AVFAtomicInt64 = QAtomicInteger<qint64>;
45
46} // unnamed namespace
47
48@interface QT_MANGLE_NAMESPACE(AVFMediaAssetWriter) (PrivateAPI)
49- (bool)addWriterInputs;
50- (void)setQueues;
51- (void)updateDuration:(CMTime)newTimeStamp;
52- (QCFType<CMSampleBufferRef>)adjustTime:(const QCFType<CMSampleBufferRef> &)sample
53 by:(CMTime)offset;
54@end
55
56@implementation QT_MANGLE_NAMESPACE(AVFMediaAssetWriter)
57{
58@private
59 AVFCameraService *m_service;
60
61 AVFScopedPointer<AVAssetWriterInput> m_cameraWriterInput;
62 AVFScopedPointer<AVAssetWriterInput> m_audioWriterInput;
63
64 // Pending audio buffer waiting for format stabilization:
65 QCFType<CMSampleBufferRef> m_pendingAudioBuffer;
66 bool m_audioFormatStabilized;
67
68 // Queue to write sample buffers:
69 AVFScopedPointer<dispatch_queue_t> m_writerQueue;
70 // High priority serial queue for video output:
71 AVFScopedPointer<dispatch_queue_t> m_videoQueue;
72 // Serial queue for audio output:
73 AVFScopedPointer<dispatch_queue_t> m_audioQueue;
74
75 AVFScopedPointer<AVAssetWriter> m_assetWriter;
76
77 AVFMediaEncoder *m_delegate;
78
79 bool m_setStartTime;
80
81 QAtomicInt m_state;
82
83 bool m_writeFirstAudioBuffer;
84
85 CMTime m_startTime;
86 CMTime m_lastTimeStamp;
87 CMTime m_lastVideoTimestamp;
88 CMTime m_lastAudioTimestamp;
89 CMTime m_timeOffset;
90 bool m_adjustTime;
91
92 NSDictionary *m_audioSettings;
93 NSDictionary *m_videoSettings;
94
95 AVFAtomicInt64 m_durationInMs;
96}
97
98- (id)initWithDelegate:(AVFMediaEncoder *)delegate
99{
100 Q_ASSERT(delegate);
101
102 if (self = [super init]) {
103 m_delegate = delegate;
104 m_setStartTime = true;
105 m_state.storeRelaxed(WriterStateIdle);
106 }
107
108 return self;
109}
110
111- (bool)setupWithFileURL:(NSURL *)fileURL
112 cameraService:(AVFCameraService *)service
113 audioSettings:(NSDictionary *)audioSettings
114 videoSettings:(NSDictionary *)videoSettings
115 fileFormat:(QMediaFormat::FileFormat)fileFormat
116 transform:(CGAffineTransform)transform
117{
118 Q_ASSERT(fileURL);
119
120 if (!qt_capture_session_isValid(service)) {
121 qCDebug(qLcCamera) << Q_FUNC_INFO << "invalid capture session";
122 return false;
123 }
124
125 m_service = service;
126 m_audioSettings = audioSettings;
127 m_videoSettings = videoSettings;
128
129 AVFCameraSession *session = m_service->session();
130
131 m_writerQueue.reset(dispatch_queue_create("asset-writer-queue", DISPATCH_QUEUE_SERIAL));
132 if (!m_writerQueue) {
133 qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create an asset writer's queue";
134 return false;
135 }
136
137 m_videoQueue.reset();
138 if (session->videoInput() && session->videoOutput() && session->videoOutput()->videoDataOutput()) {
139 m_videoQueue.reset(dispatch_queue_create("video-output-queue", DISPATCH_QUEUE_SERIAL));
140 if (!m_videoQueue) {
141 qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create video queue";
142 return false;
143 }
144 dispatch_set_target_queue(m_videoQueue, dispatch_get_global_queue(QOS_CLASS_USER_INITIATED, 0));
145 }
146
147 m_audioQueue.reset();
148 if (session->audioInput() && session->audioOutput()) {
149 m_audioQueue.reset(dispatch_queue_create("audio-output-queue", DISPATCH_QUEUE_SERIAL));
150 if (!m_audioQueue) {
151 qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create audio queue";
152 if (!m_videoQueue)
153 return false;
154 // But we still can write video!
155 }
156 }
157
158 auto fileType = QDarwinFormatInfo::avFileTypeForContainerFormat(fileFormat);
159 m_assetWriter.reset([[AVAssetWriter alloc] initWithURL:fileURL
160 fileType:fileType
161 error:nil]);
162 if (!m_assetWriter) {
163 qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create asset writer";
164 return false;
165 }
166
167 if (!m_videoQueue)
168 m_writeFirstAudioBuffer = true;
169
170 if (![self addWriterInputs]) {
171 m_assetWriter.reset();
172 return false;
173 }
174
175 if (m_cameraWriterInput)
176 m_cameraWriterInput.data().transform = transform;
177
178 [self setMetaData:fileType];
179
180 // Ready to start ...
181 return true;
182}
183
184- (void)setMetaData:(AVFileType)fileType
185{
186 m_assetWriter.data().metadata = AVFMetaData::toAVMetadataForFormat(m_delegate->metaData(), fileType);
187}
188
189- (void)start
190{
191 [self setQueues];
192
193 m_setStartTime = true;
194 m_audioFormatStabilized = false;
195 m_pendingAudioBuffer = nullptr;
196
197 m_state.storeRelease(WriterStateActive);
198
199 [m_assetWriter startWriting];
200 AVCaptureSession *session = m_service->session()->captureSession();
201 if (!session.running)
202 [session startRunning];
203}
204
205- (void)stop
206{
207 if (m_state.loadAcquire() != WriterStateActive && m_state.loadAcquire() != WriterStatePaused)
208 return;
209
210 if ([m_assetWriter status] != AVAssetWriterStatusWriting
211 && [m_assetWriter status] != AVAssetWriterStatusFailed)
212 return;
213
214 // Do this here so that -
215 // 1. '-abort' should not try calling finishWriting again and
216 // 2. async block (see below) will know if recorder control was deleted
217 // before the block's execution:
218 m_state.storeRelease(WriterStateIdle);
219 // Now, since we have to ensure no sample buffers are
220 // appended after a call to finishWriting, we must
221 // ensure writer's queue sees this change in m_state
222 // _before_ we call finishWriting:
223 dispatch_sync(m_writerQueue, ^{});
224 // Done, but now we also want to prevent video queue
225 // from updating our viewfinder:
226 if (m_videoQueue)
227 dispatch_sync(m_videoQueue, ^{});
228
229 // Now we're safe to stop:
230 [m_assetWriter finishWritingWithCompletionHandler:^{
231 // This block is async, so by the time it's executed,
232 // it's possible that render control was deleted already ...
233 if (m_state.loadAcquire() == WriterStateAborted)
234 return;
235
236 AVCaptureSession *session = m_service->session()->captureSession();
237 if (session.running)
238 [session stopRunning];
239 QMetaObject::invokeMethod(m_delegate, "assetWriterFinished", Qt::QueuedConnection);
240 }];
241}
242
243- (void)abort
244{
245 // -abort is to be called from recorder control's dtor.
246
247 if (m_state.fetchAndStoreRelease(WriterStateAborted) != WriterStateActive) {
248 // Not recording, nothing to stop.
249 return;
250 }
251
252 // From Apple's docs:
253 // "To guarantee that all sample buffers are successfully written,
254 // you must ensure that all calls to appendSampleBuffer: and
255 // appendPixelBuffer:withPresentationTime: have returned before
256 // invoking this method."
257 //
258 // The only way we can ensure this is:
259 dispatch_sync(m_writerQueue, ^{});
260 // At this point next block (if any) on the writer's queue
261 // will see m_state preventing it from any further processing.
262 if (m_videoQueue)
263 dispatch_sync(m_videoQueue, ^{});
264 // After this point video queue will not try to modify our
265 // viewfider, so we're safe to delete now.
266
267 [m_assetWriter finishWritingWithCompletionHandler:^{
268 }];
269}
270
271- (void)pause
272{
273 if (m_state.loadAcquire() != WriterStateActive)
274 return;
275 if ([m_assetWriter status] != AVAssetWriterStatusWriting)
276 return;
277
278 m_state.storeRelease(WriterStatePaused);
279 m_adjustTime = true;
280}
281
282- (void)resume
283{
284 if (m_state.loadAcquire() != WriterStatePaused)
285 return;
286 if ([m_assetWriter status] != AVAssetWriterStatusWriting)
287 return;
288
289 m_state.storeRelease(WriterStateActive);
290}
291
292- (void)setStartTimeFrom:(CMSampleBufferRef)sampleBuffer
293{
294 // Writer's queue only.
295 Q_ASSERT(m_setStartTime);
296 Q_ASSERT(sampleBuffer);
297
298 if (m_state.loadAcquire() != WriterStateActive)
299 return;
300
301 QMetaObject::invokeMethod(m_delegate, "assetWriterStarted", Qt::QueuedConnection);
302
303 m_durationInMs.storeRelease(0);
304 m_startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
305 m_lastTimeStamp = m_startTime;
306 [m_assetWriter startSessionAtSourceTime:m_startTime];
307 m_setStartTime = false;
308}
309
310- (QCFType<CMSampleBufferRef>)adjustTime:(const QCFType<CMSampleBufferRef> &)sample
311 by:(CMTime)offset
312{
313 CMItemCount count;
314 CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count);
315 CMSampleTimingInfo* timingInfo = (CMSampleTimingInfo*) malloc(sizeof(CMSampleTimingInfo) * count);
316 CMSampleBufferGetSampleTimingInfoArray(sample, count, timingInfo, &count);
317 for (CMItemCount i = 0; i < count; i++)
318 {
319 timingInfo[i].decodeTimeStamp = CMTimeSubtract(timingInfo[i].decodeTimeStamp, offset);
320 timingInfo[i].presentationTimeStamp = CMTimeSubtract(timingInfo[i].presentationTimeStamp, offset);
321 }
322 CMSampleBufferRef updatedBuffer;
323 CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, sample, count, timingInfo, &updatedBuffer);
324 free(timingInfo);
325 return updatedBuffer;
326}
327
328- (void)writeVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer
329{
330 // This code is executed only on a writer's queue.
331 Q_ASSERT(sampleBuffer);
332
333 if (m_state.loadAcquire() == WriterStateActive) {
334 if (m_setStartTime)
335 [self setStartTimeFrom:sampleBuffer];
336
337 if (m_cameraWriterInput.data().readyForMoreMediaData) {
338 [self updateDuration:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
339 [m_cameraWriterInput appendSampleBuffer:sampleBuffer];
340 }
341 }
342}
343
344- (void)writeAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer
345{
346 Q_ASSERT(sampleBuffer);
347
348 // This code is executed only on a writer's queue.
349 if (m_state.loadAcquire() == WriterStateActive) {
350 if (m_setStartTime)
351 [self setStartTimeFrom:sampleBuffer];
352
353 // On macOS, AVCaptureSession may deliver the first audio buffer(s) in a
354 // transient format that differs from the stable format the device settles
355 // on shortly after. Appending a transient-format buffer configures
356 // AVAssetWriterInput's internal AudioConverter for the wrong format,
357 // causing error -12737 or audible noise once subsequent buffers arrive in
358 // the real (stable) format. This has been observed with built-in, USB,
359 // and Bluetooth microphones.
360 // To avoid this, we wait for format stabilization: the first buffer whose
361 // CMFormatDescription matches the previous one is considered stable.
362 // At that point we append both the held buffer and the current one.
363 // See: QTBUG-127444, FB16500782.
364 if (!m_audioFormatStabilized) {
365 if (!m_pendingAudioBuffer) {
366 m_pendingAudioBuffer = QCFType<CMSampleBufferRef>::constructFromGet(sampleBuffer);
367 return;
368 }
369
370 CMFormatDescriptionRef pendingFormat =
371 CMSampleBufferGetFormatDescription(m_pendingAudioBuffer);
372 CMFormatDescriptionRef currentFormat = CMSampleBufferGetFormatDescription(sampleBuffer);
373
374 if (pendingFormat && currentFormat
375 && CMFormatDescriptionEqual(pendingFormat, currentFormat)) {
376 m_audioFormatStabilized = true;
377 // Append the held buffer first, then fall through to append
378 // the current one.
379 if (m_audioWriterInput.data().readyForMoreMediaData) {
380 [self updateDuration:CMSampleBufferGetPresentationTimeStamp(
381 m_pendingAudioBuffer)];
382 [m_audioWriterInput appendSampleBuffer:m_pendingAudioBuffer];
383 }
384 m_pendingAudioBuffer = nullptr;
385 } else {
386 qCDebug(qLcCamera) << "Audio format changed, discarding pending buffer";
387 m_pendingAudioBuffer = QCFType<CMSampleBufferRef>::constructFromGet(sampleBuffer);
388 return;
389 }
390 }
391
392 if (m_audioWriterInput.data().readyForMoreMediaData) {
393 [self updateDuration:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
394 [m_audioWriterInput appendSampleBuffer:sampleBuffer];
395 }
396 }
397}
398
399- (void)captureOutput:(AVCaptureOutput *)captureOutput
400 didOutputSampleBuffer:(CMSampleBufferRef)buffer
401 fromConnection:(AVCaptureConnection *)connection
402{
403 Q_UNUSED(connection);
404 Q_ASSERT(m_service && m_service->session());
405
406 if (m_state.loadAcquire() != WriterStateActive && m_state.loadAcquire() != WriterStatePaused)
407 return;
408
409 if ([m_assetWriter status] != AVAssetWriterStatusWriting) {
410 if ([m_assetWriter status] == AVAssetWriterStatusFailed) {
411 NSError *error = [m_assetWriter error];
412 NSString *failureReason = error.localizedFailureReason;
413 NSString *suggestion = error.localizedRecoverySuggestion;
414 NSString *errorString = suggestion ? [failureReason stringByAppendingString:suggestion] : failureReason;
415 QMetaObject::invokeMethod(m_delegate, "assetWriterError",
416 Qt::QueuedConnection,
417 Q_ARG(QString, QString::fromNSString(errorString)));
418 }
419 return;
420 }
421
422 if (!CMSampleBufferDataIsReady(buffer)) {
423 qWarning() << Q_FUNC_INFO << "sample buffer is not ready, skipping.";
424 return;
425 }
426
427 // take ownership
428 auto sampleBuffer = QCFType<CMSampleBufferRef>::constructFromGet(buffer);
429
430 bool isVideoBuffer = true;
431 isVideoBuffer = (captureOutput != m_service->session()->audioOutput());
432 if (isVideoBuffer) {
433 // Find renderercontrol's delegate and invoke its method to
434 // show updated viewfinder's frame.
435 if (m_service->session()->videoOutput()) {
436 NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> *vfDelegate =
437 (NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> *)m_service->session()->videoOutput()->captureDelegate();
438 if (vfDelegate) {
439 AVCaptureOutput *output = nil;
440 AVCaptureConnection *connection = nil;
441 [vfDelegate captureOutput:output didOutputSampleBuffer:sampleBuffer fromConnection:connection];
442 }
443 }
444 } else {
445 if (m_service->session()->audioOutput()) {
446 NSObject<AVCaptureAudioDataOutputSampleBufferDelegate> *audioPreviewDelegate =
447 (NSObject<AVCaptureAudioDataOutputSampleBufferDelegate> *)m_service->session()->audioPreviewDelegate();
448 if (audioPreviewDelegate) {
449 AVCaptureOutput *output = nil;
450 AVCaptureConnection *connection = nil;
451 [audioPreviewDelegate captureOutput:output didOutputSampleBuffer:sampleBuffer fromConnection:connection];
452 }
453 }
454 }
455
456 if (m_state.loadAcquire() != WriterStateActive)
457 return;
458
459 if (m_adjustTime) {
460 CMTime currentTimestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
461 CMTime lastTimestamp = isVideoBuffer ? m_lastVideoTimestamp : m_lastAudioTimestamp;
462
463 if (!CMTIME_IS_INVALID(lastTimestamp)) {
464 if (!CMTIME_IS_INVALID(m_timeOffset))
465 currentTimestamp = CMTimeSubtract(currentTimestamp, m_timeOffset);
466
467 CMTime pauseDuration = CMTimeSubtract(currentTimestamp, lastTimestamp);
468
469 if (m_timeOffset.value == 0)
470 m_timeOffset = pauseDuration;
471 else
472 m_timeOffset = CMTimeAdd(m_timeOffset, pauseDuration);
473 }
474 m_lastVideoTimestamp = kCMTimeInvalid;
475 m_adjustTime = false;
476 }
477
478 if (m_timeOffset.value > 0) {
479 sampleBuffer = [self adjustTime:sampleBuffer by:m_timeOffset];
480 }
481
482 CMTime currentTimestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
483 CMTime currentDuration = CMSampleBufferGetDuration(sampleBuffer);
484 if (currentDuration.value > 0)
485 currentTimestamp = CMTimeAdd(currentTimestamp, currentDuration);
486
487 if (isVideoBuffer)
488 {
489 m_lastVideoTimestamp = currentTimestamp;
490 dispatch_async(m_writerQueue, ^{
491 [self writeVideoSampleBuffer:sampleBuffer];
492 m_writeFirstAudioBuffer = true;
493 });
494 } else if (m_writeFirstAudioBuffer) {
495 m_lastAudioTimestamp = currentTimestamp;
496 dispatch_async(m_writerQueue, ^{
497 [self writeAudioSampleBuffer:sampleBuffer];
498 });
499 }
500}
501
502- (bool)addWriterInputs
503{
504 Q_ASSERT(m_service && m_service->session());
505 Q_ASSERT(m_assetWriter.data());
506
507 AVFCameraSession *session = m_service->session();
508
509 m_cameraWriterInput.reset();
510 if (m_videoQueue)
511 {
512 Q_ASSERT(session->videoCaptureDevice() && session->videoOutput() && session->videoOutput()->videoDataOutput());
513 @try {
514 m_cameraWriterInput.reset([[AVAssetWriterInput alloc]
515 initWithMediaType:AVMediaTypeVideo
516 outputSettings:m_videoSettings
517 sourceFormatHint:session->videoCaptureDevice()
518 .activeFormat.formatDescription]);
519 } @catch (NSException *exception) {
520 qCWarning(qLcCamera) << Q_FUNC_INFO << "Failed to create video writer input:"
521 << QString::fromNSString(exception.reason);
522 m_cameraWriterInput.reset();
523 return false;
524 }
525
526 @try {
527 if (m_cameraWriterInput && [m_assetWriter canAddInput:m_cameraWriterInput]) {
528 [m_assetWriter addInput:m_cameraWriterInput];
529 } else {
530 qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to add camera writer input";
531 m_cameraWriterInput.reset();
532 return false;
533 }
534 } @catch (NSException *exception) {
535 qCWarning(qLcCamera) << Q_FUNC_INFO << "Failed to add video input:"
536 << QString::fromNSString(exception.reason);
537 m_cameraWriterInput.reset();
538 return false;
539 }
540
541 m_cameraWriterInput.data().expectsMediaDataInRealTime = YES;
542 }
543
544 m_audioWriterInput.reset();
545 if (m_audioQueue) {
546 @try {
547 m_audioWriterInput.reset([[AVAssetWriterInput alloc]
548 initWithMediaType:AVMediaTypeAudio
549 outputSettings:m_audioSettings]);
550 } @catch (NSException *exception) {
551 qCWarning(qLcCamera) << Q_FUNC_INFO << "Failed to create audio writer input:"
552 << QString::fromNSString(exception.reason);
553 m_audioWriterInput.reset();
554 // But we still can record video.
555 if (!m_cameraWriterInput)
556 return false;
557 }
558 if (!m_audioWriterInput) {
559 qWarning() << Q_FUNC_INFO << "failed to create audio writer input";
560 // But we still can record video.
561 if (!m_cameraWriterInput)
562 return false;
563 } else {
564 @try {
565 if ([m_assetWriter canAddInput:m_audioWriterInput]) {
566 [m_assetWriter addInput:m_audioWriterInput];
567 m_audioWriterInput.data().expectsMediaDataInRealTime = YES;
568 } else {
569 qWarning() << Q_FUNC_INFO << "failed to add audio writer input";
570 m_audioWriterInput.reset();
571 if (!m_cameraWriterInput)
572 return false;
573 // We can (still) write video though ...
574 }
575 } @catch (NSException *exception) {
576 qCWarning(qLcCamera)
577 << Q_FUNC_INFO
578 << "Failed to add audio input:" << QString::fromNSString(exception.reason);
579 m_audioWriterInput.reset();
580 if (!m_cameraWriterInput)
581 return false;
582 // We can (still) write video though ...
583 }
584 }
585 }
586
587 return true;
588}
589
590- (void)setQueues
591{
592 Q_ASSERT(m_service && m_service->session());
593 AVFCameraSession *session = m_service->session();
594
595 if (m_videoQueue) {
596 Q_ASSERT(session->videoOutput() && session->videoOutput()->videoDataOutput());
597 [session->videoOutput()->videoDataOutput() setSampleBufferDelegate:self queue:m_videoQueue];
598 }
599
600 if (m_audioQueue) {
601 Q_ASSERT(session->audioOutput());
602 [session->audioOutput() setSampleBufferDelegate:self queue:m_audioQueue];
603 }
604}
605
606- (void)updateDuration:(CMTime)newTimeStamp
607{
608 Q_ASSERT(CMTIME_IS_VALID(m_startTime));
609 Q_ASSERT(CMTIME_IS_VALID(m_lastTimeStamp));
610 if (CMTimeCompare(newTimeStamp, m_lastTimeStamp) > 0) {
611
612 const CMTime duration = CMTimeSubtract(newTimeStamp, m_startTime);
613 if (CMTIME_IS_INVALID(duration))
614 return;
615
616 m_durationInMs.storeRelease(CMTimeGetSeconds(duration) * 1000);
617 m_lastTimeStamp = newTimeStamp;
618
619 m_delegate->updateDuration([self durationInMs]);
620 }
621}
622
623- (qint64)durationInMs
624{
625 return m_durationInMs.loadAcquire();
626}
627
628@end
AVFCameraSession * session() const
bool qt_capture_session_isValid(AVFCameraService *service)