Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
avfmediaassetwriter.mm
Go to the documentation of this file.
1// Copyright (C) 2016 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
9#include <QtMultimedia/private/qavfcameradebug_p.h>
10#include <qdarwinformatsinfo_p.h>
11#include <avfmetadata_p.h>
12
13#include <QtCore/qmetaobject.h>
14#include <QtCore/qatomic.h>
15#include <QtCore/private/qcore_mac_p.h>
16
17QT_USE_NAMESPACE
18
19namespace {
20
22{
23 if (!service || !service->session())
24 return false;
25
26 AVFCameraSession *session = service->session();
27 if (!session->captureSession())
28 return false;
29
30 if (!session->videoInput() && !session->audioInput())
31 return false;
32
33 return true;
34}
35
43
44using AVFAtomicInt64 = QAtomicInteger<qint64>;
45
46} // unnamed namespace
47
48@interface QT_MANGLE_NAMESPACE(AVFMediaAssetWriter) (PrivateAPI)
49- (bool)addWriterInputs;
50- (void)setQueues;
51- (void)updateDuration:(CMTime)newTimeStamp;
52- (QCFType<CMSampleBufferRef>)adjustTime:(const QCFType<CMSampleBufferRef> &)sample
53 by:(CMTime)offset;
54@end
55
56@implementation QT_MANGLE_NAMESPACE(AVFMediaAssetWriter)
57{
58@private
59 AVFCameraService *m_service;
60
61 AVFScopedPointer<AVAssetWriterInput> m_cameraWriterInput;
62 AVFScopedPointer<AVAssetWriterInput> m_audioWriterInput;
63
64 // Queue to write sample buffers:
65 AVFScopedPointer<dispatch_queue_t> m_writerQueue;
66 // High priority serial queue for video output:
67 AVFScopedPointer<dispatch_queue_t> m_videoQueue;
68 // Serial queue for audio output:
69 AVFScopedPointer<dispatch_queue_t> m_audioQueue;
70
71 AVFScopedPointer<AVAssetWriter> m_assetWriter;
72
73 AVFMediaEncoder *m_delegate;
74
75 bool m_setStartTime;
76
77 QAtomicInt m_state;
78
79 bool m_writeFirstAudioBuffer;
80
81 CMTime m_startTime;
82 CMTime m_lastTimeStamp;
83 CMTime m_lastVideoTimestamp;
84 CMTime m_lastAudioTimestamp;
85 CMTime m_timeOffset;
86 bool m_adjustTime;
87
88 NSDictionary *m_audioSettings;
89 NSDictionary *m_videoSettings;
90
91 AVFAtomicInt64 m_durationInMs;
92}
93
94- (id)initWithDelegate:(AVFMediaEncoder *)delegate
95{
96 Q_ASSERT(delegate);
97
98 if (self = [super init]) {
99 m_delegate = delegate;
100 m_setStartTime = true;
101 m_state.storeRelaxed(WriterStateIdle);
102 }
103
104 return self;
105}
106
107- (bool)setupWithFileURL:(NSURL *)fileURL
108 cameraService:(AVFCameraService *)service
109 audioSettings:(NSDictionary *)audioSettings
110 videoSettings:(NSDictionary *)videoSettings
111 fileFormat:(QMediaFormat::FileFormat)fileFormat
112 transform:(CGAffineTransform)transform
113{
114 Q_ASSERT(fileURL);
115
116 if (!qt_capture_session_isValid(service)) {
117 qCDebug(qLcCamera) << Q_FUNC_INFO << "invalid capture session";
118 return false;
119 }
120
121 m_service = service;
122 m_audioSettings = audioSettings;
123 m_videoSettings = videoSettings;
124
125 AVFCameraSession *session = m_service->session();
126
127 m_writerQueue.reset(dispatch_queue_create("asset-writer-queue", DISPATCH_QUEUE_SERIAL));
128 if (!m_writerQueue) {
129 qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create an asset writer's queue";
130 return false;
131 }
132
133 m_videoQueue.reset();
134 if (session->videoInput() && session->videoOutput() && session->videoOutput()->videoDataOutput()) {
135 m_videoQueue.reset(dispatch_queue_create("video-output-queue", DISPATCH_QUEUE_SERIAL));
136 if (!m_videoQueue) {
137 qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create video queue";
138 return false;
139 }
140 dispatch_set_target_queue(m_videoQueue, dispatch_get_global_queue(QOS_CLASS_USER_INITIATED, 0));
141 }
142
143 m_audioQueue.reset();
144 if (session->audioInput() && session->audioOutput()) {
145 m_audioQueue.reset(dispatch_queue_create("audio-output-queue", DISPATCH_QUEUE_SERIAL));
146 if (!m_audioQueue) {
147 qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create audio queue";
148 if (!m_videoQueue)
149 return false;
150 // But we still can write video!
151 }
152 }
153
154 auto fileType = QDarwinFormatInfo::avFileTypeForContainerFormat(fileFormat);
155 m_assetWriter.reset([[AVAssetWriter alloc] initWithURL:fileURL
156 fileType:fileType
157 error:nil]);
158 if (!m_assetWriter) {
159 qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to create asset writer";
160 return false;
161 }
162
163 if (!m_videoQueue)
164 m_writeFirstAudioBuffer = true;
165
166 if (![self addWriterInputs]) {
167 m_assetWriter.reset();
168 return false;
169 }
170
171 if (m_cameraWriterInput)
172 m_cameraWriterInput.data().transform = transform;
173
174 [self setMetaData:fileType];
175
176 // Ready to start ...
177 return true;
178}
179
180- (void)setMetaData:(AVFileType)fileType
181{
182 m_assetWriter.data().metadata = AVFMetaData::toAVMetadataForFormat(m_delegate->metaData(), fileType);
183}
184
185- (void)start
186{
187 [self setQueues];
188
189 m_setStartTime = true;
190
191 m_state.storeRelease(WriterStateActive);
192
193 [m_assetWriter startWriting];
194 AVCaptureSession *session = m_service->session()->captureSession();
195 if (!session.running)
196 [session startRunning];
197}
198
199- (void)stop
200{
201 if (m_state.loadAcquire() != WriterStateActive && m_state.loadAcquire() != WriterStatePaused)
202 return;
203
204 if ([m_assetWriter status] != AVAssetWriterStatusWriting
205 && [m_assetWriter status] != AVAssetWriterStatusFailed)
206 return;
207
208 // Do this here so that -
209 // 1. '-abort' should not try calling finishWriting again and
210 // 2. async block (see below) will know if recorder control was deleted
211 // before the block's execution:
212 m_state.storeRelease(WriterStateIdle);
213 // Now, since we have to ensure no sample buffers are
214 // appended after a call to finishWriting, we must
215 // ensure writer's queue sees this change in m_state
216 // _before_ we call finishWriting:
217 dispatch_sync(m_writerQueue, ^{});
218 // Done, but now we also want to prevent video queue
219 // from updating our viewfinder:
220 if (m_videoQueue)
221 dispatch_sync(m_videoQueue, ^{});
222
223 // Now we're safe to stop:
224 [m_assetWriter finishWritingWithCompletionHandler:^{
225 // This block is async, so by the time it's executed,
226 // it's possible that render control was deleted already ...
227 if (m_state.loadAcquire() == WriterStateAborted)
228 return;
229
230 AVCaptureSession *session = m_service->session()->captureSession();
231 if (session.running)
232 [session stopRunning];
233 QMetaObject::invokeMethod(m_delegate, "assetWriterFinished", Qt::QueuedConnection);
234 }];
235}
236
237- (void)abort
238{
239 // -abort is to be called from recorder control's dtor.
240
241 if (m_state.fetchAndStoreRelease(WriterStateAborted) != WriterStateActive) {
242 // Not recording, nothing to stop.
243 return;
244 }
245
246 // From Apple's docs:
247 // "To guarantee that all sample buffers are successfully written,
248 // you must ensure that all calls to appendSampleBuffer: and
249 // appendPixelBuffer:withPresentationTime: have returned before
250 // invoking this method."
251 //
252 // The only way we can ensure this is:
253 dispatch_sync(m_writerQueue, ^{});
254 // At this point next block (if any) on the writer's queue
255 // will see m_state preventing it from any further processing.
256 if (m_videoQueue)
257 dispatch_sync(m_videoQueue, ^{});
258 // After this point video queue will not try to modify our
259 // viewfider, so we're safe to delete now.
260
261 [m_assetWriter finishWritingWithCompletionHandler:^{
262 }];
263}
264
265- (void)pause
266{
267 if (m_state.loadAcquire() != WriterStateActive)
268 return;
269 if ([m_assetWriter status] != AVAssetWriterStatusWriting)
270 return;
271
272 m_state.storeRelease(WriterStatePaused);
273 m_adjustTime = true;
274}
275
276- (void)resume
277{
278 if (m_state.loadAcquire() != WriterStatePaused)
279 return;
280 if ([m_assetWriter status] != AVAssetWriterStatusWriting)
281 return;
282
283 m_state.storeRelease(WriterStateActive);
284}
285
286- (void)setStartTimeFrom:(CMSampleBufferRef)sampleBuffer
287{
288 // Writer's queue only.
289 Q_ASSERT(m_setStartTime);
290 Q_ASSERT(sampleBuffer);
291
292 if (m_state.loadAcquire() != WriterStateActive)
293 return;
294
295 QMetaObject::invokeMethod(m_delegate, "assetWriterStarted", Qt::QueuedConnection);
296
297 m_durationInMs.storeRelease(0);
298 m_startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
299 m_lastTimeStamp = m_startTime;
300 [m_assetWriter startSessionAtSourceTime:m_startTime];
301 m_setStartTime = false;
302}
303
304- (QCFType<CMSampleBufferRef>)adjustTime:(const QCFType<CMSampleBufferRef> &)sample
305 by:(CMTime)offset
306{
307 CMItemCount count;
308 CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count);
309 CMSampleTimingInfo* timingInfo = (CMSampleTimingInfo*) malloc(sizeof(CMSampleTimingInfo) * count);
310 CMSampleBufferGetSampleTimingInfoArray(sample, count, timingInfo, &count);
311 for (CMItemCount i = 0; i < count; i++)
312 {
313 timingInfo[i].decodeTimeStamp = CMTimeSubtract(timingInfo[i].decodeTimeStamp, offset);
314 timingInfo[i].presentationTimeStamp = CMTimeSubtract(timingInfo[i].presentationTimeStamp, offset);
315 }
316 CMSampleBufferRef updatedBuffer;
317 CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, sample, count, timingInfo, &updatedBuffer);
318 free(timingInfo);
319 return updatedBuffer;
320}
321
322- (void)writeVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer
323{
324 // This code is executed only on a writer's queue.
325 Q_ASSERT(sampleBuffer);
326
327 if (m_state.loadAcquire() == WriterStateActive) {
328 if (m_setStartTime)
329 [self setStartTimeFrom:sampleBuffer];
330
331 if (m_cameraWriterInput.data().readyForMoreMediaData) {
332 [self updateDuration:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
333 [m_cameraWriterInput appendSampleBuffer:sampleBuffer];
334 }
335 }
336}
337
338- (void)writeAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer
339{
340 Q_ASSERT(sampleBuffer);
341
342 // This code is executed only on a writer's queue.
343 if (m_state.loadAcquire() == WriterStateActive) {
344 if (m_setStartTime)
345 [self setStartTimeFrom:sampleBuffer];
346
347 if (m_audioWriterInput.data().readyForMoreMediaData) {
348 [self updateDuration:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
349 [m_audioWriterInput appendSampleBuffer:sampleBuffer];
350 }
351 }
352}
353
354- (void)captureOutput:(AVCaptureOutput *)captureOutput
355 didOutputSampleBuffer:(CMSampleBufferRef)buffer
356 fromConnection:(AVCaptureConnection *)connection
357{
358 Q_UNUSED(connection);
359 Q_ASSERT(m_service && m_service->session());
360
361 if (m_state.loadAcquire() != WriterStateActive && m_state.loadAcquire() != WriterStatePaused)
362 return;
363
364 if ([m_assetWriter status] != AVAssetWriterStatusWriting) {
365 if ([m_assetWriter status] == AVAssetWriterStatusFailed) {
366 NSError *error = [m_assetWriter error];
367 NSString *failureReason = error.localizedFailureReason;
368 NSString *suggestion = error.localizedRecoverySuggestion;
369 NSString *errorString = suggestion ? [failureReason stringByAppendingString:suggestion] : failureReason;
370 QMetaObject::invokeMethod(m_delegate, "assetWriterError",
371 Qt::QueuedConnection,
372 Q_ARG(QString, QString::fromNSString(errorString)));
373 }
374 return;
375 }
376
377 if (!CMSampleBufferDataIsReady(buffer)) {
378 qWarning() << Q_FUNC_INFO << "sample buffer is not ready, skipping.";
379 return;
380 }
381
382 // take ownership
383 auto sampleBuffer = QCFType<CMSampleBufferRef>::constructFromGet(buffer);
384
385 bool isVideoBuffer = true;
386 isVideoBuffer = (captureOutput != m_service->session()->audioOutput());
387 if (isVideoBuffer) {
388 // Find renderercontrol's delegate and invoke its method to
389 // show updated viewfinder's frame.
390 if (m_service->session()->videoOutput()) {
391 NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> *vfDelegate =
392 (NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> *)m_service->session()->videoOutput()->captureDelegate();
393 if (vfDelegate) {
394 AVCaptureOutput *output = nil;
395 AVCaptureConnection *connection = nil;
396 [vfDelegate captureOutput:output didOutputSampleBuffer:sampleBuffer fromConnection:connection];
397 }
398 }
399 } else {
400 if (m_service->session()->audioOutput()) {
401 NSObject<AVCaptureAudioDataOutputSampleBufferDelegate> *audioPreviewDelegate =
402 (NSObject<AVCaptureAudioDataOutputSampleBufferDelegate> *)m_service->session()->audioPreviewDelegate();
403 if (audioPreviewDelegate) {
404 AVCaptureOutput *output = nil;
405 AVCaptureConnection *connection = nil;
406 [audioPreviewDelegate captureOutput:output didOutputSampleBuffer:sampleBuffer fromConnection:connection];
407 }
408 }
409 }
410
411 if (m_state.loadAcquire() != WriterStateActive)
412 return;
413
414 if (m_adjustTime) {
415 CMTime currentTimestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
416 CMTime lastTimestamp = isVideoBuffer ? m_lastVideoTimestamp : m_lastAudioTimestamp;
417
418 if (!CMTIME_IS_INVALID(lastTimestamp)) {
419 if (!CMTIME_IS_INVALID(m_timeOffset))
420 currentTimestamp = CMTimeSubtract(currentTimestamp, m_timeOffset);
421
422 CMTime pauseDuration = CMTimeSubtract(currentTimestamp, lastTimestamp);
423
424 if (m_timeOffset.value == 0)
425 m_timeOffset = pauseDuration;
426 else
427 m_timeOffset = CMTimeAdd(m_timeOffset, pauseDuration);
428 }
429 m_lastVideoTimestamp = kCMTimeInvalid;
430 m_adjustTime = false;
431 }
432
433 if (m_timeOffset.value > 0) {
434 sampleBuffer = [self adjustTime:sampleBuffer by:m_timeOffset];
435 }
436
437 CMTime currentTimestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
438 CMTime currentDuration = CMSampleBufferGetDuration(sampleBuffer);
439 if (currentDuration.value > 0)
440 currentTimestamp = CMTimeAdd(currentTimestamp, currentDuration);
441
442 if (isVideoBuffer)
443 {
444 m_lastVideoTimestamp = currentTimestamp;
445 dispatch_async(m_writerQueue, ^{
446 [self writeVideoSampleBuffer:sampleBuffer];
447 m_writeFirstAudioBuffer = true;
448 });
449 } else if (m_writeFirstAudioBuffer) {
450 m_lastAudioTimestamp = currentTimestamp;
451 dispatch_async(m_writerQueue, ^{
452 [self writeAudioSampleBuffer:sampleBuffer];
453 });
454 }
455}
456
457- (bool)addWriterInputs
458{
459 Q_ASSERT(m_service && m_service->session());
460 Q_ASSERT(m_assetWriter.data());
461
462 AVFCameraSession *session = m_service->session();
463
464 m_cameraWriterInput.reset();
465 if (m_videoQueue)
466 {
467 Q_ASSERT(session->videoCaptureDevice() && session->videoOutput() && session->videoOutput()->videoDataOutput());
468 @try {
469 m_cameraWriterInput.reset([[AVAssetWriterInput alloc]
470 initWithMediaType:AVMediaTypeVideo
471 outputSettings:m_videoSettings
472 sourceFormatHint:session->videoCaptureDevice()
473 .activeFormat.formatDescription]);
474 } @catch (NSException *exception) {
475 qCWarning(qLcCamera) << Q_FUNC_INFO << "Failed to create video writer input:"
476 << QString::fromNSString(exception.reason);
477 m_cameraWriterInput.reset();
478 return false;
479 }
480
481 @try {
482 if (m_cameraWriterInput && [m_assetWriter canAddInput:m_cameraWriterInput]) {
483 [m_assetWriter addInput:m_cameraWriterInput];
484 } else {
485 qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to add camera writer input";
486 m_cameraWriterInput.reset();
487 return false;
488 }
489 } @catch (NSException *exception) {
490 qCWarning(qLcCamera) << Q_FUNC_INFO << "Failed to add video input:"
491 << QString::fromNSString(exception.reason);
492 m_cameraWriterInput.reset();
493 return false;
494 }
495
496 m_cameraWriterInput.data().expectsMediaDataInRealTime = YES;
497 }
498
499 m_audioWriterInput.reset();
500 if (m_audioQueue) {
501 @try {
502 m_audioWriterInput.reset([[AVAssetWriterInput alloc]
503 initWithMediaType:AVMediaTypeAudio
504 outputSettings:m_audioSettings]);
505 } @catch (NSException *exception) {
506 qCWarning(qLcCamera) << Q_FUNC_INFO << "Failed to create audio writer input:"
507 << QString::fromNSString(exception.reason);
508 m_audioWriterInput.reset();
509 // But we still can record video.
510 if (!m_cameraWriterInput)
511 return false;
512 }
513 if (!m_audioWriterInput) {
514 qWarning() << Q_FUNC_INFO << "failed to create audio writer input";
515 // But we still can record video.
516 if (!m_cameraWriterInput)
517 return false;
518 } else {
519 @try {
520 if ([m_assetWriter canAddInput:m_audioWriterInput]) {
521 [m_assetWriter addInput:m_audioWriterInput];
522 m_audioWriterInput.data().expectsMediaDataInRealTime = YES;
523 } else {
524 qWarning() << Q_FUNC_INFO << "failed to add audio writer input";
525 m_audioWriterInput.reset();
526 if (!m_cameraWriterInput)
527 return false;
528 // We can (still) write video though ...
529 }
530 } @catch (NSException *exception) {
531 qCWarning(qLcCamera)
532 << Q_FUNC_INFO
533 << "Failed to add audio input:" << QString::fromNSString(exception.reason);
534 m_audioWriterInput.reset();
535 if (!m_cameraWriterInput)
536 return false;
537 // We can (still) write video though ...
538 }
539 }
540 }
541
542 return true;
543}
544
545- (void)setQueues
546{
547 Q_ASSERT(m_service && m_service->session());
548 AVFCameraSession *session = m_service->session();
549
550 if (m_videoQueue) {
551 Q_ASSERT(session->videoOutput() && session->videoOutput()->videoDataOutput());
552 [session->videoOutput()->videoDataOutput() setSampleBufferDelegate:self queue:m_videoQueue];
553 }
554
555 if (m_audioQueue) {
556 Q_ASSERT(session->audioOutput());
557 [session->audioOutput() setSampleBufferDelegate:self queue:m_audioQueue];
558 }
559}
560
561- (void)updateDuration:(CMTime)newTimeStamp
562{
563 Q_ASSERT(CMTIME_IS_VALID(m_startTime));
564 Q_ASSERT(CMTIME_IS_VALID(m_lastTimeStamp));
565 if (CMTimeCompare(newTimeStamp, m_lastTimeStamp) > 0) {
566
567 const CMTime duration = CMTimeSubtract(newTimeStamp, m_startTime);
568 if (CMTIME_IS_INVALID(duration))
569 return;
570
571 m_durationInMs.storeRelease(CMTimeGetSeconds(duration) * 1000);
572 m_lastTimeStamp = newTimeStamp;
573
574 m_delegate->updateDuration([self durationInMs]);
575 }
576}
577
578- (qint64)durationInMs
579{
580 return m_durationInMs.loadAcquire();
581}
582
583@end
AVFCameraSession * session() const
bool qt_capture_session_isValid(AVFCameraService *service)