Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qavfcamerautility.mm
Go to the documentation of this file.
1// Copyright (C) 2016 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include <QtMultimedia/private/qavfcamerautility_p.h>
5#include <QtMultimedia/private/qavfcameradebug_p.h>
6
7#include <QtCore/qvector.h>
8#include <private/qmultimediautils_p.h>
9#include <private/qcameradevice_p.h>
10#include <QtMultimedia/private/qavfhelpers_p.h>
11
12#include <functional>
13#include <algorithm>
14#include <limits>
15#include <tuple>
16
17#include <AudioToolbox/AudioToolbox.h>
18
20
21Q_LOGGING_CATEGORY(qLcCamera, "qt.multimedia.camera")
22
23AVFPSRange qt_connection_framerates(AVCaptureConnection *videoConnection)
24{
25 Q_ASSERT(videoConnection);
26
27 AVFPSRange newRange;
28 // "The value in the videoMinFrameDuration is equivalent to the reciprocal
29 // of the maximum framerate, the value in the videoMaxFrameDuration is equivalent
30 // to the reciprocal of the minimum framerate."
31 if (videoConnection.supportsVideoMinFrameDuration) {
32 const CMTime cmMin = videoConnection.videoMinFrameDuration;
33 if (CMTIME_IS_VALID(cmMin)) { // Has some non-default value:
34 if (const Float64 minSeconds = CMTimeGetSeconds(cmMin))
35 newRange.second = 1. / minSeconds;
36 }
37 }
38
39 if (videoConnection.supportsVideoMaxFrameDuration) {
40 const CMTime cmMax = videoConnection.videoMaxFrameDuration;
41 if (CMTIME_IS_VALID(cmMax)) {
42 if (const Float64 maxSeconds = CMTimeGetSeconds(cmMax))
43 newRange.first = 1. / maxSeconds;
44 }
45 }
46
47 return newRange;
48}
49
50namespace {
51
52inline bool qt_area_sane(const QSize &size)
53{
54 return !size.isNull() && size.isValid()
55 && std::numeric_limits<int>::max() / size.width() >= size.height();
56}
57
58template <template <typename...> class Comp> // std::less or std::greater (or std::equal_to)
59struct ByResolution
60{
61 bool operator() (AVCaptureDeviceFormat *f1, AVCaptureDeviceFormat *f2)const
62 {
63 Q_ASSERT(f1 && f2);
64 const QSize r1(qt_device_format_resolution(f1));
65 const QSize r2(qt_device_format_resolution(f2));
66 // use std::tuple for lexicograpical sorting:
67 const Comp<std::tuple<int, int>> op = {};
68 return op(std::make_tuple(r1.width(), r1.height()),
69 std::make_tuple(r2.width(), r2.height()));
70 }
71};
72
73struct FormatHasNoFPSRange
74{
75 bool operator() (AVCaptureDeviceFormat *format) const
76 {
77 Q_ASSERT(format);
78 return !format.videoSupportedFrameRateRanges || !format.videoSupportedFrameRateRanges.count;
79 }
80};
81
82Float64 qt_find_min_framerate_distance(AVCaptureDeviceFormat *format, Float64 fps)
83{
84 Q_ASSERT(format && format.videoSupportedFrameRateRanges
85 && format.videoSupportedFrameRateRanges.count);
86
87 AVFrameRateRange *range = [format.videoSupportedFrameRateRanges objectAtIndex:0];
88 Float64 distance = qAbs(range.maxFrameRate - fps);
89 for (NSUInteger i = 1, e = format.videoSupportedFrameRateRanges.count; i < e; ++i) {
90 range = [format.videoSupportedFrameRateRanges objectAtIndex:i];
91 distance = qMin(distance, qAbs(range.maxFrameRate - fps));
92 }
93
94 return distance;
95}
96
97} // Unnamed namespace.
98
100qt_convert_to_capture_device_format(AVCaptureDevice *captureDevice,
101 const QCameraFormat &cameraFormat,
102 const std::function<bool(uint32_t)> &cvFormatValidator)
103{
104 const auto cameraFormatPrivate = QCameraFormatPrivate::handle(cameraFormat);
105 if (!cameraFormatPrivate)
106 return nil;
107
108 const auto requiredCvPixFormat = QAVFHelpers::toCVPixelFormat(cameraFormatPrivate->pixelFormat,
109 cameraFormatPrivate->colorRange);
110
111 if (requiredCvPixFormat == CvPixelFormatInvalid)
112 return nil;
113
114 AVCaptureDeviceFormat *newFormat = nil;
115 Float64 newFormatMaxFrameRate = {};
116 NSArray<AVCaptureDeviceFormat *> *formats = captureDevice.formats;
117 for (AVCaptureDeviceFormat *format in formats) {
118 CMFormatDescriptionRef formatDesc = format.formatDescription;
119 CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(formatDesc);
120 FourCharCode cvPixFormat = CMVideoFormatDescriptionGetCodecType(formatDesc);
121
122 if (cvPixFormat != requiredCvPixFormat)
123 continue;
124
125 if (cameraFormatPrivate->resolution != QSize(dim.width, dim.height))
126 continue;
127
128 if (cvFormatValidator && !cvFormatValidator(cvPixFormat))
129 continue;
130
131 const float epsilon = 0.001f;
132 for (AVFrameRateRange *frameRateRange in format.videoSupportedFrameRateRanges) {
133 if (frameRateRange.minFrameRate >= cameraFormatPrivate->minFrameRate - epsilon
134 && frameRateRange.maxFrameRate <= cameraFormatPrivate->maxFrameRate + epsilon
135 && newFormatMaxFrameRate < frameRateRange.maxFrameRate) {
136 newFormat = format;
137 newFormatMaxFrameRate = frameRateRange.maxFrameRate;
138 }
139 }
140 }
141 return newFormat;
142}
143
144QVector<AVCaptureDeviceFormat *> qt_unique_device_formats(AVCaptureDevice *captureDevice, FourCharCode filter)
145{
146 // 'filter' is the format we prefer if we have duplicates.
147 Q_ASSERT(captureDevice);
148
149 QVector<AVCaptureDeviceFormat *> formats;
150
151 if (!captureDevice.formats || !captureDevice.formats.count)
152 return formats;
153
154 formats.reserve(captureDevice.formats.count);
155 for (AVCaptureDeviceFormat *format in captureDevice.formats) {
156 const QSize resolution(qt_device_format_resolution(format));
157 if (resolution.isNull() || !resolution.isValid())
158 continue;
159 formats << format;
160 }
161
162 if (!formats.size())
163 return formats;
164
165 std::sort(formats.begin(), formats.end(), ByResolution<std::less>());
166
167 QSize size(qt_device_format_resolution(formats[0]));
168 FourCharCode codec = CMVideoFormatDescriptionGetCodecType(formats[0].formatDescription);
169 int last = 0;
170 for (int i = 1; i < formats.size(); ++i) {
171 const QSize nextSize(qt_device_format_resolution(formats[i]));
172 if (nextSize == size) {
173 if (codec == filter)
174 continue;
175 formats[last] = formats[i];
176 } else {
177 ++last;
178 formats[last] = formats[i];
179 size = nextSize;
180 }
181 codec = CMVideoFormatDescriptionGetCodecType(formats[i].formatDescription);
182 }
183 formats.resize(last + 1);
184
185 return formats;
186}
187
188QSize qt_device_format_resolution(AVCaptureDeviceFormat *format)
189{
190 if (!format || !format.formatDescription)
191 return QSize();
192
193 const CMVideoDimensions res = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
194 return QSize(res.width, res.height);
195}
196
197QSize qt_device_format_high_resolution(AVCaptureDeviceFormat *format)
198{
199 Q_ASSERT(format);
200 QSize res;
201#if defined(Q_OS_IOS)
202 const CMVideoDimensions hrDim(format.highResolutionStillImageDimensions);
203 res.setWidth(hrDim.width);
204 res.setHeight(hrDim.height);
205#endif
206 return res;
207}
208
209QVector<AVFPSRange> qt_device_format_framerates(AVCaptureDeviceFormat *format)
210{
211 Q_ASSERT(format);
212
213 QVector<AVFPSRange> qtRanges;
214
215 if (!format.videoSupportedFrameRateRanges || !format.videoSupportedFrameRateRanges.count)
216 return qtRanges;
217
218 qtRanges.reserve(format.videoSupportedFrameRateRanges.count);
219 for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges)
220 qtRanges << AVFPSRange(range.minFrameRate, range.maxFrameRate);
221
222 return qtRanges;
223}
224
225QSize qt_device_format_pixel_aspect_ratio(AVCaptureDeviceFormat *format)
226{
227 Q_ASSERT(format);
228
229 if (!format.formatDescription) {
230 qCDebug(qLcCamera) << Q_FUNC_INFO << "no format description found";
231 return QSize();
232 }
233
234 const CMVideoDimensions res = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
235 const CGSize resPAR = CMVideoFormatDescriptionGetPresentationDimensions(format.formatDescription, true, false);
236
237 if (qAbs(resPAR.width - res.width) < 1.) {
238 // "Pixel aspect ratio is used to adjust the width, leaving the height alone."
239 return QSize(1, 1);
240 }
241
242 if (!res.width || !resPAR.width)
243 return QSize();
244
245 auto frac = qRealToFraction(resPAR.width > res.width ? res.width / qreal(resPAR.width)
246 : resPAR.width / qreal(res.width));
247
248 return QSize(frac.numerator, frac.denominator);
249}
250
252 const QSize &request,
253 FourCharCode filter,
254 bool stillImage)
255{
256 Q_ASSERT(captureDevice);
257 Q_ASSERT(!request.isNull() && request.isValid());
258
259 if (!captureDevice.formats || !captureDevice.formats.count)
260 return nullptr;
261
262 QVector<AVCaptureDeviceFormat *> formats(qt_unique_device_formats(captureDevice, filter));
263
264 for (int i = 0; i < formats.size(); ++i) {
265 AVCaptureDeviceFormat *format = formats[i];
266 if (qt_device_format_resolution(format) == request)
267 return format;
268 // iOS only (still images).
269 if (stillImage && qt_device_format_high_resolution(format) == request)
270 return format;
271 }
272
273 if (!qt_area_sane(request))
274 return nullptr;
275
276 typedef std::pair<QSize, AVCaptureDeviceFormat *> FormatPair;
277
278 QVector<FormatPair> pairs; // default|HR sizes
279 pairs.reserve(formats.size());
280
281 for (int i = 0; i < formats.size(); ++i) {
282 AVCaptureDeviceFormat *format = formats[i];
283 const QSize res(qt_device_format_resolution(format));
284 if (!res.isNull() && res.isValid() && qt_area_sane(res))
285 pairs << FormatPair(res, format);
286 const QSize highRes(qt_device_format_high_resolution(format));
287 if (stillImage && !highRes.isNull() && highRes.isValid() && qt_area_sane(highRes))
288 pairs << FormatPair(highRes, format);
289 }
290
291 if (!pairs.size())
292 return nullptr;
293
294 AVCaptureDeviceFormat *best = pairs[0].second;
295 QSize next(pairs[0].first);
296 int wDiff = qAbs(request.width() - next.width());
297 int hDiff = qAbs(request.height() - next.height());
298 const int area = request.width() * request.height();
299 int areaDiff = qAbs(area - next.width() * next.height());
300 for (int i = 1; i < pairs.size(); ++i) {
301 next = pairs[i].first;
302 const int newWDiff = qAbs(next.width() - request.width());
303 const int newHDiff = qAbs(next.height() - request.height());
304 const int newAreaDiff = qAbs(area - next.width() * next.height());
305
306 if ((newWDiff < wDiff && newHDiff < hDiff)
307 || ((newWDiff <= wDiff || newHDiff <= hDiff) && newAreaDiff <= areaDiff)) {
308 wDiff = newWDiff;
309 hDiff = newHDiff;
310 best = pairs[i].second;
311 areaDiff = newAreaDiff;
312 }
313 }
314
315 return best;
316}
317
319 FourCharCode filter,
320 Float64 fps)
321{
322 Q_ASSERT(captureDevice);
323 Q_ASSERT(fps > 0.);
324
325 const qreal epsilon = 0.1;
326
327 QVector<AVCaptureDeviceFormat *>sorted(qt_unique_device_formats(captureDevice, filter));
328 // Sort formats by their resolution in decreasing order:
329 std::sort(sorted.begin(), sorted.end(), ByResolution<std::greater>());
330 // We can use only formats with framerate ranges:
331 sorted.erase(std::remove_if(sorted.begin(), sorted.end(), FormatHasNoFPSRange()), sorted.end());
332
333 if (!sorted.size())
334 return nil;
335
336 for (int i = 0; i < sorted.size(); ++i) {
337 AVCaptureDeviceFormat *format = sorted[i];
338 for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
339 if (range.maxFrameRate - range.minFrameRate < epsilon) {
340 // On OS X ranges are points (built-in camera).
341 if (qAbs(fps - range.maxFrameRate) < epsilon)
342 return format;
343 }
344
345 if (fps >= range.minFrameRate && fps <= range.maxFrameRate)
346 return format;
347 }
348 }
349
350 Float64 distance = qt_find_min_framerate_distance(sorted[0], fps);
351 AVCaptureDeviceFormat *match = sorted[0];
352 for (int i = 1; i < sorted.size(); ++i) {
353 const Float64 newDistance = qt_find_min_framerate_distance(sorted[i], fps);
354 if (newDistance < distance) {
355 distance = newDistance;
356 match = sorted[i];
357 }
358 }
359
360 return match;
361}
362
363AVFrameRateRange *qt_find_supported_framerate_range(AVCaptureDeviceFormat *format, Float64 fps)
364{
365 Q_ASSERT(format && format.videoSupportedFrameRateRanges
366 && format.videoSupportedFrameRateRanges.count);
367
368 const qreal epsilon = 0.1;
369
370 for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
371 if (range.maxFrameRate - range.minFrameRate < epsilon) {
372 // On OS X ranges are points (built-in camera).
373 if (qAbs(fps - range.maxFrameRate) < epsilon)
374 return range;
375 }
376
377 if (fps >= range.minFrameRate && fps <= range.maxFrameRate)
378 return range;
379 }
380
381 AVFrameRateRange *match = [format.videoSupportedFrameRateRanges objectAtIndex:0];
382 Float64 distance = qAbs(match.maxFrameRate - fps);
383 for (NSUInteger i = 1, e = format.videoSupportedFrameRateRanges.count; i < e; ++i) {
384 AVFrameRateRange *range = [format.videoSupportedFrameRateRanges objectAtIndex:i];
385 const Float64 newDistance = qAbs(range.maxFrameRate - fps);
386 if (newDistance < distance) {
387 distance = newDistance;
388 match = range;
389 }
390 }
391
392 return match;
393}
394
395bool qt_format_supports_framerate(AVCaptureDeviceFormat *format, qreal fps)
396{
397 if (format && fps > qreal(0)) {
398 const qreal epsilon = 0.1;
399 for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
400 if (fps >= range.minFrameRate - epsilon && fps <= range.maxFrameRate + epsilon)
401 return true;
402 }
403 }
404
405 return false;
406}
407
408bool qt_formats_are_equal(AVCaptureDeviceFormat *f1, AVCaptureDeviceFormat *f2)
409{
410 if (f1 == f2)
411 return true;
412
413 if (![f1.mediaType isEqualToString:f2.mediaType])
414 return false;
415
416 return CMFormatDescriptionEqual(f1.formatDescription, f2.formatDescription);
417}
418
419bool qt_set_active_format(AVCaptureDevice *captureDevice, AVCaptureDeviceFormat *format, bool preserveFps)
420{
421 static bool firstSet = true;
422
423 if (!captureDevice || !format)
424 return false;
425
426 if (qt_formats_are_equal(captureDevice.activeFormat, format)) {
427 if (firstSet) {
428 // The capture device format is persistent. The first time we set a format, report that
429 // it changed even if the formats are the same.
430 // This prevents the session from resetting the format to the default value.
431 firstSet = false;
432 return true;
433 }
434 return false;
435 }
436
437 firstSet = false;
438
439 const AVFConfigurationLock lock(captureDevice);
440 if (!lock) {
441 qWarning("Failed to set active format (lock failed)");
442 return false;
443 }
444
445 // Changing the activeFormat resets the frame rate.
446 AVFPSRange fps;
447 if (preserveFps)
448 fps = qt_current_framerates(captureDevice, nil);
449
450 captureDevice.activeFormat = format;
451
452 if (preserveFps)
453 qt_set_framerate_limits(captureDevice, nil, fps.first, fps.second);
454
455 return true;
456}
457
458void qt_set_framerate_limits(AVCaptureConnection *videoConnection, qreal minFPS, qreal maxFPS)
459{
460 Q_ASSERT(videoConnection);
461
462 if (minFPS < 0. || maxFPS < 0. || (maxFPS && maxFPS < minFPS)) {
463 qCDebug(qLcCamera) << Q_FUNC_INFO << "invalid framerates (min, max):"
464 << minFPS << maxFPS;
465 return;
466 }
467
468 CMTime minDuration = kCMTimeInvalid;
469 if (maxFPS > 0.) {
470 if (!videoConnection.supportsVideoMinFrameDuration)
471 qCDebug(qLcCamera) << Q_FUNC_INFO << "maximum framerate is not supported";
472 else
473 minDuration = CMTimeMake(1, maxFPS);
474 }
475 if (videoConnection.supportsVideoMinFrameDuration)
476 videoConnection.videoMinFrameDuration = minDuration;
477
478 CMTime maxDuration = kCMTimeInvalid;
479 if (minFPS > 0.) {
480 if (!videoConnection.supportsVideoMaxFrameDuration)
481 qCDebug(qLcCamera) << Q_FUNC_INFO << "minimum framerate is not supported";
482 else
483 maxDuration = CMTimeMake(1, minFPS);
484 }
485 if (videoConnection.supportsVideoMaxFrameDuration)
486 videoConnection.videoMaxFrameDuration = maxDuration;
487}
488
489CMTime qt_adjusted_frame_duration(AVFrameRateRange *range, qreal fps)
490{
491 Q_ASSERT(range);
492 Q_ASSERT(fps > 0.);
493
494 if (range.maxFrameRate - range.minFrameRate < 0.1) {
495 // Can happen on OS X.
496 return range.minFrameDuration;
497 }
498
499 if (fps <= range.minFrameRate)
500 return range.maxFrameDuration;
501 if (fps >= range.maxFrameRate)
502 return range.minFrameDuration;
503
504 auto frac = qRealToFraction(1. / fps);
505 return CMTimeMake(frac.numerator, frac.denominator);
506}
507
508void qt_set_framerate_limits(AVCaptureDevice *captureDevice, qreal minFPS, qreal maxFPS)
509{
510 Q_ASSERT(captureDevice);
511 if (!captureDevice.activeFormat) {
512 qCDebug(qLcCamera) << Q_FUNC_INFO << "no active capture device format";
513 return;
514 }
515
516 if (minFPS < 0. || maxFPS < 0. || (maxFPS && maxFPS < minFPS)) {
517 qCDebug(qLcCamera) << Q_FUNC_INFO << "invalid framerates (min, max):"
518 << minFPS << maxFPS;
519 return;
520 }
521
522 CMTime minFrameDuration = kCMTimeInvalid;
523 CMTime maxFrameDuration = kCMTimeInvalid;
524 if (maxFPS || minFPS) {
525 AVFrameRateRange *range = qt_find_supported_framerate_range(captureDevice.activeFormat,
526 maxFPS ? maxFPS : minFPS);
527 if (!range) {
528 qCDebug(qLcCamera) << Q_FUNC_INFO << "no framerate range found, (min, max):"
529 << minFPS << maxFPS;
530 return;
531 }
532
533 if (maxFPS)
534 minFrameDuration = qt_adjusted_frame_duration(range, maxFPS);
535 if (minFPS)
536 maxFrameDuration = qt_adjusted_frame_duration(range, minFPS);
537 }
538
539 const AVFConfigurationLock lock(captureDevice);
540 if (!lock) {
541 qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock for configuration";
542 return;
543 }
544
545 // While Apple's docs say kCMTimeInvalid will end in default
546 // settings for this format, kCMTimeInvalid on OS X ends with a runtime
547 // exception:
548 // "The activeVideoMinFrameDuration passed is not supported by the device."
549 // Instead, use the first item in the supported frame rates.
550#ifdef Q_OS_IOS
551 [captureDevice setActiveVideoMinFrameDuration:minFrameDuration];
552 [captureDevice setActiveVideoMaxFrameDuration:maxFrameDuration];
553#elif defined(Q_OS_MACOS)
554 if (CMTIME_IS_INVALID(minFrameDuration)
555 && CMTIME_IS_INVALID(maxFrameDuration)) {
556 AVFrameRateRange *range = captureDevice.activeFormat.videoSupportedFrameRateRanges.firstObject;
557 minFrameDuration = range.minFrameDuration;
558 maxFrameDuration = range.maxFrameDuration;
559 }
560
561 if (CMTIME_IS_VALID(minFrameDuration))
562 [captureDevice setActiveVideoMinFrameDuration:minFrameDuration];
563
564 if (CMTIME_IS_VALID(maxFrameDuration))
565 [captureDevice setActiveVideoMaxFrameDuration:maxFrameDuration];
566#endif // Q_OS_MACOS
567}
568
569void qt_set_framerate_limits(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection,
570 qreal minFPS, qreal maxFPS)
571{
572 Q_UNUSED(videoConnection);
573 Q_ASSERT(captureDevice);
574 qt_set_framerate_limits(captureDevice, minFPS, maxFPS);
575}
576
577AVFPSRange qt_current_framerates(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection)
578{
579 Q_UNUSED(videoConnection);
580 Q_ASSERT(captureDevice);
581
582 AVFPSRange fps;
583 const CMTime minDuration = captureDevice.activeVideoMinFrameDuration;
584 if (CMTIME_IS_VALID(minDuration)) {
585 if (const Float64 minSeconds = CMTimeGetSeconds(minDuration))
586 fps.second = 1. / minSeconds; // Max FPS = 1 / MinDuration.
587 }
588
589 const CMTime maxDuration = captureDevice.activeVideoMaxFrameDuration;
590 if (CMTIME_IS_VALID(maxDuration)) {
591 if (const Float64 maxSeconds = CMTimeGetSeconds(maxDuration))
592 fps.first = 1. / maxSeconds; // Min FPS = 1 / MaxDuration.
593 }
594
595 return fps;
596}
597
599{
600 UInt32 format = codecId;
601 UInt32 size;
602 OSStatus err = AudioFormatGetPropertyInfo(
603 kAudioFormatProperty_AvailableEncodeSampleRates,
604 sizeof(format),
605 &format,
606 &size);
607
608 if (err != noErr)
609 return {};
610
611 UInt32 numRanges = size / sizeof(AudioValueRange);
612 QList<AudioValueRange> result;
613 result.resize(numRanges);
614
615 err = AudioFormatGetProperty(kAudioFormatProperty_AvailableEncodeSampleRates,
616 sizeof(format),
617 &format,
618 &size,
619 result.data());
620 return err == noErr ? result : QList<AudioValueRange>{};
621}
622
624{
625 UInt32 format = codecId;
626 UInt32 size;
627 OSStatus err = AudioFormatGetPropertyInfo(
628 kAudioFormatProperty_AvailableEncodeBitRates,
629 sizeof(format),
630 &format,
631 &size);
632
633 if (err != noErr)
634 return {};
635
636 UInt32 numRanges = size / sizeof(AudioValueRange);
637 QList<AudioValueRange> result;
638 result.resize(numRanges);
639
640 err = AudioFormatGetProperty(kAudioFormatProperty_AvailableEncodeBitRates,
641 sizeof(format),
642 &format,
643 &size,
644 result.data());
645 return err == noErr ? result : QList<AudioValueRange>{};
646}
647
649{
650 AudioStreamBasicDescription sf = {};
651 sf.mFormatID = codecId;
652 UInt32 size;
653 OSStatus err = AudioFormatGetPropertyInfo(
654 kAudioFormatProperty_AvailableEncodeNumberChannels,
655 sizeof(sf),
656 &sf,
657 &size);
658
659 if (err != noErr)
660 return std::nullopt;
661
662 // From Apple's docs:
663 // A value of 0xFFFFFFFF indicates that any number of channels may be encoded.
664 if (int(size) == -1)
665 return std::nullopt;
666
667 UInt32 numCounts = size / sizeof(UInt32);
668 QList<UInt32> channelCounts;
669 channelCounts.resize(numCounts);
670
671 err = AudioFormatGetProperty(kAudioFormatProperty_AvailableEncodeNumberChannels,
672 sizeof(sf),
673 &sf,
674 &size,
675 channelCounts.data());
676 if (err == noErr)
677 return channelCounts;
678 else
679 return std::nullopt;
680}
681
683{
684 AudioStreamBasicDescription sf = {};
685 sf.mFormatID = codecId;
686 sf.mChannelsPerFrame = noChannels;
687 UInt32 size;
688 OSStatus err = AudioFormatGetPropertyInfo(
689 kAudioFormatProperty_AvailableEncodeChannelLayoutTags,
690 sizeof(sf),
691 &sf,
692 &size);
693
694 if (err != noErr)
695 return {};
696
697 UInt32 noTags = (UInt32)size / sizeof(UInt32);
698 QList<AudioChannelLayoutTag> tagsArr;
699 tagsArr.resize(noTags);
700
701 err = AudioFormatGetProperty(kAudioFormatProperty_AvailableEncodeChannelLayoutTags,
702 sizeof(sf),
703 &sf,
704 &size,
705 tagsArr.data());
706 if (err != noErr)
707 return {};
708
709 QList<UInt32> result;
710 for (const AudioChannelLayoutTag &item : tagsArr)
711 result.push_back(item);
712
713 return result;
714}
715
716QT_END_NAMESPACE
QSize qt_device_format_resolution(AVCaptureDeviceFormat *format)
bool qt_formats_are_equal(AVCaptureDeviceFormat *f1, AVCaptureDeviceFormat *f2)
AVFPSRange qt_current_framerates(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection)
std::optional< QList< UInt32 > > qt_supported_channel_counts_for_format(int codecId)
QVector< AVCaptureDeviceFormat * > qt_unique_device_formats(AVCaptureDevice *captureDevice, FourCharCode filter)
void qt_set_framerate_limits(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection, qreal minFPS, qreal maxFPS)
QVector< AVFPSRange > qt_device_format_framerates(AVCaptureDeviceFormat *format)
QSize qt_device_format_high_resolution(AVCaptureDeviceFormat *format)
QList< AudioValueRange > qt_supported_sample_rates_for_format(int codecId)
QList< UInt32 > qt_supported_channel_layout_tags_for_format(int codecId, int noChannels)
AVCaptureDeviceFormat * qt_find_best_framerate_match(AVCaptureDevice *captureDevice, FourCharCode filter, Float64 fps)
QList< AudioValueRange > qt_supported_bit_rates_for_format(int codecId)
bool qt_set_active_format(AVCaptureDevice *captureDevice, AVCaptureDeviceFormat *format, bool preserveFps)
AVFrameRateRange * qt_find_supported_framerate_range(AVCaptureDeviceFormat *format, Float64 fps)
void qt_set_framerate_limits(AVCaptureConnection *videoConnection, qreal minFPS, qreal maxFPS)
QSize qt_device_format_pixel_aspect_ratio(AVCaptureDeviceFormat *format)
AVCaptureDeviceFormat * qt_find_best_resolution_match(AVCaptureDevice *captureDevice, const QSize &request, FourCharCode filter, bool stillImage)
CMTime qt_adjusted_frame_duration(AVFrameRateRange *range, qreal fps)
bool qt_format_supports_framerate(AVCaptureDeviceFormat *format, qreal fps)
AVCaptureDeviceFormat * qt_convert_to_capture_device_format(AVCaptureDevice *captureDevice, const QCameraFormat &cameraFormat, const std::function< bool(uint32_t)> &cvFormatValidator)
Q_LOGGING_CATEGORY(lcEventDispatcher, "qt.eventdispatcher")