Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qavfcamerautility.mm
Go to the documentation of this file.
1// Copyright (C) 2016 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include <QtMultimedia/private/qavfcamerautility_p.h>
5#include <QtMultimedia/private/qavfcameradebug_p.h>
6
7#include <QtCore/qvector.h>
8#include <private/qmultimediautils_p.h>
9#include <private/qcameradevice_p.h>
10#include <QtMultimedia/private/qavfhelpers_p.h>
11#include <QtMultimedia/private/qmultimedia_ranges_p.h>
12
13#include <functional>
14#include <algorithm>
15#include <limits>
16#include <tuple>
17
18#include <AudioToolbox/AudioToolbox.h>
19
20QT_BEGIN_NAMESPACE
21
22namespace ranges = QtMultimediaPrivate::ranges;
23
24Q_LOGGING_CATEGORY(qLcCamera, "qt.multimedia.camera")
25
26AVFPSRange qt_connection_framerates(AVCaptureConnection *videoConnection)
27{
28 Q_ASSERT(videoConnection);
29
30 AVFPSRange newRange;
31 // "The value in the videoMinFrameDuration is equivalent to the reciprocal
32 // of the maximum framerate, the value in the videoMaxFrameDuration is equivalent
33 // to the reciprocal of the minimum framerate."
34 if (videoConnection.supportsVideoMinFrameDuration) {
35 const CMTime cmMin = videoConnection.videoMinFrameDuration;
36 if (CMTIME_IS_VALID(cmMin)) { // Has some non-default value:
37 if (const Float64 minSeconds = CMTimeGetSeconds(cmMin))
38 newRange.second = 1. / minSeconds;
39 }
40 }
41
42 if (videoConnection.supportsVideoMaxFrameDuration) {
43 const CMTime cmMax = videoConnection.videoMaxFrameDuration;
44 if (CMTIME_IS_VALID(cmMax)) {
45 if (const Float64 maxSeconds = CMTimeGetSeconds(cmMax))
46 newRange.first = 1. / maxSeconds;
47 }
48 }
49
50 return newRange;
51}
52
53namespace {
54
55inline bool qt_area_sane(const QSize &size)
56{
57 return !size.isNull() && size.isValid()
58 && std::numeric_limits<int>::max() / size.width() >= size.height();
59}
60
61template <template <typename...> class Comp> // std::less or std::greater (or std::equal_to)
62struct ByResolution
63{
64 bool operator() (AVCaptureDeviceFormat *f1, AVCaptureDeviceFormat *f2)const
65 {
66 Q_ASSERT(f1 && f2);
67 const QSize r1(qt_device_format_resolution(f1));
68 const QSize r2(qt_device_format_resolution(f2));
69 // use std::tuple for lexicograpical sorting:
70 const Comp<std::tuple<int, int>> op = {};
71 return op(std::make_tuple(r1.width(), r1.height()),
72 std::make_tuple(r2.width(), r2.height()));
73 }
74};
75
76struct FormatHasNoFPSRange
77{
78 bool operator() (AVCaptureDeviceFormat *format) const
79 {
80 Q_ASSERT(format);
81 return !format.videoSupportedFrameRateRanges || !format.videoSupportedFrameRateRanges.count;
82 }
83};
84
85Float64 qt_find_min_framerate_distance(AVCaptureDeviceFormat *format, Float64 fps)
86{
87 Q_ASSERT(format && format.videoSupportedFrameRateRanges
88 && format.videoSupportedFrameRateRanges.count);
89
90 AVFrameRateRange *range = [format.videoSupportedFrameRateRanges objectAtIndex:0];
91 Float64 distance = qAbs(range.maxFrameRate - fps);
92 for (NSUInteger i = 1, e = format.videoSupportedFrameRateRanges.count; i < e; ++i) {
93 range = [format.videoSupportedFrameRateRanges objectAtIndex:i];
94 distance = qMin(distance, qAbs(range.maxFrameRate - fps));
95 }
96
97 return distance;
98}
99
100} // Unnamed namespace.
101
103qt_convert_to_capture_device_format(AVCaptureDevice *captureDevice,
104 const QCameraFormat &cameraFormat,
105 const std::function<bool(uint32_t)> &cvFormatValidator)
106{
107 const auto cameraFormatPrivate = QCameraFormatPrivate::handle(cameraFormat);
108 if (!cameraFormatPrivate)
109 return nil;
110
111 const auto requiredCvPixFormat = QAVFHelpers::toCVPixelFormat(cameraFormatPrivate->pixelFormat,
112 cameraFormatPrivate->colorRange);
113
114 if (requiredCvPixFormat == CvPixelFormatInvalid)
115 return nil;
116
117 AVCaptureDeviceFormat *newFormat = nil;
118 Float64 newFormatMaxFrameRate = {};
119 NSArray<AVCaptureDeviceFormat *> *formats = captureDevice.formats;
120 for (AVCaptureDeviceFormat *format in formats) {
121 CMFormatDescriptionRef formatDesc = format.formatDescription;
122 CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(formatDesc);
123 FourCharCode cvPixFormat = CMVideoFormatDescriptionGetCodecType(formatDesc);
124
125 if (cvPixFormat != requiredCvPixFormat)
126 continue;
127
128 if (cameraFormatPrivate->resolution != QSize(dim.width, dim.height))
129 continue;
130
131 if (cvFormatValidator && !cvFormatValidator(cvPixFormat))
132 continue;
133
134 const float epsilon = 0.001f;
135 for (AVFrameRateRange *frameRateRange in format.videoSupportedFrameRateRanges) {
136 if (frameRateRange.minFrameRate >= cameraFormatPrivate->minFrameRate - epsilon
137 && frameRateRange.maxFrameRate <= cameraFormatPrivate->maxFrameRate + epsilon
138 && newFormatMaxFrameRate < frameRateRange.maxFrameRate) {
139 newFormat = format;
140 newFormatMaxFrameRate = frameRateRange.maxFrameRate;
141 }
142 }
143 }
144 return newFormat;
145}
146
147QVector<AVCaptureDeviceFormat *> qt_unique_device_formats(AVCaptureDevice *captureDevice, FourCharCode filter)
148{
149 // 'filter' is the format we prefer if we have duplicates.
150 Q_ASSERT(captureDevice);
151
152 QVector<AVCaptureDeviceFormat *> formats;
153
154 if (!captureDevice.formats || !captureDevice.formats.count)
155 return formats;
156
157 formats.reserve(captureDevice.formats.count);
158 for (AVCaptureDeviceFormat *format in captureDevice.formats) {
159 const QSize resolution(qt_device_format_resolution(format));
160 if (resolution.isNull() || !resolution.isValid())
161 continue;
162 formats << format;
163 }
164
165 if (!formats.size())
166 return formats;
167
168 ranges::sort(formats, ByResolution<std::less>());
169
170 QSize size(qt_device_format_resolution(formats[0]));
171 FourCharCode codec = CMVideoFormatDescriptionGetCodecType(formats[0].formatDescription);
172 int last = 0;
173 for (int i = 1; i < formats.size(); ++i) {
174 const QSize nextSize(qt_device_format_resolution(formats[i]));
175 if (nextSize == size) {
176 if (codec == filter)
177 continue;
178 formats[last] = formats[i];
179 } else {
180 ++last;
181 formats[last] = formats[i];
182 size = nextSize;
183 }
184 codec = CMVideoFormatDescriptionGetCodecType(formats[i].formatDescription);
185 }
186 formats.resize(last + 1);
187
188 return formats;
189}
190
191QSize qt_device_format_resolution(AVCaptureDeviceFormat *format)
192{
193 if (!format || !format.formatDescription)
194 return QSize();
195
196 const CMVideoDimensions res = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
197 return QSize(res.width, res.height);
198}
199
200QSize qt_device_format_high_resolution(AVCaptureDeviceFormat *format)
201{
202 Q_ASSERT(format);
203 QSize res;
204#if defined(Q_OS_IOS)
205 const CMVideoDimensions hrDim(format.highResolutionStillImageDimensions);
206 res.setWidth(hrDim.width);
207 res.setHeight(hrDim.height);
208#endif
209 return res;
210}
211
212QVector<AVFPSRange> qt_device_format_framerates(AVCaptureDeviceFormat *format)
213{
214 Q_ASSERT(format);
215
216 QVector<AVFPSRange> qtRanges;
217
218 if (!format.videoSupportedFrameRateRanges || !format.videoSupportedFrameRateRanges.count)
219 return qtRanges;
220
221 qtRanges.reserve(format.videoSupportedFrameRateRanges.count);
222 for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges)
223 qtRanges << AVFPSRange(range.minFrameRate, range.maxFrameRate);
224
225 return qtRanges;
226}
227
228QSize qt_device_format_pixel_aspect_ratio(AVCaptureDeviceFormat *format)
229{
230 Q_ASSERT(format);
231
232 if (!format.formatDescription) {
233 qCDebug(qLcCamera) << Q_FUNC_INFO << "no format description found";
234 return QSize();
235 }
236
237 const CMVideoDimensions res = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
238 const CGSize resPAR = CMVideoFormatDescriptionGetPresentationDimensions(format.formatDescription, true, false);
239
240 if (qAbs(resPAR.width - res.width) < 1.) {
241 // "Pixel aspect ratio is used to adjust the width, leaving the height alone."
242 return QSize(1, 1);
243 }
244
245 if (!res.width || !resPAR.width)
246 return QSize();
247
248 auto frac = qRealToFraction(resPAR.width > res.width ? res.width / qreal(resPAR.width)
249 : resPAR.width / qreal(res.width));
250
251 return QSize(frac.numerator, frac.denominator);
252}
253
255 const QSize &request,
256 FourCharCode filter,
257 bool stillImage)
258{
259 Q_ASSERT(captureDevice);
260 Q_ASSERT(!request.isNull() && request.isValid());
261
262 if (!captureDevice.formats || !captureDevice.formats.count)
263 return nullptr;
264
265 QVector<AVCaptureDeviceFormat *> formats(qt_unique_device_formats(captureDevice, filter));
266
267 for (int i = 0; i < formats.size(); ++i) {
268 AVCaptureDeviceFormat *format = formats[i];
269 if (qt_device_format_resolution(format) == request)
270 return format;
271 // iOS only (still images).
272 if (stillImage && qt_device_format_high_resolution(format) == request)
273 return format;
274 }
275
276 if (!qt_area_sane(request))
277 return nullptr;
278
279 typedef std::pair<QSize, AVCaptureDeviceFormat *> FormatPair;
280
281 QVector<FormatPair> pairs; // default|HR sizes
282 pairs.reserve(formats.size());
283
284 for (int i = 0; i < formats.size(); ++i) {
285 AVCaptureDeviceFormat *format = formats[i];
286 const QSize res(qt_device_format_resolution(format));
287 if (!res.isNull() && res.isValid() && qt_area_sane(res))
288 pairs << FormatPair(res, format);
289 const QSize highRes(qt_device_format_high_resolution(format));
290 if (stillImage && !highRes.isNull() && highRes.isValid() && qt_area_sane(highRes))
291 pairs << FormatPair(highRes, format);
292 }
293
294 if (!pairs.size())
295 return nullptr;
296
297 AVCaptureDeviceFormat *best = pairs[0].second;
298 QSize next(pairs[0].first);
299 int wDiff = qAbs(request.width() - next.width());
300 int hDiff = qAbs(request.height() - next.height());
301 const int area = request.width() * request.height();
302 int areaDiff = qAbs(area - next.width() * next.height());
303 for (int i = 1; i < pairs.size(); ++i) {
304 next = pairs[i].first;
305 const int newWDiff = qAbs(next.width() - request.width());
306 const int newHDiff = qAbs(next.height() - request.height());
307 const int newAreaDiff = qAbs(area - next.width() * next.height());
308
309 if ((newWDiff < wDiff && newHDiff < hDiff)
310 || ((newWDiff <= wDiff || newHDiff <= hDiff) && newAreaDiff <= areaDiff)) {
311 wDiff = newWDiff;
312 hDiff = newHDiff;
313 best = pairs[i].second;
314 areaDiff = newAreaDiff;
315 }
316 }
317
318 return best;
319}
320
322 FourCharCode filter,
323 Float64 fps)
324{
325 Q_ASSERT(captureDevice);
326 Q_ASSERT(fps > 0.);
327
328 const qreal epsilon = 0.1;
329
330 QVector<AVCaptureDeviceFormat *> sorted(qt_unique_device_formats(captureDevice, filter));
331 // Sort formats by their resolution in decreasing order:
332 ranges::sort(sorted, ByResolution<std::greater>());
333 // We can use only formats with framerate ranges:
334 sorted.removeIf(FormatHasNoFPSRange());
335
336 if (!sorted.size())
337 return nil;
338
339 for (int i = 0; i < sorted.size(); ++i) {
340 AVCaptureDeviceFormat *format = sorted[i];
341 for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
342 if (range.maxFrameRate - range.minFrameRate < epsilon) {
343 // On OS X ranges are points (built-in camera).
344 if (qAbs(fps - range.maxFrameRate) < epsilon)
345 return format;
346 }
347
348 if (fps >= range.minFrameRate && fps <= range.maxFrameRate)
349 return format;
350 }
351 }
352
353 Float64 distance = qt_find_min_framerate_distance(sorted[0], fps);
354 AVCaptureDeviceFormat *match = sorted[0];
355 for (int i = 1; i < sorted.size(); ++i) {
356 const Float64 newDistance = qt_find_min_framerate_distance(sorted[i], fps);
357 if (newDistance < distance) {
358 distance = newDistance;
359 match = sorted[i];
360 }
361 }
362
363 return match;
364}
365
366AVFrameRateRange *qt_find_supported_framerate_range(AVCaptureDeviceFormat *format, Float64 fps)
367{
368 Q_ASSERT(format && format.videoSupportedFrameRateRanges
369 && format.videoSupportedFrameRateRanges.count);
370
371 const qreal epsilon = 0.1;
372
373 for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
374 if (range.maxFrameRate - range.minFrameRate < epsilon) {
375 // On OS X ranges are points (built-in camera).
376 if (qAbs(fps - range.maxFrameRate) < epsilon)
377 return range;
378 }
379
380 if (fps >= range.minFrameRate && fps <= range.maxFrameRate)
381 return range;
382 }
383
384 AVFrameRateRange *match = [format.videoSupportedFrameRateRanges objectAtIndex:0];
385 Float64 distance = qAbs(match.maxFrameRate - fps);
386 for (NSUInteger i = 1, e = format.videoSupportedFrameRateRanges.count; i < e; ++i) {
387 AVFrameRateRange *range = [format.videoSupportedFrameRateRanges objectAtIndex:i];
388 const Float64 newDistance = qAbs(range.maxFrameRate - fps);
389 if (newDistance < distance) {
390 distance = newDistance;
391 match = range;
392 }
393 }
394
395 return match;
396}
397
398bool qt_format_supports_framerate(AVCaptureDeviceFormat *format, qreal fps)
399{
400 if (format && fps > qreal(0)) {
401 const qreal epsilon = 0.1;
402 for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
403 if (fps >= range.minFrameRate - epsilon && fps <= range.maxFrameRate + epsilon)
404 return true;
405 }
406 }
407
408 return false;
409}
410
411bool qt_formats_are_equal(AVCaptureDeviceFormat *f1, AVCaptureDeviceFormat *f2)
412{
413 if (f1 == f2)
414 return true;
415
416 if (![f1.mediaType isEqualToString:f2.mediaType])
417 return false;
418
419 return CMFormatDescriptionEqual(f1.formatDescription, f2.formatDescription);
420}
421
422bool qt_set_active_format(AVCaptureDevice *captureDevice, AVCaptureDeviceFormat *format, bool preserveFps)
423{
424 static bool firstSet = true;
425
426 if (!captureDevice || !format)
427 return false;
428
429 if (qt_formats_are_equal(captureDevice.activeFormat, format)) {
430 if (firstSet) {
431 // The capture device format is persistent. The first time we set a format, report that
432 // it changed even if the formats are the same.
433 // This prevents the session from resetting the format to the default value.
434 firstSet = false;
435 return true;
436 }
437 return false;
438 }
439
440 firstSet = false;
441
442 const AVFConfigurationLock lock(captureDevice);
443 if (!lock) {
444 qWarning("Failed to set active format (lock failed)");
445 return false;
446 }
447
448 // Changing the activeFormat resets the frame rate.
449 AVFPSRange fps;
450 if (preserveFps)
451 fps = qt_current_framerates(captureDevice, nil);
452
453 captureDevice.activeFormat = format;
454
455 if (preserveFps)
456 qt_set_framerate_limits(captureDevice, nil, fps.first, fps.second);
457
458 return true;
459}
460
461void qt_set_framerate_limits(AVCaptureConnection *videoConnection, qreal minFPS, qreal maxFPS)
462{
463 Q_ASSERT(videoConnection);
464
465 if (minFPS < 0. || maxFPS < 0. || (maxFPS && maxFPS < minFPS)) {
466 qCDebug(qLcCamera) << Q_FUNC_INFO << "invalid framerates (min, max):"
467 << minFPS << maxFPS;
468 return;
469 }
470
471 CMTime minDuration = kCMTimeInvalid;
472 if (maxFPS > 0.) {
473 if (!videoConnection.supportsVideoMinFrameDuration)
474 qCDebug(qLcCamera) << Q_FUNC_INFO << "maximum framerate is not supported";
475 else
476 minDuration = CMTimeMake(1, maxFPS);
477 }
478 if (videoConnection.supportsVideoMinFrameDuration)
479 videoConnection.videoMinFrameDuration = minDuration;
480
481 CMTime maxDuration = kCMTimeInvalid;
482 if (minFPS > 0.) {
483 if (!videoConnection.supportsVideoMaxFrameDuration)
484 qCDebug(qLcCamera) << Q_FUNC_INFO << "minimum framerate is not supported";
485 else
486 maxDuration = CMTimeMake(1, minFPS);
487 }
488 if (videoConnection.supportsVideoMaxFrameDuration)
489 videoConnection.videoMaxFrameDuration = maxDuration;
490}
491
492CMTime qt_adjusted_frame_duration(AVFrameRateRange *range, qreal fps)
493{
494 Q_ASSERT(range);
495 Q_ASSERT(fps > 0.);
496
497 if (range.maxFrameRate - range.minFrameRate < 0.1) {
498 // Can happen on OS X.
499 return range.minFrameDuration;
500 }
501
502 if (fps <= range.minFrameRate)
503 return range.maxFrameDuration;
504 if (fps >= range.maxFrameRate)
505 return range.minFrameDuration;
506
507 auto frac = qRealToFraction(1. / fps);
508 return CMTimeMake(frac.numerator, frac.denominator);
509}
510
511void qt_set_framerate_limits(AVCaptureDevice *captureDevice, qreal minFPS, qreal maxFPS)
512{
513 Q_ASSERT(captureDevice);
514 if (!captureDevice.activeFormat) {
515 qCDebug(qLcCamera) << Q_FUNC_INFO << "no active capture device format";
516 return;
517 }
518
519 if (minFPS < 0. || maxFPS < 0. || (maxFPS && maxFPS < minFPS)) {
520 qCDebug(qLcCamera) << Q_FUNC_INFO << "invalid framerates (min, max):"
521 << minFPS << maxFPS;
522 return;
523 }
524
525 CMTime minFrameDuration = kCMTimeInvalid;
526 CMTime maxFrameDuration = kCMTimeInvalid;
527 if (maxFPS || minFPS) {
528 AVFrameRateRange *range = qt_find_supported_framerate_range(captureDevice.activeFormat,
529 maxFPS ? maxFPS : minFPS);
530 if (!range) {
531 qCDebug(qLcCamera) << Q_FUNC_INFO << "no framerate range found, (min, max):"
532 << minFPS << maxFPS;
533 return;
534 }
535
536 if (maxFPS)
537 minFrameDuration = qt_adjusted_frame_duration(range, maxFPS);
538 if (minFPS)
539 maxFrameDuration = qt_adjusted_frame_duration(range, minFPS);
540 }
541
542 const AVFConfigurationLock lock(captureDevice);
543 if (!lock) {
544 qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock for configuration";
545 return;
546 }
547
548 // While Apple's docs say kCMTimeInvalid will end in default
549 // settings for this format, kCMTimeInvalid on OS X ends with a runtime
550 // exception:
551 // "The activeVideoMinFrameDuration passed is not supported by the device."
552 // Instead, use the first item in the supported frame rates.
553#ifdef Q_OS_IOS
554 [captureDevice setActiveVideoMinFrameDuration:minFrameDuration];
555 [captureDevice setActiveVideoMaxFrameDuration:maxFrameDuration];
556#elif defined(Q_OS_MACOS)
557 if (CMTIME_IS_INVALID(minFrameDuration)
558 && CMTIME_IS_INVALID(maxFrameDuration)) {
559 AVFrameRateRange *range = captureDevice.activeFormat.videoSupportedFrameRateRanges.firstObject;
560 minFrameDuration = range.minFrameDuration;
561 maxFrameDuration = range.maxFrameDuration;
562 }
563
564 if (CMTIME_IS_VALID(minFrameDuration))
565 [captureDevice setActiveVideoMinFrameDuration:minFrameDuration];
566
567 if (CMTIME_IS_VALID(maxFrameDuration))
568 [captureDevice setActiveVideoMaxFrameDuration:maxFrameDuration];
569#endif // Q_OS_MACOS
570}
571
572void qt_set_framerate_limits(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection,
573 qreal minFPS, qreal maxFPS)
574{
575 Q_UNUSED(videoConnection);
576 Q_ASSERT(captureDevice);
577 qt_set_framerate_limits(captureDevice, minFPS, maxFPS);
578}
579
580AVFPSRange qt_current_framerates(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection)
581{
582 Q_UNUSED(videoConnection);
583 Q_ASSERT(captureDevice);
584
585 AVFPSRange fps;
586 const CMTime minDuration = captureDevice.activeVideoMinFrameDuration;
587 if (CMTIME_IS_VALID(minDuration)) {
588 if (const Float64 minSeconds = CMTimeGetSeconds(minDuration))
589 fps.second = 1. / minSeconds; // Max FPS = 1 / MinDuration.
590 }
591
592 const CMTime maxDuration = captureDevice.activeVideoMaxFrameDuration;
593 if (CMTIME_IS_VALID(maxDuration)) {
594 if (const Float64 maxSeconds = CMTimeGetSeconds(maxDuration))
595 fps.first = 1. / maxSeconds; // Min FPS = 1 / MaxDuration.
596 }
597
598 return fps;
599}
600
602{
603 UInt32 format = codecId;
604 UInt32 size;
605 OSStatus err = AudioFormatGetPropertyInfo(
606 kAudioFormatProperty_AvailableEncodeSampleRates,
607 sizeof(format),
608 &format,
609 &size);
610
611 if (err != noErr)
612 return {};
613
614 UInt32 numRanges = size / sizeof(AudioValueRange);
615 QList<AudioValueRange> result;
616 result.resize(numRanges);
617
618 err = AudioFormatGetProperty(kAudioFormatProperty_AvailableEncodeSampleRates,
619 sizeof(format),
620 &format,
621 &size,
622 result.data());
623 return err == noErr ? result : QList<AudioValueRange>{};
624}
625
627{
628 UInt32 format = codecId;
629 UInt32 size;
630 OSStatus err = AudioFormatGetPropertyInfo(
631 kAudioFormatProperty_AvailableEncodeBitRates,
632 sizeof(format),
633 &format,
634 &size);
635
636 if (err != noErr)
637 return {};
638
639 UInt32 numRanges = size / sizeof(AudioValueRange);
640 QList<AudioValueRange> result;
641 result.resize(numRanges);
642
643 err = AudioFormatGetProperty(kAudioFormatProperty_AvailableEncodeBitRates,
644 sizeof(format),
645 &format,
646 &size,
647 result.data());
648 return err == noErr ? result : QList<AudioValueRange>{};
649}
650
652{
653 AudioStreamBasicDescription sf = {};
654 sf.mFormatID = codecId;
655 UInt32 size;
656 OSStatus err = AudioFormatGetPropertyInfo(
657 kAudioFormatProperty_AvailableEncodeNumberChannels,
658 sizeof(sf),
659 &sf,
660 &size);
661
662 if (err != noErr)
663 return std::nullopt;
664
665 // From Apple's docs:
666 // A value of 0xFFFFFFFF indicates that any number of channels may be encoded.
667 if (int(size) == -1)
668 return std::nullopt;
669
670 UInt32 numCounts = size / sizeof(UInt32);
671 QList<UInt32> channelCounts;
672 channelCounts.resize(numCounts);
673
674 err = AudioFormatGetProperty(kAudioFormatProperty_AvailableEncodeNumberChannels,
675 sizeof(sf),
676 &sf,
677 &size,
678 channelCounts.data());
679 if (err == noErr)
680 return channelCounts;
681 else
682 return std::nullopt;
683}
684
686{
687 AudioStreamBasicDescription sf = {};
688 sf.mFormatID = codecId;
689 sf.mChannelsPerFrame = noChannels;
690 UInt32 size;
691 OSStatus err = AudioFormatGetPropertyInfo(
692 kAudioFormatProperty_AvailableEncodeChannelLayoutTags,
693 sizeof(sf),
694 &sf,
695 &size);
696
697 if (err != noErr)
698 return {};
699
700 UInt32 noTags = (UInt32)size / sizeof(UInt32);
701 QList<AudioChannelLayoutTag> tagsArr;
702 tagsArr.resize(noTags);
703
704 err = AudioFormatGetProperty(kAudioFormatProperty_AvailableEncodeChannelLayoutTags,
705 sizeof(sf),
706 &sf,
707 &size,
708 tagsArr.data());
709 if (err != noErr)
710 return {};
711
712 QList<UInt32> result;
713 for (const AudioChannelLayoutTag &item : tagsArr)
714 result.push_back(item);
715
716 return result;
717}
718
719QT_END_NAMESPACE
QSize qt_device_format_resolution(AVCaptureDeviceFormat *format)
bool qt_formats_are_equal(AVCaptureDeviceFormat *f1, AVCaptureDeviceFormat *f2)
AVFPSRange qt_current_framerates(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection)
std::optional< QList< UInt32 > > qt_supported_channel_counts_for_format(int codecId)
QVector< AVCaptureDeviceFormat * > qt_unique_device_formats(AVCaptureDevice *captureDevice, FourCharCode filter)
void qt_set_framerate_limits(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection, qreal minFPS, qreal maxFPS)
QVector< AVFPSRange > qt_device_format_framerates(AVCaptureDeviceFormat *format)
QSize qt_device_format_high_resolution(AVCaptureDeviceFormat *format)
QList< AudioValueRange > qt_supported_sample_rates_for_format(int codecId)
QList< UInt32 > qt_supported_channel_layout_tags_for_format(int codecId, int noChannels)
AVCaptureDeviceFormat * qt_find_best_framerate_match(AVCaptureDevice *captureDevice, FourCharCode filter, Float64 fps)
QList< AudioValueRange > qt_supported_bit_rates_for_format(int codecId)
bool qt_set_active_format(AVCaptureDevice *captureDevice, AVCaptureDeviceFormat *format, bool preserveFps)
AVFrameRateRange * qt_find_supported_framerate_range(AVCaptureDeviceFormat *format, Float64 fps)
void qt_set_framerate_limits(AVCaptureConnection *videoConnection, qreal minFPS, qreal maxFPS)
QSize qt_device_format_pixel_aspect_ratio(AVCaptureDeviceFormat *format)
AVCaptureDeviceFormat * qt_find_best_resolution_match(AVCaptureDevice *captureDevice, const QSize &request, FourCharCode filter, bool stillImage)
CMTime qt_adjusted_frame_duration(AVFrameRateRange *range, qreal fps)
bool qt_format_supports_framerate(AVCaptureDeviceFormat *format, qreal fps)
AVCaptureDeviceFormat * qt_convert_to_capture_device_format(AVCaptureDevice *captureDevice, const QCameraFormat &cameraFormat, const std::function< bool(uint32_t)> &cvFormatValidator)
Q_LOGGING_CATEGORY(lcEventDispatcher, "qt.eventdispatcher")