Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
qavfcamerautility.mm
Go to the documentation of this file.
1// Copyright (C) 2016 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
4#include <QtMultimedia/private/qavfcamerautility_p.h>
5#include <QtMultimedia/private/qavfcameradebug_p.h>
6
7#include <QtCore/qvector.h>
8#include <private/qmultimediautils_p.h>
9#include <private/qcameradevice_p.h>
10#include <QtMultimedia/private/qavfhelpers_p.h>
11
12#include <functional>
13#include <algorithm>
14#include <limits>
15#include <tuple>
16
18
19Q_LOGGING_CATEGORY(qLcCamera, "qt.multimedia.camera")
20
21AVFPSRange qt_connection_framerates(AVCaptureConnection *videoConnection)
22{
23 Q_ASSERT(videoConnection);
24
25 AVFPSRange newRange;
26 // "The value in the videoMinFrameDuration is equivalent to the reciprocal
27 // of the maximum framerate, the value in the videoMaxFrameDuration is equivalent
28 // to the reciprocal of the minimum framerate."
29 if (videoConnection.supportsVideoMinFrameDuration) {
30 const CMTime cmMin = videoConnection.videoMinFrameDuration;
31 if (CMTimeCompare(cmMin, kCMTimeInvalid)) { // Has some non-default value:
32 if (const Float64 minSeconds = CMTimeGetSeconds(cmMin))
33 newRange.second = 1. / minSeconds;
34 }
35 }
36
37 if (videoConnection.supportsVideoMaxFrameDuration) {
38 const CMTime cmMax = videoConnection.videoMaxFrameDuration;
39 if (CMTimeCompare(cmMax, kCMTimeInvalid)) {
40 if (const Float64 maxSeconds = CMTimeGetSeconds(cmMax))
41 newRange.first = 1. / maxSeconds;
42 }
43 }
44
45 return newRange;
46}
47
48namespace {
49
50inline bool qt_area_sane(const QSize &size)
51{
52 return !size.isNull() && size.isValid()
53 && std::numeric_limits<int>::max() / size.width() >= size.height();
54}
55
56template <template <typename...> class Comp> // std::less or std::greater (or std::equal_to)
57struct ByResolution
58{
59 bool operator() (AVCaptureDeviceFormat *f1, AVCaptureDeviceFormat *f2)const
60 {
61 Q_ASSERT(f1 && f2);
62 const QSize r1(qt_device_format_resolution(f1));
63 const QSize r2(qt_device_format_resolution(f2));
64 // use std::tuple for lexicograpical sorting:
65 const Comp<std::tuple<int, int>> op = {};
66 return op(std::make_tuple(r1.width(), r1.height()),
67 std::make_tuple(r2.width(), r2.height()));
68 }
69};
70
71struct FormatHasNoFPSRange
72{
73 bool operator() (AVCaptureDeviceFormat *format) const
74 {
75 Q_ASSERT(format);
76 return !format.videoSupportedFrameRateRanges || !format.videoSupportedFrameRateRanges.count;
77 }
78};
79
80Float64 qt_find_min_framerate_distance(AVCaptureDeviceFormat *format, Float64 fps)
81{
82 Q_ASSERT(format && format.videoSupportedFrameRateRanges
83 && format.videoSupportedFrameRateRanges.count);
84
85 AVFrameRateRange *range = [format.videoSupportedFrameRateRanges objectAtIndex:0];
86 Float64 distance = qAbs(range.maxFrameRate - fps);
87 for (NSUInteger i = 1, e = format.videoSupportedFrameRateRanges.count; i < e; ++i) {
88 range = [format.videoSupportedFrameRateRanges objectAtIndex:i];
89 distance = qMin(distance, qAbs(range.maxFrameRate - fps));
90 }
91
92 return distance;
93}
94
95} // Unnamed namespace.
96
98qt_convert_to_capture_device_format(AVCaptureDevice *captureDevice,
99 const QCameraFormat &cameraFormat,
100 const std::function<bool(uint32_t)> &cvFormatValidator)
101{
102 const auto cameraFormatPrivate = QCameraFormatPrivate::handle(cameraFormat);
103 if (!cameraFormatPrivate)
104 return nil;
105
106 const auto requiredCvPixFormat = QAVFHelpers::toCVPixelFormat(cameraFormatPrivate->pixelFormat,
107 cameraFormatPrivate->colorRange);
108
109 if (requiredCvPixFormat == CvPixelFormatInvalid)
110 return nil;
111
112 AVCaptureDeviceFormat *newFormat = nil;
113 Float64 newFormatMaxFrameRate = {};
114 NSArray<AVCaptureDeviceFormat *> *formats = captureDevice.formats;
115 for (AVCaptureDeviceFormat *format in formats) {
116 CMFormatDescriptionRef formatDesc = format.formatDescription;
117 CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(formatDesc);
118 FourCharCode cvPixFormat = CMVideoFormatDescriptionGetCodecType(formatDesc);
119
120 if (cvPixFormat != requiredCvPixFormat)
121 continue;
122
123 if (cameraFormatPrivate->resolution != QSize(dim.width, dim.height))
124 continue;
125
126 if (cvFormatValidator && !cvFormatValidator(cvPixFormat))
127 continue;
128
129 const float epsilon = 0.001f;
130 for (AVFrameRateRange *frameRateRange in format.videoSupportedFrameRateRanges) {
131 if (frameRateRange.minFrameRate >= cameraFormatPrivate->minFrameRate - epsilon
132 && frameRateRange.maxFrameRate <= cameraFormatPrivate->maxFrameRate + epsilon
133 && newFormatMaxFrameRate < frameRateRange.maxFrameRate) {
134 newFormat = format;
135 newFormatMaxFrameRate = frameRateRange.maxFrameRate;
136 }
137 }
138 }
139 return newFormat;
140}
141
142QVector<AVCaptureDeviceFormat *> qt_unique_device_formats(AVCaptureDevice *captureDevice, FourCharCode filter)
143{
144 // 'filter' is the format we prefer if we have duplicates.
145 Q_ASSERT(captureDevice);
146
147 QVector<AVCaptureDeviceFormat *> formats;
148
149 if (!captureDevice.formats || !captureDevice.formats.count)
150 return formats;
151
152 formats.reserve(captureDevice.formats.count);
153 for (AVCaptureDeviceFormat *format in captureDevice.formats) {
154 const QSize resolution(qt_device_format_resolution(format));
155 if (resolution.isNull() || !resolution.isValid())
156 continue;
157 formats << format;
158 }
159
160 if (!formats.size())
161 return formats;
162
163 std::sort(formats.begin(), formats.end(), ByResolution<std::less>());
164
165 QSize size(qt_device_format_resolution(formats[0]));
166 FourCharCode codec = CMVideoFormatDescriptionGetCodecType(formats[0].formatDescription);
167 int last = 0;
168 for (int i = 1; i < formats.size(); ++i) {
169 const QSize nextSize(qt_device_format_resolution(formats[i]));
170 if (nextSize == size) {
171 if (codec == filter)
172 continue;
173 formats[last] = formats[i];
174 } else {
175 ++last;
176 formats[last] = formats[i];
177 size = nextSize;
178 }
179 codec = CMVideoFormatDescriptionGetCodecType(formats[i].formatDescription);
180 }
181 formats.resize(last + 1);
182
183 return formats;
184}
185
186QSize qt_device_format_resolution(AVCaptureDeviceFormat *format)
187{
188 if (!format || !format.formatDescription)
189 return QSize();
190
191 const CMVideoDimensions res = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
192 return QSize(res.width, res.height);
193}
194
195QSize qt_device_format_high_resolution(AVCaptureDeviceFormat *format)
196{
197 Q_ASSERT(format);
198 QSize res;
199#if defined(Q_OS_IOS)
200 const CMVideoDimensions hrDim(format.highResolutionStillImageDimensions);
201 res.setWidth(hrDim.width);
202 res.setHeight(hrDim.height);
203#endif
204 return res;
205}
206
207QVector<AVFPSRange> qt_device_format_framerates(AVCaptureDeviceFormat *format)
208{
209 Q_ASSERT(format);
210
211 QVector<AVFPSRange> qtRanges;
212
213 if (!format.videoSupportedFrameRateRanges || !format.videoSupportedFrameRateRanges.count)
214 return qtRanges;
215
216 qtRanges.reserve(format.videoSupportedFrameRateRanges.count);
217 for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges)
218 qtRanges << AVFPSRange(range.minFrameRate, range.maxFrameRate);
219
220 return qtRanges;
221}
222
223QSize qt_device_format_pixel_aspect_ratio(AVCaptureDeviceFormat *format)
224{
225 Q_ASSERT(format);
226
227 if (!format.formatDescription) {
228 qCDebug(qLcCamera) << Q_FUNC_INFO << "no format description found";
229 return QSize();
230 }
231
232 const CMVideoDimensions res = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
233 const CGSize resPAR = CMVideoFormatDescriptionGetPresentationDimensions(format.formatDescription, true, false);
234
235 if (qAbs(resPAR.width - res.width) < 1.) {
236 // "Pixel aspect ratio is used to adjust the width, leaving the height alone."
237 return QSize(1, 1);
238 }
239
240 if (!res.width || !resPAR.width)
241 return QSize();
242
243 auto frac = qRealToFraction(resPAR.width > res.width ? res.width / qreal(resPAR.width)
244 : resPAR.width / qreal(res.width));
245
246 return QSize(frac.numerator, frac.denominator);
247}
248
250 const QSize &request,
251 FourCharCode filter,
252 bool stillImage)
253{
254 Q_ASSERT(captureDevice);
255 Q_ASSERT(!request.isNull() && request.isValid());
256
257 if (!captureDevice.formats || !captureDevice.formats.count)
258 return nullptr;
259
260 QVector<AVCaptureDeviceFormat *> formats(qt_unique_device_formats(captureDevice, filter));
261
262 for (int i = 0; i < formats.size(); ++i) {
263 AVCaptureDeviceFormat *format = formats[i];
264 if (qt_device_format_resolution(format) == request)
265 return format;
266 // iOS only (still images).
267 if (stillImage && qt_device_format_high_resolution(format) == request)
268 return format;
269 }
270
271 if (!qt_area_sane(request))
272 return nullptr;
273
274 typedef std::pair<QSize, AVCaptureDeviceFormat *> FormatPair;
275
276 QVector<FormatPair> pairs; // default|HR sizes
277 pairs.reserve(formats.size());
278
279 for (int i = 0; i < formats.size(); ++i) {
280 AVCaptureDeviceFormat *format = formats[i];
281 const QSize res(qt_device_format_resolution(format));
282 if (!res.isNull() && res.isValid() && qt_area_sane(res))
283 pairs << FormatPair(res, format);
284 const QSize highRes(qt_device_format_high_resolution(format));
285 if (stillImage && !highRes.isNull() && highRes.isValid() && qt_area_sane(highRes))
286 pairs << FormatPair(highRes, format);
287 }
288
289 if (!pairs.size())
290 return nullptr;
291
292 AVCaptureDeviceFormat *best = pairs[0].second;
293 QSize next(pairs[0].first);
294 int wDiff = qAbs(request.width() - next.width());
295 int hDiff = qAbs(request.height() - next.height());
296 const int area = request.width() * request.height();
297 int areaDiff = qAbs(area - next.width() * next.height());
298 for (int i = 1; i < pairs.size(); ++i) {
299 next = pairs[i].first;
300 const int newWDiff = qAbs(next.width() - request.width());
301 const int newHDiff = qAbs(next.height() - request.height());
302 const int newAreaDiff = qAbs(area - next.width() * next.height());
303
304 if ((newWDiff < wDiff && newHDiff < hDiff)
305 || ((newWDiff <= wDiff || newHDiff <= hDiff) && newAreaDiff <= areaDiff)) {
306 wDiff = newWDiff;
307 hDiff = newHDiff;
308 best = pairs[i].second;
309 areaDiff = newAreaDiff;
310 }
311 }
312
313 return best;
314}
315
317 FourCharCode filter,
318 Float64 fps)
319{
320 Q_ASSERT(captureDevice);
321 Q_ASSERT(fps > 0.);
322
323 const qreal epsilon = 0.1;
324
325 QVector<AVCaptureDeviceFormat *>sorted(qt_unique_device_formats(captureDevice, filter));
326 // Sort formats by their resolution in decreasing order:
327 std::sort(sorted.begin(), sorted.end(), ByResolution<std::greater>());
328 // We can use only formats with framerate ranges:
329 sorted.erase(std::remove_if(sorted.begin(), sorted.end(), FormatHasNoFPSRange()), sorted.end());
330
331 if (!sorted.size())
332 return nil;
333
334 for (int i = 0; i < sorted.size(); ++i) {
335 AVCaptureDeviceFormat *format = sorted[i];
336 for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
337 if (range.maxFrameRate - range.minFrameRate < epsilon) {
338 // On OS X ranges are points (built-in camera).
339 if (qAbs(fps - range.maxFrameRate) < epsilon)
340 return format;
341 }
342
343 if (fps >= range.minFrameRate && fps <= range.maxFrameRate)
344 return format;
345 }
346 }
347
348 Float64 distance = qt_find_min_framerate_distance(sorted[0], fps);
349 AVCaptureDeviceFormat *match = sorted[0];
350 for (int i = 1; i < sorted.size(); ++i) {
351 const Float64 newDistance = qt_find_min_framerate_distance(sorted[i], fps);
352 if (newDistance < distance) {
353 distance = newDistance;
354 match = sorted[i];
355 }
356 }
357
358 return match;
359}
360
361AVFrameRateRange *qt_find_supported_framerate_range(AVCaptureDeviceFormat *format, Float64 fps)
362{
363 Q_ASSERT(format && format.videoSupportedFrameRateRanges
364 && format.videoSupportedFrameRateRanges.count);
365
366 const qreal epsilon = 0.1;
367
368 for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
369 if (range.maxFrameRate - range.minFrameRate < epsilon) {
370 // On OS X ranges are points (built-in camera).
371 if (qAbs(fps - range.maxFrameRate) < epsilon)
372 return range;
373 }
374
375 if (fps >= range.minFrameRate && fps <= range.maxFrameRate)
376 return range;
377 }
378
379 AVFrameRateRange *match = [format.videoSupportedFrameRateRanges objectAtIndex:0];
380 Float64 distance = qAbs(match.maxFrameRate - fps);
381 for (NSUInteger i = 1, e = format.videoSupportedFrameRateRanges.count; i < e; ++i) {
382 AVFrameRateRange *range = [format.videoSupportedFrameRateRanges objectAtIndex:i];
383 const Float64 newDistance = qAbs(range.maxFrameRate - fps);
384 if (newDistance < distance) {
385 distance = newDistance;
386 match = range;
387 }
388 }
389
390 return match;
391}
392
393bool qt_format_supports_framerate(AVCaptureDeviceFormat *format, qreal fps)
394{
395 if (format && fps > qreal(0)) {
396 const qreal epsilon = 0.1;
397 for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
398 if (fps >= range.minFrameRate - epsilon && fps <= range.maxFrameRate + epsilon)
399 return true;
400 }
401 }
402
403 return false;
404}
405
406bool qt_formats_are_equal(AVCaptureDeviceFormat *f1, AVCaptureDeviceFormat *f2)
407{
408 if (f1 == f2)
409 return true;
410
411 if (![f1.mediaType isEqualToString:f2.mediaType])
412 return false;
413
414 return CMFormatDescriptionEqual(f1.formatDescription, f2.formatDescription);
415}
416
417bool qt_set_active_format(AVCaptureDevice *captureDevice, AVCaptureDeviceFormat *format, bool preserveFps)
418{
419 static bool firstSet = true;
420
421 if (!captureDevice || !format)
422 return false;
423
424 if (qt_formats_are_equal(captureDevice.activeFormat, format)) {
425 if (firstSet) {
426 // The capture device format is persistent. The first time we set a format, report that
427 // it changed even if the formats are the same.
428 // This prevents the session from resetting the format to the default value.
429 firstSet = false;
430 return true;
431 }
432 return false;
433 }
434
435 firstSet = false;
436
437 const AVFConfigurationLock lock(captureDevice);
438 if (!lock) {
439 qWarning("Failed to set active format (lock failed)");
440 return false;
441 }
442
443 // Changing the activeFormat resets the frame rate.
444 AVFPSRange fps;
445 if (preserveFps)
446 fps = qt_current_framerates(captureDevice, nil);
447
448 captureDevice.activeFormat = format;
449
450 if (preserveFps)
451 qt_set_framerate_limits(captureDevice, nil, fps.first, fps.second);
452
453 return true;
454}
455
456void qt_set_framerate_limits(AVCaptureConnection *videoConnection, qreal minFPS, qreal maxFPS)
457{
458 Q_ASSERT(videoConnection);
459
460 if (minFPS < 0. || maxFPS < 0. || (maxFPS && maxFPS < minFPS)) {
461 qCDebug(qLcCamera) << Q_FUNC_INFO << "invalid framerates (min, max):"
462 << minFPS << maxFPS;
463 return;
464 }
465
466 CMTime minDuration = kCMTimeInvalid;
467 if (maxFPS > 0.) {
468 if (!videoConnection.supportsVideoMinFrameDuration)
469 qCDebug(qLcCamera) << Q_FUNC_INFO << "maximum framerate is not supported";
470 else
471 minDuration = CMTimeMake(1, maxFPS);
472 }
473 if (videoConnection.supportsVideoMinFrameDuration)
474 videoConnection.videoMinFrameDuration = minDuration;
475
476 CMTime maxDuration = kCMTimeInvalid;
477 if (minFPS > 0.) {
478 if (!videoConnection.supportsVideoMaxFrameDuration)
479 qCDebug(qLcCamera) << Q_FUNC_INFO << "minimum framerate is not supported";
480 else
481 maxDuration = CMTimeMake(1, minFPS);
482 }
483 if (videoConnection.supportsVideoMaxFrameDuration)
484 videoConnection.videoMaxFrameDuration = maxDuration;
485}
486
487CMTime qt_adjusted_frame_duration(AVFrameRateRange *range, qreal fps)
488{
489 Q_ASSERT(range);
490 Q_ASSERT(fps > 0.);
491
492 if (range.maxFrameRate - range.minFrameRate < 0.1) {
493 // Can happen on OS X.
494 return range.minFrameDuration;
495 }
496
497 if (fps <= range.minFrameRate)
498 return range.maxFrameDuration;
499 if (fps >= range.maxFrameRate)
500 return range.minFrameDuration;
501
502 auto frac = qRealToFraction(1. / fps);
503 return CMTimeMake(frac.numerator, frac.denominator);
504}
505
506void qt_set_framerate_limits(AVCaptureDevice *captureDevice, qreal minFPS, qreal maxFPS)
507{
508 Q_ASSERT(captureDevice);
509 if (!captureDevice.activeFormat) {
510 qCDebug(qLcCamera) << Q_FUNC_INFO << "no active capture device format";
511 return;
512 }
513
514 if (minFPS < 0. || maxFPS < 0. || (maxFPS && maxFPS < minFPS)) {
515 qCDebug(qLcCamera) << Q_FUNC_INFO << "invalid framerates (min, max):"
516 << minFPS << maxFPS;
517 return;
518 }
519
520 CMTime minFrameDuration = kCMTimeInvalid;
521 CMTime maxFrameDuration = kCMTimeInvalid;
522 if (maxFPS || minFPS) {
523 AVFrameRateRange *range = qt_find_supported_framerate_range(captureDevice.activeFormat,
524 maxFPS ? maxFPS : minFPS);
525 if (!range) {
526 qCDebug(qLcCamera) << Q_FUNC_INFO << "no framerate range found, (min, max):"
527 << minFPS << maxFPS;
528 return;
529 }
530
531 if (maxFPS)
532 minFrameDuration = qt_adjusted_frame_duration(range, maxFPS);
533 if (minFPS)
534 maxFrameDuration = qt_adjusted_frame_duration(range, minFPS);
535 }
536
537 const AVFConfigurationLock lock(captureDevice);
538 if (!lock) {
539 qCDebug(qLcCamera) << Q_FUNC_INFO << "failed to lock for configuration";
540 return;
541 }
542
543 // While Apple's docs say kCMTimeInvalid will end in default
544 // settings for this format, kCMTimeInvalid on OS X ends with a runtime
545 // exception:
546 // "The activeVideoMinFrameDuration passed is not supported by the device."
547 // Instead, use the first item in the supported frame rates.
548#ifdef Q_OS_IOS
549 [captureDevice setActiveVideoMinFrameDuration:minFrameDuration];
550 [captureDevice setActiveVideoMaxFrameDuration:maxFrameDuration];
551#elif defined(Q_OS_MACOS)
552 if (CMTimeCompare(minFrameDuration, kCMTimeInvalid) == 0
553 && CMTimeCompare(maxFrameDuration, kCMTimeInvalid) == 0) {
554 AVFrameRateRange *range = captureDevice.activeFormat.videoSupportedFrameRateRanges.firstObject;
555 minFrameDuration = range.minFrameDuration;
556 maxFrameDuration = range.maxFrameDuration;
557 }
558
559 if (CMTimeCompare(minFrameDuration, kCMTimeInvalid))
560 [captureDevice setActiveVideoMinFrameDuration:minFrameDuration];
561
562 if (CMTimeCompare(maxFrameDuration, kCMTimeInvalid))
563 [captureDevice setActiveVideoMaxFrameDuration:maxFrameDuration];
564#endif // Q_OS_MACOS
565}
566
567void qt_set_framerate_limits(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection,
568 qreal minFPS, qreal maxFPS)
569{
570 Q_UNUSED(videoConnection);
571 Q_ASSERT(captureDevice);
572 qt_set_framerate_limits(captureDevice, minFPS, maxFPS);
573}
574
575AVFPSRange qt_current_framerates(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection)
576{
577 Q_UNUSED(videoConnection);
578 Q_ASSERT(captureDevice);
579
580 AVFPSRange fps;
581 const CMTime minDuration = captureDevice.activeVideoMinFrameDuration;
582 if (CMTimeCompare(minDuration, kCMTimeInvalid)) {
583 if (const Float64 minSeconds = CMTimeGetSeconds(minDuration))
584 fps.second = 1. / minSeconds; // Max FPS = 1 / MinDuration.
585 }
586
587 const CMTime maxDuration = captureDevice.activeVideoMaxFrameDuration;
588 if (CMTimeCompare(maxDuration, kCMTimeInvalid)) {
589 if (const Float64 maxSeconds = CMTimeGetSeconds(maxDuration))
590 fps.first = 1. / maxSeconds; // Min FPS = 1 / MaxDuration.
591 }
592
593 return fps;
594}
595
597{
598 UInt32 format = codecId;
599 UInt32 size;
600 OSStatus err = AudioFormatGetPropertyInfo(
601 kAudioFormatProperty_AvailableEncodeSampleRates,
602 sizeof(format),
603 &format,
604 &size);
605
606 if (err != noErr)
607 return {};
608
609 UInt32 numRanges = size / sizeof(AudioValueRange);
610 QList<AudioValueRange> result;
611 result.resize(numRanges);
612
613 err = AudioFormatGetProperty(kAudioFormatProperty_AvailableEncodeSampleRates,
614 sizeof(format),
615 &format,
616 &size,
617 result.data());
618 return err == noErr ? result : QList<AudioValueRange>{};
619}
620
622{
623 UInt32 format = codecId;
624 UInt32 size;
625 OSStatus err = AudioFormatGetPropertyInfo(
626 kAudioFormatProperty_AvailableEncodeBitRates,
627 sizeof(format),
628 &format,
629 &size);
630
631 if (err != noErr)
632 return {};
633
634 UInt32 numRanges = size / sizeof(AudioValueRange);
635 QList<AudioValueRange> result;
636 result.resize(numRanges);
637
638 err = AudioFormatGetProperty(kAudioFormatProperty_AvailableEncodeBitRates,
639 sizeof(format),
640 &format,
641 &size,
642 result.data());
643 return err == noErr ? result : QList<AudioValueRange>{};
644}
645
647{
648 AudioStreamBasicDescription sf = {};
649 sf.mFormatID = codecId;
650 UInt32 size;
651 OSStatus err = AudioFormatGetPropertyInfo(
652 kAudioFormatProperty_AvailableEncodeNumberChannels,
653 sizeof(sf),
654 &sf,
655 &size);
656
657 if (err != noErr)
658 return std::nullopt;
659
660 // From Apple's docs:
661 // A value of 0xFFFFFFFF indicates that any number of channels may be encoded.
662 if (int(size) == -1)
663 return std::nullopt;
664
665 UInt32 numCounts = size / sizeof(UInt32);
666 QList<UInt32> channelCounts;
667 channelCounts.resize(numCounts);
668
669 err = AudioFormatGetProperty(kAudioFormatProperty_AvailableEncodeNumberChannels,
670 sizeof(sf),
671 &sf,
672 &size,
673 channelCounts.data());
674 if (err == noErr)
675 return channelCounts;
676 else
677 return std::nullopt;
678}
679
681{
682 AudioStreamBasicDescription sf = {};
683 sf.mFormatID = codecId;
684 sf.mChannelsPerFrame = noChannels;
685 UInt32 size;
686 OSStatus err = AudioFormatGetPropertyInfo(
687 kAudioFormatProperty_AvailableEncodeChannelLayoutTags,
688 sizeof(sf),
689 &sf,
690 &size);
691
692 if (err != noErr)
693 return {};
694
695 UInt32 noTags = (UInt32)size / sizeof(UInt32);
696 QList<AudioChannelLayoutTag> tagsArr;
697 tagsArr.resize(noTags);
698
699 err = AudioFormatGetProperty(kAudioFormatProperty_AvailableEncodeChannelLayoutTags,
700 sizeof(sf),
701 &sf,
702 &size,
703 tagsArr.data());
704 if (err != noErr)
705 return {};
706
707 QList<UInt32> result;
708 for (const AudioChannelLayoutTag &item : tagsArr)
709 result.push_back(item);
710
711 return result;
712}
713
714QT_END_NAMESPACE
QSize qt_device_format_resolution(AVCaptureDeviceFormat *format)
bool qt_formats_are_equal(AVCaptureDeviceFormat *f1, AVCaptureDeviceFormat *f2)
AVFPSRange qt_current_framerates(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection)
std::optional< QList< UInt32 > > qt_supported_channel_counts_for_format(int codecId)
QVector< AVCaptureDeviceFormat * > qt_unique_device_formats(AVCaptureDevice *captureDevice, FourCharCode filter)
void qt_set_framerate_limits(AVCaptureDevice *captureDevice, AVCaptureConnection *videoConnection, qreal minFPS, qreal maxFPS)
QVector< AVFPSRange > qt_device_format_framerates(AVCaptureDeviceFormat *format)
QSize qt_device_format_high_resolution(AVCaptureDeviceFormat *format)
QList< AudioValueRange > qt_supported_sample_rates_for_format(int codecId)
QList< UInt32 > qt_supported_channel_layout_tags_for_format(int codecId, int noChannels)
AVCaptureDeviceFormat * qt_find_best_framerate_match(AVCaptureDevice *captureDevice, FourCharCode filter, Float64 fps)
QList< AudioValueRange > qt_supported_bit_rates_for_format(int codecId)
bool qt_set_active_format(AVCaptureDevice *captureDevice, AVCaptureDeviceFormat *format, bool preserveFps)
AVFrameRateRange * qt_find_supported_framerate_range(AVCaptureDeviceFormat *format, Float64 fps)
void qt_set_framerate_limits(AVCaptureConnection *videoConnection, qreal minFPS, qreal maxFPS)
QSize qt_device_format_pixel_aspect_ratio(AVCaptureDeviceFormat *format)
AVCaptureDeviceFormat * qt_find_best_resolution_match(AVCaptureDevice *captureDevice, const QSize &request, FourCharCode filter, bool stillImage)
CMTime qt_adjusted_frame_duration(AVFrameRateRange *range, qreal fps)
bool qt_format_supports_framerate(AVCaptureDeviceFormat *format, qreal fps)
AVCaptureDeviceFormat * qt_convert_to_capture_device_format(AVCaptureDevice *captureDevice, const QCameraFormat &cameraFormat, const std::function< bool(uint32_t)> &cvFormatValidator)
Q_LOGGING_CATEGORY(lcEventDispatcher, "qt.eventdispatcher")