Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
avfmediaplayer.mm
Go to the documentation of this file.
1// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
6#include <avfvideosink_p.h>
7#include <avfmetadata_p.h>
8
9#include "qaudiooutput.h"
10#include "private/qplatformaudiooutput_p.h"
11
12#include <QtCore/qdir.h>
13#include <QtCore/qfileinfo.h>
14#include <QtCore/qmimedatabase.h>
15#include <QtCore/qpointer.h>
16#include <QtCore/qmath.h>
17#include <QtCore/qmutex.h>
18#include <QtCore/qthread.h>
19#include <QtCore/private/qexpected_p.h>
20
21#include <mutex>
22
23#import <AVFoundation/AVFoundation.h>
24
25QT_USE_NAMESPACE
26
27//AVAsset Keys
28static NSString* const AVF_TRACKS_KEY = @"tracks";
29static NSString* const AVF_PLAYABLE_KEY = @"playable";
30
31//AVPlayerItem keys
32static NSString* const AVF_STATUS_KEY = @"status";
33static NSString* const AVF_BUFFER_LIKELY_KEEP_UP_KEY = @"playbackLikelyToKeepUp";
34
35//AVPlayer keys
36static NSString* const AVF_RATE_KEY = @"rate";
37static NSString* const AVF_CURRENT_ITEM_KEY = @"currentItem";
38static NSString* const AVF_CURRENT_ITEM_DURATION_KEY = @"currentItem.duration";
39
48
49@interface AVFMediaPlayerObserver : NSObject<AVAssetResourceLoaderDelegate>
50
51@property (readonly, getter=player) AVPlayer* m_player;
52@property (readonly, getter=playerItem) AVPlayerItem* m_playerItem;
53@property (readonly, getter=playerLayer) AVPlayerLayer* m_playerLayer;
54@property (retain) AVPlayerItemTrack *videoTrack;
55
56- (AVFMediaPlayerObserver *) initWithMediaPlayerSession:(AVFMediaPlayer *)session;
57- (void) setURL:(NSURL *)url mimeType:(NSString *)mimeType;
58- (void) unloadMedia;
59- (void) prepareToPlayAsset:(AVURLAsset *)asset withKeys:(NSArray *)requestedKeys;
60- (void) assetFailedToPrepareForPlayback:(NSError *)error;
61- (void) playerItemDidReachEnd:(NSNotification *)notification;
62- (void) observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object
63 change:(NSDictionary *)change context:(void *)context;
64- (void) clearSession;
65- (void) notifySeekComplete;
66- (void) dealloc;
67- (BOOL) resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
68@end
69
70#ifdef Q_OS_IOS
71// Alas, no such thing as 'class variable', hence globals:
72static unsigned sessionActivationCount;
73static QMutex sessionMutex;
74#endif // Q_OS_IOS
75
76namespace {
77
78struct GuardedPlatformPlayer
79{
80 mutable QMutex mutex;
81 AVFMediaPlayer *player{};
82
83 explicit operator bool() const
84 {
85 std::lock_guard guard(mutex);
86 return player;
87 }
88
89 struct not_a_platform_player_t
90 {
91 };
92
93 template <typename Functor>
94 auto withPlatformPlayer(Functor &&f)
95 -> q23::expected<std::invoke_result_t<Functor, AVFMediaPlayer *>,
96 not_a_platform_player_t>
97 {
98 std::unique_lock guard(mutex);
99 if (!player)
100 return q23::unexpected{ not_a_platform_player_t{} };
101 if constexpr (std::is_void_v<std::invoke_result_t<Functor, AVFMediaPlayer *>>) {
102 f(player);
103 return {};
104 } else {
105 return f(player);
106 }
107 }
108
109 template <typename Functor>
110 void invokeWithPlatformPlayer(Functor f)
111 {
112 std::unique_lock guard(mutex);
113 if (!player)
114 return;
115
116 if (player->thread()->isCurrentThread()) {
117 guard.unlock();
118 f(player);
119 } else {
120 QMetaObject::invokeMethod(player, [f = std::move(f), player = player]() {
121 f(player);
122 });
123 }
124 }
125
126 void clear()
127 {
128 std::lock_guard<QMutex> guard(mutex);
129 player = nullptr;
130 }
131};
132} // namespace
133
134@implementation AVFMediaPlayerObserver {
135@private
136 GuardedPlatformPlayer m_platformPlayer;
137 AVPlayer *m_player;
138 AVPlayerItem *m_playerItem;
139 AVPlayerLayer *m_playerLayer;
140 NSURL *m_URL;
141 BOOL m_bufferIsLikelyToKeepUp;
142 NSData *m_data;
143 NSString *m_mimeType;
144#ifdef Q_OS_IOS
145 BOOL m_activated;
146#endif
147}
148
149@synthesize m_player, m_playerItem, m_playerLayer;
150
151#ifdef Q_OS_IOS
152- (void)setSessionActive:(BOOL)active
153{
154 const QMutexLocker lock(&sessionMutex);
155 if (active) {
156 // Don't count the same player twice if already activated,
157 // unless it tried to deactivate first:
158 if (m_activated)
159 return;
160 if (!sessionActivationCount)
161 [AVAudioSession.sharedInstance setActive:YES error:nil];
162 ++sessionActivationCount;
163 m_activated = YES;
164 } else {
165 if (!sessionActivationCount || !m_activated) {
166 qWarning("Unbalanced audio session deactivation, ignoring.");
167 return;
168 }
169 --sessionActivationCount;
170 m_activated = NO;
171 if (!sessionActivationCount)
172 [AVAudioSession.sharedInstance setActive:NO error:nil];
173 }
174}
175#endif // Q_OS_IOS
176
177- (AVFMediaPlayerObserver *) initWithMediaPlayerSession:(AVFMediaPlayer *)session
178{
179 if (!(self = [super init]))
180 return nil;
181 m_platformPlayer.player = session;
182 m_bufferIsLikelyToKeepUp = FALSE;
183
184 m_playerLayer = [AVPlayerLayer playerLayerWithPlayer:nil];
185 [m_playerLayer retain];
186 m_playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
187 m_playerLayer.anchorPoint = CGPointMake(0.0f, 0.0f);
188 return self;
189}
190
191- (void) setURL:(NSURL *)url mimeType:(NSString *)mimeType
192{
193 [m_mimeType release];
194 m_mimeType = [mimeType retain];
195
196 if (m_URL != url)
197 {
198 [m_URL release];
199 m_URL = [url copy];
200
201 //Create an asset for inspection of a resource referenced by a given URL.
202 //Load the values for the asset keys "tracks", "playable".
203
204 // use __block to avoid maintaining strong references on variables captured by the
205 // following block callback
206#if defined(Q_OS_IOS)
207 BOOL isAccessing = [m_URL startAccessingSecurityScopedResource];
208#endif
209 __block AVURLAsset *asset = [[AVURLAsset URLAssetWithURL:m_URL options:nil] retain];
210 [asset.resourceLoader setDelegate:self queue:dispatch_get_main_queue()];
211
212 __block NSArray *requestedKeys = [[NSArray arrayWithObjects:AVF_TRACKS_KEY, AVF_PLAYABLE_KEY, nil] retain];
213
214 __block AVFMediaPlayerObserver *blockSelf = [self retain];
215
216 // Tells the asset to load the values of any of the specified keys that are not already loaded.
217 [asset loadValuesAsynchronouslyForKeys:requestedKeys completionHandler:
218 ^{
219 dispatch_async( dispatch_get_main_queue(),
220 ^{
221#if defined(Q_OS_IOS)
222 if (isAccessing)
223 [m_URL stopAccessingSecurityScopedResource];
224#endif
225 [blockSelf prepareToPlayAsset:asset withKeys:requestedKeys];
226 [asset release];
227 [requestedKeys release];
228 [blockSelf release];
229 });
230 }];
231 }
232}
233
234- (void) unloadMedia
235{
236 if (m_playerItem) {
237 [m_playerItem removeObserver:self forKeyPath:@"presentationSize"];
238 [m_playerItem removeObserver:self forKeyPath:AVF_STATUS_KEY];
239 [m_playerItem removeObserver:self forKeyPath:AVF_BUFFER_LIKELY_KEEP_UP_KEY];
240 [m_playerItem removeObserver:self forKeyPath:AVF_TRACKS_KEY];
241
242 [[NSNotificationCenter defaultCenter] removeObserver:self
243 name:AVPlayerItemDidPlayToEndTimeNotification
244 object:m_playerItem];
245 m_playerItem = nullptr;
246 }
247 if (m_player) {
248 [m_player setRate:0.0];
249 [m_player removeObserver:self forKeyPath:AVF_CURRENT_ITEM_DURATION_KEY];
250 [m_player removeObserver:self forKeyPath:AVF_CURRENT_ITEM_KEY];
251 [m_player removeObserver:self forKeyPath:AVF_RATE_KEY];
252 [m_player replaceCurrentItemWithPlayerItem:nil];
253
254 // Defer the release of AVPlayer to allow CoreMedia/VideoToolbox
255 // dispatch queues to finish pending operations. Releasing the
256 // player synchronously can cause sporadic crashes on macOS 14
257 // when background threads still reference internal resources.
258 AVPlayer *player = m_player;
259 m_player = nullptr;
260 dispatch_async(dispatch_get_main_queue(), ^{
261 [player release];
262 });
263 }
264 if (m_playerLayer)
265 m_playerLayer.player = nil;
266#if defined(Q_OS_IOS)
267 [self setSessionActive:NO];
268#endif
269}
270
271- (void) prepareToPlayAsset:(AVURLAsset *)asset
272 withKeys:(NSArray *)requestedKeys
273{
274 if (!m_platformPlayer)
275 return;
276
277 //Make sure that the value of each key has loaded successfully.
278 for (NSString *thisKey in requestedKeys)
279 {
280 NSError *error = nil;
281 AVKeyValueStatus keyStatus = [asset statusOfValueForKey:thisKey error:&error];
282#ifdef QT_DEBUG_AVF
283 qDebug() << Q_FUNC_INFO << [thisKey UTF8String] << " status: " << keyStatus;
284#endif
285 if (keyStatus == AVKeyValueStatusFailed)
286 {
287 [self assetFailedToPrepareForPlayback:error];
288 return;
289 }
290 }
291
292 //Use the AVAsset playable property to detect whether the asset can be played.
293#ifdef QT_DEBUG_AVF
294 qDebug() << Q_FUNC_INFO << "isPlayable: " << [asset isPlayable];
295#endif
296 if (!asset.playable)
297 qWarning() << "Asset reported to be not playable. Playback of this asset may not be possible.";
298
299 //At this point we're ready to set up for playback of the asset.
300 //Stop observing our prior AVPlayerItem, if we have one.
301 if (m_playerItem)
302 {
303 //Remove existing player item key value observers and notifications.
304 [self unloadMedia];
305 }
306
307 //Create a new instance of AVPlayerItem from the now successfully loaded AVAsset.
308 m_playerItem = [AVPlayerItem playerItemWithAsset:asset];
309 if (!m_playerItem) {
310 qWarning() << "Failed to create player item";
311 //Generate an error describing the failure.
312 NSString *localizedDescription = NSLocalizedString(@"Item cannot be played", @"Item cannot be played description");
313 NSString *localizedFailureReason = NSLocalizedString(@"The assets tracks were loaded, but couldn't create player item.", @"Item cannot be played failure reason");
314 NSDictionary *errorDict = [NSDictionary dictionaryWithObjectsAndKeys:
315 localizedDescription, NSLocalizedDescriptionKey,
316 localizedFailureReason, NSLocalizedFailureReasonErrorKey,
317 nil];
318 NSError *assetCannotBePlayedError = [NSError errorWithDomain:@"StitchedStreamPlayer" code:0 userInfo:errorDict];
319
320 [self assetFailedToPrepareForPlayback:assetCannotBePlayedError];
321 return;
322 }
323
324 //Observe the player item "status" key to determine when it is ready to play.
325 [m_playerItem addObserver:self
326 forKeyPath:AVF_STATUS_KEY
327 options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
328 context:AVFMediaPlayerObserverStatusObservationContext];
329
330 [m_playerItem addObserver:self
331 forKeyPath:@"presentationSize"
332 options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
333 context:AVFMediaPlayerObserverPresentationSizeContext];
334
335 [m_playerItem addObserver:self
336 forKeyPath:AVF_BUFFER_LIKELY_KEEP_UP_KEY
337 options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
338 context:AVFMediaPlayerObserverBufferLikelyToKeepUpContext];
339
340 [m_playerItem addObserver:self
341 forKeyPath:AVF_TRACKS_KEY
342 options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
343 context:AVFMediaPlayerObserverTracksContext];
344
345 //When the player item has played to its end time we'll toggle
346 //the movie controller Pause button to be the Play button
347 [[NSNotificationCenter defaultCenter] addObserver:self
348 selector:@selector(playerItemDidReachEnd:)
349 name:AVPlayerItemDidPlayToEndTimeNotification
350 object:m_playerItem];
351
352 //Get a new AVPlayer initialized to play the specified player item.
353 m_player = [AVPlayer playerWithPlayerItem:m_playerItem];
354 [m_player retain];
355
356 //Set the initial audio ouptut settings on new player object
357 {
358 m_platformPlayer.withPlatformPlayer([&](AVFMediaPlayer *player) {
359 auto *audioOutput = player->m_audioOutput;
360 m_player.volume = (audioOutput ? audioOutput->volume : 1.);
361 m_player.muted = (audioOutput ? audioOutput->muted : true);
362 player->updateAudioOutputDevice();
363 });
364 }
365
366 //Assign the output layer to the new player
367 m_playerLayer.player = m_player;
368
369 //Observe the AVPlayer "currentItem" property to find out when any
370 //AVPlayer replaceCurrentItemWithPlayerItem: replacement will/did
371 //occur.
372 [m_player addObserver:self
373 forKeyPath:AVF_CURRENT_ITEM_KEY
374 options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
375 context:AVFMediaPlayerObserverCurrentItemObservationContext];
376
377 //Observe the AVPlayer "rate" property to update the scrubber control.
378 [m_player addObserver:self
379 forKeyPath:AVF_RATE_KEY
380 options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
381 context:AVFMediaPlayerObserverRateObservationContext];
382
383 //Observe the duration for getting the buffer state
384 [m_player addObserver:self
385 forKeyPath:AVF_CURRENT_ITEM_DURATION_KEY
386 options:0
387 context:AVFMediaPlayerObserverCurrentItemDurationObservationContext];
388#if defined(Q_OS_IOS)
389 [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback withOptions:AVAudioSessionCategoryOptionMixWithOthers error:nil];
390 [self setSessionActive:YES];
391#endif
392}
393
394-(void) assetFailedToPrepareForPlayback:(NSError *)error
395{
396 QMediaPlayer::Error errorCode = QMediaPlayer::FormatError;
397 if (error) {
398 NSError *underlyingError = error.userInfo[NSUnderlyingErrorKey];
399 if (underlyingError && ![underlyingError.domain isEqualToString:AVFoundationErrorDomain])
400 errorCode = QMediaPlayer::ResourceError;
401 }
402 m_platformPlayer.invokeWithPlatformPlayer([errorCode](AVFMediaPlayer *platformPlayer) {
403 platformPlayer->processMediaLoadError(errorCode);
404 });
405
406#ifdef QT_DEBUG_AVF
407 qDebug() << Q_FUNC_INFO;
408 qDebug() << [[error localizedDescription] UTF8String];
409 qDebug() << [[error localizedFailureReason] UTF8String];
410 qDebug() << [[error localizedRecoverySuggestion] UTF8String];
411#endif
412}
413
414- (void) playerItemDidReachEnd:(NSNotification *)notification
415{
416 Q_UNUSED(notification);
417
418 m_platformPlayer.invokeWithPlatformPlayer([](AVFMediaPlayer *platformPlayer) {
419 platformPlayer->processEOS();
420 });
421}
422
423- (void) observeValueForKeyPath:(NSString*) path
424 ofObject:(id)object
425 change:(NSDictionary*)change
426 context:(void*)context
427{
428 //AVPlayerItem "status" property value observer.
429 if (context == AVFMediaPlayerObserverStatusObservationContext)
430 {
431 AVPlayerStatus status = (AVPlayerStatus)[[change objectForKey:NSKeyValueChangeNewKey] integerValue];
432 switch (status)
433 {
434 //Indicates that the status of the player is not yet known because
435 //it has not tried to load new media resources for playback
436 case AVPlayerStatusUnknown:
437 {
438 //QMetaObject::invokeMethod(m_session, "processLoadStateChange", Qt::AutoConnection);
439 }
440 break;
441
442 case AVPlayerStatusReadyToPlay:
443 {
444 //Once the AVPlayerItem becomes ready to play, i.e.
445 //[playerItem status] == AVPlayerItemStatusReadyToPlay,
446 //its duration can be fetched from the item.
447
448 m_platformPlayer.invokeWithPlatformPlayer([](AVFMediaPlayer *platformPlayer) {
449 platformPlayer->processLoadStateChange();
450 });
451 }
452 break;
453
454 case AVPlayerStatusFailed:
455 {
456 AVPlayerItem *playerItem = static_cast<AVPlayerItem*>(object);
457 [self assetFailedToPrepareForPlayback:playerItem.error];
458
459 m_platformPlayer.invokeWithPlatformPlayer([](AVFMediaPlayer *platformPlayer) {
460 platformPlayer->processLoadStateChange();
461 });
462 }
463 break;
464 }
465 } else if (context == AVFMediaPlayerObserverPresentationSizeContext) {
466 QSize size(m_playerItem.presentationSize.width, m_playerItem.presentationSize.height);
467 m_platformPlayer.invokeWithPlatformPlayer([size](AVFMediaPlayer *platformPlayer) {
468 platformPlayer->nativeSizeChanged(size);
469 });
470 } else if (context == AVFMediaPlayerObserverBufferLikelyToKeepUpContext)
471 {
472 const bool isPlaybackLikelyToKeepUp = [m_playerItem isPlaybackLikelyToKeepUp];
473 if (isPlaybackLikelyToKeepUp != m_bufferIsLikelyToKeepUp) {
474 m_bufferIsLikelyToKeepUp = isPlaybackLikelyToKeepUp;
475 int bufferProgress = isPlaybackLikelyToKeepUp ? 100 : 0;
476
477 m_platformPlayer.invokeWithPlatformPlayer(
478 [bufferProgress](AVFMediaPlayer *platformPlayer) {
479 platformPlayer->processBufferStateChange(bufferProgress);
480 });
481 }
482 }
483 else if (context == AVFMediaPlayerObserverTracksContext)
484 {
485 m_platformPlayer.invokeWithPlatformPlayer([](AVFMediaPlayer *platformPlayer) {
486 platformPlayer->updateTracks();
487 });
488 }
489 //AVPlayer "rate" property value observer.
490 else if (context == AVFMediaPlayerObserverRateObservationContext) {
491 //QMetaObject::invokeMethod(m_session, "setPlaybackRate", Qt::AutoConnection, Q_ARG(qreal, [m_player rate]));
492 }
493 //AVPlayer "currentItem" property observer.
494 //Called when the AVPlayer replaceCurrentItemWithPlayerItem:
495 //replacement will/did occur.
496 else if (context == AVFMediaPlayerObserverCurrentItemObservationContext) {
497 AVPlayerItem *newPlayerItem = [change objectForKey:NSKeyValueChangeNewKey];
498 if (m_playerItem != newPlayerItem)
499 m_playerItem = newPlayerItem;
500 } else if (context == AVFMediaPlayerObserverCurrentItemDurationObservationContext) {
501 const CMTime time = [m_playerItem duration];
502 const qint64 dur = static_cast<qint64>(float(time.value) / float(time.timescale) * 1000.0f);
503
504 m_platformPlayer.invokeWithPlatformPlayer([dur](AVFMediaPlayer *platformPlayer) {
505 platformPlayer->processDurationChange(dur);
506 });
507 } else {
508 [super observeValueForKeyPath:path ofObject:object change:change context:context];
509 }
510}
511
512- (void)clearSession
513{
514#ifdef QT_DEBUG_AVF
515 qDebug() << Q_FUNC_INFO;
516#endif
517 m_platformPlayer.clear();
518}
519
520- (void)notifySeekComplete
521{
522 m_platformPlayer.withPlatformPlayer([](AVFMediaPlayer *player) {
523 player->seekCompleted();
524 });
525}
526
527- (void) dealloc
528{
529#ifdef QT_DEBUG_AVF
530 qDebug() << Q_FUNC_INFO;
531#endif
532 [self unloadMedia];
533
534 m_platformPlayer.clear();
535
536 if (m_URL) {
537 [m_URL release];
538 }
539
540 [m_mimeType release];
541 [m_playerLayer release];
542 // 'videoTrack' is a 'retain' property, but still needs a
543 // manual 'release' (i.e. setting to nil):
544 self.videoTrack = nil;
545 [super dealloc];
546}
547
548- (BOOL) resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
549{
550 Q_UNUSED(resourceLoader);
551
552 if (![loadingRequest.request.URL.scheme isEqualToString:@"iodevice"])
553 return NO;
554
555 auto result = m_platformPlayer.withPlatformPlayer([&](AVFMediaPlayer *platformPlayer) {
556 QIODevice *device = platformPlayer->mediaStream();
557 if (!device)
558 return NO;
559
560 device->seek(loadingRequest.dataRequest.requestedOffset);
561 if (loadingRequest.contentInformationRequest) {
562 loadingRequest.contentInformationRequest.contentType = m_mimeType;
563 loadingRequest.contentInformationRequest.contentLength = device->size();
564 loadingRequest.contentInformationRequest.byteRangeAccessSupported = YES;
565 }
566
567 if (loadingRequest.dataRequest) {
568 NSInteger requestedLength = loadingRequest.dataRequest.requestedLength;
569 int maxBytes = qMin(32 * 1064, int(requestedLength));
570 QByteArray buffer;
571 buffer.resize(maxBytes);
572
573 NSInteger submitted = 0;
574 while (submitted < requestedLength) {
575 qint64 len = device->read(buffer.data(), maxBytes);
576 if (len < 1)
577 break;
578
579 [loadingRequest.dataRequest respondWithData:[NSData dataWithBytes:buffer.constData()
580 length:len]];
581 submitted += len;
582 }
583
584 // Finish loading even if not all bytes submitted.
585 [loadingRequest finishLoading];
586 }
587
588 return YES;
589 });
590
591 return result.value_or(NO);
592}
593@end
594
595AVFMediaPlayer::AVFMediaPlayer(QMediaPlayer *player)
596 : QObject(player),
597 QPlatformMediaPlayer(player),
598 m_mediaStream(nullptr),
599 m_rate(1.0),
600 m_requestedPosition(-1),
601 m_duration(0),
602 m_bufferProgress(0)
603{
604 m_observer = [[AVFMediaPlayerObserver alloc] initWithMediaPlayerSession:this];
605 connect(&m_playbackTimer, &QTimer::timeout, this, &AVFMediaPlayer::processPositionChange);
606 setVideoOutput(new AVFVideoRendererControl(this));
607}
608
610{
611#ifdef QT_DEBUG_AVF
612 qDebug() << Q_FUNC_INFO;
613#endif
614
615 // Unload media before the C++ side is torn down, so that
616 // CoreMedia/VideoToolbox threads don't outlive our objects.
617 [m_observer unloadMedia];
618
619 //Detatch the session from the sessionObserver (which could still be alive trying to communicate with this session).
620 [m_observer clearSession];
621 [m_observer release];
622}
623
624void AVFMediaPlayer::setVideoSink(QVideoSink *sink)
625{
626 m_videoSink = sink ? static_cast<AVFVideoSink *>(sink->platformVideoSink()): nullptr;
627 m_videoOutput->setVideoSink(m_videoSink);
628}
629
631{
632#ifdef QT_DEBUG_AVF
633 qDebug() << Q_FUNC_INFO << output;
634#endif
635
636 if (m_videoOutput == output)
637 return;
638
639 //Set the current output layer to null to stop rendering
640 if (m_videoOutput) {
641 m_videoOutput->setLayer(nullptr);
642 }
643
644 m_videoOutput = output;
645
646 if (m_videoOutput && state() != QMediaPlayer::StoppedState)
647 m_videoOutput->setLayer([m_observer playerLayer]);
648}
649
651{
652#ifdef QT_DEBUG_AVF
653 qDebug() << Q_FUNC_INFO;
654#endif
655 AVAsset *currentAsset = [[m_observer playerItem] asset];
656 return currentAsset;
657}
658
660{
661 return m_resources;
662}
663
665{
666 return m_mediaStream;
667}
668
669static void setURL(AVFMediaPlayerObserver *observer, const QUrl &url, const QString &mimeType = QString())
670{
671 QUrl resolvedUrl = url;
672 // AVFoundation cannot handle file URLs with a relative path
673 if (url.isLocalFile() && !QDir::isAbsolutePath(url.path()))
674 resolvedUrl = QUrl::fromLocalFile(QFileInfo(url.path()).absoluteFilePath());
675 NSURL *nsurl = resolvedUrl.toNSURL();
676 [observer setURL:nsurl mimeType:[NSString stringWithUTF8String:mimeType.toLatin1().constData()]];
677}
678
679
680void AVFMediaPlayer::setMedia(const QUrl &content, QIODevice *stream)
681{
682#ifdef QT_DEBUG_AVF
683 qDebug() << Q_FUNC_INFO << content.request().url();
684#endif
685
686 [m_observer unloadMedia];
687
688 m_resources = content;
689 resetStream(stream);
690
691 m_requestedPosition = -1;
692 orientationChanged(QtVideo::Rotation::None, false);
693 positionChanged(position());
694 if (m_duration != 0) {
695 m_duration = 0;
696 durationChanged(0);
697 }
698 if (!m_metaData.isEmpty()) {
699 m_metaData.clear();
700 metaDataChanged();
701 }
702 resetBufferProgress();
703 for (int i = 0; i < QPlatformMediaPlayer::NTrackTypes; ++i) {
704 tracks[i].clear();
705 nativeTracks[i].clear();
706 }
707 tracksChanged();
708
709 if (!m_mediaStream && content.isEmpty()) {
710 seekableChanged(false);
711 audioAvailableChanged(false);
712 videoAvailableChanged(false);
713
714 mediaStatusChanged(QMediaPlayer::NoMedia);
715 stateChanged(QMediaPlayer::StoppedState);
716
717 return;
718 }
719
720 mediaStatusChanged(QMediaPlayer::LoadingMedia);
721
722 if (m_mediaStream) {
723 // If there is a data, try to load it,
724 // otherwise wait for readyRead.
725 if (m_mediaStream->size())
727 } else {
728 //Load AVURLAsset
729 //initialize asset using content's URL
730 setURL(m_observer, m_resources);
731 }
732
733 stateChanged(QMediaPlayer::StoppedState);
734}
735
737{
738 AVPlayerItem *playerItem = [m_observer playerItem];
739
740 if (m_requestedPosition != -1)
741 return m_requestedPosition;
742
743 if (!playerItem)
744 return 0;
745
746 CMTime time = [playerItem currentTime];
747 return static_cast<quint64>(float(time.value) / float(time.timescale) * 1000.0f);
748}
749
751{
752#ifdef QT_DEBUG_AVF
753 qDebug() << Q_FUNC_INFO;
754#endif
755 return m_duration;
756}
757
759{
760#ifdef QT_DEBUG_AVF
761 qDebug() << Q_FUNC_INFO;
762#endif
763 return m_bufferProgress/100.;
764}
765
767{
768 AVPlayerItem *playerItem = [m_observer playerItem];
769
770 if (!playerItem)
771 return {};
772
773 if (state() == QMediaPlayer::StoppedState)
774 return {};
775
776 QMediaTimeRange timeRanges;
777
778 NSArray *ranges = [playerItem loadedTimeRanges];
779 for (NSValue *timeRange in ranges) {
780 CMTimeRange currentTimeRange = [timeRange CMTimeRangeValue];
781 qint64 startTime = qint64(float(currentTimeRange.start.value) / currentTimeRange.start.timescale * 1000.0);
782 timeRanges.addInterval(startTime, startTime + qint64(float(currentTimeRange.duration.value) / currentTimeRange.duration.timescale * 1000.0));
783 }
784 return timeRanges;
785}
786
788{
789 return m_rate;
790}
791
792void AVFMediaPlayer::setAudioOutput(QPlatformAudioOutput *output)
793{
794 if (m_audioOutput == output)
795 return;
796 if (m_audioOutput)
797 m_audioOutput->q->disconnect(this);
798 m_audioOutput = output;
799 if (m_audioOutput) {
800 connect(m_audioOutput->q, &QAudioOutput::deviceChanged, this, &AVFMediaPlayer::updateAudioOutputDevice);
801 connect(m_audioOutput->q, &QAudioOutput::volumeChanged, this, &AVFMediaPlayer::setVolume);
802 connect(m_audioOutput->q, &QAudioOutput::mutedChanged, this, &AVFMediaPlayer::setMuted);
803 //connect(m_audioOutput->q, &QAudioOutput::audioRoleChanged, this, &AVFMediaPlayer::setAudioRole);
804 }
806 setMuted(m_audioOutput ? m_audioOutput->muted : true);
807 setVolume(m_audioOutput ? m_audioOutput->volume : 1.);
808}
809
811{
812 return m_metaData;
813}
814
815void AVFMediaPlayer::setPlaybackRate(qreal rate)
816{
817#ifdef QT_DEBUG_AVF
818 qDebug() << Q_FUNC_INFO << rate;
819#endif
820
821 if (QtPrivate::fuzzyCompare(m_rate, rate))
822 return;
823
824 m_rate = rate;
825
826 AVPlayer *player = [m_observer player];
827 if (player && state() == QMediaPlayer::PlayingState)
828 [player setRate:m_rate];
829
830 playbackRateChanged(m_rate);
831}
832
834{
835#ifdef QT_DEBUG_AVF
836 qDebug() << Q_FUNC_INFO << pos;
837#endif
838
839 if (pos == position())
840 return;
841
842 AVPlayerItem *playerItem = [m_observer playerItem];
843 if (!playerItem) {
844 m_requestedPosition = pos;
845 positionChanged(m_requestedPosition);
846 return;
847 }
848
849 if (!isSeekable()) {
850 if (m_requestedPosition != -1) {
851 m_requestedPosition = -1;
852 positionChanged(position());
853 }
854 return;
855 }
856
857 pos = qMax(qint64(0), pos);
858 if (duration() > 0)
859 pos = qMin(pos, duration());
860 m_requestedPosition = pos;
861
862 CMTime newTime = [playerItem currentTime];
863 newTime.value = (pos / 1000.0f) * newTime.timescale;
864 AVFMediaPlayerObserver *observer = m_observer;
865 [playerItem seekToTime:newTime toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero
866 completionHandler:^(BOOL finished) {
867 if (finished)
868 [observer notifySeekComplete];
869 }];
870
871 positionChanged(pos);
872
873 // Reset media status if the current status is EndOfMedia
874 if (mediaStatus() == QMediaPlayer::EndOfMedia) {
875 QMediaPlayer::MediaStatus newMediaStatus = (state() == QMediaPlayer::PausedState)
876 ? QMediaPlayer::BufferedMedia
877 : QMediaPlayer::LoadedMedia;
878 mediaStatusChanged(newMediaStatus);
879 }
880}
881
883{
884#ifdef QT_DEBUG_AVF
885 qDebug() << Q_FUNC_INFO << "currently: " << state();
886#endif
887
888 if (mediaStatus() == QMediaPlayer::NoMedia || mediaStatus() == QMediaPlayer::InvalidMedia)
889 return;
890
891 if (state() == QMediaPlayer::PlayingState)
892 return;
893
894 if (state() != QMediaPlayer::PausedState)
895 resetCurrentLoop();
896
897 if (m_videoOutput && m_videoSink)
898 m_videoOutput->setLayer([m_observer playerLayer]);
899
900 // Reset media status if the current status is EndOfMedia
901 if (mediaStatus() == QMediaPlayer::EndOfMedia)
902 setPosition(0);
903
904 if (mediaStatus() == QMediaPlayer::LoadedMedia
905 || mediaStatus() == QMediaPlayer::BufferedMedia) {
906 // Setting the rate starts playback
907 [[m_observer player] setRate:m_rate];
908 }
909
910 processLoadStateChange(QMediaPlayer::PlayingState);
911
912 stateChanged(QMediaPlayer::PlayingState);
913 m_playbackTimer.start(100);
914}
915
917{
918#ifdef QT_DEBUG_AVF
919 qDebug() << Q_FUNC_INFO << "currently: " << state();
920#endif
921
922 if (mediaStatus() == QMediaPlayer::NoMedia || mediaStatus() == QMediaPlayer::InvalidMedia)
923 return;
924
925 if (state() == QMediaPlayer::PausedState)
926 return;
927
928 stateChanged(QMediaPlayer::PausedState);
929
930 if (m_videoOutput && m_videoSink)
931 m_videoOutput->setLayer([m_observer playerLayer]);
932
933 [[m_observer player] pause];
934
935 // Reset media status if the current status is EndOfMedia
936 if (mediaStatus() == QMediaPlayer::EndOfMedia)
937 setPosition(0);
938
939 positionChanged(position());
940 m_playbackTimer.stop();
941}
942
944{
945#ifdef QT_DEBUG_AVF
946 qDebug() << Q_FUNC_INFO << "currently: " << state();
947#endif
948
949 if (state() == QMediaPlayer::StoppedState && mediaStatus() != QMediaPlayer::EndOfMedia)
950 return;
951
952 // AVPlayer doesn't have stop(), only pause() and play().
953 [[m_observer player] pause];
954 setPosition(0);
955
956 if (m_videoOutput)
957 m_videoOutput->setLayer(nullptr);
958
959 resetBufferProgress();
960
961 if (mediaStatus() == QMediaPlayer::BufferedMedia || mediaStatus() == QMediaPlayer::EndOfMedia)
962 mediaStatusChanged(QMediaPlayer::LoadedMedia);
963
964 stateChanged(QMediaPlayer::StoppedState);
965 m_playbackTimer.stop();
966}
967
968void AVFMediaPlayer::setVolume(float volume)
969{
970#ifdef QT_DEBUG_AVF
971 qDebug() << Q_FUNC_INFO << volume;
972#endif
973
974 AVPlayer *player = [m_observer player];
975 if (player)
976 player.volume = volume;
977}
978
979void AVFMediaPlayer::setMuted(bool muted)
980{
981#ifdef QT_DEBUG_AVF
982 qDebug() << Q_FUNC_INFO << muted;
983#endif
984
985 AVPlayer *player = [m_observer player];
986 if (player)
987 player.muted = muted;
988}
989
991{
992#ifdef Q_OS_MACOS
993 AVPlayer *player = [m_observer player];
994 if (!player)
995 return;
996
997 if (!m_audioOutput || m_audioOutput->device.id().isEmpty()) {
998 if (!m_audioOutput)
999 player.muted = true;
1000 player.audioOutputDeviceUniqueID = nil;
1001 } else {
1002 NSString *str = QString::fromUtf8(m_audioOutput->device.id()).toNSString();
1003 player.audioOutputDeviceUniqueID = str;
1004 }
1005#endif
1006}
1007
1009{
1010 if (doLoop()) {
1011 positionChanged(duration());
1012 setPosition(0);
1013 [[m_observer player] setRate:m_rate];
1014 return;
1015 }
1016
1017 //AVPlayerItem has reached end of track/stream
1018#ifdef QT_DEBUG_AVF
1019 qDebug() << Q_FUNC_INFO;
1020#endif
1021 positionChanged(position());
1022
1023 if (m_videoOutput)
1024 m_videoOutput->setLayer(nullptr);
1025
1026 resetBufferProgress();
1027
1028 stateChanged(QMediaPlayer::StoppedState);
1029 mediaStatusChanged(QMediaPlayer::EndOfMedia);
1030}
1031
1032void AVFMediaPlayer::processLoadStateChange(QMediaPlayer::PlaybackState newState)
1033{
1034 AVPlayerStatus currentStatus = [[m_observer player] status];
1035
1036#ifdef QT_DEBUG_AVF
1037 qDebug() << Q_FUNC_INFO << currentStatus << ", " << mediaStatus() << ", " << newState;
1038#endif
1039
1040 if (mediaStatus() == QMediaPlayer::NoMedia)
1041 return;
1042
1043 if (currentStatus == AVPlayerStatusReadyToPlay) {
1044
1045 AVPlayerItem *playerItem = [m_observer playerItem];
1046
1047 applyPitchCompensation(m_pitchCompensationEnabled);
1048
1049 // get the meta data
1050 m_metaData = AVFMetaData::fromAsset(playerItem.asset);
1051 metaDataChanged();
1053
1054 if (playerItem) {
1055 seekableChanged([[playerItem seekableTimeRanges] count] > 0);
1056
1057 // Get the native size of the video, and reset the bounds of the player layer
1058 AVPlayerLayer *playerLayer = [m_observer playerLayer];
1059 if (m_observer.videoTrack && playerLayer) {
1060 if (!playerLayer.bounds.size.width || !playerLayer.bounds.size.height) {
1061 playerLayer.bounds = CGRectMake(0.0f, 0.0f,
1062 m_observer.videoTrack.assetTrack.naturalSize.width,
1063 m_observer.videoTrack.assetTrack.naturalSize.height);
1064 }
1065 }
1066
1067 if (m_requestedPosition != -1)
1068 setPosition(m_requestedPosition);
1069 }
1070
1071 QMediaPlayer::MediaStatus newStatus = (newState != QMediaPlayer::StoppedState)
1072 ? QMediaPlayer::BufferedMedia
1073 : QMediaPlayer::LoadedMedia;
1074
1075 if (newStatus != mediaStatus()) {
1076 if (newStatus == QMediaPlayer::BufferedMedia
1077 && mediaStatus() == QMediaPlayer::LoadingMedia) {
1078 // Emit intermediate transitions to match expected signal sequence
1079 mediaStatusChanged(QMediaPlayer::LoadedMedia);
1080 mediaStatusChanged(QMediaPlayer::BufferingMedia);
1081 } else if (newStatus == QMediaPlayer::BufferedMedia
1082 && mediaStatus() == QMediaPlayer::LoadedMedia) {
1083 mediaStatusChanged(QMediaPlayer::BufferingMedia);
1084 }
1085 mediaStatusChanged(newStatus);
1086 }
1087 }
1088
1089 if (newState == QMediaPlayer::PlayingState && [m_observer player]) {
1090 // Setting the rate is enough to start playback, no need to call play()
1091 [[m_observer player] setRate:m_rate];
1092 m_playbackTimer.start();
1093 }
1094}
1095
1096
1101
1102
1104{
1105 stateChanged(QMediaPlayer::StoppedState);
1106}
1107
1109{
1110 if (state() == QMediaPlayer::StoppedState)
1111 return;
1112
1113 if (bufferProgress == m_bufferProgress)
1114 return;
1115
1116 auto status = mediaStatus();
1117 // Buffered -> unbuffered.
1118 if (!bufferProgress) {
1119 status = QMediaPlayer::StalledMedia;
1120 } else if (status == QMediaPlayer::StalledMedia) {
1121 status = QMediaPlayer::BufferedMedia;
1122 // Resume playback.
1123 if (state() == QMediaPlayer::PlayingState) {
1124 [[m_observer player] setRate:m_rate];
1125 m_playbackTimer.start();
1126 }
1127 }
1128
1129 mediaStatusChanged(status);
1130
1131 m_bufferProgress = bufferProgress;
1132 bufferProgressChanged(bufferProgress / 100.);
1133}
1134
1136{
1137 if (duration == m_duration)
1138 return;
1139
1140 m_duration = duration;
1141 durationChanged(duration);
1142}
1143
1145{
1146 if (state() == QMediaPlayer::StoppedState)
1147 return;
1148
1149 positionChanged(position());
1150}
1151
1152void AVFMediaPlayer::processMediaLoadError(QMediaPlayer::Error errorCode)
1153{
1154 if (m_requestedPosition != -1) {
1155 m_requestedPosition = -1;
1156 positionChanged(position());
1157 }
1158
1159 setInvalidMediaWithError(errorCode, tr("Failed to load media"));
1160}
1161
1163{
1164 m_requestedPosition = -1;
1165}
1166
1171
1173{
1174 QString suffix;
1175 if (!m_resources.isEmpty())
1176 suffix = QFileInfo(m_resources.path()).suffix();
1177 if (suffix.isEmpty() && m_mediaStream)
1178 suffix = QMimeDatabase().mimeTypeForData(m_mediaStream).preferredSuffix();
1179 const QString url = QStringLiteral("iodevice:///iodevice.") + suffix;
1180 setURL(m_observer, QUrl(url), suffix);
1181}
1182
1184{
1185 resetStream(nullptr);
1186}
1187
1189{
1190 bool firstLoad = true;
1191 for (int i = 0; i < QPlatformMediaPlayer::NTrackTypes; ++i) {
1192 if (tracks[i].count())
1193 firstLoad = false;
1194 tracks[i].clear();
1195 nativeTracks[i].clear();
1196 }
1197 bool hasAudio = false;
1198 bool hasVideo = false;
1199 AVPlayerItem *playerItem = [m_observer playerItem];
1200 if (playerItem) {
1201 // Check each track for audio and video content
1202 NSArray *tracks = playerItem.tracks;
1203 for (AVPlayerItemTrack *track in tracks) {
1204 AVAssetTrack *assetTrack = track.assetTrack;
1205 if (assetTrack) {
1206 int qtTrack = -1;
1207 if ([assetTrack.mediaType isEqualToString:AVMediaTypeAudio]) {
1208 qtTrack = QPlatformMediaPlayer::AudioStream;
1209 hasAudio = true;
1210 } else if ([assetTrack.mediaType isEqualToString:AVMediaTypeVideo]) {
1211 qtTrack = QPlatformMediaPlayer::VideoStream;
1212 hasVideo = true;
1213 if (m_observer.videoTrack != track) {
1214 m_observer.videoTrack = track;
1215 bool isMirrored = false;
1216 QtVideo::Rotation orientation = QtVideo::Rotation::None;
1217 videoOrientationForAssetTrack(assetTrack, orientation, isMirrored);
1218 orientationChanged(orientation, isMirrored);
1219 }
1220 }
1221 else if ([assetTrack.mediaType isEqualToString:AVMediaTypeSubtitle]) {
1222 qtTrack = QPlatformMediaPlayer::SubtitleStream;
1223 }
1224 if (qtTrack != -1) {
1225 QMediaMetaData metaData = AVFMetaData::fromAssetTrack(assetTrack);
1226 this->tracks[qtTrack].append(metaData);
1227 nativeTracks[qtTrack].append(track);
1228 }
1229 }
1230 }
1231 // subtitles are disabled by default
1232 if (firstLoad)
1233 setActiveTrack(SubtitleStream, -1);
1234 }
1235 audioAvailableChanged(hasAudio);
1236 videoAvailableChanged(hasVideo);
1237 tracksChanged();
1238}
1239
1240void AVFMediaPlayer::setActiveTrack(QPlatformMediaPlayer::TrackType type, int index)
1241{
1242 const auto &t = nativeTracks[type];
1243 if (type == QPlatformMediaPlayer::SubtitleStream) {
1244 // subtitle streams are not always automatically enabled on macOS/iOS.
1245 // this hack ensures they get enables and we actually get the text
1246 AVPlayerItem *playerItem = m_observer.m_playerItem;
1247 if (playerItem) {
1248 AVAsset *asset = playerItem.asset;
1249 if (!asset)
1250 return;
1251#if defined(Q_OS_VISIONOS)
1252 [asset loadMediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible
1253 completionHandler:[=](AVMediaSelectionGroup *group, NSError *error) {
1254 // FIXME: handle error
1255 if (error)
1256 return;
1257 auto *options = group.options;
1258 if (options.count)
1259 [playerItem selectMediaOption:options.firstObject inMediaSelectionGroup:group];
1260 }];
1261#else
1262 AVMediaSelectionGroup *group = [asset mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
1263 if (!group)
1264 return;
1265 auto *options = group.options;
1266 if (options.count)
1267 [playerItem selectMediaOption:options.firstObject inMediaSelectionGroup:group];
1268#endif
1269 }
1270 }
1271 for (int i = 0; i < t.count(); ++i)
1272 t.at(i).enabled = (i == index);
1273 activeTracksChanged();
1274}
1275
1276int AVFMediaPlayer::activeTrack(QPlatformMediaPlayer::TrackType type)
1277{
1278 const auto &t = nativeTracks[type];
1279 for (int i = 0; i < t.count(); ++i)
1280 if (t.at(i).enabled)
1281 return i;
1282 return -1;
1283}
1284
1285int AVFMediaPlayer::trackCount(QPlatformMediaPlayer::TrackType type)
1286{
1287 return nativeTracks[type].count();
1288}
1289
1290QMediaMetaData AVFMediaPlayer::trackMetaData(QPlatformMediaPlayer::TrackType type, int trackNumber)
1291{
1292 const auto &t = tracks[type];
1293 if (trackNumber < 0 || trackNumber >= t.count())
1294 return QMediaMetaData();
1295 return t.at(trackNumber);
1296}
1297
1298void AVFMediaPlayer::resetStream(QIODevice *stream)
1299{
1300 if (m_mediaStream) {
1301 disconnect(m_mediaStream, &QIODevice::readyRead, this, &AVFMediaPlayer::streamReady);
1302 disconnect(m_mediaStream, &QIODevice::destroyed, this, &AVFMediaPlayer::streamDestroyed);
1303 }
1304
1305 m_mediaStream = stream;
1306
1307 if (m_mediaStream) {
1308 connect(m_mediaStream, &QIODevice::readyRead, this, &AVFMediaPlayer::streamReady);
1309 connect(m_mediaStream, &QIODevice::destroyed, this, &AVFMediaPlayer::streamDestroyed);
1310 }
1311}
1312
1313void AVFMediaPlayer::applyPitchCompensation(bool enabled)
1314{
1315 AVPlayerItem *playerItem = [m_observer playerItem];
1316 if (playerItem) {
1317 if (enabled)
1318 playerItem.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmSpectral;
1319 else
1320 playerItem.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmVarispeed;
1321 }
1322}
1323
1324void AVFMediaPlayer::resetBufferProgress()
1325{
1326 if (m_bufferProgress != 0) {
1327 m_bufferProgress = 0;
1328 bufferProgressChanged(0);
1329 }
1330}
1331
1333{
1334 if (!m_videoSink)
1335 return;
1336 m_videoSink->setNativeSize(size);
1337}
1338
1339void AVFMediaPlayer::orientationChanged(QtVideo::Rotation rotation, bool mirrored)
1340{
1341 if (!m_videoOutput)
1342 return;
1343
1344 m_videoOutput->setVideoRotation(rotation);
1345 m_videoOutput->setVideoMirrored(mirrored);
1346}
1347
1348void AVFMediaPlayer::videoOrientationForAssetTrack(AVAssetTrack *videoTrack,
1349 QtVideo::Rotation &angle,
1350 bool &mirrored)
1351{
1352 angle = QtVideo::Rotation::None;
1353 mirrored = false;
1354 if (videoTrack) {
1355 CGAffineTransform transform = videoTrack.preferredTransform;
1356 if (CGAffineTransformIsIdentity(transform))
1357 return;
1358
1359 // determinant < 0 means the transform includes a reflection (mirror)
1360 qreal det = transform.a * transform.d - transform.b * transform.c;
1361 mirrored = (det < 0.0);
1362
1363 // Factor out mirror before computing rotation angle.
1364 // Negating the first column of a mirrored matrix yields a pure rotation.
1365 qreal ra = mirrored ? -transform.a : transform.a;
1366 qreal rb = mirrored ? -transform.b : transform.b;
1367
1368 qreal degrees = qRadiansToDegrees(qAtan2(rb, ra));
1369 if (degrees < 0)
1370 degrees += 360.0;
1371
1372 if (QtPrivate::fuzzyCompare(degrees, qreal(90))
1373 || QtPrivate::fuzzyCompare(degrees, qreal(-270))) {
1374 angle = QtVideo::Rotation::Clockwise90;
1375 } else if (QtPrivate::fuzzyCompare(degrees, qreal(-90))
1376 || QtPrivate::fuzzyCompare(degrees, qreal(270))) {
1377 angle = QtVideo::Rotation::Clockwise270;
1378 } else if (QtPrivate::fuzzyCompare(degrees, qreal(180))
1379 || QtPrivate::fuzzyCompare(degrees, qreal(-180))) {
1380 angle = QtVideo::Rotation::Clockwise180;
1381 }
1382 }
1383}
1384
1386{
1387 if (m_pitchCompensationEnabled == enabled)
1388 return;
1389
1390 applyPitchCompensation(enabled);
1391
1392 m_pitchCompensationEnabled = enabled;
1393 pitchCompensationChanged(enabled);
1394}
1395
1397{
1398 return m_pitchCompensationEnabled;
1399}
1400
1403{
1404 return QPlatformMediaPlayer::PitchCompensationAvailability::Available;
1405}
1406
1407#include "moc_avfmediaplayer_p.cpp"
static void * AVFMediaPlayerObserverCurrentItemObservationContext
static NSString *const AVF_BUFFER_LIKELY_KEEP_UP_KEY
static void * AVFMediaPlayerObserverPresentationSizeContext
static void * AVFMediaPlayerObserverTracksContext
static NSString *const AVF_STATUS_KEY
static NSString *const AVF_CURRENT_ITEM_DURATION_KEY
static void * AVFMediaPlayerObserverRateObservationContext
static void setURL(AVFMediaPlayerObserver *observer, const QUrl &url, const QString &mimeType=QString())
static NSString *const AVF_CURRENT_ITEM_KEY
static NSString *const AVF_PLAYABLE_KEY
static void * AVFMediaPlayerObserverBufferLikelyToKeepUpContext
static NSString *const AVF_RATE_KEY
static void * AVFMediaPlayerObserverCurrentItemDurationObservationContext
static void * AVFMediaPlayerObserverStatusObservationContext
qint64 duration() const override
void setVolume(float volume)
void processLoadStateChange()
void setPosition(qint64 pos) override
void setVideoSink(QVideoSink *sink) override
QMediaTimeRange availablePlaybackRanges() const override
void nativeSizeChanged(QSize size)
QMediaMetaData metaData() const override
void setVideoOutput(AVFVideoRendererControl *output)
void stop() override
float bufferProgress() const override
void processMediaLoadError(QMediaPlayer::Error errorCode)
void setMedia(const QUrl &content, QIODevice *stream) override
qint64 position() const override
void setPitchCompensation(bool enabled) override
void setMuted(bool muted)
void pause() override
void play() override
void processDurationChange(qint64 duration)
QUrl media() const override
bool pitchCompensation() const override
void processLoadStateFailure()
void updateAudioOutputDevice()
qreal playbackRate() const override
void processLoadStateChange(QMediaPlayer::PlaybackState newState)
QIODevice * mediaStream() const override
PitchCompensationAvailability pitchCompensationAvailability() const override
~AVFMediaPlayer() override
AVAsset * currentAssetHandle()
void processBufferStateChange(int bufferProgress)
void setVideoSink(AVFVideoSink *sink)