Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
avfmediaplayer.mm
Go to the documentation of this file.
1// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
6#include <avfvideosink_p.h>
7#include <avfmetadata_p.h>
8
9#include "qaudiooutput.h"
10#include "private/qplatformaudiooutput_p.h"
11
12#include <QtCore/qdir.h>
13#include <QtCore/qfileinfo.h>
14#include <QtCore/qmimedatabase.h>
15#include <QtCore/qpointer.h>
16#include <QtCore/qmath.h>
17#include <QtCore/qmutex.h>
18#include <QtCore/qthread.h>
19#include <QtCore/private/qexpected_p.h>
20
21#include <mutex>
22
23#import <AVFoundation/AVFoundation.h>
24
25QT_USE_NAMESPACE
26
27//AVAsset Keys
28static NSString* const AVF_TRACKS_KEY = @"tracks";
29static NSString* const AVF_PLAYABLE_KEY = @"playable";
30
31//AVPlayerItem keys
32static NSString* const AVF_STATUS_KEY = @"status";
33static NSString* const AVF_BUFFER_LIKELY_KEEP_UP_KEY = @"playbackLikelyToKeepUp";
34
35//AVPlayer keys
36static NSString* const AVF_RATE_KEY = @"rate";
37static NSString* const AVF_CURRENT_ITEM_KEY = @"currentItem";
38static NSString* const AVF_CURRENT_ITEM_DURATION_KEY = @"currentItem.duration";
39
48
49@interface AVFMediaPlayerObserver : NSObject<AVAssetResourceLoaderDelegate>
50
51@property (readonly, getter=player) AVPlayer* m_player;
52@property (readonly, getter=playerItem) AVPlayerItem* m_playerItem;
53@property (readonly, getter=playerLayer) AVPlayerLayer* m_playerLayer;
54@property (retain) AVPlayerItemTrack *videoTrack;
55
56- (AVFMediaPlayerObserver *) initWithMediaPlayerSession:(AVFMediaPlayer *)session;
57- (void) setURL:(NSURL *)url mimeType:(NSString *)mimeType;
58- (void) unloadMedia;
59- (void) prepareToPlayAsset:(AVURLAsset *)asset withKeys:(NSArray *)requestedKeys;
60- (void) assetFailedToPrepareForPlayback:(NSError *)error;
61- (void) playerItemDidReachEnd:(NSNotification *)notification;
62- (void) observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object
63 change:(NSDictionary *)change context:(void *)context;
64- (void)clearSession;
65- (void) dealloc;
66- (BOOL) resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
67@end
68
69#ifdef Q_OS_IOS
70// Alas, no such thing as 'class variable', hence globals:
71static unsigned sessionActivationCount;
72static QMutex sessionMutex;
73#endif // Q_OS_IOS
74
75namespace {
76
77struct GuardedPlatformPlayer
78{
79 mutable QMutex mutex;
80 AVFMediaPlayer *player{};
81
82 explicit operator bool() const
83 {
84 std::lock_guard guard(mutex);
85 return player;
86 }
87
88 struct not_a_platform_player_t
89 {
90 };
91
92 template <typename Functor>
93 auto withPlatformPlayer(Functor &&f)
94 -> q23::expected<std::invoke_result_t<Functor, AVFMediaPlayer *>,
95 not_a_platform_player_t>
96 {
97 std::unique_lock guard(mutex);
98 if (!player)
99 return q23::unexpected{ not_a_platform_player_t{} };
100 if constexpr (std::is_void_v<std::invoke_result_t<Functor, AVFMediaPlayer *>>) {
101 f(player);
102 return {};
103 } else {
104 return f(player);
105 }
106 }
107
108 template <typename Functor>
109 void invokeWithPlatformPlayer(Functor f)
110 {
111 std::unique_lock guard(mutex);
112 if (!player)
113 return;
114
115 if (player->thread()->isCurrentThread()) {
116 guard.unlock();
117 f(player);
118 } else {
119 QMetaObject::invokeMethod(player, [f = std::move(f), player = player]() {
120 f(player);
121 });
122 }
123 }
124
125 void clear()
126 {
127 std::lock_guard<QMutex> guard(mutex);
128 player = nullptr;
129 }
130};
131} // namespace
132
133@implementation AVFMediaPlayerObserver {
134@private
135 GuardedPlatformPlayer m_platformPlayer;
136 AVPlayer *m_player;
137 AVPlayerItem *m_playerItem;
138 AVPlayerLayer *m_playerLayer;
139 NSURL *m_URL;
140 BOOL m_bufferIsLikelyToKeepUp;
141 NSData *m_data;
142 NSString *m_mimeType;
143#ifdef Q_OS_IOS
144 BOOL m_activated;
145#endif
146}
147
148@synthesize m_player, m_playerItem, m_playerLayer;
149
150#ifdef Q_OS_IOS
151- (void)setSessionActive:(BOOL)active
152{
153 const QMutexLocker lock(&sessionMutex);
154 if (active) {
155 // Don't count the same player twice if already activated,
156 // unless it tried to deactivate first:
157 if (m_activated)
158 return;
159 if (!sessionActivationCount)
160 [AVAudioSession.sharedInstance setActive:YES error:nil];
161 ++sessionActivationCount;
162 m_activated = YES;
163 } else {
164 if (!sessionActivationCount || !m_activated) {
165 qWarning("Unbalanced audio session deactivation, ignoring.");
166 return;
167 }
168 --sessionActivationCount;
169 m_activated = NO;
170 if (!sessionActivationCount)
171 [AVAudioSession.sharedInstance setActive:NO error:nil];
172 }
173}
174#endif // Q_OS_IOS
175
176- (AVFMediaPlayerObserver *) initWithMediaPlayerSession:(AVFMediaPlayer *)session
177{
178 if (!(self = [super init]))
179 return nil;
180 m_platformPlayer.player = session;
181 m_bufferIsLikelyToKeepUp = FALSE;
182
183 m_playerLayer = [AVPlayerLayer playerLayerWithPlayer:nil];
184 [m_playerLayer retain];
185 m_playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
186 m_playerLayer.anchorPoint = CGPointMake(0.0f, 0.0f);
187 return self;
188}
189
190- (void) setURL:(NSURL *)url mimeType:(NSString *)mimeType
191{
192 [m_mimeType release];
193 m_mimeType = [mimeType retain];
194
195 if (m_URL != url)
196 {
197 [m_URL release];
198 m_URL = [url copy];
199
200 //Create an asset for inspection of a resource referenced by a given URL.
201 //Load the values for the asset keys "tracks", "playable".
202
203 // use __block to avoid maintaining strong references on variables captured by the
204 // following block callback
205#if defined(Q_OS_IOS)
206 BOOL isAccessing = [m_URL startAccessingSecurityScopedResource];
207#endif
208 __block AVURLAsset *asset = [[AVURLAsset URLAssetWithURL:m_URL options:nil] retain];
209 [asset.resourceLoader setDelegate:self queue:dispatch_get_main_queue()];
210
211 __block NSArray *requestedKeys = [[NSArray arrayWithObjects:AVF_TRACKS_KEY, AVF_PLAYABLE_KEY, nil] retain];
212
213 __block AVFMediaPlayerObserver *blockSelf = [self retain];
214
215 // Tells the asset to load the values of any of the specified keys that are not already loaded.
216 [asset loadValuesAsynchronouslyForKeys:requestedKeys completionHandler:
217 ^{
218 dispatch_async( dispatch_get_main_queue(),
219 ^{
220#if defined(Q_OS_IOS)
221 if (isAccessing)
222 [m_URL stopAccessingSecurityScopedResource];
223#endif
224 [blockSelf prepareToPlayAsset:asset withKeys:requestedKeys];
225 [asset release];
226 [requestedKeys release];
227 [blockSelf release];
228 });
229 }];
230 }
231}
232
233- (void) unloadMedia
234{
235 if (m_playerItem) {
236 [m_playerItem removeObserver:self forKeyPath:@"presentationSize"];
237 [m_playerItem removeObserver:self forKeyPath:AVF_STATUS_KEY];
238 [m_playerItem removeObserver:self forKeyPath:AVF_BUFFER_LIKELY_KEEP_UP_KEY];
239 [m_playerItem removeObserver:self forKeyPath:AVF_TRACKS_KEY];
240
241 [[NSNotificationCenter defaultCenter] removeObserver:self
242 name:AVPlayerItemDidPlayToEndTimeNotification
243 object:m_playerItem];
244 m_playerItem = nullptr;
245 }
246 if (m_player) {
247 [m_player setRate:0.0];
248 [m_player removeObserver:self forKeyPath:AVF_CURRENT_ITEM_DURATION_KEY];
249 [m_player removeObserver:self forKeyPath:AVF_CURRENT_ITEM_KEY];
250 [m_player removeObserver:self forKeyPath:AVF_RATE_KEY];
251 [m_player release];
252 m_player = nullptr;
253 }
254 if (m_playerLayer)
255 m_playerLayer.player = nil;
256#if defined(Q_OS_IOS)
257 [self setSessionActive:NO];
258#endif
259}
260
261- (void) prepareToPlayAsset:(AVURLAsset *)asset
262 withKeys:(NSArray *)requestedKeys
263{
264 if (!m_platformPlayer)
265 return;
266
267 //Make sure that the value of each key has loaded successfully.
268 for (NSString *thisKey in requestedKeys)
269 {
270 NSError *error = nil;
271 AVKeyValueStatus keyStatus = [asset statusOfValueForKey:thisKey error:&error];
272#ifdef QT_DEBUG_AVF
273 qDebug() << Q_FUNC_INFO << [thisKey UTF8String] << " status: " << keyStatus;
274#endif
275 if (keyStatus == AVKeyValueStatusFailed)
276 {
277 [self assetFailedToPrepareForPlayback:error];
278 return;
279 }
280 }
281
282 //Use the AVAsset playable property to detect whether the asset can be played.
283#ifdef QT_DEBUG_AVF
284 qDebug() << Q_FUNC_INFO << "isPlayable: " << [asset isPlayable];
285#endif
286 if (!asset.playable)
287 qWarning() << "Asset reported to be not playable. Playback of this asset may not be possible.";
288
289 //At this point we're ready to set up for playback of the asset.
290 //Stop observing our prior AVPlayerItem, if we have one.
291 if (m_playerItem)
292 {
293 //Remove existing player item key value observers and notifications.
294 [self unloadMedia];
295 }
296
297 //Create a new instance of AVPlayerItem from the now successfully loaded AVAsset.
298 m_playerItem = [AVPlayerItem playerItemWithAsset:asset];
299 if (!m_playerItem) {
300 qWarning() << "Failed to create player item";
301 //Generate an error describing the failure.
302 NSString *localizedDescription = NSLocalizedString(@"Item cannot be played", @"Item cannot be played description");
303 NSString *localizedFailureReason = NSLocalizedString(@"The assets tracks were loaded, but couldn't create player item.", @"Item cannot be played failure reason");
304 NSDictionary *errorDict = [NSDictionary dictionaryWithObjectsAndKeys:
305 localizedDescription, NSLocalizedDescriptionKey,
306 localizedFailureReason, NSLocalizedFailureReasonErrorKey,
307 nil];
308 NSError *assetCannotBePlayedError = [NSError errorWithDomain:@"StitchedStreamPlayer" code:0 userInfo:errorDict];
309
310 [self assetFailedToPrepareForPlayback:assetCannotBePlayedError];
311 return;
312 }
313
314 //Observe the player item "status" key to determine when it is ready to play.
315 [m_playerItem addObserver:self
316 forKeyPath:AVF_STATUS_KEY
317 options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
318 context:AVFMediaPlayerObserverStatusObservationContext];
319
320 [m_playerItem addObserver:self
321 forKeyPath:@"presentationSize"
322 options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
323 context:AVFMediaPlayerObserverPresentationSizeContext];
324
325 [m_playerItem addObserver:self
326 forKeyPath:AVF_BUFFER_LIKELY_KEEP_UP_KEY
327 options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
328 context:AVFMediaPlayerObserverBufferLikelyToKeepUpContext];
329
330 [m_playerItem addObserver:self
331 forKeyPath:AVF_TRACKS_KEY
332 options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
333 context:AVFMediaPlayerObserverTracksContext];
334
335 //When the player item has played to its end time we'll toggle
336 //the movie controller Pause button to be the Play button
337 [[NSNotificationCenter defaultCenter] addObserver:self
338 selector:@selector(playerItemDidReachEnd:)
339 name:AVPlayerItemDidPlayToEndTimeNotification
340 object:m_playerItem];
341
342 //Get a new AVPlayer initialized to play the specified player item.
343 m_player = [AVPlayer playerWithPlayerItem:m_playerItem];
344 [m_player retain];
345
346 //Set the initial audio ouptut settings on new player object
347 {
348 m_platformPlayer.withPlatformPlayer([&](AVFMediaPlayer *player) {
349 auto *audioOutput = player->m_audioOutput;
350 m_player.volume = (audioOutput ? audioOutput->volume : 1.);
351 m_player.muted = (audioOutput ? audioOutput->muted : true);
352 player->updateAudioOutputDevice();
353 });
354 }
355
356 //Assign the output layer to the new player
357 m_playerLayer.player = m_player;
358
359 //Observe the AVPlayer "currentItem" property to find out when any
360 //AVPlayer replaceCurrentItemWithPlayerItem: replacement will/did
361 //occur.
362 [m_player addObserver:self
363 forKeyPath:AVF_CURRENT_ITEM_KEY
364 options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
365 context:AVFMediaPlayerObserverCurrentItemObservationContext];
366
367 //Observe the AVPlayer "rate" property to update the scrubber control.
368 [m_player addObserver:self
369 forKeyPath:AVF_RATE_KEY
370 options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
371 context:AVFMediaPlayerObserverRateObservationContext];
372
373 //Observe the duration for getting the buffer state
374 [m_player addObserver:self
375 forKeyPath:AVF_CURRENT_ITEM_DURATION_KEY
376 options:0
377 context:AVFMediaPlayerObserverCurrentItemDurationObservationContext];
378#if defined(Q_OS_IOS)
379 [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback withOptions:AVAudioSessionCategoryOptionMixWithOthers error:nil];
380 [self setSessionActive:YES];
381#endif
382}
383
384-(void) assetFailedToPrepareForPlayback:(NSError *)error
385{
386 QMediaPlayer::Error errorCode = QMediaPlayer::FormatError;
387 if (error) {
388 NSError *underlyingError = error.userInfo[NSUnderlyingErrorKey];
389 if (underlyingError && ![underlyingError.domain isEqualToString:AVFoundationErrorDomain])
390 errorCode = QMediaPlayer::ResourceError;
391 }
392 m_platformPlayer.invokeWithPlatformPlayer([errorCode](AVFMediaPlayer *platformPlayer) {
393 platformPlayer->processMediaLoadError(errorCode);
394 });
395
396#ifdef QT_DEBUG_AVF
397 qDebug() << Q_FUNC_INFO;
398 qDebug() << [[error localizedDescription] UTF8String];
399 qDebug() << [[error localizedFailureReason] UTF8String];
400 qDebug() << [[error localizedRecoverySuggestion] UTF8String];
401#endif
402}
403
404- (void) playerItemDidReachEnd:(NSNotification *)notification
405{
406 Q_UNUSED(notification);
407
408 m_platformPlayer.invokeWithPlatformPlayer([](AVFMediaPlayer *platformPlayer) {
409 platformPlayer->processEOS();
410 });
411}
412
413- (void) observeValueForKeyPath:(NSString*) path
414 ofObject:(id)object
415 change:(NSDictionary*)change
416 context:(void*)context
417{
418 //AVPlayerItem "status" property value observer.
419 if (context == AVFMediaPlayerObserverStatusObservationContext)
420 {
421 AVPlayerStatus status = (AVPlayerStatus)[[change objectForKey:NSKeyValueChangeNewKey] integerValue];
422 switch (status)
423 {
424 //Indicates that the status of the player is not yet known because
425 //it has not tried to load new media resources for playback
426 case AVPlayerStatusUnknown:
427 {
428 //QMetaObject::invokeMethod(m_session, "processLoadStateChange", Qt::AutoConnection);
429 }
430 break;
431
432 case AVPlayerStatusReadyToPlay:
433 {
434 //Once the AVPlayerItem becomes ready to play, i.e.
435 //[playerItem status] == AVPlayerItemStatusReadyToPlay,
436 //its duration can be fetched from the item.
437
438 m_platformPlayer.invokeWithPlatformPlayer([](AVFMediaPlayer *platformPlayer) {
439 platformPlayer->processLoadStateChange();
440 });
441 }
442 break;
443
444 case AVPlayerStatusFailed:
445 {
446 AVPlayerItem *playerItem = static_cast<AVPlayerItem*>(object);
447 [self assetFailedToPrepareForPlayback:playerItem.error];
448
449 m_platformPlayer.invokeWithPlatformPlayer([](AVFMediaPlayer *platformPlayer) {
450 platformPlayer->processLoadStateChange();
451 });
452 }
453 break;
454 }
455 } else if (context == AVFMediaPlayerObserverPresentationSizeContext) {
456 QSize size(m_playerItem.presentationSize.width, m_playerItem.presentationSize.height);
457 m_platformPlayer.invokeWithPlatformPlayer([size](AVFMediaPlayer *platformPlayer) {
458 platformPlayer->nativeSizeChanged(size);
459 });
460 } else if (context == AVFMediaPlayerObserverBufferLikelyToKeepUpContext)
461 {
462 const bool isPlaybackLikelyToKeepUp = [m_playerItem isPlaybackLikelyToKeepUp];
463 if (isPlaybackLikelyToKeepUp != m_bufferIsLikelyToKeepUp) {
464 m_bufferIsLikelyToKeepUp = isPlaybackLikelyToKeepUp;
465 int bufferProgress = isPlaybackLikelyToKeepUp ? 100 : 0;
466
467 m_platformPlayer.invokeWithPlatformPlayer(
468 [bufferProgress](AVFMediaPlayer *platformPlayer) {
469 platformPlayer->processBufferStateChange(bufferProgress);
470 });
471 }
472 }
473 else if (context == AVFMediaPlayerObserverTracksContext)
474 {
475 m_platformPlayer.invokeWithPlatformPlayer([](AVFMediaPlayer *platformPlayer) {
476 platformPlayer->updateTracks();
477 });
478 }
479 //AVPlayer "rate" property value observer.
480 else if (context == AVFMediaPlayerObserverRateObservationContext) {
481 //QMetaObject::invokeMethod(m_session, "setPlaybackRate", Qt::AutoConnection, Q_ARG(qreal, [m_player rate]));
482 }
483 //AVPlayer "currentItem" property observer.
484 //Called when the AVPlayer replaceCurrentItemWithPlayerItem:
485 //replacement will/did occur.
486 else if (context == AVFMediaPlayerObserverCurrentItemObservationContext) {
487 AVPlayerItem *newPlayerItem = [change objectForKey:NSKeyValueChangeNewKey];
488 if (m_playerItem != newPlayerItem)
489 m_playerItem = newPlayerItem;
490 } else if (context == AVFMediaPlayerObserverCurrentItemDurationObservationContext) {
491 const CMTime time = [m_playerItem duration];
492 const qint64 dur = static_cast<qint64>(float(time.value) / float(time.timescale) * 1000.0f);
493
494 m_platformPlayer.invokeWithPlatformPlayer([dur](AVFMediaPlayer *platformPlayer) {
495 platformPlayer->processDurationChange(dur);
496 });
497 } else {
498 [super observeValueForKeyPath:path ofObject:object change:change context:context];
499 }
500}
501
502- (void)clearSession
503{
504#ifdef QT_DEBUG_AVF
505 qDebug() << Q_FUNC_INFO;
506#endif
507 m_platformPlayer.clear();
508}
509
510- (void) dealloc
511{
512#ifdef QT_DEBUG_AVF
513 qDebug() << Q_FUNC_INFO;
514#endif
515 [self unloadMedia];
516
517 m_platformPlayer.clear();
518
519 if (m_URL) {
520 [m_URL release];
521 }
522
523 [m_mimeType release];
524 [m_playerLayer release];
525 // 'videoTrack' is a 'retain' property, but still needs a
526 // manual 'release' (i.e. setting to nil):
527 self.videoTrack = nil;
528 [super dealloc];
529}
530
531- (BOOL) resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
532{
533 Q_UNUSED(resourceLoader);
534
535 if (![loadingRequest.request.URL.scheme isEqualToString:@"iodevice"])
536 return NO;
537
538 auto result = m_platformPlayer.withPlatformPlayer([&](AVFMediaPlayer *platformPlayer) {
539 QIODevice *device = platformPlayer->mediaStream();
540 if (!device)
541 return NO;
542
543 device->seek(loadingRequest.dataRequest.requestedOffset);
544 if (loadingRequest.contentInformationRequest) {
545 loadingRequest.contentInformationRequest.contentType = m_mimeType;
546 loadingRequest.contentInformationRequest.contentLength = device->size();
547 loadingRequest.contentInformationRequest.byteRangeAccessSupported = YES;
548 }
549
550 if (loadingRequest.dataRequest) {
551 NSInteger requestedLength = loadingRequest.dataRequest.requestedLength;
552 int maxBytes = qMin(32 * 1064, int(requestedLength));
553 QByteArray buffer;
554 buffer.resize(maxBytes);
555
556 NSInteger submitted = 0;
557 while (submitted < requestedLength) {
558 qint64 len = device->read(buffer.data(), maxBytes);
559 if (len < 1)
560 break;
561
562 [loadingRequest.dataRequest respondWithData:[NSData dataWithBytes:buffer
563 length:len]];
564 submitted += len;
565 }
566
567 // Finish loading even if not all bytes submitted.
568 [loadingRequest finishLoading];
569 }
570
571 return YES;
572 });
573
574 return result.value_or(NO);
575}
576@end
577
578AVFMediaPlayer::AVFMediaPlayer(QMediaPlayer *player)
579 : QObject(player),
580 QPlatformMediaPlayer(player),
581 m_mediaStream(nullptr),
582 m_rate(1.0),
583 m_requestedPosition(-1),
584 m_duration(0),
585 m_bufferProgress(0)
586{
587 m_observer = [[AVFMediaPlayerObserver alloc] initWithMediaPlayerSession:this];
588 connect(&m_playbackTimer, &QTimer::timeout, this, &AVFMediaPlayer::processPositionChange);
589 setVideoOutput(new AVFVideoRendererControl(this));
590}
591
593{
594#ifdef QT_DEBUG_AVF
595 qDebug() << Q_FUNC_INFO;
596#endif
597 //Detatch the session from the sessionObserver (which could still be alive trying to communicate with this session).
598 [m_observer clearSession];
599 [m_observer release];
600}
601
602void AVFMediaPlayer::setVideoSink(QVideoSink *sink)
603{
604 m_videoSink = sink ? static_cast<AVFVideoSink *>(sink->platformVideoSink()): nullptr;
605 m_videoOutput->setVideoSink(m_videoSink);
606}
607
609{
610#ifdef QT_DEBUG_AVF
611 qDebug() << Q_FUNC_INFO << output;
612#endif
613
614 if (m_videoOutput == output)
615 return;
616
617 //Set the current output layer to null to stop rendering
618 if (m_videoOutput) {
619 m_videoOutput->setLayer(nullptr);
620 }
621
622 m_videoOutput = output;
623
624 if (m_videoOutput && state() != QMediaPlayer::StoppedState)
625 m_videoOutput->setLayer([m_observer playerLayer]);
626}
627
629{
630#ifdef QT_DEBUG_AVF
631 qDebug() << Q_FUNC_INFO;
632#endif
633 AVAsset *currentAsset = [[m_observer playerItem] asset];
634 return currentAsset;
635}
636
638{
639 return m_resources;
640}
641
643{
644 return m_mediaStream;
645}
646
647static void setURL(AVFMediaPlayerObserver *observer, const QUrl &url, const QString &mimeType = QString())
648{
649 QUrl resolvedUrl = url;
650 // AVFoundation cannot handle file URLs with a relative path
651 if (url.isLocalFile() && !QDir::isAbsolutePath(url.path()))
652 resolvedUrl = QUrl::fromLocalFile(QFileInfo(url.path()).absoluteFilePath());
653 NSURL *nsurl = resolvedUrl.toNSURL();
654 [observer setURL:nsurl mimeType:[NSString stringWithUTF8String:mimeType.toLatin1().constData()]];
655}
656
657
658void AVFMediaPlayer::setMedia(const QUrl &content, QIODevice *stream)
659{
660#ifdef QT_DEBUG_AVF
661 qDebug() << Q_FUNC_INFO << content.request().url();
662#endif
663
664 [m_observer unloadMedia];
665
666 m_resources = content;
667 resetStream(stream);
668
669 m_requestedPosition = -1;
670 orientationChanged(QtVideo::Rotation::None, false);
671 positionChanged(position());
672 if (m_duration != 0) {
673 m_duration = 0;
674 durationChanged(0);
675 }
676 if (!m_metaData.isEmpty()) {
677 m_metaData.clear();
678 metaDataChanged();
679 }
680 resetBufferProgress();
681 for (int i = 0; i < QPlatformMediaPlayer::NTrackTypes; ++i) {
682 tracks[i].clear();
683 nativeTracks[i].clear();
684 }
685 tracksChanged();
686
687 if (!m_mediaStream && content.isEmpty()) {
688 seekableChanged(false);
689 audioAvailableChanged(false);
690 videoAvailableChanged(false);
691
692 mediaStatusChanged(QMediaPlayer::NoMedia);
693 stateChanged(QMediaPlayer::StoppedState);
694
695 return;
696 }
697
698 mediaStatusChanged(QMediaPlayer::LoadingMedia);
699
700 if (m_mediaStream) {
701 // If there is a data, try to load it,
702 // otherwise wait for readyRead.
703 if (m_mediaStream->size())
705 } else {
706 //Load AVURLAsset
707 //initialize asset using content's URL
708 setURL(m_observer, m_resources);
709 }
710
711 stateChanged(QMediaPlayer::StoppedState);
712}
713
715{
716 AVPlayerItem *playerItem = [m_observer playerItem];
717
718 if (m_requestedPosition != -1)
719 return m_requestedPosition;
720
721 if (!playerItem)
722 return 0;
723
724 CMTime time = [playerItem currentTime];
725 return static_cast<quint64>(float(time.value) / float(time.timescale) * 1000.0f);
726}
727
729{
730#ifdef QT_DEBUG_AVF
731 qDebug() << Q_FUNC_INFO;
732#endif
733 return m_duration;
734}
735
737{
738#ifdef QT_DEBUG_AVF
739 qDebug() << Q_FUNC_INFO;
740#endif
741 return m_bufferProgress/100.;
742}
743
745{
746 AVPlayerItem *playerItem = [m_observer playerItem];
747
748 if (!playerItem)
749 return {};
750
751 if (state() == QMediaPlayer::StoppedState)
752 return {};
753
754 QMediaTimeRange timeRanges;
755
756 NSArray *ranges = [playerItem loadedTimeRanges];
757 for (NSValue *timeRange in ranges) {
758 CMTimeRange currentTimeRange = [timeRange CMTimeRangeValue];
759 qint64 startTime = qint64(float(currentTimeRange.start.value) / currentTimeRange.start.timescale * 1000.0);
760 timeRanges.addInterval(startTime, startTime + qint64(float(currentTimeRange.duration.value) / currentTimeRange.duration.timescale * 1000.0));
761 }
762 return timeRanges;
763}
764
766{
767 return m_rate;
768}
769
770void AVFMediaPlayer::setAudioOutput(QPlatformAudioOutput *output)
771{
772 if (m_audioOutput == output)
773 return;
774 if (m_audioOutput)
775 m_audioOutput->q->disconnect(this);
776 m_audioOutput = output;
777 if (m_audioOutput) {
778 connect(m_audioOutput->q, &QAudioOutput::deviceChanged, this, &AVFMediaPlayer::updateAudioOutputDevice);
779 connect(m_audioOutput->q, &QAudioOutput::volumeChanged, this, &AVFMediaPlayer::setVolume);
780 connect(m_audioOutput->q, &QAudioOutput::mutedChanged, this, &AVFMediaPlayer::setMuted);
781 //connect(m_audioOutput->q, &QAudioOutput::audioRoleChanged, this, &AVFMediaPlayer::setAudioRole);
782 }
784 setMuted(m_audioOutput ? m_audioOutput->muted : true);
785 setVolume(m_audioOutput ? m_audioOutput->volume : 1.);
786}
787
789{
790 return m_metaData;
791}
792
793void AVFMediaPlayer::setPlaybackRate(qreal rate)
794{
795#ifdef QT_DEBUG_AVF
796 qDebug() << Q_FUNC_INFO << rate;
797#endif
798
799 if (QtPrivate::fuzzyCompare(m_rate, rate))
800 return;
801
802 m_rate = rate;
803
804 AVPlayer *player = [m_observer player];
805 if (player && state() == QMediaPlayer::PlayingState)
806 [player setRate:m_rate];
807
808 playbackRateChanged(m_rate);
809}
810
812{
813#ifdef QT_DEBUG_AVF
814 qDebug() << Q_FUNC_INFO << pos;
815#endif
816
817 if (pos == position())
818 return;
819
820 AVPlayerItem *playerItem = [m_observer playerItem];
821 if (!playerItem) {
822 m_requestedPosition = pos;
823 positionChanged(m_requestedPosition);
824 return;
825 }
826
827 if (!isSeekable()) {
828 if (m_requestedPosition != -1) {
829 m_requestedPosition = -1;
830 positionChanged(position());
831 }
832 return;
833 }
834
835 pos = qMax(qint64(0), pos);
836 if (duration() > 0)
837 pos = qMin(pos, duration());
838 m_requestedPosition = pos;
839
840 CMTime newTime = [playerItem currentTime];
841 newTime.value = (pos / 1000.0f) * newTime.timescale;
842 [playerItem seekToTime:newTime toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero
843 completionHandler:^(BOOL finished) {
844 if (finished)
845 m_requestedPosition = -1;
846 }];
847
848 positionChanged(pos);
849
850 // Reset media status if the current status is EndOfMedia
851 if (mediaStatus() == QMediaPlayer::EndOfMedia) {
852 QMediaPlayer::MediaStatus newMediaStatus = (state() == QMediaPlayer::PausedState)
853 ? QMediaPlayer::BufferedMedia
854 : QMediaPlayer::LoadedMedia;
855 mediaStatusChanged(newMediaStatus);
856 }
857}
858
860{
861#ifdef QT_DEBUG_AVF
862 qDebug() << Q_FUNC_INFO << "currently: " << state();
863#endif
864
865 if (mediaStatus() == QMediaPlayer::NoMedia || mediaStatus() == QMediaPlayer::InvalidMedia)
866 return;
867
868 if (state() == QMediaPlayer::PlayingState)
869 return;
870
871 if (state() != QMediaPlayer::PausedState)
872 resetCurrentLoop();
873
874 if (m_videoOutput && m_videoSink)
875 m_videoOutput->setLayer([m_observer playerLayer]);
876
877 // Reset media status if the current status is EndOfMedia
878 if (mediaStatus() == QMediaPlayer::EndOfMedia)
879 setPosition(0);
880
881 if (mediaStatus() == QMediaPlayer::LoadedMedia
882 || mediaStatus() == QMediaPlayer::BufferedMedia) {
883 // Setting the rate starts playback
884 [[m_observer player] setRate:m_rate];
885 }
886
887 processLoadStateChange(QMediaPlayer::PlayingState);
888
889 stateChanged(QMediaPlayer::PlayingState);
890 m_playbackTimer.start(100);
891}
892
894{
895#ifdef QT_DEBUG_AVF
896 qDebug() << Q_FUNC_INFO << "currently: " << state();
897#endif
898
899 if (mediaStatus() == QMediaPlayer::NoMedia || mediaStatus() == QMediaPlayer::InvalidMedia)
900 return;
901
902 if (state() == QMediaPlayer::PausedState)
903 return;
904
905 stateChanged(QMediaPlayer::PausedState);
906
907 if (m_videoOutput && m_videoSink)
908 m_videoOutput->setLayer([m_observer playerLayer]);
909
910 [[m_observer player] pause];
911
912 // Reset media status if the current status is EndOfMedia
913 if (mediaStatus() == QMediaPlayer::EndOfMedia)
914 setPosition(0);
915
916 positionChanged(position());
917 m_playbackTimer.stop();
918}
919
921{
922#ifdef QT_DEBUG_AVF
923 qDebug() << Q_FUNC_INFO << "currently: " << state();
924#endif
925
926 if (state() == QMediaPlayer::StoppedState && mediaStatus() != QMediaPlayer::EndOfMedia)
927 return;
928
929 // AVPlayer doesn't have stop(), only pause() and play().
930 [[m_observer player] pause];
931 setPosition(0);
932
933 if (m_videoOutput)
934 m_videoOutput->setLayer(nullptr);
935
936 resetBufferProgress();
937
938 if (mediaStatus() == QMediaPlayer::BufferedMedia || mediaStatus() == QMediaPlayer::EndOfMedia)
939 mediaStatusChanged(QMediaPlayer::LoadedMedia);
940
941 stateChanged(QMediaPlayer::StoppedState);
942 m_playbackTimer.stop();
943}
944
945void AVFMediaPlayer::setVolume(float volume)
946{
947#ifdef QT_DEBUG_AVF
948 qDebug() << Q_FUNC_INFO << volume;
949#endif
950
951 AVPlayer *player = [m_observer player];
952 if (player)
953 player.volume = volume;
954}
955
956void AVFMediaPlayer::setMuted(bool muted)
957{
958#ifdef QT_DEBUG_AVF
959 qDebug() << Q_FUNC_INFO << muted;
960#endif
961
962 AVPlayer *player = [m_observer player];
963 if (player)
964 player.muted = muted;
965}
966
968{
969#ifdef Q_OS_MACOS
970 AVPlayer *player = [m_observer player];
971 if (!player)
972 return;
973
974 if (!m_audioOutput || m_audioOutput->device.id().isEmpty()) {
975 if (!m_audioOutput)
976 player.muted = true;
977 player.audioOutputDeviceUniqueID = nil;
978 } else {
979 NSString *str = QString::fromUtf8(m_audioOutput->device.id()).toNSString();
980 player.audioOutputDeviceUniqueID = str;
981 }
982#endif
983}
984
986{
987 if (doLoop()) {
988 positionChanged(duration());
989 setPosition(0);
990 [[m_observer player] setRate:m_rate];
991 return;
992 }
993
994 //AVPlayerItem has reached end of track/stream
995#ifdef QT_DEBUG_AVF
996 qDebug() << Q_FUNC_INFO;
997#endif
998 positionChanged(position());
999
1000 if (m_videoOutput)
1001 m_videoOutput->setLayer(nullptr);
1002
1003 resetBufferProgress();
1004
1005 stateChanged(QMediaPlayer::StoppedState);
1006 mediaStatusChanged(QMediaPlayer::EndOfMedia);
1007}
1008
1009void AVFMediaPlayer::processLoadStateChange(QMediaPlayer::PlaybackState newState)
1010{
1011 AVPlayerStatus currentStatus = [[m_observer player] status];
1012
1013#ifdef QT_DEBUG_AVF
1014 qDebug() << Q_FUNC_INFO << currentStatus << ", " << mediaStatus() << ", " << newState;
1015#endif
1016
1017 if (mediaStatus() == QMediaPlayer::NoMedia)
1018 return;
1019
1020 if (currentStatus == AVPlayerStatusReadyToPlay) {
1021
1022 AVPlayerItem *playerItem = [m_observer playerItem];
1023
1024 applyPitchCompensation(m_pitchCompensationEnabled);
1025
1026 // get the meta data
1027 m_metaData = AVFMetaData::fromAsset(playerItem.asset);
1028 metaDataChanged();
1030
1031 if (playerItem) {
1032 seekableChanged([[playerItem seekableTimeRanges] count] > 0);
1033
1034 // Get the native size of the video, and reset the bounds of the player layer
1035 AVPlayerLayer *playerLayer = [m_observer playerLayer];
1036 if (m_observer.videoTrack && playerLayer) {
1037 if (!playerLayer.bounds.size.width || !playerLayer.bounds.size.height) {
1038 playerLayer.bounds = CGRectMake(0.0f, 0.0f,
1039 m_observer.videoTrack.assetTrack.naturalSize.width,
1040 m_observer.videoTrack.assetTrack.naturalSize.height);
1041 }
1042 }
1043
1044 if (m_requestedPosition != -1)
1045 setPosition(m_requestedPosition);
1046 }
1047
1048 QMediaPlayer::MediaStatus newStatus = (newState != QMediaPlayer::StoppedState)
1049 ? QMediaPlayer::BufferedMedia
1050 : QMediaPlayer::LoadedMedia;
1051
1052 if (newStatus != mediaStatus()) {
1053 if (newStatus == QMediaPlayer::BufferedMedia
1054 && mediaStatus() == QMediaPlayer::LoadingMedia) {
1055 // Emit intermediate transitions to match expected signal sequence
1056 mediaStatusChanged(QMediaPlayer::LoadedMedia);
1057 mediaStatusChanged(QMediaPlayer::BufferingMedia);
1058 } else if (newStatus == QMediaPlayer::BufferedMedia
1059 && mediaStatus() == QMediaPlayer::LoadedMedia) {
1060 mediaStatusChanged(QMediaPlayer::BufferingMedia);
1061 }
1062 mediaStatusChanged(newStatus);
1063 }
1064 }
1065
1066 if (newState == QMediaPlayer::PlayingState && [m_observer player]) {
1067 // Setting the rate is enough to start playback, no need to call play()
1068 [[m_observer player] setRate:m_rate];
1069 m_playbackTimer.start();
1070 }
1071}
1072
1073
1078
1079
1081{
1082 stateChanged(QMediaPlayer::StoppedState);
1083}
1084
1086{
1087 if (state() == QMediaPlayer::StoppedState)
1088 return;
1089
1090 if (bufferProgress == m_bufferProgress)
1091 return;
1092
1093 auto status = mediaStatus();
1094 // Buffered -> unbuffered.
1095 if (!bufferProgress) {
1096 status = QMediaPlayer::StalledMedia;
1097 } else if (status == QMediaPlayer::StalledMedia) {
1098 status = QMediaPlayer::BufferedMedia;
1099 // Resume playback.
1100 if (state() == QMediaPlayer::PlayingState) {
1101 [[m_observer player] setRate:m_rate];
1102 m_playbackTimer.start();
1103 }
1104 }
1105
1106 mediaStatusChanged(status);
1107
1108 m_bufferProgress = bufferProgress;
1109 bufferProgressChanged(bufferProgress / 100.);
1110}
1111
1113{
1114 if (duration == m_duration)
1115 return;
1116
1117 m_duration = duration;
1118 durationChanged(duration);
1119}
1120
1122{
1123 if (state() == QMediaPlayer::StoppedState)
1124 return;
1125
1126 positionChanged(position());
1127}
1128
1129void AVFMediaPlayer::processMediaLoadError(QMediaPlayer::Error errorCode)
1130{
1131 if (m_requestedPosition != -1) {
1132 m_requestedPosition = -1;
1133 positionChanged(position());
1134 }
1135
1136 mediaStatusChanged(QMediaPlayer::InvalidMedia);
1137
1138 error(errorCode, tr("Failed to load media"));
1139}
1140
1145
1147{
1148 QString suffix;
1149 if (!m_resources.isEmpty())
1150 suffix = QFileInfo(m_resources.path()).suffix();
1151 if (suffix.isEmpty() && m_mediaStream)
1152 suffix = QMimeDatabase().mimeTypeForData(m_mediaStream).preferredSuffix();
1153 const QString url = QStringLiteral("iodevice:///iodevice.") + suffix;
1154 setURL(m_observer, QUrl(url), suffix);
1155}
1156
1158{
1159 resetStream(nullptr);
1160}
1161
1163{
1164 bool firstLoad = true;
1165 for (int i = 0; i < QPlatformMediaPlayer::NTrackTypes; ++i) {
1166 if (tracks[i].count())
1167 firstLoad = false;
1168 tracks[i].clear();
1169 nativeTracks[i].clear();
1170 }
1171 bool hasAudio = false;
1172 bool hasVideo = false;
1173 AVPlayerItem *playerItem = [m_observer playerItem];
1174 if (playerItem) {
1175 // Check each track for audio and video content
1176 NSArray *tracks = playerItem.tracks;
1177 for (AVPlayerItemTrack *track in tracks) {
1178 AVAssetTrack *assetTrack = track.assetTrack;
1179 if (assetTrack) {
1180 int qtTrack = -1;
1181 if ([assetTrack.mediaType isEqualToString:AVMediaTypeAudio]) {
1182 qtTrack = QPlatformMediaPlayer::AudioStream;
1183 hasAudio = true;
1184 } else if ([assetTrack.mediaType isEqualToString:AVMediaTypeVideo]) {
1185 qtTrack = QPlatformMediaPlayer::VideoStream;
1186 hasVideo = true;
1187 if (m_observer.videoTrack != track) {
1188 m_observer.videoTrack = track;
1189 bool isMirrored = false;
1190 QtVideo::Rotation orientation = QtVideo::Rotation::None;
1191 videoOrientationForAssetTrack(assetTrack, orientation, isMirrored);
1192 orientationChanged(orientation, isMirrored);
1193 }
1194 }
1195 else if ([assetTrack.mediaType isEqualToString:AVMediaTypeSubtitle]) {
1196 qtTrack = QPlatformMediaPlayer::SubtitleStream;
1197 }
1198 if (qtTrack != -1) {
1199 QMediaMetaData metaData = AVFMetaData::fromAssetTrack(assetTrack);
1200 this->tracks[qtTrack].append(metaData);
1201 nativeTracks[qtTrack].append(track);
1202 }
1203 }
1204 }
1205 // subtitles are disabled by default
1206 if (firstLoad)
1207 setActiveTrack(SubtitleStream, -1);
1208 }
1209 audioAvailableChanged(hasAudio);
1210 videoAvailableChanged(hasVideo);
1211 tracksChanged();
1212}
1213
1214void AVFMediaPlayer::setActiveTrack(QPlatformMediaPlayer::TrackType type, int index)
1215{
1216 const auto &t = nativeTracks[type];
1217 if (type == QPlatformMediaPlayer::SubtitleStream) {
1218 // subtitle streams are not always automatically enabled on macOS/iOS.
1219 // this hack ensures they get enables and we actually get the text
1220 AVPlayerItem *playerItem = m_observer.m_playerItem;
1221 if (playerItem) {
1222 AVAsset *asset = playerItem.asset;
1223 if (!asset)
1224 return;
1225#if defined(Q_OS_VISIONOS)
1226 [asset loadMediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible
1227 completionHandler:[=](AVMediaSelectionGroup *group, NSError *error) {
1228 // FIXME: handle error
1229 if (error)
1230 return;
1231 auto *options = group.options;
1232 if (options.count)
1233 [playerItem selectMediaOption:options.firstObject inMediaSelectionGroup:group];
1234 }];
1235#else
1236 AVMediaSelectionGroup *group = [asset mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
1237 if (!group)
1238 return;
1239 auto *options = group.options;
1240 if (options.count)
1241 [playerItem selectMediaOption:options.firstObject inMediaSelectionGroup:group];
1242#endif
1243 }
1244 }
1245 for (int i = 0; i < t.count(); ++i)
1246 t.at(i).enabled = (i == index);
1247 activeTracksChanged();
1248}
1249
1250int AVFMediaPlayer::activeTrack(QPlatformMediaPlayer::TrackType type)
1251{
1252 const auto &t = nativeTracks[type];
1253 for (int i = 0; i < t.count(); ++i)
1254 if (t.at(i).enabled)
1255 return i;
1256 return -1;
1257}
1258
1259int AVFMediaPlayer::trackCount(QPlatformMediaPlayer::TrackType type)
1260{
1261 return nativeTracks[type].count();
1262}
1263
1264QMediaMetaData AVFMediaPlayer::trackMetaData(QPlatformMediaPlayer::TrackType type, int trackNumber)
1265{
1266 const auto &t = tracks[type];
1267 if (trackNumber < 0 || trackNumber >= t.count())
1268 return QMediaMetaData();
1269 return t.at(trackNumber);
1270}
1271
1272void AVFMediaPlayer::resetStream(QIODevice *stream)
1273{
1274 if (m_mediaStream) {
1275 disconnect(m_mediaStream, &QIODevice::readyRead, this, &AVFMediaPlayer::streamReady);
1276 disconnect(m_mediaStream, &QIODevice::destroyed, this, &AVFMediaPlayer::streamDestroyed);
1277 }
1278
1279 m_mediaStream = stream;
1280
1281 if (m_mediaStream) {
1282 connect(m_mediaStream, &QIODevice::readyRead, this, &AVFMediaPlayer::streamReady);
1283 connect(m_mediaStream, &QIODevice::destroyed, this, &AVFMediaPlayer::streamDestroyed);
1284 }
1285}
1286
1287void AVFMediaPlayer::applyPitchCompensation(bool enabled)
1288{
1289 AVPlayerItem *playerItem = [m_observer playerItem];
1290 if (playerItem) {
1291 if (enabled)
1292 playerItem.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmSpectral;
1293 else
1294 playerItem.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmVarispeed;
1295 }
1296}
1297
1298void AVFMediaPlayer::resetBufferProgress()
1299{
1300 if (m_bufferProgress != 0) {
1301 m_bufferProgress = 0;
1302 bufferProgressChanged(0);
1303 }
1304}
1305
1307{
1308 if (!m_videoSink)
1309 return;
1310 m_videoSink->setNativeSize(size);
1311}
1312
1313void AVFMediaPlayer::orientationChanged(QtVideo::Rotation rotation, bool mirrored)
1314{
1315 if (!m_videoOutput)
1316 return;
1317
1318 m_videoOutput->setVideoRotation(rotation);
1319 m_videoOutput->setVideoMirrored(mirrored);
1320}
1321
1322void AVFMediaPlayer::videoOrientationForAssetTrack(AVAssetTrack *videoTrack,
1323 QtVideo::Rotation &angle,
1324 bool &mirrored)
1325{
1326 angle = QtVideo::Rotation::None;
1327 mirrored = false;
1328 if (videoTrack) {
1329 CGAffineTransform transform = videoTrack.preferredTransform;
1330 if (CGAffineTransformIsIdentity(transform))
1331 return;
1332
1333 // determinant < 0 means the transform includes a reflection (mirror)
1334 qreal det = transform.a * transform.d - transform.b * transform.c;
1335 mirrored = (det < 0.0);
1336
1337 // Factor out mirror before computing rotation angle.
1338 // Negating the first column of a mirrored matrix yields a pure rotation.
1339 qreal ra = mirrored ? -transform.a : transform.a;
1340 qreal rb = mirrored ? -transform.b : transform.b;
1341
1342 qreal degrees = qRadiansToDegrees(qAtan2(rb, ra));
1343 if (degrees < 0)
1344 degrees += 360.0;
1345
1346 if (QtPrivate::fuzzyCompare(degrees, qreal(90))
1347 || QtPrivate::fuzzyCompare(degrees, qreal(-270))) {
1348 angle = QtVideo::Rotation::Clockwise90;
1349 } else if (QtPrivate::fuzzyCompare(degrees, qreal(-90))
1350 || QtPrivate::fuzzyCompare(degrees, qreal(270))) {
1351 angle = QtVideo::Rotation::Clockwise270;
1352 } else if (QtPrivate::fuzzyCompare(degrees, qreal(180))
1353 || QtPrivate::fuzzyCompare(degrees, qreal(-180))) {
1354 angle = QtVideo::Rotation::Clockwise180;
1355 }
1356 }
1357}
1358
1360{
1361 if (m_pitchCompensationEnabled == enabled)
1362 return;
1363
1364 applyPitchCompensation(enabled);
1365
1366 m_pitchCompensationEnabled = enabled;
1367 pitchCompensationChanged(enabled);
1368}
1369
1371{
1372 return m_pitchCompensationEnabled;
1373}
1374
1377{
1378 return QPlatformMediaPlayer::PitchCompensationAvailability::Available;
1379}
1380
1381#include "moc_avfmediaplayer_p.cpp"
static void * AVFMediaPlayerObserverCurrentItemObservationContext
static NSString *const AVF_BUFFER_LIKELY_KEEP_UP_KEY
static void * AVFMediaPlayerObserverPresentationSizeContext
static void * AVFMediaPlayerObserverTracksContext
static NSString *const AVF_STATUS_KEY
static NSString *const AVF_CURRENT_ITEM_DURATION_KEY
static void * AVFMediaPlayerObserverRateObservationContext
static void setURL(AVFMediaPlayerObserver *observer, const QUrl &url, const QString &mimeType=QString())
static NSString *const AVF_CURRENT_ITEM_KEY
static NSString *const AVF_PLAYABLE_KEY
static void * AVFMediaPlayerObserverBufferLikelyToKeepUpContext
static NSString *const AVF_RATE_KEY
static void * AVFMediaPlayerObserverCurrentItemDurationObservationContext
static void * AVFMediaPlayerObserverStatusObservationContext
qint64 duration() const override
void setVolume(float volume)
void processLoadStateChange()
void setPosition(qint64 pos) override
void setVideoSink(QVideoSink *sink) override
QMediaTimeRange availablePlaybackRanges() const override
void nativeSizeChanged(QSize size)
QMediaMetaData metaData() const override
void setVideoOutput(AVFVideoRendererControl *output)
void stop() override
float bufferProgress() const override
void processMediaLoadError(QMediaPlayer::Error errorCode)
void setMedia(const QUrl &content, QIODevice *stream) override
qint64 position() const override
void setPitchCompensation(bool enabled) override
void setMuted(bool muted)
void pause() override
void play() override
void processDurationChange(qint64 duration)
QUrl media() const override
bool pitchCompensation() const override
void processLoadStateFailure()
void updateAudioOutputDevice()
qreal playbackRate() const override
void processLoadStateChange(QMediaPlayer::PlaybackState newState)
QIODevice * mediaStream() const override
PitchCompensationAvailability pitchCompensationAvailability() const override
~AVFMediaPlayer() override
AVAsset * currentAssetHandle()
void processBufferStateChange(int bufferProgress)
void setVideoSink(AVFVideoSink *sink)