Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
avfmediaplayer.mm
Go to the documentation of this file.
1// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
6#include <avfvideosink_p.h>
7#include <avfmetadata_p.h>
8
9#include "qaudiooutput.h"
10#include "private/qplatformaudiooutput_p.h"
11
12#include <QtCore/qdir.h>
13#include <QtCore/qfileinfo.h>
14#include <QtCore/qmimedatabase.h>
15#include <QtCore/qpointer.h>
16#include <QtCore/qmath.h>
17#include <QtCore/qmutex.h>
18#include <QtCore/qthread.h>
19#include <QtCore/private/qexpected_p.h>
20
21#include <mutex>
22
23#import <AVFoundation/AVFoundation.h>
24
25QT_USE_NAMESPACE
26
27//AVAsset Keys
28static NSString* const AVF_TRACKS_KEY = @"tracks";
29static NSString* const AVF_PLAYABLE_KEY = @"playable";
30
31//AVPlayerItem keys
32static NSString* const AVF_STATUS_KEY = @"status";
33static NSString* const AVF_BUFFER_LIKELY_KEEP_UP_KEY = @"playbackLikelyToKeepUp";
34
35//AVPlayer keys
36static NSString* const AVF_RATE_KEY = @"rate";
37static NSString* const AVF_CURRENT_ITEM_KEY = @"currentItem";
38static NSString* const AVF_CURRENT_ITEM_DURATION_KEY = @"currentItem.duration";
39
48
49@interface AVFMediaPlayerObserver : NSObject<AVAssetResourceLoaderDelegate>
50
51@property (readonly, getter=player) AVPlayer* m_player;
52@property (readonly, getter=playerItem) AVPlayerItem* m_playerItem;
53@property (readonly, getter=playerLayer) AVPlayerLayer* m_playerLayer;
54@property (retain) AVPlayerItemTrack *videoTrack;
55
56- (AVFMediaPlayerObserver *) initWithMediaPlayerSession:(AVFMediaPlayer *)session;
57- (void) setURL:(NSURL *)url mimeType:(NSString *)mimeType;
58- (void) unloadMedia;
59- (void) prepareToPlayAsset:(AVURLAsset *)asset withKeys:(NSArray *)requestedKeys;
60- (void) assetFailedToPrepareForPlayback:(NSError *)error;
61- (void) playerItemDidReachEnd:(NSNotification *)notification;
62- (void) observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object
63 change:(NSDictionary *)change context:(void *)context;
64- (void)clearSession;
65- (void) dealloc;
66- (BOOL) resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
67@end
68
69#ifdef Q_OS_IOS
70// Alas, no such thing as 'class variable', hence globals:
71static unsigned sessionActivationCount;
72static QMutex sessionMutex;
73#endif // Q_OS_IOS
74
75namespace {
76
77struct GuardedPlatformPlayer
78{
79 mutable QMutex mutex;
80 AVFMediaPlayer *player{};
81
82 explicit operator bool() const
83 {
84 std::lock_guard guard(mutex);
85 return player;
86 }
87
88 struct not_a_platform_player_t
89 {
90 };
91
92 template <typename Functor>
93 auto withPlatformPlayer(Functor &&f)
94 -> q23::expected<std::invoke_result_t<Functor, AVFMediaPlayer *>,
95 not_a_platform_player_t>
96 {
97 std::unique_lock guard(mutex);
98 if (!player)
99 return q23::unexpected{ not_a_platform_player_t{} };
100 if constexpr (std::is_void_v<std::invoke_result_t<Functor, AVFMediaPlayer *>>) {
101 f(player);
102 return {};
103 } else {
104 return f(player);
105 }
106 }
107
108 template <typename Functor>
109 void invokeWithPlatformPlayer(Functor f)
110 {
111 std::unique_lock guard(mutex);
112 if (!player)
113 return;
114
115 if (player->thread()->isCurrentThread()) {
116 guard.unlock();
117 f(player);
118 } else {
119 QMetaObject::invokeMethod(player, [f = std::move(f), player = player]() {
120 f(player);
121 });
122 }
123 }
124
125 void clear()
126 {
127 std::lock_guard<QMutex> guard(mutex);
128 player = nullptr;
129 }
130};
131} // namespace
132
133@implementation AVFMediaPlayerObserver {
134@private
135 GuardedPlatformPlayer m_platformPlayer;
136 AVPlayer *m_player;
137 AVPlayerItem *m_playerItem;
138 AVPlayerLayer *m_playerLayer;
139 NSURL *m_URL;
140 BOOL m_bufferIsLikelyToKeepUp;
141 NSData *m_data;
142 NSString *m_mimeType;
143#ifdef Q_OS_IOS
144 BOOL m_activated;
145#endif
146}
147
148@synthesize m_player, m_playerItem, m_playerLayer;
149
150#ifdef Q_OS_IOS
151- (void)setSessionActive:(BOOL)active
152{
153 const QMutexLocker lock(&sessionMutex);
154 if (active) {
155 // Don't count the same player twice if already activated,
156 // unless it tried to deactivate first:
157 if (m_activated)
158 return;
159 if (!sessionActivationCount)
160 [AVAudioSession.sharedInstance setActive:YES error:nil];
161 ++sessionActivationCount;
162 m_activated = YES;
163 } else {
164 if (!sessionActivationCount || !m_activated) {
165 qWarning("Unbalanced audio session deactivation, ignoring.");
166 return;
167 }
168 --sessionActivationCount;
169 m_activated = NO;
170 if (!sessionActivationCount)
171 [AVAudioSession.sharedInstance setActive:NO error:nil];
172 }
173}
174#endif // Q_OS_IOS
175
176- (AVFMediaPlayerObserver *) initWithMediaPlayerSession:(AVFMediaPlayer *)session
177{
178 if (!(self = [super init]))
179 return nil;
180 m_platformPlayer.player = session;
181 m_bufferIsLikelyToKeepUp = FALSE;
182
183 m_playerLayer = [AVPlayerLayer playerLayerWithPlayer:nil];
184 [m_playerLayer retain];
185 m_playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
186 m_playerLayer.anchorPoint = CGPointMake(0.0f, 0.0f);
187 return self;
188}
189
190- (void) setURL:(NSURL *)url mimeType:(NSString *)mimeType
191{
192 [m_mimeType release];
193 m_mimeType = [mimeType retain];
194
195 if (m_URL != url)
196 {
197 [m_URL release];
198 m_URL = [url copy];
199
200 //Create an asset for inspection of a resource referenced by a given URL.
201 //Load the values for the asset keys "tracks", "playable".
202
203 // use __block to avoid maintaining strong references on variables captured by the
204 // following block callback
205#if defined(Q_OS_IOS)
206 BOOL isAccessing = [m_URL startAccessingSecurityScopedResource];
207#endif
208 __block AVURLAsset *asset = [[AVURLAsset URLAssetWithURL:m_URL options:nil] retain];
209 [asset.resourceLoader setDelegate:self queue:dispatch_get_main_queue()];
210
211 __block NSArray *requestedKeys = [[NSArray arrayWithObjects:AVF_TRACKS_KEY, AVF_PLAYABLE_KEY, nil] retain];
212
213 __block AVFMediaPlayerObserver *blockSelf = [self retain];
214
215 // Tells the asset to load the values of any of the specified keys that are not already loaded.
216 [asset loadValuesAsynchronouslyForKeys:requestedKeys completionHandler:
217 ^{
218 dispatch_async( dispatch_get_main_queue(),
219 ^{
220#if defined(Q_OS_IOS)
221 if (isAccessing)
222 [m_URL stopAccessingSecurityScopedResource];
223#endif
224 [blockSelf prepareToPlayAsset:asset withKeys:requestedKeys];
225 [asset release];
226 [requestedKeys release];
227 [blockSelf release];
228 });
229 }];
230 }
231}
232
233- (void) unloadMedia
234{
235 if (m_playerItem) {
236 [m_playerItem removeObserver:self forKeyPath:@"presentationSize"];
237 [m_playerItem removeObserver:self forKeyPath:AVF_STATUS_KEY];
238 [m_playerItem removeObserver:self forKeyPath:AVF_BUFFER_LIKELY_KEEP_UP_KEY];
239 [m_playerItem removeObserver:self forKeyPath:AVF_TRACKS_KEY];
240
241 [[NSNotificationCenter defaultCenter] removeObserver:self
242 name:AVPlayerItemDidPlayToEndTimeNotification
243 object:m_playerItem];
244 m_playerItem = nullptr;
245 }
246 if (m_player) {
247 [m_player setRate:0.0];
248 [m_player removeObserver:self forKeyPath:AVF_CURRENT_ITEM_DURATION_KEY];
249 [m_player removeObserver:self forKeyPath:AVF_CURRENT_ITEM_KEY];
250 [m_player removeObserver:self forKeyPath:AVF_RATE_KEY];
251 [m_player replaceCurrentItemWithPlayerItem:nil];
252 [m_player release];
253 m_player = nullptr;
254 }
255 if (m_playerLayer)
256 m_playerLayer.player = nil;
257#if defined(Q_OS_IOS)
258 [self setSessionActive:NO];
259#endif
260}
261
262- (void) prepareToPlayAsset:(AVURLAsset *)asset
263 withKeys:(NSArray *)requestedKeys
264{
265 if (!m_platformPlayer)
266 return;
267
268 //Make sure that the value of each key has loaded successfully.
269 for (NSString *thisKey in requestedKeys)
270 {
271 NSError *error = nil;
272 AVKeyValueStatus keyStatus = [asset statusOfValueForKey:thisKey error:&error];
273#ifdef QT_DEBUG_AVF
274 qDebug() << Q_FUNC_INFO << [thisKey UTF8String] << " status: " << keyStatus;
275#endif
276 if (keyStatus == AVKeyValueStatusFailed)
277 {
278 [self assetFailedToPrepareForPlayback:error];
279 return;
280 }
281 }
282
283 //Use the AVAsset playable property to detect whether the asset can be played.
284#ifdef QT_DEBUG_AVF
285 qDebug() << Q_FUNC_INFO << "isPlayable: " << [asset isPlayable];
286#endif
287 if (!asset.playable)
288 qWarning() << "Asset reported to be not playable. Playback of this asset may not be possible.";
289
290 //At this point we're ready to set up for playback of the asset.
291 //Stop observing our prior AVPlayerItem, if we have one.
292 if (m_playerItem)
293 {
294 //Remove existing player item key value observers and notifications.
295 [self unloadMedia];
296 }
297
298 //Create a new instance of AVPlayerItem from the now successfully loaded AVAsset.
299 m_playerItem = [AVPlayerItem playerItemWithAsset:asset];
300 if (!m_playerItem) {
301 qWarning() << "Failed to create player item";
302 //Generate an error describing the failure.
303 NSString *localizedDescription = NSLocalizedString(@"Item cannot be played", @"Item cannot be played description");
304 NSString *localizedFailureReason = NSLocalizedString(@"The assets tracks were loaded, but couldn't create player item.", @"Item cannot be played failure reason");
305 NSDictionary *errorDict = [NSDictionary dictionaryWithObjectsAndKeys:
306 localizedDescription, NSLocalizedDescriptionKey,
307 localizedFailureReason, NSLocalizedFailureReasonErrorKey,
308 nil];
309 NSError *assetCannotBePlayedError = [NSError errorWithDomain:@"StitchedStreamPlayer" code:0 userInfo:errorDict];
310
311 [self assetFailedToPrepareForPlayback:assetCannotBePlayedError];
312 return;
313 }
314
315 //Observe the player item "status" key to determine when it is ready to play.
316 [m_playerItem addObserver:self
317 forKeyPath:AVF_STATUS_KEY
318 options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
319 context:AVFMediaPlayerObserverStatusObservationContext];
320
321 [m_playerItem addObserver:self
322 forKeyPath:@"presentationSize"
323 options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
324 context:AVFMediaPlayerObserverPresentationSizeContext];
325
326 [m_playerItem addObserver:self
327 forKeyPath:AVF_BUFFER_LIKELY_KEEP_UP_KEY
328 options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
329 context:AVFMediaPlayerObserverBufferLikelyToKeepUpContext];
330
331 [m_playerItem addObserver:self
332 forKeyPath:AVF_TRACKS_KEY
333 options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
334 context:AVFMediaPlayerObserverTracksContext];
335
336 //When the player item has played to its end time we'll toggle
337 //the movie controller Pause button to be the Play button
338 [[NSNotificationCenter defaultCenter] addObserver:self
339 selector:@selector(playerItemDidReachEnd:)
340 name:AVPlayerItemDidPlayToEndTimeNotification
341 object:m_playerItem];
342
343 //Get a new AVPlayer initialized to play the specified player item.
344 m_player = [AVPlayer playerWithPlayerItem:m_playerItem];
345 [m_player retain];
346
347 //Set the initial audio ouptut settings on new player object
348 {
349 m_platformPlayer.withPlatformPlayer([&](AVFMediaPlayer *player) {
350 auto *audioOutput = player->m_audioOutput;
351 m_player.volume = (audioOutput ? audioOutput->volume : 1.);
352 m_player.muted = (audioOutput ? audioOutput->muted : true);
353 player->updateAudioOutputDevice();
354 });
355 }
356
357 //Assign the output layer to the new player
358 m_playerLayer.player = m_player;
359
360 //Observe the AVPlayer "currentItem" property to find out when any
361 //AVPlayer replaceCurrentItemWithPlayerItem: replacement will/did
362 //occur.
363 [m_player addObserver:self
364 forKeyPath:AVF_CURRENT_ITEM_KEY
365 options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
366 context:AVFMediaPlayerObserverCurrentItemObservationContext];
367
368 //Observe the AVPlayer "rate" property to update the scrubber control.
369 [m_player addObserver:self
370 forKeyPath:AVF_RATE_KEY
371 options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
372 context:AVFMediaPlayerObserverRateObservationContext];
373
374 //Observe the duration for getting the buffer state
375 [m_player addObserver:self
376 forKeyPath:AVF_CURRENT_ITEM_DURATION_KEY
377 options:0
378 context:AVFMediaPlayerObserverCurrentItemDurationObservationContext];
379#if defined(Q_OS_IOS)
380 [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback withOptions:AVAudioSessionCategoryOptionMixWithOthers error:nil];
381 [self setSessionActive:YES];
382#endif
383}
384
385-(void) assetFailedToPrepareForPlayback:(NSError *)error
386{
387 QMediaPlayer::Error errorCode = QMediaPlayer::FormatError;
388 if (error) {
389 NSError *underlyingError = error.userInfo[NSUnderlyingErrorKey];
390 if (underlyingError && ![underlyingError.domain isEqualToString:AVFoundationErrorDomain])
391 errorCode = QMediaPlayer::ResourceError;
392 }
393 m_platformPlayer.invokeWithPlatformPlayer([errorCode](AVFMediaPlayer *platformPlayer) {
394 platformPlayer->processMediaLoadError(errorCode);
395 });
396
397#ifdef QT_DEBUG_AVF
398 qDebug() << Q_FUNC_INFO;
399 qDebug() << [[error localizedDescription] UTF8String];
400 qDebug() << [[error localizedFailureReason] UTF8String];
401 qDebug() << [[error localizedRecoverySuggestion] UTF8String];
402#endif
403}
404
405- (void) playerItemDidReachEnd:(NSNotification *)notification
406{
407 Q_UNUSED(notification);
408
409 m_platformPlayer.invokeWithPlatformPlayer([](AVFMediaPlayer *platformPlayer) {
410 platformPlayer->processEOS();
411 });
412}
413
414- (void) observeValueForKeyPath:(NSString*) path
415 ofObject:(id)object
416 change:(NSDictionary*)change
417 context:(void*)context
418{
419 //AVPlayerItem "status" property value observer.
420 if (context == AVFMediaPlayerObserverStatusObservationContext)
421 {
422 AVPlayerStatus status = (AVPlayerStatus)[[change objectForKey:NSKeyValueChangeNewKey] integerValue];
423 switch (status)
424 {
425 //Indicates that the status of the player is not yet known because
426 //it has not tried to load new media resources for playback
427 case AVPlayerStatusUnknown:
428 {
429 //QMetaObject::invokeMethod(m_session, "processLoadStateChange", Qt::AutoConnection);
430 }
431 break;
432
433 case AVPlayerStatusReadyToPlay:
434 {
435 //Once the AVPlayerItem becomes ready to play, i.e.
436 //[playerItem status] == AVPlayerItemStatusReadyToPlay,
437 //its duration can be fetched from the item.
438
439 m_platformPlayer.invokeWithPlatformPlayer([](AVFMediaPlayer *platformPlayer) {
440 platformPlayer->processLoadStateChange();
441 });
442 }
443 break;
444
445 case AVPlayerStatusFailed:
446 {
447 AVPlayerItem *playerItem = static_cast<AVPlayerItem*>(object);
448 [self assetFailedToPrepareForPlayback:playerItem.error];
449
450 m_platformPlayer.invokeWithPlatformPlayer([](AVFMediaPlayer *platformPlayer) {
451 platformPlayer->processLoadStateChange();
452 });
453 }
454 break;
455 }
456 } else if (context == AVFMediaPlayerObserverPresentationSizeContext) {
457 QSize size(m_playerItem.presentationSize.width, m_playerItem.presentationSize.height);
458 m_platformPlayer.invokeWithPlatformPlayer([size](AVFMediaPlayer *platformPlayer) {
459 platformPlayer->nativeSizeChanged(size);
460 });
461 } else if (context == AVFMediaPlayerObserverBufferLikelyToKeepUpContext)
462 {
463 const bool isPlaybackLikelyToKeepUp = [m_playerItem isPlaybackLikelyToKeepUp];
464 if (isPlaybackLikelyToKeepUp != m_bufferIsLikelyToKeepUp) {
465 m_bufferIsLikelyToKeepUp = isPlaybackLikelyToKeepUp;
466 int bufferProgress = isPlaybackLikelyToKeepUp ? 100 : 0;
467
468 m_platformPlayer.invokeWithPlatformPlayer(
469 [bufferProgress](AVFMediaPlayer *platformPlayer) {
470 platformPlayer->processBufferStateChange(bufferProgress);
471 });
472 }
473 }
474 else if (context == AVFMediaPlayerObserverTracksContext)
475 {
476 m_platformPlayer.invokeWithPlatformPlayer([](AVFMediaPlayer *platformPlayer) {
477 platformPlayer->updateTracks();
478 });
479 }
480 //AVPlayer "rate" property value observer.
481 else if (context == AVFMediaPlayerObserverRateObservationContext) {
482 //QMetaObject::invokeMethod(m_session, "setPlaybackRate", Qt::AutoConnection, Q_ARG(qreal, [m_player rate]));
483 }
484 //AVPlayer "currentItem" property observer.
485 //Called when the AVPlayer replaceCurrentItemWithPlayerItem:
486 //replacement will/did occur.
487 else if (context == AVFMediaPlayerObserverCurrentItemObservationContext) {
488 AVPlayerItem *newPlayerItem = [change objectForKey:NSKeyValueChangeNewKey];
489 if (m_playerItem != newPlayerItem)
490 m_playerItem = newPlayerItem;
491 } else if (context == AVFMediaPlayerObserverCurrentItemDurationObservationContext) {
492 const CMTime time = [m_playerItem duration];
493 const qint64 dur = static_cast<qint64>(float(time.value) / float(time.timescale) * 1000.0f);
494
495 m_platformPlayer.invokeWithPlatformPlayer([dur](AVFMediaPlayer *platformPlayer) {
496 platformPlayer->processDurationChange(dur);
497 });
498 } else {
499 [super observeValueForKeyPath:path ofObject:object change:change context:context];
500 }
501}
502
503- (void)clearSession
504{
505#ifdef QT_DEBUG_AVF
506 qDebug() << Q_FUNC_INFO;
507#endif
508 m_platformPlayer.clear();
509}
510
511- (void) dealloc
512{
513#ifdef QT_DEBUG_AVF
514 qDebug() << Q_FUNC_INFO;
515#endif
516 [self unloadMedia];
517
518 m_platformPlayer.clear();
519
520 if (m_URL) {
521 [m_URL release];
522 }
523
524 [m_mimeType release];
525 [m_playerLayer release];
526 // 'videoTrack' is a 'retain' property, but still needs a
527 // manual 'release' (i.e. setting to nil):
528 self.videoTrack = nil;
529 [super dealloc];
530}
531
532- (BOOL) resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
533{
534 Q_UNUSED(resourceLoader);
535
536 if (![loadingRequest.request.URL.scheme isEqualToString:@"iodevice"])
537 return NO;
538
539 auto result = m_platformPlayer.withPlatformPlayer([&](AVFMediaPlayer *platformPlayer) {
540 QIODevice *device = platformPlayer->mediaStream();
541 if (!device)
542 return NO;
543
544 device->seek(loadingRequest.dataRequest.requestedOffset);
545 if (loadingRequest.contentInformationRequest) {
546 loadingRequest.contentInformationRequest.contentType = m_mimeType;
547 loadingRequest.contentInformationRequest.contentLength = device->size();
548 loadingRequest.contentInformationRequest.byteRangeAccessSupported = YES;
549 }
550
551 if (loadingRequest.dataRequest) {
552 NSInteger requestedLength = loadingRequest.dataRequest.requestedLength;
553 int maxBytes = qMin(32 * 1064, int(requestedLength));
554 QByteArray buffer;
555 buffer.resize(maxBytes);
556
557 NSInteger submitted = 0;
558 while (submitted < requestedLength) {
559 qint64 len = device->read(buffer.data(), maxBytes);
560 if (len < 1)
561 break;
562
563 [loadingRequest.dataRequest respondWithData:[NSData dataWithBytes:buffer.constData()
564 length:len]];
565 submitted += len;
566 }
567
568 // Finish loading even if not all bytes submitted.
569 [loadingRequest finishLoading];
570 }
571
572 return YES;
573 });
574
575 return result.value_or(NO);
576}
577@end
578
579AVFMediaPlayer::AVFMediaPlayer(QMediaPlayer *player)
580 : QObject(player),
581 QPlatformMediaPlayer(player),
582 m_mediaStream(nullptr),
583 m_rate(1.0),
584 m_requestedPosition(-1),
585 m_duration(0),
586 m_bufferProgress(0)
587{
588 m_observer = [[AVFMediaPlayerObserver alloc] initWithMediaPlayerSession:this];
589 connect(&m_playbackTimer, &QTimer::timeout, this, &AVFMediaPlayer::processPositionChange);
590 setVideoOutput(new AVFVideoRendererControl(this));
591}
592
594{
595#ifdef QT_DEBUG_AVF
596 qDebug() << Q_FUNC_INFO;
597#endif
598 //Detatch the session from the sessionObserver (which could still be alive trying to communicate with this session).
599 [m_observer clearSession];
600 [m_observer release];
601}
602
603void AVFMediaPlayer::setVideoSink(QVideoSink *sink)
604{
605 m_videoSink = sink ? static_cast<AVFVideoSink *>(sink->platformVideoSink()): nullptr;
606 m_videoOutput->setVideoSink(m_videoSink);
607}
608
610{
611#ifdef QT_DEBUG_AVF
612 qDebug() << Q_FUNC_INFO << output;
613#endif
614
615 if (m_videoOutput == output)
616 return;
617
618 //Set the current output layer to null to stop rendering
619 if (m_videoOutput) {
620 m_videoOutput->setLayer(nullptr);
621 }
622
623 m_videoOutput = output;
624
625 if (m_videoOutput && state() != QMediaPlayer::StoppedState)
626 m_videoOutput->setLayer([m_observer playerLayer]);
627}
628
630{
631#ifdef QT_DEBUG_AVF
632 qDebug() << Q_FUNC_INFO;
633#endif
634 AVAsset *currentAsset = [[m_observer playerItem] asset];
635 return currentAsset;
636}
637
639{
640 return m_resources;
641}
642
644{
645 return m_mediaStream;
646}
647
648static void setURL(AVFMediaPlayerObserver *observer, const QUrl &url, const QString &mimeType = QString())
649{
650 QUrl resolvedUrl = url;
651 // AVFoundation cannot handle file URLs with a relative path
652 if (url.isLocalFile() && !QDir::isAbsolutePath(url.path()))
653 resolvedUrl = QUrl::fromLocalFile(QFileInfo(url.path()).absoluteFilePath());
654 NSURL *nsurl = resolvedUrl.toNSURL();
655 [observer setURL:nsurl mimeType:[NSString stringWithUTF8String:mimeType.toLatin1().constData()]];
656}
657
658
659void AVFMediaPlayer::setMedia(const QUrl &content, QIODevice *stream)
660{
661#ifdef QT_DEBUG_AVF
662 qDebug() << Q_FUNC_INFO << content.request().url();
663#endif
664
665 [m_observer unloadMedia];
666
667 m_resources = content;
668 resetStream(stream);
669
670 m_requestedPosition = -1;
671 orientationChanged(QtVideo::Rotation::None, false);
672 positionChanged(position());
673 if (m_duration != 0) {
674 m_duration = 0;
675 durationChanged(0);
676 }
677 if (!m_metaData.isEmpty()) {
678 m_metaData.clear();
679 metaDataChanged();
680 }
681 resetBufferProgress();
682 for (int i = 0; i < QPlatformMediaPlayer::NTrackTypes; ++i) {
683 tracks[i].clear();
684 nativeTracks[i].clear();
685 }
686 tracksChanged();
687
688 if (!m_mediaStream && content.isEmpty()) {
689 seekableChanged(false);
690 audioAvailableChanged(false);
691 videoAvailableChanged(false);
692
693 mediaStatusChanged(QMediaPlayer::NoMedia);
694 stateChanged(QMediaPlayer::StoppedState);
695
696 return;
697 }
698
699 mediaStatusChanged(QMediaPlayer::LoadingMedia);
700
701 if (m_mediaStream) {
702 // If there is a data, try to load it,
703 // otherwise wait for readyRead.
704 if (m_mediaStream->size())
706 } else {
707 //Load AVURLAsset
708 //initialize asset using content's URL
709 setURL(m_observer, m_resources);
710 }
711
712 stateChanged(QMediaPlayer::StoppedState);
713}
714
716{
717 AVPlayerItem *playerItem = [m_observer playerItem];
718
719 if (m_requestedPosition != -1)
720 return m_requestedPosition;
721
722 if (!playerItem)
723 return 0;
724
725 CMTime time = [playerItem currentTime];
726 return static_cast<quint64>(float(time.value) / float(time.timescale) * 1000.0f);
727}
728
730{
731#ifdef QT_DEBUG_AVF
732 qDebug() << Q_FUNC_INFO;
733#endif
734 return m_duration;
735}
736
738{
739#ifdef QT_DEBUG_AVF
740 qDebug() << Q_FUNC_INFO;
741#endif
742 return m_bufferProgress/100.;
743}
744
746{
747 AVPlayerItem *playerItem = [m_observer playerItem];
748
749 if (!playerItem)
750 return {};
751
752 if (state() == QMediaPlayer::StoppedState)
753 return {};
754
755 QMediaTimeRange timeRanges;
756
757 NSArray *ranges = [playerItem loadedTimeRanges];
758 for (NSValue *timeRange in ranges) {
759 CMTimeRange currentTimeRange = [timeRange CMTimeRangeValue];
760 qint64 startTime = qint64(float(currentTimeRange.start.value) / currentTimeRange.start.timescale * 1000.0);
761 timeRanges.addInterval(startTime, startTime + qint64(float(currentTimeRange.duration.value) / currentTimeRange.duration.timescale * 1000.0));
762 }
763 return timeRanges;
764}
765
767{
768 return m_rate;
769}
770
771void AVFMediaPlayer::setAudioOutput(QPlatformAudioOutput *output)
772{
773 if (m_audioOutput == output)
774 return;
775 if (m_audioOutput)
776 m_audioOutput->q->disconnect(this);
777 m_audioOutput = output;
778 if (m_audioOutput) {
779 connect(m_audioOutput->q, &QAudioOutput::deviceChanged, this, &AVFMediaPlayer::updateAudioOutputDevice);
780 connect(m_audioOutput->q, &QAudioOutput::volumeChanged, this, &AVFMediaPlayer::setVolume);
781 connect(m_audioOutput->q, &QAudioOutput::mutedChanged, this, &AVFMediaPlayer::setMuted);
782 //connect(m_audioOutput->q, &QAudioOutput::audioRoleChanged, this, &AVFMediaPlayer::setAudioRole);
783 }
785 setMuted(m_audioOutput ? m_audioOutput->muted : true);
786 setVolume(m_audioOutput ? m_audioOutput->volume : 1.);
787}
788
790{
791 return m_metaData;
792}
793
794void AVFMediaPlayer::setPlaybackRate(qreal rate)
795{
796#ifdef QT_DEBUG_AVF
797 qDebug() << Q_FUNC_INFO << rate;
798#endif
799
800 if (QtPrivate::fuzzyCompare(m_rate, rate))
801 return;
802
803 m_rate = rate;
804
805 AVPlayer *player = [m_observer player];
806 if (player && state() == QMediaPlayer::PlayingState)
807 [player setRate:m_rate];
808
809 playbackRateChanged(m_rate);
810}
811
813{
814#ifdef QT_DEBUG_AVF
815 qDebug() << Q_FUNC_INFO << pos;
816#endif
817
818 if (pos == position())
819 return;
820
821 AVPlayerItem *playerItem = [m_observer playerItem];
822 if (!playerItem) {
823 m_requestedPosition = pos;
824 positionChanged(m_requestedPosition);
825 return;
826 }
827
828 if (!isSeekable()) {
829 if (m_requestedPosition != -1) {
830 m_requestedPosition = -1;
831 positionChanged(position());
832 }
833 return;
834 }
835
836 pos = qMax(qint64(0), pos);
837 if (duration() > 0)
838 pos = qMin(pos, duration());
839 m_requestedPosition = pos;
840
841 CMTime newTime = [playerItem currentTime];
842 newTime.value = (pos / 1000.0f) * newTime.timescale;
843 [playerItem seekToTime:newTime toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero
844 completionHandler:^(BOOL finished) {
845 if (finished)
846 m_requestedPosition = -1;
847 }];
848
849 positionChanged(pos);
850
851 // Reset media status if the current status is EndOfMedia
852 if (mediaStatus() == QMediaPlayer::EndOfMedia) {
853 QMediaPlayer::MediaStatus newMediaStatus = (state() == QMediaPlayer::PausedState)
854 ? QMediaPlayer::BufferedMedia
855 : QMediaPlayer::LoadedMedia;
856 mediaStatusChanged(newMediaStatus);
857 }
858}
859
861{
862#ifdef QT_DEBUG_AVF
863 qDebug() << Q_FUNC_INFO << "currently: " << state();
864#endif
865
866 if (mediaStatus() == QMediaPlayer::NoMedia || mediaStatus() == QMediaPlayer::InvalidMedia)
867 return;
868
869 if (state() == QMediaPlayer::PlayingState)
870 return;
871
872 if (state() != QMediaPlayer::PausedState)
873 resetCurrentLoop();
874
875 if (m_videoOutput && m_videoSink)
876 m_videoOutput->setLayer([m_observer playerLayer]);
877
878 // Reset media status if the current status is EndOfMedia
879 if (mediaStatus() == QMediaPlayer::EndOfMedia)
880 setPosition(0);
881
882 if (mediaStatus() == QMediaPlayer::LoadedMedia
883 || mediaStatus() == QMediaPlayer::BufferedMedia) {
884 // Setting the rate starts playback
885 [[m_observer player] setRate:m_rate];
886 }
887
888 processLoadStateChange(QMediaPlayer::PlayingState);
889
890 stateChanged(QMediaPlayer::PlayingState);
891 m_playbackTimer.start(100);
892}
893
895{
896#ifdef QT_DEBUG_AVF
897 qDebug() << Q_FUNC_INFO << "currently: " << state();
898#endif
899
900 if (mediaStatus() == QMediaPlayer::NoMedia || mediaStatus() == QMediaPlayer::InvalidMedia)
901 return;
902
903 if (state() == QMediaPlayer::PausedState)
904 return;
905
906 stateChanged(QMediaPlayer::PausedState);
907
908 if (m_videoOutput && m_videoSink)
909 m_videoOutput->setLayer([m_observer playerLayer]);
910
911 [[m_observer player] pause];
912
913 // Reset media status if the current status is EndOfMedia
914 if (mediaStatus() == QMediaPlayer::EndOfMedia)
915 setPosition(0);
916
917 positionChanged(position());
918 m_playbackTimer.stop();
919}
920
922{
923#ifdef QT_DEBUG_AVF
924 qDebug() << Q_FUNC_INFO << "currently: " << state();
925#endif
926
927 if (state() == QMediaPlayer::StoppedState && mediaStatus() != QMediaPlayer::EndOfMedia)
928 return;
929
930 // AVPlayer doesn't have stop(), only pause() and play().
931 [[m_observer player] pause];
932 setPosition(0);
933
934 if (m_videoOutput)
935 m_videoOutput->setLayer(nullptr);
936
937 resetBufferProgress();
938
939 if (mediaStatus() == QMediaPlayer::BufferedMedia || mediaStatus() == QMediaPlayer::EndOfMedia)
940 mediaStatusChanged(QMediaPlayer::LoadedMedia);
941
942 stateChanged(QMediaPlayer::StoppedState);
943 m_playbackTimer.stop();
944}
945
946void AVFMediaPlayer::setVolume(float volume)
947{
948#ifdef QT_DEBUG_AVF
949 qDebug() << Q_FUNC_INFO << volume;
950#endif
951
952 AVPlayer *player = [m_observer player];
953 if (player)
954 player.volume = volume;
955}
956
957void AVFMediaPlayer::setMuted(bool muted)
958{
959#ifdef QT_DEBUG_AVF
960 qDebug() << Q_FUNC_INFO << muted;
961#endif
962
963 AVPlayer *player = [m_observer player];
964 if (player)
965 player.muted = muted;
966}
967
969{
970#ifdef Q_OS_MACOS
971 AVPlayer *player = [m_observer player];
972 if (!player)
973 return;
974
975 if (!m_audioOutput || m_audioOutput->device.id().isEmpty()) {
976 if (!m_audioOutput)
977 player.muted = true;
978 player.audioOutputDeviceUniqueID = nil;
979 } else {
980 NSString *str = QString::fromUtf8(m_audioOutput->device.id()).toNSString();
981 player.audioOutputDeviceUniqueID = str;
982 }
983#endif
984}
985
987{
988 if (doLoop()) {
989 positionChanged(duration());
990 setPosition(0);
991 [[m_observer player] setRate:m_rate];
992 return;
993 }
994
995 //AVPlayerItem has reached end of track/stream
996#ifdef QT_DEBUG_AVF
997 qDebug() << Q_FUNC_INFO;
998#endif
999 positionChanged(position());
1000
1001 if (m_videoOutput)
1002 m_videoOutput->setLayer(nullptr);
1003
1004 resetBufferProgress();
1005
1006 stateChanged(QMediaPlayer::StoppedState);
1007 mediaStatusChanged(QMediaPlayer::EndOfMedia);
1008}
1009
1010void AVFMediaPlayer::processLoadStateChange(QMediaPlayer::PlaybackState newState)
1011{
1012 AVPlayerStatus currentStatus = [[m_observer player] status];
1013
1014#ifdef QT_DEBUG_AVF
1015 qDebug() << Q_FUNC_INFO << currentStatus << ", " << mediaStatus() << ", " << newState;
1016#endif
1017
1018 if (mediaStatus() == QMediaPlayer::NoMedia)
1019 return;
1020
1021 if (currentStatus == AVPlayerStatusReadyToPlay) {
1022
1023 AVPlayerItem *playerItem = [m_observer playerItem];
1024
1025 applyPitchCompensation(m_pitchCompensationEnabled);
1026
1027 // get the meta data
1028 m_metaData = AVFMetaData::fromAsset(playerItem.asset);
1029 metaDataChanged();
1031
1032 if (playerItem) {
1033 seekableChanged([[playerItem seekableTimeRanges] count] > 0);
1034
1035 // Get the native size of the video, and reset the bounds of the player layer
1036 AVPlayerLayer *playerLayer = [m_observer playerLayer];
1037 if (m_observer.videoTrack && playerLayer) {
1038 if (!playerLayer.bounds.size.width || !playerLayer.bounds.size.height) {
1039 playerLayer.bounds = CGRectMake(0.0f, 0.0f,
1040 m_observer.videoTrack.assetTrack.naturalSize.width,
1041 m_observer.videoTrack.assetTrack.naturalSize.height);
1042 }
1043 }
1044
1045 if (m_requestedPosition != -1)
1046 setPosition(m_requestedPosition);
1047 }
1048
1049 QMediaPlayer::MediaStatus newStatus = (newState != QMediaPlayer::StoppedState)
1050 ? QMediaPlayer::BufferedMedia
1051 : QMediaPlayer::LoadedMedia;
1052
1053 if (newStatus != mediaStatus()) {
1054 if (newStatus == QMediaPlayer::BufferedMedia
1055 && mediaStatus() == QMediaPlayer::LoadingMedia) {
1056 // Emit intermediate transitions to match expected signal sequence
1057 mediaStatusChanged(QMediaPlayer::LoadedMedia);
1058 mediaStatusChanged(QMediaPlayer::BufferingMedia);
1059 } else if (newStatus == QMediaPlayer::BufferedMedia
1060 && mediaStatus() == QMediaPlayer::LoadedMedia) {
1061 mediaStatusChanged(QMediaPlayer::BufferingMedia);
1062 }
1063 mediaStatusChanged(newStatus);
1064 }
1065 }
1066
1067 if (newState == QMediaPlayer::PlayingState && [m_observer player]) {
1068 // Setting the rate is enough to start playback, no need to call play()
1069 [[m_observer player] setRate:m_rate];
1070 m_playbackTimer.start();
1071 }
1072}
1073
1074
1079
1080
1082{
1083 stateChanged(QMediaPlayer::StoppedState);
1084}
1085
1087{
1088 if (state() == QMediaPlayer::StoppedState)
1089 return;
1090
1091 if (bufferProgress == m_bufferProgress)
1092 return;
1093
1094 auto status = mediaStatus();
1095 // Buffered -> unbuffered.
1096 if (!bufferProgress) {
1097 status = QMediaPlayer::StalledMedia;
1098 } else if (status == QMediaPlayer::StalledMedia) {
1099 status = QMediaPlayer::BufferedMedia;
1100 // Resume playback.
1101 if (state() == QMediaPlayer::PlayingState) {
1102 [[m_observer player] setRate:m_rate];
1103 m_playbackTimer.start();
1104 }
1105 }
1106
1107 mediaStatusChanged(status);
1108
1109 m_bufferProgress = bufferProgress;
1110 bufferProgressChanged(bufferProgress / 100.);
1111}
1112
1114{
1115 if (duration == m_duration)
1116 return;
1117
1118 m_duration = duration;
1119 durationChanged(duration);
1120}
1121
1123{
1124 if (state() == QMediaPlayer::StoppedState)
1125 return;
1126
1127 positionChanged(position());
1128}
1129
1130void AVFMediaPlayer::processMediaLoadError(QMediaPlayer::Error errorCode)
1131{
1132 if (m_requestedPosition != -1) {
1133 m_requestedPosition = -1;
1134 positionChanged(position());
1135 }
1136
1137 setInvalidMediaWithError(errorCode, tr("Failed to load media"));
1138}
1139
1144
1146{
1147 QString suffix;
1148 if (!m_resources.isEmpty())
1149 suffix = QFileInfo(m_resources.path()).suffix();
1150 if (suffix.isEmpty() && m_mediaStream)
1151 suffix = QMimeDatabase().mimeTypeForData(m_mediaStream).preferredSuffix();
1152 const QString url = QStringLiteral("iodevice:///iodevice.") + suffix;
1153 setURL(m_observer, QUrl(url), suffix);
1154}
1155
1157{
1158 resetStream(nullptr);
1159}
1160
1162{
1163 bool firstLoad = true;
1164 for (int i = 0; i < QPlatformMediaPlayer::NTrackTypes; ++i) {
1165 if (tracks[i].count())
1166 firstLoad = false;
1167 tracks[i].clear();
1168 nativeTracks[i].clear();
1169 }
1170 bool hasAudio = false;
1171 bool hasVideo = false;
1172 AVPlayerItem *playerItem = [m_observer playerItem];
1173 if (playerItem) {
1174 // Check each track for audio and video content
1175 NSArray *tracks = playerItem.tracks;
1176 for (AVPlayerItemTrack *track in tracks) {
1177 AVAssetTrack *assetTrack = track.assetTrack;
1178 if (assetTrack) {
1179 int qtTrack = -1;
1180 if ([assetTrack.mediaType isEqualToString:AVMediaTypeAudio]) {
1181 qtTrack = QPlatformMediaPlayer::AudioStream;
1182 hasAudio = true;
1183 } else if ([assetTrack.mediaType isEqualToString:AVMediaTypeVideo]) {
1184 qtTrack = QPlatformMediaPlayer::VideoStream;
1185 hasVideo = true;
1186 if (m_observer.videoTrack != track) {
1187 m_observer.videoTrack = track;
1188 bool isMirrored = false;
1189 QtVideo::Rotation orientation = QtVideo::Rotation::None;
1190 videoOrientationForAssetTrack(assetTrack, orientation, isMirrored);
1191 orientationChanged(orientation, isMirrored);
1192 }
1193 }
1194 else if ([assetTrack.mediaType isEqualToString:AVMediaTypeSubtitle]) {
1195 qtTrack = QPlatformMediaPlayer::SubtitleStream;
1196 }
1197 if (qtTrack != -1) {
1198 QMediaMetaData metaData = AVFMetaData::fromAssetTrack(assetTrack);
1199 this->tracks[qtTrack].append(metaData);
1200 nativeTracks[qtTrack].append(track);
1201 }
1202 }
1203 }
1204 // subtitles are disabled by default
1205 if (firstLoad)
1206 setActiveTrack(SubtitleStream, -1);
1207 }
1208 audioAvailableChanged(hasAudio);
1209 videoAvailableChanged(hasVideo);
1210 tracksChanged();
1211}
1212
1213void AVFMediaPlayer::setActiveTrack(QPlatformMediaPlayer::TrackType type, int index)
1214{
1215 const auto &t = nativeTracks[type];
1216 if (type == QPlatformMediaPlayer::SubtitleStream) {
1217 // subtitle streams are not always automatically enabled on macOS/iOS.
1218 // this hack ensures they get enables and we actually get the text
1219 AVPlayerItem *playerItem = m_observer.m_playerItem;
1220 if (playerItem) {
1221 AVAsset *asset = playerItem.asset;
1222 if (!asset)
1223 return;
1224#if defined(Q_OS_VISIONOS)
1225 [asset loadMediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible
1226 completionHandler:[=](AVMediaSelectionGroup *group, NSError *error) {
1227 // FIXME: handle error
1228 if (error)
1229 return;
1230 auto *options = group.options;
1231 if (options.count)
1232 [playerItem selectMediaOption:options.firstObject inMediaSelectionGroup:group];
1233 }];
1234#else
1235 AVMediaSelectionGroup *group = [asset mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
1236 if (!group)
1237 return;
1238 auto *options = group.options;
1239 if (options.count)
1240 [playerItem selectMediaOption:options.firstObject inMediaSelectionGroup:group];
1241#endif
1242 }
1243 }
1244 for (int i = 0; i < t.count(); ++i)
1245 t.at(i).enabled = (i == index);
1246 activeTracksChanged();
1247}
1248
1249int AVFMediaPlayer::activeTrack(QPlatformMediaPlayer::TrackType type)
1250{
1251 const auto &t = nativeTracks[type];
1252 for (int i = 0; i < t.count(); ++i)
1253 if (t.at(i).enabled)
1254 return i;
1255 return -1;
1256}
1257
1258int AVFMediaPlayer::trackCount(QPlatformMediaPlayer::TrackType type)
1259{
1260 return nativeTracks[type].count();
1261}
1262
1263QMediaMetaData AVFMediaPlayer::trackMetaData(QPlatformMediaPlayer::TrackType type, int trackNumber)
1264{
1265 const auto &t = tracks[type];
1266 if (trackNumber < 0 || trackNumber >= t.count())
1267 return QMediaMetaData();
1268 return t.at(trackNumber);
1269}
1270
1271void AVFMediaPlayer::resetStream(QIODevice *stream)
1272{
1273 if (m_mediaStream) {
1274 disconnect(m_mediaStream, &QIODevice::readyRead, this, &AVFMediaPlayer::streamReady);
1275 disconnect(m_mediaStream, &QIODevice::destroyed, this, &AVFMediaPlayer::streamDestroyed);
1276 }
1277
1278 m_mediaStream = stream;
1279
1280 if (m_mediaStream) {
1281 connect(m_mediaStream, &QIODevice::readyRead, this, &AVFMediaPlayer::streamReady);
1282 connect(m_mediaStream, &QIODevice::destroyed, this, &AVFMediaPlayer::streamDestroyed);
1283 }
1284}
1285
1286void AVFMediaPlayer::applyPitchCompensation(bool enabled)
1287{
1288 AVPlayerItem *playerItem = [m_observer playerItem];
1289 if (playerItem) {
1290 if (enabled)
1291 playerItem.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmSpectral;
1292 else
1293 playerItem.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmVarispeed;
1294 }
1295}
1296
1297void AVFMediaPlayer::resetBufferProgress()
1298{
1299 if (m_bufferProgress != 0) {
1300 m_bufferProgress = 0;
1301 bufferProgressChanged(0);
1302 }
1303}
1304
1306{
1307 if (!m_videoSink)
1308 return;
1309 m_videoSink->setNativeSize(size);
1310}
1311
1312void AVFMediaPlayer::orientationChanged(QtVideo::Rotation rotation, bool mirrored)
1313{
1314 if (!m_videoOutput)
1315 return;
1316
1317 m_videoOutput->setVideoRotation(rotation);
1318 m_videoOutput->setVideoMirrored(mirrored);
1319}
1320
1321void AVFMediaPlayer::videoOrientationForAssetTrack(AVAssetTrack *videoTrack,
1322 QtVideo::Rotation &angle,
1323 bool &mirrored)
1324{
1325 angle = QtVideo::Rotation::None;
1326 mirrored = false;
1327 if (videoTrack) {
1328 CGAffineTransform transform = videoTrack.preferredTransform;
1329 if (CGAffineTransformIsIdentity(transform))
1330 return;
1331
1332 // determinant < 0 means the transform includes a reflection (mirror)
1333 qreal det = transform.a * transform.d - transform.b * transform.c;
1334 mirrored = (det < 0.0);
1335
1336 // Factor out mirror before computing rotation angle.
1337 // Negating the first column of a mirrored matrix yields a pure rotation.
1338 qreal ra = mirrored ? -transform.a : transform.a;
1339 qreal rb = mirrored ? -transform.b : transform.b;
1340
1341 qreal degrees = qRadiansToDegrees(qAtan2(rb, ra));
1342 if (degrees < 0)
1343 degrees += 360.0;
1344
1345 if (QtPrivate::fuzzyCompare(degrees, qreal(90))
1346 || QtPrivate::fuzzyCompare(degrees, qreal(-270))) {
1347 angle = QtVideo::Rotation::Clockwise90;
1348 } else if (QtPrivate::fuzzyCompare(degrees, qreal(-90))
1349 || QtPrivate::fuzzyCompare(degrees, qreal(270))) {
1350 angle = QtVideo::Rotation::Clockwise270;
1351 } else if (QtPrivate::fuzzyCompare(degrees, qreal(180))
1352 || QtPrivate::fuzzyCompare(degrees, qreal(-180))) {
1353 angle = QtVideo::Rotation::Clockwise180;
1354 }
1355 }
1356}
1357
1359{
1360 if (m_pitchCompensationEnabled == enabled)
1361 return;
1362
1363 applyPitchCompensation(enabled);
1364
1365 m_pitchCompensationEnabled = enabled;
1366 pitchCompensationChanged(enabled);
1367}
1368
1370{
1371 return m_pitchCompensationEnabled;
1372}
1373
1376{
1377 return QPlatformMediaPlayer::PitchCompensationAvailability::Available;
1378}
1379
1380#include "moc_avfmediaplayer_p.cpp"
static void * AVFMediaPlayerObserverCurrentItemObservationContext
static NSString *const AVF_BUFFER_LIKELY_KEEP_UP_KEY
static void * AVFMediaPlayerObserverPresentationSizeContext
static void * AVFMediaPlayerObserverTracksContext
static NSString *const AVF_STATUS_KEY
static NSString *const AVF_CURRENT_ITEM_DURATION_KEY
static void * AVFMediaPlayerObserverRateObservationContext
static void setURL(AVFMediaPlayerObserver *observer, const QUrl &url, const QString &mimeType=QString())
static NSString *const AVF_CURRENT_ITEM_KEY
static NSString *const AVF_PLAYABLE_KEY
static void * AVFMediaPlayerObserverBufferLikelyToKeepUpContext
static NSString *const AVF_RATE_KEY
static void * AVFMediaPlayerObserverCurrentItemDurationObservationContext
static void * AVFMediaPlayerObserverStatusObservationContext
qint64 duration() const override
void setVolume(float volume)
void processLoadStateChange()
void setPosition(qint64 pos) override
void setVideoSink(QVideoSink *sink) override
QMediaTimeRange availablePlaybackRanges() const override
void nativeSizeChanged(QSize size)
QMediaMetaData metaData() const override
void setVideoOutput(AVFVideoRendererControl *output)
void stop() override
float bufferProgress() const override
void processMediaLoadError(QMediaPlayer::Error errorCode)
void setMedia(const QUrl &content, QIODevice *stream) override
qint64 position() const override
void setPitchCompensation(bool enabled) override
void setMuted(bool muted)
void pause() override
void play() override
void processDurationChange(qint64 duration)
QUrl media() const override
bool pitchCompensation() const override
void processLoadStateFailure()
void updateAudioOutputDevice()
qreal playbackRate() const override
void processLoadStateChange(QMediaPlayer::PlaybackState newState)
QIODevice * mediaStream() const override
PitchCompensationAvailability pitchCompensationAvailability() const override
~AVFMediaPlayer() override
AVAsset * currentAssetHandle()
void processBufferStateChange(int bufferProgress)
void setVideoSink(AVFVideoSink *sink)