Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
avfmediaplayer.mm
Go to the documentation of this file.
1// Copyright (C) 2016 The Qt Company Ltd and/or its subsidiary(-ies).
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
6#include <avfvideosink_p.h>
7#include <avfmetadata_p.h>
8
9#include "qaudiooutput.h"
10#include "private/qplatformaudiooutput_p.h"
11
12#include <qpointer.h>
13#include <QFileInfo>
14#include <QtCore/qmath.h>
15#include <QtCore/qmutex.h>
16
17#import <AVFoundation/AVFoundation.h>
18
19QT_USE_NAMESPACE
20
21//AVAsset Keys
22static NSString* const AVF_TRACKS_KEY = @"tracks";
23static NSString* const AVF_PLAYABLE_KEY = @"playable";
24
25//AVPlayerItem keys
26static NSString* const AVF_STATUS_KEY = @"status";
27static NSString* const AVF_BUFFER_LIKELY_KEEP_UP_KEY = @"playbackLikelyToKeepUp";
28
29//AVPlayer keys
30static NSString* const AVF_RATE_KEY = @"rate";
31static NSString* const AVF_CURRENT_ITEM_KEY = @"currentItem";
32static NSString* const AVF_CURRENT_ITEM_DURATION_KEY = @"currentItem.duration";
33
42
43@interface AVFMediaPlayerObserver : NSObject<AVAssetResourceLoaderDelegate>
44
45@property (readonly, getter=player) AVPlayer* m_player;
46@property (readonly, getter=playerItem) AVPlayerItem* m_playerItem;
47@property (readonly, getter=playerLayer) AVPlayerLayer* m_playerLayer;
48@property (readonly, getter=session) AVFMediaPlayer* m_session;
49@property (retain) AVPlayerItemTrack *videoTrack;
50
51- (AVFMediaPlayerObserver *) initWithMediaPlayerSession:(AVFMediaPlayer *)session;
52- (void) setURL:(NSURL *)url mimeType:(NSString *)mimeType;
53- (void) unloadMedia;
54- (void) prepareToPlayAsset:(AVURLAsset *)asset withKeys:(NSArray *)requestedKeys;
55- (void) assetFailedToPrepareForPlayback:(NSError *)error;
56- (void) playerItemDidReachEnd:(NSNotification *)notification;
57- (void) observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object
58 change:(NSDictionary *)change context:(void *)context;
59- (void) detatchSession;
60- (void) dealloc;
61- (BOOL) resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
62@end
63
64#ifdef Q_OS_IOS
65// Alas, no such thing as 'class variable', hence globals:
66static unsigned sessionActivationCount;
67static QMutex sessionMutex;
68#endif // Q_OS_IOS
69
70@implementation AVFMediaPlayerObserver
71{
72@private
73 AVFMediaPlayer *m_session;
74 AVPlayer *m_player;
75 AVPlayerItem *m_playerItem;
76 AVPlayerLayer *m_playerLayer;
77 NSURL *m_URL;
78 BOOL m_bufferIsLikelyToKeepUp;
79 NSData *m_data;
80 NSString *m_mimeType;
81#ifdef Q_OS_IOS
82 BOOL m_activated;
83#endif
84}
85
86@synthesize m_player, m_playerItem, m_playerLayer, m_session;
87
88#ifdef Q_OS_IOS
89- (void)setSessionActive:(BOOL)active
90{
91 const QMutexLocker lock(&sessionMutex);
92 if (active) {
93 // Don't count the same player twice if already activated,
94 // unless it tried to deactivate first:
95 if (m_activated)
96 return;
97 if (!sessionActivationCount)
98 [AVAudioSession.sharedInstance setActive:YES error:nil];
99 ++sessionActivationCount;
100 m_activated = YES;
101 } else {
102 if (!sessionActivationCount || !m_activated) {
103 qWarning("Unbalanced audio session deactivation, ignoring.");
104 return;
105 }
106 --sessionActivationCount;
107 m_activated = NO;
108 if (!sessionActivationCount)
109 [AVAudioSession.sharedInstance setActive:NO error:nil];
110 }
111}
112#endif // Q_OS_IOS
113
114- (AVFMediaPlayerObserver *) initWithMediaPlayerSession:(AVFMediaPlayer *)session
115{
116 if (!(self = [super init]))
117 return nil;
118
119 m_session = session;
120 m_bufferIsLikelyToKeepUp = FALSE;
121
122 m_playerLayer = [AVPlayerLayer playerLayerWithPlayer:nil];
123 [m_playerLayer retain];
124 m_playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
125 m_playerLayer.anchorPoint = CGPointMake(0.0f, 0.0f);
126 return self;
127}
128
129- (void) setURL:(NSURL *)url mimeType:(NSString *)mimeType
130{
131 if (!m_session)
132 return;
133
134 [m_mimeType release];
135 m_mimeType = [mimeType retain];
136
137 if (m_URL != url)
138 {
139 [m_URL release];
140 m_URL = [url copy];
141
142 //Create an asset for inspection of a resource referenced by a given URL.
143 //Load the values for the asset keys "tracks", "playable".
144
145 // use __block to avoid maintaining strong references on variables captured by the
146 // following block callback
147#if defined(Q_OS_IOS)
148 BOOL isAccessing = [m_URL startAccessingSecurityScopedResource];
149#endif
150 __block AVURLAsset *asset = [[AVURLAsset URLAssetWithURL:m_URL options:nil] retain];
151 [asset.resourceLoader setDelegate:self queue:dispatch_get_main_queue()];
152
153 __block NSArray *requestedKeys = [[NSArray arrayWithObjects:AVF_TRACKS_KEY, AVF_PLAYABLE_KEY, nil] retain];
154
155 __block AVFMediaPlayerObserver *blockSelf = [self retain];
156
157 // Tells the asset to load the values of any of the specified keys that are not already loaded.
158 [asset loadValuesAsynchronouslyForKeys:requestedKeys completionHandler:
159 ^{
160 dispatch_async( dispatch_get_main_queue(),
161 ^{
162#if defined(Q_OS_IOS)
163 if (isAccessing)
164 [m_URL stopAccessingSecurityScopedResource];
165#endif
166 [blockSelf prepareToPlayAsset:asset withKeys:requestedKeys];
167 [asset release];
168 [requestedKeys release];
169 [blockSelf release];
170 });
171 }];
172 }
173}
174
175- (void) unloadMedia
176{
177 if (m_playerItem) {
178 [m_playerItem removeObserver:self forKeyPath:@"presentationSize"];
179 [m_playerItem removeObserver:self forKeyPath:AVF_STATUS_KEY];
180 [m_playerItem removeObserver:self forKeyPath:AVF_BUFFER_LIKELY_KEEP_UP_KEY];
181 [m_playerItem removeObserver:self forKeyPath:AVF_TRACKS_KEY];
182
183 [[NSNotificationCenter defaultCenter] removeObserver:self
184 name:AVPlayerItemDidPlayToEndTimeNotification
185 object:m_playerItem];
186 m_playerItem = nullptr;
187 }
188 if (m_player) {
189 [m_player setRate:0.0];
190 [m_player removeObserver:self forKeyPath:AVF_CURRENT_ITEM_DURATION_KEY];
191 [m_player removeObserver:self forKeyPath:AVF_CURRENT_ITEM_KEY];
192 [m_player removeObserver:self forKeyPath:AVF_RATE_KEY];
193 [m_player release];
194 m_player = nullptr;
195 }
196 if (m_playerLayer)
197 m_playerLayer.player = nil;
198#if defined(Q_OS_IOS)
199 [self setSessionActive:NO];
200#endif
201}
202
203- (void) prepareToPlayAsset:(AVURLAsset *)asset
204 withKeys:(NSArray *)requestedKeys
205{
206 if (!m_session)
207 return;
208
209 //Make sure that the value of each key has loaded successfully.
210 for (NSString *thisKey in requestedKeys)
211 {
212 NSError *error = nil;
213 AVKeyValueStatus keyStatus = [asset statusOfValueForKey:thisKey error:&error];
214#ifdef QT_DEBUG_AVF
215 qDebug() << Q_FUNC_INFO << [thisKey UTF8String] << " status: " << keyStatus;
216#endif
217 if (keyStatus == AVKeyValueStatusFailed)
218 {
219 [self assetFailedToPrepareForPlayback:error];
220 return;
221 }
222 }
223
224 //Use the AVAsset playable property to detect whether the asset can be played.
225#ifdef QT_DEBUG_AVF
226 qDebug() << Q_FUNC_INFO << "isPlayable: " << [asset isPlayable];
227#endif
228 if (!asset.playable)
229 qWarning() << "Asset reported to be not playable. Playback of this asset may not be possible.";
230
231 //At this point we're ready to set up for playback of the asset.
232 //Stop observing our prior AVPlayerItem, if we have one.
233 if (m_playerItem)
234 {
235 //Remove existing player item key value observers and notifications.
236 [self unloadMedia];
237 }
238
239 //Create a new instance of AVPlayerItem from the now successfully loaded AVAsset.
240 m_playerItem = [AVPlayerItem playerItemWithAsset:asset];
241 if (!m_playerItem) {
242 qWarning() << "Failed to create player item";
243 //Generate an error describing the failure.
244 NSString *localizedDescription = NSLocalizedString(@"Item cannot be played", @"Item cannot be played description");
245 NSString *localizedFailureReason = NSLocalizedString(@"The assets tracks were loaded, but couldn't create player item.", @"Item cannot be played failure reason");
246 NSDictionary *errorDict = [NSDictionary dictionaryWithObjectsAndKeys:
247 localizedDescription, NSLocalizedDescriptionKey,
248 localizedFailureReason, NSLocalizedFailureReasonErrorKey,
249 nil];
250 NSError *assetCannotBePlayedError = [NSError errorWithDomain:@"StitchedStreamPlayer" code:0 userInfo:errorDict];
251
252 [self assetFailedToPrepareForPlayback:assetCannotBePlayedError];
253 return;
254 }
255
256 //Observe the player item "status" key to determine when it is ready to play.
257 [m_playerItem addObserver:self
258 forKeyPath:AVF_STATUS_KEY
259 options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
260 context:AVFMediaPlayerObserverStatusObservationContext];
261
262 [m_playerItem addObserver:self
263 forKeyPath:@"presentationSize"
264 options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
265 context:AVFMediaPlayerObserverPresentationSizeContext];
266
267 [m_playerItem addObserver:self
268 forKeyPath:AVF_BUFFER_LIKELY_KEEP_UP_KEY
269 options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
270 context:AVFMediaPlayerObserverBufferLikelyToKeepUpContext];
271
272 [m_playerItem addObserver:self
273 forKeyPath:AVF_TRACKS_KEY
274 options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
275 context:AVFMediaPlayerObserverTracksContext];
276
277 //When the player item has played to its end time we'll toggle
278 //the movie controller Pause button to be the Play button
279 [[NSNotificationCenter defaultCenter] addObserver:self
280 selector:@selector(playerItemDidReachEnd:)
281 name:AVPlayerItemDidPlayToEndTimeNotification
282 object:m_playerItem];
283
284 //Get a new AVPlayer initialized to play the specified player item.
285 m_player = [AVPlayer playerWithPlayerItem:m_playerItem];
286 [m_player retain];
287
288 //Set the initial audio ouptut settings on new player object
289 if (self.session) {
290 auto *audioOutput = m_session->m_audioOutput;
291 m_player.volume = (audioOutput ? audioOutput->volume : 1.);
292 m_player.muted = (audioOutput ? audioOutput->muted : true);
293 m_session->updateAudioOutputDevice();
294 }
295
296 //Assign the output layer to the new player
297 m_playerLayer.player = m_player;
298
299 //Observe the AVPlayer "currentItem" property to find out when any
300 //AVPlayer replaceCurrentItemWithPlayerItem: replacement will/did
301 //occur.
302 [m_player addObserver:self
303 forKeyPath:AVF_CURRENT_ITEM_KEY
304 options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
305 context:AVFMediaPlayerObserverCurrentItemObservationContext];
306
307 //Observe the AVPlayer "rate" property to update the scrubber control.
308 [m_player addObserver:self
309 forKeyPath:AVF_RATE_KEY
310 options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
311 context:AVFMediaPlayerObserverRateObservationContext];
312
313 //Observe the duration for getting the buffer state
314 [m_player addObserver:self
315 forKeyPath:AVF_CURRENT_ITEM_DURATION_KEY
316 options:0
317 context:AVFMediaPlayerObserverCurrentItemDurationObservationContext];
318#if defined(Q_OS_IOS)
319 [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback withOptions:AVAudioSessionCategoryOptionMixWithOthers error:nil];
320 [self setSessionActive:YES];
321#endif
322}
323
324-(void) assetFailedToPrepareForPlayback:(NSError *)error
325{
326 Q_UNUSED(error);
327 QMetaObject::invokeMethod(m_session, "processMediaLoadError", Qt::AutoConnection);
328#ifdef QT_DEBUG_AVF
329 qDebug() << Q_FUNC_INFO;
330 qDebug() << [[error localizedDescription] UTF8String];
331 qDebug() << [[error localizedFailureReason] UTF8String];
332 qDebug() << [[error localizedRecoverySuggestion] UTF8String];
333#endif
334}
335
336- (void) playerItemDidReachEnd:(NSNotification *)notification
337{
338 Q_UNUSED(notification);
339 if (self.session)
340 QMetaObject::invokeMethod(m_session, "processEOS", Qt::AutoConnection);
341}
342
343- (void) observeValueForKeyPath:(NSString*) path
344 ofObject:(id)object
345 change:(NSDictionary*)change
346 context:(void*)context
347{
348 //AVPlayerItem "status" property value observer.
349 if (context == AVFMediaPlayerObserverStatusObservationContext)
350 {
351 AVPlayerStatus status = (AVPlayerStatus)[[change objectForKey:NSKeyValueChangeNewKey] integerValue];
352 switch (status)
353 {
354 //Indicates that the status of the player is not yet known because
355 //it has not tried to load new media resources for playback
356 case AVPlayerStatusUnknown:
357 {
358 //QMetaObject::invokeMethod(m_session, "processLoadStateChange", Qt::AutoConnection);
359 }
360 break;
361
362 case AVPlayerStatusReadyToPlay:
363 {
364 //Once the AVPlayerItem becomes ready to play, i.e.
365 //[playerItem status] == AVPlayerItemStatusReadyToPlay,
366 //its duration can be fetched from the item.
367 if (self.session)
368 QMetaObject::invokeMethod(m_session, "processLoadStateChange", Qt::AutoConnection);
369 }
370 break;
371
372 case AVPlayerStatusFailed:
373 {
374 AVPlayerItem *playerItem = static_cast<AVPlayerItem*>(object);
375 [self assetFailedToPrepareForPlayback:playerItem.error];
376
377 if (self.session)
378 QMetaObject::invokeMethod(m_session, "processLoadStateFailure", Qt::AutoConnection);
379 }
380 break;
381 }
382 } else if (context == AVFMediaPlayerObserverPresentationSizeContext) {
383 QSize size(m_playerItem.presentationSize.width, m_playerItem.presentationSize.height);
384 QMetaObject::invokeMethod(m_session, "nativeSizeChanged", Qt::AutoConnection, Q_ARG(QSize, size));
385 } else if (context == AVFMediaPlayerObserverBufferLikelyToKeepUpContext)
386 {
387 const bool isPlaybackLikelyToKeepUp = [m_playerItem isPlaybackLikelyToKeepUp];
388 if (isPlaybackLikelyToKeepUp != m_bufferIsLikelyToKeepUp) {
389 m_bufferIsLikelyToKeepUp = isPlaybackLikelyToKeepUp;
390 QMetaObject::invokeMethod(m_session, "processBufferStateChange", Qt::AutoConnection,
391 Q_ARG(int, isPlaybackLikelyToKeepUp ? 100 : 0));
392 }
393 }
394 else if (context == AVFMediaPlayerObserverTracksContext)
395 {
396 QMetaObject::invokeMethod(m_session, "updateTracks", Qt::AutoConnection);
397 }
398 //AVPlayer "rate" property value observer.
399 else if (context == AVFMediaPlayerObserverRateObservationContext)
400 {
401 //QMetaObject::invokeMethod(m_session, "setPlaybackRate", Qt::AutoConnection, Q_ARG(qreal, [m_player rate]));
402 }
403 //AVPlayer "currentItem" property observer.
404 //Called when the AVPlayer replaceCurrentItemWithPlayerItem:
405 //replacement will/did occur.
406 else if (context == AVFMediaPlayerObserverCurrentItemObservationContext)
407 {
408 AVPlayerItem *newPlayerItem = [change objectForKey:NSKeyValueChangeNewKey];
409 if (m_playerItem != newPlayerItem)
410 m_playerItem = newPlayerItem;
411 }
412 else if (context == AVFMediaPlayerObserverCurrentItemDurationObservationContext)
413 {
414 const CMTime time = [m_playerItem duration];
415 const qint64 duration = static_cast<qint64>(float(time.value) / float(time.timescale) * 1000.0f);
416 if (self.session)
417 QMetaObject::invokeMethod(m_session, "processDurationChange", Qt::AutoConnection, Q_ARG(qint64, duration));
418 }
419 else
420 {
421 [super observeValueForKeyPath:path ofObject:object change:change context:context];
422 }
423}
424
425- (void) detatchSession
426{
427#ifdef QT_DEBUG_AVF
428 qDebug() << Q_FUNC_INFO;
429#endif
430 m_session = nullptr;
431}
432
433- (void) dealloc
434{
435#ifdef QT_DEBUG_AVF
436 qDebug() << Q_FUNC_INFO;
437#endif
438 [self unloadMedia];
439
440 if (m_URL) {
441 [m_URL release];
442 }
443
444 [m_mimeType release];
445 [m_playerLayer release];
446 // 'videoTrack' is a 'retain' property, but still needs a
447 // manual 'release' (i.e. setting to nil):
448 self.videoTrack = nil;
449 [super dealloc];
450}
451
452- (BOOL) resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
453{
454 Q_UNUSED(resourceLoader);
455
456 if (![loadingRequest.request.URL.scheme isEqualToString:@"iodevice"])
457 return NO;
458
459 QIODevice *device = m_session->mediaStream();
460 if (!device)
461 return NO;
462
463 device->seek(loadingRequest.dataRequest.requestedOffset);
464 if (loadingRequest.contentInformationRequest) {
465 loadingRequest.contentInformationRequest.contentType = m_mimeType;
466 loadingRequest.contentInformationRequest.contentLength = device->size();
467 loadingRequest.contentInformationRequest.byteRangeAccessSupported = YES;
468 }
469
470 if (loadingRequest.dataRequest) {
471 NSInteger requestedLength = loadingRequest.dataRequest.requestedLength;
472 int maxBytes = qMin(32 * 1064, int(requestedLength));
473 QByteArray buffer;
474 buffer.resize(maxBytes);
475
476 NSInteger submitted = 0;
477 while (submitted < requestedLength) {
478 qint64 len = device->read(buffer.data(), maxBytes);
479 if (len < 1)
480 break;
481
482 [loadingRequest.dataRequest respondWithData:[NSData dataWithBytes:buffer length:len]];
483 submitted += len;
484 }
485
486 // Finish loading even if not all bytes submitted.
487 [loadingRequest finishLoading];
488 }
489
490 return YES;
491}
492@end
493
494AVFMediaPlayer::AVFMediaPlayer(QMediaPlayer *player)
495 : QObject(player),
496 QPlatformMediaPlayer(player),
497 m_state(QMediaPlayer::StoppedState),
498 m_mediaStatus(QMediaPlayer::NoMedia),
499 m_mediaStream(nullptr),
500 m_rate(1.0),
501 m_requestedPosition(-1),
502 m_duration(0),
503 m_bufferProgress(0),
504 m_videoAvailable(false),
505 m_audioAvailable(false),
506 m_seekable(false)
507{
508 m_observer = [[AVFMediaPlayerObserver alloc] initWithMediaPlayerSession:this];
509 connect(&m_playbackTimer, &QTimer::timeout, this, &AVFMediaPlayer::processPositionChange);
510 setVideoOutput(new AVFVideoRendererControl(this));
511}
512
514{
515#ifdef QT_DEBUG_AVF
516 qDebug() << Q_FUNC_INFO;
517#endif
518 //Detatch the session from the sessionObserver (which could still be alive trying to communicate with this session).
519 [m_observer detatchSession];
520 [m_observer release];
521}
522
523void AVFMediaPlayer::setVideoSink(QVideoSink *sink)
524{
525 m_videoSink = sink ? static_cast<AVFVideoSink *>(sink->platformVideoSink()): nullptr;
526 m_videoOutput->setVideoSink(m_videoSink);
527}
528
530{
531#ifdef QT_DEBUG_AVF
532 qDebug() << Q_FUNC_INFO << output;
533#endif
534
535 if (m_videoOutput == output)
536 return;
537
538 //Set the current output layer to null to stop rendering
539 if (m_videoOutput) {
540 m_videoOutput->setLayer(nullptr);
541 }
542
543 m_videoOutput = output;
544
545 if (m_videoOutput && m_state != QMediaPlayer::StoppedState)
546 m_videoOutput->setLayer([m_observer playerLayer]);
547}
548
550{
551#ifdef QT_DEBUG_AVF
552 qDebug() << Q_FUNC_INFO;
553#endif
554 AVAsset *currentAsset = [[m_observer playerItem] asset];
555 return currentAsset;
556}
557
559{
560 return m_state;
561}
562
564{
565 return m_mediaStatus;
566}
567
569{
570 return m_resources;
571}
572
574{
575 return m_mediaStream;
576}
577
578static void setURL(AVFMediaPlayerObserver *observer, const QByteArray &url, const QString &mimeType = QString())
579{
580 NSString *urlString = [NSString stringWithUTF8String:url.constData()];
581 NSURL *nsurl = [NSURL URLWithString:urlString];
582 [observer setURL:nsurl mimeType:[NSString stringWithUTF8String:mimeType.toLatin1().constData()]];
583}
584
585static void setStreamURL(AVFMediaPlayerObserver *observer, const QByteArray &url)
586{
587 setURL(observer, QByteArrayLiteral("iodevice://") + url, QFileInfo(QString::fromUtf8(url)).suffix());
588}
589
590void AVFMediaPlayer::setMedia(const QUrl &content, QIODevice *stream)
591{
592#ifdef QT_DEBUG_AVF
593 qDebug() << Q_FUNC_INFO << content.request().url();
594#endif
595
596 [m_observer unloadMedia];
597
598 m_resources = content;
599 resetStream(stream);
600
601 setAudioAvailable(false);
602 setVideoAvailable(false);
603 setSeekable(false);
604 m_requestedPosition = -1;
605 orientationChanged(QtVideo::Rotation::None, false);
606 positionChanged(position());
607 if (m_duration != 0) {
608 m_duration = 0;
609 durationChanged(0);
610 }
611 if (!m_metaData.isEmpty()) {
612 m_metaData.clear();
613 metaDataChanged();
614 }
615 for (int i = 0; i < QPlatformMediaPlayer::NTrackTypes; ++i) {
616 tracks[i].clear();
617 nativeTracks[i].clear();
618 }
619 tracksChanged();
620
621 const QMediaPlayer::MediaStatus oldMediaStatus = m_mediaStatus;
622 const QMediaPlayer::PlaybackState oldState = m_state;
623
624 if (!m_mediaStream && content.isEmpty()) {
625 m_mediaStatus = QMediaPlayer::NoMedia;
626 if (m_mediaStatus != oldMediaStatus)
627 mediaStatusChanged(m_mediaStatus);
628
629 m_state = QMediaPlayer::StoppedState;
630 if (m_state != oldState)
631 stateChanged(m_state);
632
633 return;
634 }
635
636 m_mediaStatus = QMediaPlayer::LoadingMedia;
637 if (m_mediaStatus != oldMediaStatus)
638 mediaStatusChanged(m_mediaStatus);
639
640 if (m_mediaStream) {
641 // If there is a data, try to load it,
642 // otherwise wait for readyRead.
643 if (m_mediaStream->size())
644 setStreamURL(m_observer, m_resources.toEncoded());
645 } else {
646 //Load AVURLAsset
647 //initialize asset using content's URL
648 setURL(m_observer, m_resources.toEncoded());
649 }
650
651 m_state = QMediaPlayer::StoppedState;
652 if (m_state != oldState)
653 stateChanged(m_state);
654}
655
657{
658 AVPlayerItem *playerItem = [m_observer playerItem];
659
660 if (m_requestedPosition != -1)
661 return m_requestedPosition;
662
663 if (!playerItem)
664 return 0;
665
666 CMTime time = [playerItem currentTime];
667 return static_cast<quint64>(float(time.value) / float(time.timescale) * 1000.0f);
668}
669
671{
672#ifdef QT_DEBUG_AVF
673 qDebug() << Q_FUNC_INFO;
674#endif
675 return m_duration;
676}
677
679{
680#ifdef QT_DEBUG_AVF
681 qDebug() << Q_FUNC_INFO;
682#endif
683 return m_bufferProgress/100.;
684}
685
686void AVFMediaPlayer::setAudioAvailable(bool available)
687{
688 if (m_audioAvailable == available)
689 return;
690
691 m_audioAvailable = available;
692 audioAvailableChanged(available);
693}
694
696{
697 return m_audioAvailable;
698}
699
700void AVFMediaPlayer::setVideoAvailable(bool available)
701{
702 if (m_videoAvailable == available)
703 return;
704
705 m_videoAvailable = available;
706 videoAvailableChanged(available);
707}
708
710{
711 return m_videoAvailable;
712}
713
715{
716 return m_seekable;
717}
718
719void AVFMediaPlayer::setSeekable(bool seekable)
720{
721 if (m_seekable == seekable)
722 return;
723
724 m_seekable = seekable;
725 seekableChanged(seekable);
726}
727
729{
730 AVPlayerItem *playerItem = [m_observer playerItem];
731
732 if (playerItem) {
733 QMediaTimeRange timeRanges;
734
735 NSArray *ranges = [playerItem loadedTimeRanges];
736 for (NSValue *timeRange in ranges) {
737 CMTimeRange currentTimeRange = [timeRange CMTimeRangeValue];
738 qint64 startTime = qint64(float(currentTimeRange.start.value) / currentTimeRange.start.timescale * 1000.0);
739 timeRanges.addInterval(startTime, startTime + qint64(float(currentTimeRange.duration.value) / currentTimeRange.duration.timescale * 1000.0));
740 }
741 if (!timeRanges.isEmpty())
742 return timeRanges;
743 }
744 return QMediaTimeRange(0, duration());
745}
746
748{
749 return m_rate;
750}
751
752void AVFMediaPlayer::setAudioOutput(QPlatformAudioOutput *output)
753{
754 if (m_audioOutput == output)
755 return;
756 if (m_audioOutput)
757 m_audioOutput->q->disconnect(this);
758 m_audioOutput = output;
759 if (m_audioOutput) {
760 connect(m_audioOutput->q, &QAudioOutput::deviceChanged, this, &AVFMediaPlayer::updateAudioOutputDevice);
761 connect(m_audioOutput->q, &QAudioOutput::volumeChanged, this, &AVFMediaPlayer::setVolume);
762 connect(m_audioOutput->q, &QAudioOutput::mutedChanged, this, &AVFMediaPlayer::setMuted);
763 //connect(m_audioOutput->q, &QAudioOutput::audioRoleChanged, this, &AVFMediaPlayer::setAudioRole);
764 }
766 setMuted(m_audioOutput ? m_audioOutput->muted : true);
767 setVolume(m_audioOutput ? m_audioOutput->volume : 1.);
768}
769
771{
772 return m_metaData;
773}
774
775void AVFMediaPlayer::setPlaybackRate(qreal rate)
776{
777#ifdef QT_DEBUG_AVF
778 qDebug() << Q_FUNC_INFO << rate;
779#endif
780
781 if (qFuzzyCompare(m_rate, rate))
782 return;
783
784 m_rate = rate;
785
786 AVPlayer *player = [m_observer player];
787 if (player && m_state == QMediaPlayer::PlayingState)
788 [player setRate:m_rate];
789
790 playbackRateChanged(m_rate);
791}
792
794{
795#ifdef QT_DEBUG_AVF
796 qDebug() << Q_FUNC_INFO << pos;
797#endif
798
799 if (pos == position())
800 return;
801
802 AVPlayerItem *playerItem = [m_observer playerItem];
803 if (!playerItem) {
804 m_requestedPosition = pos;
805 positionChanged(m_requestedPosition);
806 return;
807 }
808
809 if (!isSeekable()) {
810 if (m_requestedPosition != -1) {
811 m_requestedPosition = -1;
812 positionChanged(position());
813 }
814 return;
815 }
816
817 pos = qMax(qint64(0), pos);
818 if (duration() > 0)
819 pos = qMin(pos, duration());
820 m_requestedPosition = pos;
821
822 CMTime newTime = [playerItem currentTime];
823 newTime.value = (pos / 1000.0f) * newTime.timescale;
824 [playerItem seekToTime:newTime toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero
825 completionHandler:^(BOOL finished) {
826 if (finished)
827 m_requestedPosition = -1;
828 }];
829
830 positionChanged(pos);
831
832 // Reset media status if the current status is EndOfMedia
833 if (m_mediaStatus == QMediaPlayer::EndOfMedia) {
834 QMediaPlayer::MediaStatus newMediaStatus = (m_state == QMediaPlayer::PausedState) ? QMediaPlayer::BufferedMedia
835 : QMediaPlayer::LoadedMedia;
836 mediaStatusChanged((m_mediaStatus = newMediaStatus));
837 }
838}
839
841{
842#ifdef QT_DEBUG_AVF
843 qDebug() << Q_FUNC_INFO << "currently: " << m_state;
844#endif
845
846 if (m_mediaStatus == QMediaPlayer::NoMedia || m_mediaStatus == QMediaPlayer::InvalidMedia)
847 return;
848
849 if (m_state == QMediaPlayer::PlayingState)
850 return;
851
852 resetCurrentLoop();
853
854 if (m_videoOutput && m_videoSink)
855 m_videoOutput->setLayer([m_observer playerLayer]);
856
857 // Reset media status if the current status is EndOfMedia
858 if (m_mediaStatus == QMediaPlayer::EndOfMedia)
859 setPosition(0);
860
861 if (m_mediaStatus == QMediaPlayer::LoadedMedia || m_mediaStatus == QMediaPlayer::BufferedMedia) {
862 // Setting the rate starts playback
863 [[m_observer player] setRate:m_rate];
864 }
865
866 m_state = QMediaPlayer::PlayingState;
868
869 stateChanged(m_state);
870 m_playbackTimer.start(100);
871}
872
874{
875#ifdef QT_DEBUG_AVF
876 qDebug() << Q_FUNC_INFO << "currently: " << m_state;
877#endif
878
879 if (m_mediaStatus == QMediaPlayer::NoMedia)
880 return;
881
882 if (m_state == QMediaPlayer::PausedState)
883 return;
884
885 m_state = QMediaPlayer::PausedState;
886
887 if (m_videoOutput && m_videoSink)
888 m_videoOutput->setLayer([m_observer playerLayer]);
889
890 [[m_observer player] pause];
891
892 // Reset media status if the current status is EndOfMedia
893 if (m_mediaStatus == QMediaPlayer::EndOfMedia)
894 setPosition(0);
895
896 positionChanged(position());
897 stateChanged(m_state);
898 m_playbackTimer.stop();
899}
900
902{
903#ifdef QT_DEBUG_AVF
904 qDebug() << Q_FUNC_INFO << "currently: " << m_state;
905#endif
906
907 if (m_state == QMediaPlayer::StoppedState)
908 return;
909
910 // AVPlayer doesn't have stop(), only pause() and play().
911 [[m_observer player] pause];
912 setPosition(0);
913
914 if (m_videoOutput)
915 m_videoOutput->setLayer(nullptr);
916
917 if (m_mediaStatus == QMediaPlayer::BufferedMedia)
918 mediaStatusChanged((m_mediaStatus = QMediaPlayer::LoadedMedia));
919
920 stateChanged((m_state = QMediaPlayer::StoppedState));
921 m_playbackTimer.stop();
922}
923
924void AVFMediaPlayer::setVolume(float volume)
925{
926#ifdef QT_DEBUG_AVF
927 qDebug() << Q_FUNC_INFO << volume;
928#endif
929
930 AVPlayer *player = [m_observer player];
931 if (player)
932 player.volume = volume;
933}
934
935void AVFMediaPlayer::setMuted(bool muted)
936{
937#ifdef QT_DEBUG_AVF
938 qDebug() << Q_FUNC_INFO << muted;
939#endif
940
941 AVPlayer *player = [m_observer player];
942 if (player)
943 player.muted = muted;
944}
945
947{
948#ifdef Q_OS_MACOS
949 AVPlayer *player = [m_observer player];
950 if (!player)
951 return;
952
953 if (!m_audioOutput || m_audioOutput->device.id().isEmpty()) {
954 if (!m_audioOutput)
955 player.muted = true;
956 player.audioOutputDeviceUniqueID = nil;
957 } else {
958 NSString *str = QString::fromUtf8(m_audioOutput->device.id()).toNSString();
959 player.audioOutputDeviceUniqueID = str;
960 }
961#endif
962}
963
965{
966 if (doLoop()) {
967 setPosition(0);
968 [[m_observer player] setRate:m_rate];
969 return;
970 }
971
972 //AVPlayerItem has reached end of track/stream
973#ifdef QT_DEBUG_AVF
974 qDebug() << Q_FUNC_INFO;
975#endif
976 positionChanged(position());
977 m_mediaStatus = QMediaPlayer::EndOfMedia;
978 m_state = QMediaPlayer::StoppedState;
979
980 if (m_videoOutput)
981 m_videoOutput->setLayer(nullptr);
982
983 mediaStatusChanged(m_mediaStatus);
984 stateChanged(m_state);
985}
986
987void AVFMediaPlayer::processLoadStateChange(QMediaPlayer::PlaybackState newState)
988{
989 AVPlayerStatus currentStatus = [[m_observer player] status];
990
991#ifdef QT_DEBUG_AVF
992 qDebug() << Q_FUNC_INFO << currentStatus << ", " << m_mediaStatus << ", " << newState;
993#endif
994
995 if (m_mediaStatus == QMediaPlayer::NoMedia)
996 return;
997
998 if (currentStatus == AVPlayerStatusReadyToPlay) {
999
1000 AVPlayerItem *playerItem = [m_observer playerItem];
1001
1002 applyPitchCompensation(m_pitchCompensationEnabled);
1003
1004 // get the meta data
1005 m_metaData = AVFMetaData::fromAsset(playerItem.asset);
1006 metaDataChanged();
1008
1009 if (playerItem) {
1010 setSeekable([[playerItem seekableTimeRanges] count] > 0);
1011
1012 // Get the native size of the video, and reset the bounds of the player layer
1013 AVPlayerLayer *playerLayer = [m_observer playerLayer];
1014 if (m_observer.videoTrack && playerLayer) {
1015 if (!playerLayer.bounds.size.width || !playerLayer.bounds.size.height) {
1016 playerLayer.bounds = CGRectMake(0.0f, 0.0f,
1017 m_observer.videoTrack.assetTrack.naturalSize.width,
1018 m_observer.videoTrack.assetTrack.naturalSize.height);
1019 }
1020 }
1021
1022 if (m_requestedPosition != -1) {
1023 setPosition(m_requestedPosition);
1024 m_requestedPosition = -1;
1025 }
1026 }
1027
1028 QMediaPlayer::MediaStatus newStatus = (newState != QMediaPlayer::StoppedState)
1029 ? QMediaPlayer::BufferedMedia
1030 : QMediaPlayer::LoadedMedia;
1031
1032 if (newStatus != m_mediaStatus)
1033 mediaStatusChanged((m_mediaStatus = newStatus));
1034 }
1035
1036 if (newState == QMediaPlayer::PlayingState && [m_observer player]) {
1037 // Setting the rate is enough to start playback, no need to call play()
1038 [[m_observer player] setRate:m_rate];
1039 m_playbackTimer.start();
1040 }
1041}
1042
1043
1045{
1046 processLoadStateChange(m_state);
1047}
1048
1049
1051{
1052 stateChanged((m_state = QMediaPlayer::StoppedState));
1053}
1054
1056{
1057 if (bufferProgress == m_bufferProgress)
1058 return;
1059
1060 auto status = m_mediaStatus;
1061 // Buffered -> unbuffered.
1062 if (!bufferProgress) {
1063 status = QMediaPlayer::StalledMedia;
1064 } else if (status == QMediaPlayer::StalledMedia) {
1065 status = QMediaPlayer::BufferedMedia;
1066 // Resume playback.
1067 if (m_state == QMediaPlayer::PlayingState) {
1068 [[m_observer player] setRate:m_rate];
1069 m_playbackTimer.start();
1070 }
1071 }
1072
1073 if (m_mediaStatus != status)
1074 mediaStatusChanged(m_mediaStatus = status);
1075
1076 m_bufferProgress = bufferProgress;
1077 bufferProgressChanged(bufferProgress / 100.);
1078}
1079
1081{
1082 if (duration == m_duration)
1083 return;
1084
1085 m_duration = duration;
1086 durationChanged(duration);
1087}
1088
1090{
1091 if (m_state == QMediaPlayer::StoppedState)
1092 return;
1093
1094 positionChanged(position());
1095}
1096
1098{
1099 if (m_requestedPosition != -1) {
1100 m_requestedPosition = -1;
1101 positionChanged(position());
1102 }
1103
1104 mediaStatusChanged((m_mediaStatus = QMediaPlayer::InvalidMedia));
1105
1106 error(QMediaPlayer::FormatError, tr("Failed to load media"));
1107}
1108
1110{
1111 setStreamURL(m_observer, m_resources.toEncoded());
1112}
1113
1115{
1116 resetStream(nullptr);
1117}
1118
1120{
1121 bool firstLoad = true;
1122 for (int i = 0; i < QPlatformMediaPlayer::NTrackTypes; ++i) {
1123 if (tracks[i].count())
1124 firstLoad = false;
1125 tracks[i].clear();
1126 nativeTracks[i].clear();
1127 }
1128 AVPlayerItem *playerItem = [m_observer playerItem];
1129 if (playerItem) {
1130 // Check each track for audio and video content
1131 NSArray *tracks = playerItem.tracks;
1132 for (AVPlayerItemTrack *track in tracks) {
1133 AVAssetTrack *assetTrack = track.assetTrack;
1134 if (assetTrack) {
1135 int qtTrack = -1;
1136 if ([assetTrack.mediaType isEqualToString:AVMediaTypeAudio]) {
1137 qtTrack = QPlatformMediaPlayer::AudioStream;
1138 setAudioAvailable(true);
1139 } else if ([assetTrack.mediaType isEqualToString:AVMediaTypeVideo]) {
1140 qtTrack = QPlatformMediaPlayer::VideoStream;
1141 setVideoAvailable(true);
1142 if (m_observer.videoTrack != track) {
1143 m_observer.videoTrack = track;
1144 bool isMirrored = false;
1145 QtVideo::Rotation orientation = QtVideo::Rotation::None;
1146 videoOrientationForAssetTrack(assetTrack, orientation, isMirrored);
1147 orientationChanged(orientation, isMirrored);
1148 }
1149 }
1150 else if ([assetTrack.mediaType isEqualToString:AVMediaTypeSubtitle]) {
1151 qtTrack = QPlatformMediaPlayer::SubtitleStream;
1152 }
1153 if (qtTrack != -1) {
1154 QMediaMetaData metaData = AVFMetaData::fromAssetTrack(assetTrack);
1155 this->tracks[qtTrack].append(metaData);
1156 nativeTracks[qtTrack].append(track);
1157 }
1158 }
1159 }
1160 // subtitles are disabled by default
1161 if (firstLoad)
1162 setActiveTrack(SubtitleStream, -1);
1163 }
1164 tracksChanged();
1165}
1166
1167void AVFMediaPlayer::setActiveTrack(QPlatformMediaPlayer::TrackType type, int index)
1168{
1169 const auto &t = nativeTracks[type];
1170 if (type == QPlatformMediaPlayer::SubtitleStream) {
1171 // subtitle streams are not always automatically enabled on macOS/iOS.
1172 // this hack ensures they get enables and we actually get the text
1173 AVPlayerItem *playerItem = m_observer.m_playerItem;
1174 if (playerItem) {
1175 AVAsset *asset = playerItem.asset;
1176 if (!asset)
1177 return;
1178#if defined(Q_OS_VISIONOS)
1179 [asset loadMediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible
1180 completionHandler:[=](AVMediaSelectionGroup *group, NSError *error) {
1181 // FIXME: handle error
1182 if (error)
1183 return;
1184 auto *options = group.options;
1185 if (options.count)
1186 [playerItem selectMediaOption:options.firstObject inMediaSelectionGroup:group];
1187 }];
1188#else
1189 AVMediaSelectionGroup *group = [asset mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
1190 if (!group)
1191 return;
1192 auto *options = group.options;
1193 if (options.count)
1194 [playerItem selectMediaOption:options.firstObject inMediaSelectionGroup:group];
1195#endif
1196 }
1197 }
1198 for (int i = 0; i < t.count(); ++i)
1199 t.at(i).enabled = (i == index);
1200 activeTracksChanged();
1201}
1202
1203int AVFMediaPlayer::activeTrack(QPlatformMediaPlayer::TrackType type)
1204{
1205 const auto &t = nativeTracks[type];
1206 for (int i = 0; i < t.count(); ++i)
1207 if (t.at(i).enabled)
1208 return i;
1209 return -1;
1210}
1211
1212int AVFMediaPlayer::trackCount(QPlatformMediaPlayer::TrackType type)
1213{
1214 return nativeTracks[type].count();
1215}
1216
1217QMediaMetaData AVFMediaPlayer::trackMetaData(QPlatformMediaPlayer::TrackType type, int trackNumber)
1218{
1219 const auto &t = tracks[type];
1220 if (trackNumber < 0 || trackNumber >= t.count())
1221 return QMediaMetaData();
1222 return t.at(trackNumber);
1223}
1224
1225void AVFMediaPlayer::resetStream(QIODevice *stream)
1226{
1227 if (m_mediaStream) {
1228 disconnect(m_mediaStream, &QIODevice::readyRead, this, &AVFMediaPlayer::streamReady);
1229 disconnect(m_mediaStream, &QIODevice::destroyed, this, &AVFMediaPlayer::streamDestroyed);
1230 }
1231
1232 m_mediaStream = stream;
1233
1234 if (m_mediaStream) {
1235 connect(m_mediaStream, &QIODevice::readyRead, this, &AVFMediaPlayer::streamReady);
1236 connect(m_mediaStream, &QIODevice::destroyed, this, &AVFMediaPlayer::streamDestroyed);
1237 }
1238}
1239
1240void AVFMediaPlayer::applyPitchCompensation(bool enabled)
1241{
1242 AVPlayerItem *playerItem = [m_observer playerItem];
1243 if (playerItem) {
1244 if (enabled)
1245 playerItem.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmSpectral;
1246 else
1247 playerItem.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmVarispeed;
1248 }
1249}
1250
1252{
1253 if (!m_videoSink)
1254 return;
1255 m_videoSink->setNativeSize(size);
1256}
1257
1258void AVFMediaPlayer::orientationChanged(QtVideo::Rotation rotation, bool mirrored)
1259{
1260 if (!m_videoOutput)
1261 return;
1262
1263 m_videoOutput->setVideoRotation(rotation);
1264 m_videoOutput->setVideoMirrored(mirrored);
1265}
1266
1267void AVFMediaPlayer::videoOrientationForAssetTrack(AVAssetTrack *videoTrack,
1268 QtVideo::Rotation &angle,
1269 bool &mirrored)
1270{
1271 angle = QtVideo::Rotation::None;
1272 if (videoTrack) {
1273 CGAffineTransform transform = videoTrack.preferredTransform;
1274 if (CGAffineTransformIsIdentity(transform))
1275 return;
1276 qreal delta = transform.a * transform.d - transform.b * transform.c;
1277 qreal radians = qAtan2(transform.b, transform.a);
1278 qreal degrees = qRadiansToDegrees(radians);
1279 qreal scaleX = (transform.a/qAbs(transform.a)) * qSqrt(qPow(transform.a, 2) + qPow(transform.c, 2));
1280 qreal scaleY = (transform.d/abs(transform.d)) * qSqrt(qPow(transform.b, 2) + qPow(transform.d, 2));
1281
1282 if (delta < 0.0) { // flipped
1283 if (scaleX < 0.0) {
1284 // vertical flip
1285 degrees = -degrees;
1286 } else if (scaleY < 0.0) {
1287 // horizontal flip
1288 degrees = (180 + (int)degrees) % 360;
1289 }
1290 mirrored = true;
1291 }
1292
1293 if (qFuzzyCompare(degrees, qreal(90)) || qFuzzyCompare(degrees, qreal(-270))) {
1294 angle = QtVideo::Rotation::Clockwise90;
1295 } else if (qFuzzyCompare(degrees, qreal(-90)) || qFuzzyCompare(degrees, qreal(270))) {
1296 angle = QtVideo::Rotation::Clockwise270;
1297 } else if (qFuzzyCompare(degrees, qreal(180)) || qFuzzyCompare(degrees, qreal(-180))) {
1298 angle = QtVideo::Rotation::Clockwise180;
1299 }
1300 }
1301}
1302
1304{
1305 if (m_pitchCompensationEnabled == enabled)
1306 return;
1307
1308 applyPitchCompensation(enabled);
1309
1310 m_pitchCompensationEnabled = enabled;
1311 pitchCompensationChanged(enabled);
1312}
1313
1315{
1316 return m_pitchCompensationEnabled;
1317}
1318
1321{
1322 return QPlatformMediaPlayer::PitchCompensationAvailability::Available;
1323}
1324
1325#include "moc_avfmediaplayer_p.cpp"
static void * AVFMediaPlayerObserverCurrentItemObservationContext
static NSString *const AVF_BUFFER_LIKELY_KEEP_UP_KEY
static void * AVFMediaPlayerObserverPresentationSizeContext
static void * AVFMediaPlayerObserverTracksContext
static QT_USE_NAMESPACE NSString *const AVF_TRACKS_KEY
static NSString *const AVF_STATUS_KEY
static NSString *const AVF_CURRENT_ITEM_DURATION_KEY
static void setURL(AVFMediaPlayerObserver *observer, const QByteArray &url, const QString &mimeType=QString())
static void * AVFMediaPlayerObserverRateObservationContext
static NSString *const AVF_CURRENT_ITEM_KEY
static NSString *const AVF_PLAYABLE_KEY
static void * AVFMediaPlayerObserverBufferLikelyToKeepUpContext
static NSString *const AVF_RATE_KEY
static void setStreamURL(AVFMediaPlayerObserver *observer, const QByteArray &url)
static void * AVFMediaPlayerObserverCurrentItemDurationObservationContext
static void * AVFMediaPlayerObserverStatusObservationContext
Q_FORWARD_DECLARE_OBJC_CLASS(AVFMediaPlayerObserver)
qint64 duration() const override
void setVolume(float volume)
void processLoadStateChange()
void setPosition(qint64 pos) override
void setVideoSink(QVideoSink *sink) override
QMediaTimeRange availablePlaybackRanges() const override
void nativeSizeChanged(QSize size)
bool isAudioAvailable() const override
QMediaMetaData metaData() const override
void setVideoOutput(AVFVideoRendererControl *output)
void stop() override
float bufferProgress() const override
void setMedia(const QUrl &content, QIODevice *stream) override
qint64 position() const override
void setPitchCompensation(bool enabled) override
QMediaPlayer::PlaybackState state() const override
void setMuted(bool muted)
void pause() override
void play() override
void processDurationChange(qint64 duration)
QUrl media() const override
bool pitchCompensation() const override
void processLoadStateFailure()
void updateAudioOutputDevice()
qreal playbackRate() const override
QMediaPlayer::MediaStatus mediaStatus() const override
void processLoadStateChange(QMediaPlayer::PlaybackState newState)
QIODevice * mediaStream() const override
PitchCompensationAvailability pitchCompensationAvailability() const override
~AVFMediaPlayer() override
AVAsset * currentAssetHandle()
bool isVideoAvailable() const override
bool isSeekable() const override
void processBufferStateChange(int bufferProgress)
void setVideoSink(AVFVideoSink *sink)