· 5 years ago · Sep 26, 2020, 02:50 AM
1#import <React/RCTConvert.h>
2#import "RCTVideo.h"
3#import <React/RCTBridgeModule.h>
4#import <React/RCTEventDispatcher.h>
5#import <React/UIView+React.h>
6#include <MediaAccessibility/MediaAccessibility.h>
7#include <AVFoundation/AVFoundation.h>
8
9static NSString *const statusKeyPath = @"status";
10static NSString *const playbackLikelyToKeepUpKeyPath = @"playbackLikelyToKeepUp";
11static NSString *const playbackBufferEmptyKeyPath = @"playbackBufferEmpty";
12static NSString *const readyForDisplayKeyPath = @"readyForDisplay";
13static NSString *const playbackRate = @"rate";
14static NSString *const timedMetadata = @"timedMetadata";
15static NSString *const externalPlaybackActive = @"externalPlaybackActive";
16
17static int const RCTVideoUnset = -1;
18
19#ifdef DEBUG
20 #define DebugLog(...) NSLog(__VA_ARGS__)
21#else
22 #define DebugLog(...) (void)0
23#endif
24
25@implementation RCTVideo
26{
27 AVPlayer *_player;
28 AVPlayerItem *_playerItem;
29 NSDictionary *_source;
30 BOOL _playerItemObserversSet;
31 BOOL _playerBufferEmpty;
32 AVPlayerLayer *_playerLayer;
33 BOOL _playerLayerObserverSet;
34 RCTVideoPlayerViewController *_playerViewController;
35 NSURL *_videoURL;
36
37 /* Required to publish events */
38 RCTEventDispatcher *_eventDispatcher;
39 BOOL _playbackRateObserverRegistered;
40 BOOL _isExternalPlaybackActiveObserverRegistered;
41 BOOL _videoLoadStarted;
42
43 bool _pendingSeek;
44 float _pendingSeekTime;
45 float _lastSeekTime;
46
47 /* For sending videoProgress events */
48 Float64 _progressUpdateInterval;
49 BOOL _controls;
50 id _timeObserver;
51
52 /* Keep track of any modifiers, need to be applied after each play */
53 float _volume;
54 float _rate;
55 float _maxBitRate;
56
57 BOOL _muted;
58 BOOL _paused;
59 BOOL _repeat;
60 BOOL _allowsExternalPlayback;
61 NSArray * _textTracks;
62 NSDictionary * _selectedTextTrack;
63 NSDictionary * _selectedAudioTrack;
64 BOOL _playbackStalled;
65 BOOL _playInBackground;
66 BOOL _playWhenInactive;
67 BOOL _pictureInPicture;
68 NSString * _ignoreSilentSwitch;
69 NSString * _resizeMode;
70 BOOL _fullscreen;
71 BOOL _fullscreenAutorotate;
72 NSString * _fullscreenOrientation;
73 BOOL _fullscreenPlayerPresented;
74 NSString *_filterName;
75 BOOL _filterEnabled;
76 UIViewController * _presentingViewController;
77#if __has_include(<react-native-video/RCTVideoCache.h>)
78 RCTVideoCache * _videoCache;
79#endif
80#if TARGET_OS_IOS
81 void (^__strong _Nonnull _restoreUserInterfaceForPIPStopCompletionHandler)(BOOL);
82 AVPictureInPictureController *_pipController;
83#endif
84}
85
86- (instancetype)initWithEventDispatcher:(RCTEventDispatcher *)eventDispatcher
87{
88 if ((self = [super init])) {
89 _eventDispatcher = eventDispatcher;
90
91 _playbackRateObserverRegistered = NO;
92 _isExternalPlaybackActiveObserverRegistered = NO;
93 _playbackStalled = NO;
94 _rate = 1.0;
95 _volume = 1.0;
96 _resizeMode = @"AVLayerVideoGravityResizeAspectFill";
97 _fullscreenAutorotate = YES;
98 _fullscreenOrientation = @"all";
99 _pendingSeek = false;
100 _pendingSeekTime = 0.0f;
101 _lastSeekTime = 0.0f;
102 _progressUpdateInterval = 250;
103 _controls = NO;
104 _playerBufferEmpty = YES;
105 _playInBackground = false;
106 _allowsExternalPlayback = YES;
107 _playWhenInactive = false;
108 _pictureInPicture = false;
109 _ignoreSilentSwitch = @"inherit"; // inherit, ignore, obey
110#if TARGET_OS_IOS
111 _restoreUserInterfaceForPIPStopCompletionHandler = NULL;
112#endif
113#if __has_include(<react-native-video/RCTVideoCache.h>)
114 _videoCache = [RCTVideoCache sharedInstance];
115#endif
116 [[NSNotificationCenter defaultCenter] addObserver:self
117 selector:@selector(applicationWillResignActive:)
118 name:UIApplicationWillResignActiveNotification
119 object:nil];
120
121 [[NSNotificationCenter defaultCenter] addObserver:self
122 selector:@selector(applicationDidEnterBackground:)
123 name:UIApplicationDidEnterBackgroundNotification
124 object:nil];
125
126 [[NSNotificationCenter defaultCenter] addObserver:self
127 selector:@selector(applicationWillEnterForeground:)
128 name:UIApplicationWillEnterForegroundNotification
129 object:nil];
130
131 [[NSNotificationCenter defaultCenter] addObserver:self
132 selector:@selector(audioRouteChanged:)
133 name:AVAudioSessionRouteChangeNotification
134 object:nil];
135 }
136
137 return self;
138}
139
140- (RCTVideoPlayerViewController*)createPlayerViewController:(AVPlayer*)player
141 withPlayerItem:(AVPlayerItem*)playerItem {
142 RCTVideoPlayerViewController* viewController = [[RCTVideoPlayerViewController alloc] init];
143 viewController.showsPlaybackControls = YES;
144 viewController.rctDelegate = self;
145 viewController.preferredOrientation = _fullscreenOrientation;
146
147 viewController.view.frame = self.bounds;
148 viewController.player = player;
149 return viewController;
150}
151
152/* ---------------------------------------------------------
153 ** Get the duration for a AVPlayerItem.
154 ** ------------------------------------------------------- */
155
156- (CMTime)playerItemDuration
157{
158 AVPlayerItem *playerItem = [_player currentItem];
159 if (playerItem.status == AVPlayerItemStatusReadyToPlay)
160 {
161 return([playerItem duration]);
162 }
163
164 return(kCMTimeInvalid);
165}
166
167- (CMTimeRange)playerItemSeekableTimeRange
168{
169 AVPlayerItem *playerItem = [_player currentItem];
170 if (playerItem.status == AVPlayerItemStatusReadyToPlay)
171 {
172 return [playerItem seekableTimeRanges].firstObject.CMTimeRangeValue;
173 }
174
175 return (kCMTimeRangeZero);
176}
177
178-(void)addPlayerTimeObserver
179{
180 const Float64 progressUpdateIntervalMS = _progressUpdateInterval / 1000;
181 // @see endScrubbing in AVPlayerDemoPlaybackViewController.m
182 // of https://developer.apple.com/library/ios/samplecode/AVPlayerDemo/Introduction/Intro.html
183 __weak RCTVideo *weakSelf = self;
184 _timeObserver = [_player addPeriodicTimeObserverForInterval:CMTimeMakeWithSeconds(progressUpdateIntervalMS, NSEC_PER_SEC)
185 queue:NULL
186 usingBlock:^(CMTime time) { [weakSelf sendProgressUpdate]; }
187 ];
188}
189
190/* Cancels the previously registered time observer. */
191-(void)removePlayerTimeObserver
192{
193 if (_timeObserver)
194 {
195 [_player removeTimeObserver:_timeObserver];
196 _timeObserver = nil;
197 }
198}
199
200#pragma mark - Progress
201
202- (void)dealloc
203{
204 [[NSNotificationCenter defaultCenter] removeObserver:self];
205 [self removePlayerLayer];
206 [self removePlayerItemObservers];
207 [_player removeObserver:self forKeyPath:playbackRate context:nil];
208 [_player removeObserver:self forKeyPath:externalPlaybackActive context: nil];
209}
210
211#pragma mark - App lifecycle handlers
212
213- (void)applicationWillResignActive:(NSNotification *)notification
214{
215 if (_playInBackground || _playWhenInactive || _paused) return;
216
217 [_player pause];
218 [_player setRate:0.0];
219}
220
221- (void)applicationDidEnterBackground:(NSNotification *)notification
222{
223 if (_playInBackground) {
224 // Needed to play sound in background. See https://developer.apple.com/library/ios/qa/qa1668/_index.html
225 [_playerLayer setPlayer:nil];
226 [_playerViewController setPlayer:nil];
227 }
228}
229
230- (void)applicationWillEnterForeground:(NSNotification *)notification
231{
232 [self applyModifiers];
233 if (_playInBackground) {
234 [_playerLayer setPlayer:_player];
235 [_playerViewController setPlayer:_player];
236 }
237}
238
239#pragma mark - Audio events
240
241- (void)audioRouteChanged:(NSNotification *)notification
242{
243 NSNumber *reason = [[notification userInfo] objectForKey:AVAudioSessionRouteChangeReasonKey];
244 NSNumber *previousRoute = [[notification userInfo] objectForKey:AVAudioSessionRouteChangePreviousRouteKey];
245 if (reason.unsignedIntValue == AVAudioSessionRouteChangeReasonOldDeviceUnavailable) {
246 self.onVideoAudioBecomingNoisy(@{@"target": self.reactTag});
247 }
248}
249
250#pragma mark - Progress
251
252- (void)sendProgressUpdate
253{
254 AVPlayerItem *video = [_player currentItem];
255 if (video == nil || video.status != AVPlayerItemStatusReadyToPlay) {
256 return;
257 }
258
259 CMTime playerDuration = [self playerItemDuration];
260 if (CMTIME_IS_INVALID(playerDuration)) {
261 return;
262 }
263
264 CMTime currentTime = _player.currentTime;
265 const Float64 duration = CMTimeGetSeconds(playerDuration);
266 const Float64 currentTimeSecs = CMTimeGetSeconds(currentTime);
267
268 [[NSNotificationCenter defaultCenter] postNotificationName:@"RCTVideo_progress" object:nil userInfo:@{@"progress": [NSNumber numberWithDouble: currentTimeSecs / duration]}];
269
270 if( currentTimeSecs >= 0 && self.onVideoProgress) {
271 self.onVideoProgress(@{
272 @"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(currentTime)],
273 @"playableDuration": [self calculatePlayableDuration],
274 @"atValue": [NSNumber numberWithLongLong:currentTime.value],
275 @"atTimescale": [NSNumber numberWithInt:currentTime.timescale],
276 @"target": self.reactTag,
277 @"seekableDuration": [self calculateSeekableDuration],
278 });
279 }
280}
281
282/*!
283 * Calculates and returns the playable duration of the current player item using its loaded time ranges.
284 *
285 * \returns The playable duration of the current player item in seconds.
286 */
287- (NSNumber *)calculatePlayableDuration
288{
289 AVPlayerItem *video = _player.currentItem;
290 if (video.status == AVPlayerItemStatusReadyToPlay) {
291 __block CMTimeRange effectiveTimeRange;
292 [video.loadedTimeRanges enumerateObjectsUsingBlock:^(id obj, NSUInteger idx, BOOL *stop) {
293 CMTimeRange timeRange = [obj CMTimeRangeValue];
294 if (CMTimeRangeContainsTime(timeRange, video.currentTime)) {
295 effectiveTimeRange = timeRange;
296 *stop = YES;
297 }
298 }];
299 Float64 playableDuration = CMTimeGetSeconds(CMTimeRangeGetEnd(effectiveTimeRange));
300 if (playableDuration > 0) {
301 return [NSNumber numberWithFloat:playableDuration];
302 }
303 }
304 return [NSNumber numberWithInteger:0];
305}
306
307- (NSNumber *)calculateSeekableDuration
308{
309 CMTimeRange timeRange = [self playerItemSeekableTimeRange];
310 if (CMTIME_IS_NUMERIC(timeRange.duration))
311 {
312 return [NSNumber numberWithFloat:CMTimeGetSeconds(timeRange.duration)];
313 }
314 return [NSNumber numberWithInteger:0];
315}
316
317- (void)addPlayerItemObservers
318{
319 [_playerItem addObserver:self forKeyPath:statusKeyPath options:0 context:nil];
320 [_playerItem addObserver:self forKeyPath:playbackBufferEmptyKeyPath options:0 context:nil];
321 [_playerItem addObserver:self forKeyPath:playbackLikelyToKeepUpKeyPath options:0 context:nil];
322 [_playerItem addObserver:self forKeyPath:timedMetadata options:NSKeyValueObservingOptionNew context:nil];
323 _playerItemObserversSet = YES;
324}
325
326/* Fixes https://github.com/brentvatne/react-native-video/issues/43
327 * Crashes caused when trying to remove the observer when there is no
328 * observer set */
329- (void)removePlayerItemObservers
330{
331 if (_playerItemObserversSet) {
332 [_playerItem removeObserver:self forKeyPath:statusKeyPath];
333 [_playerItem removeObserver:self forKeyPath:playbackBufferEmptyKeyPath];
334 [_playerItem removeObserver:self forKeyPath:playbackLikelyToKeepUpKeyPath];
335 [_playerItem removeObserver:self forKeyPath:timedMetadata];
336 _playerItemObserversSet = NO;
337 }
338}
339
340#pragma mark - Player and source
341
342- (void)setSrc:(NSDictionary *)source
343{
344 _source = source;
345 [self removePlayerLayer];
346 [self removePlayerTimeObserver];
347 [self removePlayerItemObservers];
348
349 dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t) 0), dispatch_get_main_queue(), ^{
350
351 // perform on next run loop, otherwise other passed react-props may not be set
352 [self playerItemForSource:source withCallback:^(AVPlayerItem * playerItem) {
353 _playerItem = playerItem;
354 [self addPlayerItemObservers];
355 [self setFilter:_filterName];
356 [self setMaxBitRate:_maxBitRate];
357
358 [_player pause];
359
360 if (_playbackRateObserverRegistered) {
361 [_player removeObserver:self forKeyPath:playbackRate context:nil];
362 _playbackRateObserverRegistered = NO;
363 }
364 if (_isExternalPlaybackActiveObserverRegistered) {
365 [_player removeObserver:self forKeyPath:externalPlaybackActive context:nil];
366 _isExternalPlaybackActiveObserverRegistered = NO;
367 }
368
369 _player = [AVPlayer playerWithPlayerItem:_playerItem];
370 [self touchPlayerLayer];
371 _player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
372
373 [_player addObserver:self forKeyPath:playbackRate options:0 context:nil];
374 _playbackRateObserverRegistered = YES;
375
376 [_player addObserver:self forKeyPath:externalPlaybackActive options:0 context:nil];
377 _isExternalPlaybackActiveObserverRegistered = YES;
378
379 [self addPlayerTimeObserver];
380
381 //Perform on next run loop, otherwise onVideoLoadStart is nil
382 if (self.onVideoLoadStart) {
383 id uri = [source objectForKey:@"uri"];
384 id type = [source objectForKey:@"type"];
385 self.onVideoLoadStart(@{@"src": @{
386 @"uri": uri ? uri : [NSNull null],
387 @"type": type ? type : [NSNull null],
388 @"isNetwork": [NSNumber numberWithBool:(bool)[source objectForKey:@"isNetwork"]]},
389 @"target": self.reactTag
390 });
391 }
392 }];
393 });
394 _videoLoadStarted = YES;
395}
396
397- (NSURL*) urlFilePath:(NSString*) filepath {
398 if ([filepath containsString:@"file://"]) {
399 return [NSURL URLWithString:filepath];
400 }
401
402 // if no file found, check if the file exists in the Document directory
403 NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
404 NSString* relativeFilePath = [filepath lastPathComponent];
405 // the file may be multiple levels below the documents directory
406 NSArray* fileComponents = [filepath componentsSeparatedByString:@"Documents/"];
407 if (fileComponents.count > 1) {
408 relativeFilePath = [fileComponents objectAtIndex:1];
409 }
410
411 NSString *path = [paths.firstObject stringByAppendingPathComponent:relativeFilePath];
412 if ([[NSFileManager defaultManager] fileExistsAtPath:path]) {
413 return [NSURL fileURLWithPath:path];
414 }
415 return nil;
416}
417
418- (void)playerItemPrepareText:(AVAsset *)asset assetOptions:(NSDictionary * __nullable)assetOptions withCallback:(void(^)(AVPlayerItem *))handler
419{
420 if (!_textTracks || _textTracks.count==0) {
421 handler([AVPlayerItem playerItemWithAsset:asset]);
422 return;
423 }
424
425 // AVPlayer can't airplay AVMutableCompositions
426 _allowsExternalPlayback = NO;
427
428 // sideload text tracks
429 AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
430
431 AVAssetTrack *videoAsset = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject;
432 AVMutableCompositionTrack *videoCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
433 [videoCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration)
434 ofTrack:videoAsset
435 atTime:kCMTimeZero
436 error:nil];
437
438 AVAssetTrack *audioAsset = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject;
439 AVMutableCompositionTrack *audioCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
440 [audioCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration)
441 ofTrack:audioAsset
442 atTime:kCMTimeZero
443 error:nil];
444
445 NSMutableArray* validTextTracks = [NSMutableArray array];
446 for (int i = 0; i < _textTracks.count; ++i) {
447 AVURLAsset *textURLAsset;
448 NSString *textUri = [_textTracks objectAtIndex:i][@"uri"];
449 if ([[textUri lowercaseString] hasPrefix:@"http"]) {
450 textURLAsset = [AVURLAsset URLAssetWithURL:[NSURL URLWithString:textUri] options:assetOptions];
451 } else {
452 textURLAsset = [AVURLAsset URLAssetWithURL:[self urlFilePath:textUri] options:nil];
453 }
454 AVAssetTrack *textTrackAsset = [textURLAsset tracksWithMediaType:AVMediaTypeText].firstObject;
455 if (!textTrackAsset) continue; // fix when there's no textTrackAsset
456 [validTextTracks addObject:[_textTracks objectAtIndex:i]];
457 AVMutableCompositionTrack *textCompTrack = [mixComposition
458 addMutableTrackWithMediaType:AVMediaTypeText
459 preferredTrackID:kCMPersistentTrackID_Invalid];
460 [textCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration)
461 ofTrack:textTrackAsset
462 atTime:kCMTimeZero
463 error:nil];
464 }
465 if (validTextTracks.count != _textTracks.count) {
466 [self setTextTracks:validTextTracks];
467 }
468
469 handler([AVPlayerItem playerItemWithAsset:mixComposition]);
470}
471
472- (void)playerItemForSource:(NSDictionary *)source withCallback:(void(^)(AVPlayerItem *))handler
473{
474 bool isNetwork = [RCTConvert BOOL:[source objectForKey:@"isNetwork"]];
475 bool isAsset = [RCTConvert BOOL:[source objectForKey:@"isAsset"]];
476 bool shouldCache = [RCTConvert BOOL:[source objectForKey:@"shouldCache"]];
477 NSString *uri = [source objectForKey:@"uri"];
478 NSString *type = [source objectForKey:@"type"];
479 if (!uri || [uri isEqualToString:@""]) {
480 DebugLog(@"Could not find video URL in source '%@'", source);
481 return;
482 }
483
484 NSURL *url = isNetwork || isAsset
485 ? [NSURL URLWithString:uri]
486 : [[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:uri ofType:type]];
487 NSMutableDictionary *assetOptions = [[NSMutableDictionary alloc] init];
488
489 if (isNetwork) {
490 /* Per #1091, this is not a public API.
491 * We need to either get approval from Apple to use this or use a different approach.
492 NSDictionary *headers = [source objectForKey:@"requestHeaders"];
493 if ([headers count] > 0) {
494 [assetOptions setObject:headers forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
495 }
496 */
497 NSArray *cookies = [[NSHTTPCookieStorage sharedHTTPCookieStorage] cookies];
498 [assetOptions setObject:cookies forKey:AVURLAssetHTTPCookiesKey];
499
500#if __has_include(<react-native-video/RCTVideoCache.h>)
501 if (shouldCache && (!_textTracks || !_textTracks.count)) {
502 /* The DVURLAsset created by cache doesn't have a tracksWithMediaType property, so trying
503 * to bring in the text track code will crash. I suspect this is because the asset hasn't fully loaded.
504 * Until this is fixed, we need to bypass caching when text tracks are specified.
505 */
506 DebugLog(@"Caching is not supported for uri '%@' because text tracks are not compatible with the cache. Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md", uri);
507 [self playerItemForSourceUsingCache:uri assetOptions:assetOptions withCallback:handler];
508 return;
509 }
510#endif
511
512 AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:assetOptions];
513 [self playerItemPrepareText:asset assetOptions:assetOptions withCallback:handler];
514 return;
515 } else if (isAsset) {
516 AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:nil];
517 [self playerItemPrepareText:asset assetOptions:assetOptions withCallback:handler];
518 return;
519 }
520
521 AVURLAsset *asset = [AVURLAsset URLAssetWithURL:[[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:uri ofType:type]] options:nil];
522 [self playerItemPrepareText:asset assetOptions:assetOptions withCallback:handler];
523}
524
525#if __has_include(<react-native-video/RCTVideoCache.h>)
526
527- (void)playerItemForSourceUsingCache:(NSString *)uri assetOptions:(NSDictionary *)options withCallback:(void(^)(AVPlayerItem *))handler {
528 NSURL *url = [NSURL URLWithString:uri];
529 [_videoCache getItemForUri:uri withCallback:^(RCTVideoCacheStatus videoCacheStatus, AVAsset * _Nullable cachedAsset) {
530 switch (videoCacheStatus) {
531 case RCTVideoCacheStatusMissingFileExtension: {
532 DebugLog(@"Could not generate cache key for uri '%@'. It is currently not supported to cache urls that do not include a file extension. The video file will not be cached. Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md", uri);
533 AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:options];
534 [self playerItemPrepareText:asset assetOptions:options withCallback:handler];
535 return;
536 }
537 case RCTVideoCacheStatusUnsupportedFileExtension: {
538 DebugLog(@"Could not generate cache key for uri '%@'. The file extension of that uri is currently not supported. The video file will not be cached. Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md", uri);
539 AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:options];
540 [self playerItemPrepareText:asset assetOptions:options withCallback:handler];
541 return;
542 }
543 default:
544 if (cachedAsset) {
545 DebugLog(@"Playing back uri '%@' from cache", uri);
546 // See note in playerItemForSource about not being able to support text tracks & caching
547 handler([AVPlayerItem playerItemWithAsset:cachedAsset]);
548 return;
549 }
550 }
551
552 DVURLAsset *asset = [[DVURLAsset alloc] initWithURL:url options:options networkTimeout:10000];
553 asset.loaderDelegate = self;
554
555 /* More granular code to have control over the DVURLAsset
556 DVAssetLoaderDelegate *resourceLoaderDelegate = [[DVAssetLoaderDelegate alloc] initWithURL:url];
557 resourceLoaderDelegate.delegate = self;
558 NSURLComponents *components = [[NSURLComponents alloc] initWithURL:url resolvingAgainstBaseURL:NO];
559 components.scheme = [DVAssetLoaderDelegate scheme];
560 AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:[components URL] options:options];
561 [asset.resourceLoader setDelegate:resourceLoaderDelegate queue:dispatch_get_main_queue()];
562 */
563
564 handler([AVPlayerItem playerItemWithAsset:asset]);
565 }];
566}
567
568#pragma mark - DVAssetLoaderDelegate
569
570- (void)dvAssetLoaderDelegate:(DVAssetLoaderDelegate *)loaderDelegate
571 didLoadData:(NSData *)data
572 forURL:(NSURL *)url {
573 [_videoCache storeItem:data forUri:[url absoluteString] withCallback:^(BOOL success) {
574 DebugLog(@"Cache data stored successfully ?");
575 }];
576}
577
578#endif
579
580- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
581{
582
583 if([keyPath isEqualToString:readyForDisplayKeyPath] && [change objectForKey:NSKeyValueChangeNewKey] && self.onReadyForDisplay) {
584 self.onReadyForDisplay(@{@"target": self.reactTag});
585 return;
586 }
587 if (object == _playerItem) {
588 // When timeMetadata is read the event onTimedMetadata is triggered
589 if ([keyPath isEqualToString:timedMetadata]) {
590 NSArray<AVMetadataItem *> *items = [change objectForKey:@"new"];
591 if (items && ![items isEqual:[NSNull null]] && items.count > 0) {
592 NSMutableArray *array = [NSMutableArray new];
593 for (AVMetadataItem *item in items) {
594 NSString *value = (NSString *)item.value;
595 NSString *identifier = item.identifier;
596
597 if (![value isEqual: [NSNull null]]) {
598 NSDictionary *dictionary = [[NSDictionary alloc] initWithObjects:@[value, identifier] forKeys:@[@"value", @"identifier"]];
599
600 [array addObject:dictionary];
601 }
602 }
603
604 self.onTimedMetadata(@{
605 @"target": self.reactTag,
606 @"metadata": array
607 });
608 }
609 }
610
611 if ([keyPath isEqualToString:statusKeyPath]) {
612 // Handle player item status change.
613 if (_playerItem.status == AVPlayerItemStatusReadyToPlay) {
614 float duration = CMTimeGetSeconds(_playerItem.asset.duration);
615
616 if (isnan(duration)) {
617 duration = 0.0;
618 }
619
620 NSObject *width = @"undefined";
621 NSObject *height = @"undefined";
622 NSString *orientation = @"undefined";
623
624 if ([_playerItem.asset tracksWithMediaType:AVMediaTypeVideo].count > 0) {
625 AVAssetTrack *videoTrack = [[_playerItem.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
626 width = [NSNumber numberWithFloat:videoTrack.naturalSize.width];
627 height = [NSNumber numberWithFloat:videoTrack.naturalSize.height];
628 CGAffineTransform preferredTransform = [videoTrack preferredTransform];
629
630 if ((videoTrack.naturalSize.width == preferredTransform.tx
631 && videoTrack.naturalSize.height == preferredTransform.ty)
632 || (preferredTransform.tx == 0 && preferredTransform.ty == 0))
633 {
634 orientation = @"landscape";
635 } else {
636 orientation = @"portrait";
637 }
638 }
639
640 if (self.onVideoLoad && _videoLoadStarted) {
641 self.onVideoLoad(@{@"duration": [NSNumber numberWithFloat:duration],
642 @"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(_playerItem.currentTime)],
643 @"canPlayReverse": [NSNumber numberWithBool:_playerItem.canPlayReverse],
644 @"canPlayFastForward": [NSNumber numberWithBool:_playerItem.canPlayFastForward],
645 @"canPlaySlowForward": [NSNumber numberWithBool:_playerItem.canPlaySlowForward],
646 @"canPlaySlowReverse": [NSNumber numberWithBool:_playerItem.canPlaySlowReverse],
647 @"canStepBackward": [NSNumber numberWithBool:_playerItem.canStepBackward],
648 @"canStepForward": [NSNumber numberWithBool:_playerItem.canStepForward],
649 @"naturalSize": @{
650 @"width": width,
651 @"height": height,
652 @"orientation": orientation
653 },
654 @"audioTracks": [self getAudioTrackInfo],
655 @"textTracks": [self getTextTrackInfo],
656 @"target": self.reactTag});
657 }
658 _videoLoadStarted = NO;
659
660 [self attachListeners];
661 [self applyModifiers];
662 } else if (_playerItem.status == AVPlayerItemStatusFailed && self.onVideoError) {
663 self.onVideoError(@{@"error": @{@"code": [NSNumber numberWithInteger: _playerItem.error.code],
664 @"domain": _playerItem.error.domain},
665 @"target": self.reactTag});
666 }
667 } else if ([keyPath isEqualToString:playbackBufferEmptyKeyPath]) {
668 _playerBufferEmpty = YES;
669 self.onVideoBuffer(@{@"isBuffering": @(YES), @"target": self.reactTag});
670 } else if ([keyPath isEqualToString:playbackLikelyToKeepUpKeyPath]) {
671 // Continue playing (or not if paused) after being paused due to hitting an unbuffered zone.
672 if ((!(_controls || _fullscreenPlayerPresented) || _playerBufferEmpty) && _playerItem.playbackLikelyToKeepUp) {
673 [self setPaused:_paused];
674 }
675 _playerBufferEmpty = NO;
676 self.onVideoBuffer(@{@"isBuffering": @(NO), @"target": self.reactTag});
677 }
678 } else if (object == _player) {
679 if([keyPath isEqualToString:playbackRate]) {
680 if(self.onPlaybackRateChange) {
681 self.onPlaybackRateChange(@{@"playbackRate": [NSNumber numberWithFloat:_player.rate],
682 @"target": self.reactTag});
683 }
684 if(_playbackStalled && _player.rate > 0) {
685 if(self.onPlaybackResume) {
686 self.onPlaybackResume(@{@"playbackRate": [NSNumber numberWithFloat:_player.rate],
687 @"target": self.reactTag});
688 }
689 _playbackStalled = NO;
690 }
691 }
692 else if([keyPath isEqualToString:externalPlaybackActive]) {
693 if(self.onVideoExternalPlaybackChange) {
694 self.onVideoExternalPlaybackChange(@{@"isExternalPlaybackActive": [NSNumber numberWithBool:_player.isExternalPlaybackActive],
695 @"target": self.reactTag});
696 }
697 }
698 } else if (object == _playerViewController.contentOverlayView) {
699 // when controls==true, this is a hack to reset the rootview when rotation happens in fullscreen
700 if ([keyPath isEqualToString:@"frame"]) {
701
702 CGRect oldRect = [change[NSKeyValueChangeOldKey] CGRectValue];
703 CGRect newRect = [change[NSKeyValueChangeNewKey] CGRectValue];
704
705 if (!CGRectEqualToRect(oldRect, newRect)) {
706 if (CGRectEqualToRect(newRect, [UIScreen mainScreen].bounds)) {
707 NSLog(@"in fullscreen");
708 } else NSLog(@"not fullscreen");
709
710 [self.reactViewController.view setFrame:[UIScreen mainScreen].bounds];
711 [self.reactViewController.view setNeedsLayout];
712 }
713
714 return;
715 }
716 } else if ([super respondsToSelector:@selector(observeValueForKeyPath:ofObject:change:context:)]) {
717 [super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
718 }
719}
720
721- (void)attachListeners
722{
723 // listen for end of file
724 [[NSNotificationCenter defaultCenter] removeObserver:self
725 name:AVPlayerItemDidPlayToEndTimeNotification
726 object:[_player currentItem]];
727 [[NSNotificationCenter defaultCenter] addObserver:self
728 selector:@selector(playerItemDidReachEnd:)
729 name:AVPlayerItemDidPlayToEndTimeNotification
730 object:[_player currentItem]];
731
732 [[NSNotificationCenter defaultCenter] removeObserver:self
733 name:AVPlayerItemPlaybackStalledNotification
734 object:nil];
735 [[NSNotificationCenter defaultCenter] addObserver:self
736 selector:@selector(playbackStalled:)
737 name:AVPlayerItemPlaybackStalledNotification
738 object:nil];
739
740 [[NSNotificationCenter defaultCenter] removeObserver:self
741 name:AVPlayerItemNewAccessLogEntryNotification
742 object:nil];
743 [[NSNotificationCenter defaultCenter] addObserver:self
744 selector:@selector(handleAVPlayerAccess:)
745 name:AVPlayerItemNewAccessLogEntryNotification
746 object:nil];
747
748}
749
750- (void)handleAVPlayerAccess:(NSNotification *)notification {
751 AVPlayerItemAccessLog *accessLog = [((AVPlayerItem *)notification.object) accessLog];
752 AVPlayerItemAccessLogEvent *lastEvent = accessLog.events.lastObject;
753
754 /* TODO: get this working
755 if (self.onBandwidthUpdate) {
756 self.onBandwidthUpdate(@{@"bitrate": [NSNumber numberWithFloat:lastEvent.observedBitrate]});
757 }
758 */
759}
760
761- (void)playbackStalled:(NSNotification *)notification
762{
763 if(self.onPlaybackStalled) {
764 self.onPlaybackStalled(@{@"target": self.reactTag});
765 }
766 _playbackStalled = YES;
767}
768
769- (void)playerItemDidReachEnd:(NSNotification *)notification
770{
771 if(self.onVideoEnd) {
772 self.onVideoEnd(@{@"target": self.reactTag});
773 }
774
775 if (_repeat) {
776 AVPlayerItem *item = [notification object];
777 [item seekToTime:kCMTimeZero];
778 [self applyModifiers];
779 } else {
780 [self removePlayerTimeObserver];
781 }
782}
783
784#pragma mark - Prop setters
785
786- (void)setResizeMode:(NSString*)mode
787{
788 if( _controls )
789 {
790 _playerViewController.videoGravity = mode;
791 }
792 else
793 {
794 _playerLayer.videoGravity = mode;
795 }
796 _resizeMode = mode;
797}
798
799- (void)setPlayInBackground:(BOOL)playInBackground
800{
801 _playInBackground = playInBackground;
802}
803
804- (void)setAllowsExternalPlayback:(BOOL)allowsExternalPlayback
805{
806 _allowsExternalPlayback = allowsExternalPlayback;
807 _player.allowsExternalPlayback = _allowsExternalPlayback;
808}
809
810- (void)setPlayWhenInactive:(BOOL)playWhenInactive
811{
812 _playWhenInactive = playWhenInactive;
813}
814
815- (void)setPictureInPicture:(BOOL)pictureInPicture
816{
817 #if TARGET_OS_IOS
818 if (_pictureInPicture == pictureInPicture) {
819 return;
820 }
821
822 _pictureInPicture = pictureInPicture;
823 if (_pipController && _pictureInPicture && ![_pipController isPictureInPictureActive]) {
824 dispatch_async(dispatch_get_main_queue(), ^{
825 [_pipController startPictureInPicture];
826 });
827 } else if (_pipController && !_pictureInPicture && [_pipController isPictureInPictureActive]) {
828 dispatch_async(dispatch_get_main_queue(), ^{
829 [_pipController stopPictureInPicture];
830 });
831 }
832 #endif
833}
834
835#if TARGET_OS_IOS
836- (void)setRestoreUserInterfaceForPIPStopCompletionHandler:(BOOL)restore
837{
838 if (_restoreUserInterfaceForPIPStopCompletionHandler != NULL) {
839 _restoreUserInterfaceForPIPStopCompletionHandler(restore);
840 _restoreUserInterfaceForPIPStopCompletionHandler = NULL;
841 }
842}
843
844- (void)setupPipController {
845 if (!_pipController && _playerLayer && [AVPictureInPictureController isPictureInPictureSupported]) {
846 // Create new controller passing reference to the AVPlayerLayer
847 _pipController = [[AVPictureInPictureController alloc] initWithPlayerLayer:_playerLayer];
848 _pipController.delegate = self;
849 }
850}
851#endif
852
853- (void)setIgnoreSilentSwitch:(NSString *)ignoreSilentSwitch
854{
855 _ignoreSilentSwitch = ignoreSilentSwitch;
856 [self applyModifiers];
857}
858
859- (void)setPaused:(BOOL)paused
860{
861 if (paused) {
862 [_player pause];
863 [_player setRate:0.0];
864 } else {
865 if([_ignoreSilentSwitch isEqualToString:@"ignore"]) {
866 [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error:nil];
867 } else if([_ignoreSilentSwitch isEqualToString:@"obey"]) {
868 [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryAmbient error:nil];
869 }
870 [_player play];
871 [_player setRate:_rate];
872 }
873
874 _paused = paused;
875}
876
877- (float)getCurrentTime
878{
879 return _playerItem != NULL ? CMTimeGetSeconds(_playerItem.currentTime) : 0;
880}
881
882- (void)setCurrentTime:(float)currentTime
883{
884 NSDictionary *info = @{
885 @"time": [NSNumber numberWithFloat:currentTime],
886 @"tolerance": [NSNumber numberWithInt:100]
887 };
888 [self setSeek:info];
889}
890
891- (void)setSeek:(NSDictionary *)info
892{
893 NSNumber *seekTime = info[@"time"];
894 NSNumber *seekTolerance = info[@"tolerance"];
895
896 int timeScale = 1000;
897
898 AVPlayerItem *item = _player.currentItem;
899 if (item && item.status == AVPlayerItemStatusReadyToPlay) {
900 // TODO check loadedTimeRanges
901
902 CMTime cmSeekTime = CMTimeMakeWithSeconds([seekTime floatValue], timeScale);
903 CMTime current = item.currentTime;
904 // TODO figure out a good tolerance level
905 CMTime tolerance = CMTimeMake([seekTolerance floatValue], timeScale);
906 BOOL wasPaused = _paused;
907
908 if (CMTimeCompare(current, cmSeekTime) != 0) {
909 if (!wasPaused) [_player pause];
910 [_player seekToTime:cmSeekTime toleranceBefore:tolerance toleranceAfter:tolerance completionHandler:^(BOOL finished) {
911 if (!_timeObserver) {
912 [self addPlayerTimeObserver];
913 }
914 if (!wasPaused) {
915 [self setPaused:false];
916 }
917 if(self.onVideoSeek) {
918 self.onVideoSeek(@{@"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(item.currentTime)],
919 @"seekTime": seekTime,
920 @"target": self.reactTag});
921 }
922 }];
923
924 _pendingSeek = false;
925 }
926
927 } else {
928 // TODO: See if this makes sense and if so, actually implement it
929 _pendingSeek = true;
930 _pendingSeekTime = [seekTime floatValue];
931 }
932}
933
934- (void)setRate:(float)rate
935{
936 _rate = rate;
937 [self applyModifiers];
938}
939
940- (void)setMuted:(BOOL)muted
941{
942 _muted = muted;
943 [self applyModifiers];
944}
945
946- (void)setVolume:(float)volume
947{
948 _volume = volume;
949 [self applyModifiers];
950}
951
952- (void)setMaxBitRate:(float) maxBitRate {
953 _maxBitRate = maxBitRate;
954 _playerItem.preferredPeakBitRate = maxBitRate;
955}
956
957
958- (void)applyModifiers
959{
960 if (_muted) {
961 if (!_controls) {
962 [_player setVolume:0];
963 }
964 [_player setMuted:YES];
965 } else {
966 [_player setVolume:_volume];
967 [_player setMuted:NO];
968 }
969
970 [self setMaxBitRate:_maxBitRate];
971 [self setSelectedAudioTrack:_selectedAudioTrack];
972 [self setSelectedTextTrack:_selectedTextTrack];
973 [self setResizeMode:_resizeMode];
974 [self setRepeat:_repeat];
975 [self setPaused:_paused];
976 [self setControls:_controls];
977 [self setAllowsExternalPlayback:_allowsExternalPlayback];
978}
979
980- (void)setRepeat:(BOOL)repeat {
981 _repeat = repeat;
982}
983
984- (void)setMediaSelectionTrackForCharacteristic:(AVMediaCharacteristic)characteristic
985 withCriteria:(NSDictionary *)criteria
986{
987 NSString *type = criteria[@"type"];
988 AVMediaSelectionGroup *group = [_player.currentItem.asset
989 mediaSelectionGroupForMediaCharacteristic:characteristic];
990 AVMediaSelectionOption *mediaOption;
991
992 if ([type isEqualToString:@"disabled"]) {
993 // Do nothing. We want to ensure option is nil
994 } else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) {
995 NSString *value = criteria[@"value"];
996 for (int i = 0; i < group.options.count; ++i) {
997 AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i];
998 NSString *optionValue;
999 if ([type isEqualToString:@"language"]) {
1000 optionValue = [currentOption extendedLanguageTag];
1001 } else {
1002 optionValue = [[[currentOption commonMetadata]
1003 valueForKey:@"value"]
1004 objectAtIndex:0];
1005 }
1006 if ([value isEqualToString:optionValue]) {
1007 mediaOption = currentOption;
1008 break;
1009 }
1010 }
1011 //} else if ([type isEqualToString:@"default"]) {
1012 // option = group.defaultOption; */
1013 } else if ([type isEqualToString:@"index"]) {
1014 if ([criteria[@"value"] isKindOfClass:[NSNumber class]]) {
1015 int index = [criteria[@"value"] intValue];
1016 if (group.options.count > index) {
1017 mediaOption = [group.options objectAtIndex:index];
1018 }
1019 }
1020 } else { // default. invalid type or "system"
1021 [_player.currentItem selectMediaOptionAutomaticallyInMediaSelectionGroup:group];
1022 return;
1023 }
1024
1025 // If a match isn't found, option will be nil and text tracks will be disabled
1026 [_player.currentItem selectMediaOption:mediaOption inMediaSelectionGroup:group];
1027}
1028
1029- (void)setSelectedAudioTrack:(NSDictionary *)selectedAudioTrack {
1030 _selectedAudioTrack = selectedAudioTrack;
1031 [self setMediaSelectionTrackForCharacteristic:AVMediaCharacteristicAudible
1032 withCriteria:_selectedAudioTrack];
1033}
1034
1035- (void)setSelectedTextTrack:(NSDictionary *)selectedTextTrack {
1036 _selectedTextTrack = selectedTextTrack;
1037 if (_textTracks) { // sideloaded text tracks
1038 [self setSideloadedText];
1039 } else { // text tracks included in the HLS playlist
1040 [self setMediaSelectionTrackForCharacteristic:AVMediaCharacteristicLegible
1041 withCriteria:_selectedTextTrack];
1042 }
1043}
1044
1045- (void) setSideloadedText {
1046 NSString *type = _selectedTextTrack[@"type"];
1047 NSArray *textTracks = [self getTextTrackInfo];
1048
1049 // The first few tracks will be audio & video track
1050 int firstTextIndex = 0;
1051 for (firstTextIndex = 0; firstTextIndex < _player.currentItem.tracks.count; ++firstTextIndex) {
1052 if ([_player.currentItem.tracks[firstTextIndex].assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible]) {
1053 break;
1054 }
1055 }
1056
1057 int selectedTrackIndex = RCTVideoUnset;
1058
1059 if ([type isEqualToString:@"disabled"]) {
1060 // Do nothing. We want to ensure option is nil
1061 } else if ([type isEqualToString:@"language"]) {
1062 NSString *selectedValue = _selectedTextTrack[@"value"];
1063 for (int i = 0; i < textTracks.count; ++i) {
1064 NSDictionary *currentTextTrack = [textTracks objectAtIndex:i];
1065 if ([selectedValue isEqualToString:currentTextTrack[@"language"]]) {
1066 selectedTrackIndex = i;
1067 break;
1068 }
1069 }
1070 } else if ([type isEqualToString:@"title"]) {
1071 NSString *selectedValue = _selectedTextTrack[@"value"];
1072 for (int i = 0; i < textTracks.count; ++i) {
1073 NSDictionary *currentTextTrack = [textTracks objectAtIndex:i];
1074 if ([selectedValue isEqualToString:currentTextTrack[@"title"]]) {
1075 selectedTrackIndex = i;
1076 break;
1077 }
1078 }
1079 } else if ([type isEqualToString:@"index"]) {
1080 if ([_selectedTextTrack[@"value"] isKindOfClass:[NSNumber class]]) {
1081 int index = [_selectedTextTrack[@"value"] intValue];
1082 if (textTracks.count > index) {
1083 selectedTrackIndex = index;
1084 }
1085 }
1086 }
1087
1088 // in the situation that a selected text track is not available (eg. specifies a textTrack not available)
1089 if (![type isEqualToString:@"disabled"] && selectedTrackIndex == RCTVideoUnset) {
1090 CFArrayRef captioningMediaCharacteristics = MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics(kMACaptionAppearanceDomainUser);
1091 NSArray *captionSettings = (__bridge NSArray*)captioningMediaCharacteristics;
1092 if ([captionSettings containsObject:AVMediaCharacteristicTranscribesSpokenDialogForAccessibility]) {
1093 selectedTrackIndex = 0; // If we can't find a match, use the first available track
1094 NSString *systemLanguage = [[NSLocale preferredLanguages] firstObject];
1095 for (int i = 0; i < textTracks.count; ++i) {
1096 NSDictionary *currentTextTrack = [textTracks objectAtIndex:i];
1097 if ([systemLanguage isEqualToString:currentTextTrack[@"language"]]) {
1098 selectedTrackIndex = i;
1099 break;
1100 }
1101 }
1102 }
1103 }
1104
1105 for (int i = firstTextIndex; i < _player.currentItem.tracks.count; ++i) {
1106 BOOL isEnabled = NO;
1107 if (selectedTrackIndex != RCTVideoUnset) {
1108 isEnabled = i == selectedTrackIndex + firstTextIndex;
1109 }
1110 [_player.currentItem.tracks[i] setEnabled:isEnabled];
1111 }
1112}
1113
1114-(void) setStreamingText {
1115 NSString *type = _selectedTextTrack[@"type"];
1116 AVMediaSelectionGroup *group = [_player.currentItem.asset
1117 mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
1118 AVMediaSelectionOption *mediaOption;
1119
1120 if ([type isEqualToString:@"disabled"]) {
1121 // Do nothing. We want to ensure option is nil
1122 } else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) {
1123 NSString *value = _selectedTextTrack[@"value"];
1124 for (int i = 0; i < group.options.count; ++i) {
1125 AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i];
1126 NSString *optionValue;
1127 if ([type isEqualToString:@"language"]) {
1128 optionValue = [currentOption extendedLanguageTag];
1129 } else {
1130 optionValue = [[[currentOption commonMetadata]
1131 valueForKey:@"value"]
1132 objectAtIndex:0];
1133 }
1134 if ([value isEqualToString:optionValue]) {
1135 mediaOption = currentOption;
1136 break;
1137 }
1138 }
1139 //} else if ([type isEqualToString:@"default"]) {
1140 // option = group.defaultOption; */
1141 } else if ([type isEqualToString:@"index"]) {
1142 if ([_selectedTextTrack[@"value"] isKindOfClass:[NSNumber class]]) {
1143 int index = [_selectedTextTrack[@"value"] intValue];
1144 if (group.options.count > index) {
1145 mediaOption = [group.options objectAtIndex:index];
1146 }
1147 }
1148 } else { // default. invalid type or "system"
1149 [_player.currentItem selectMediaOptionAutomaticallyInMediaSelectionGroup:group];
1150 return;
1151 }
1152
1153 // If a match isn't found, option will be nil and text tracks will be disabled
1154 [_player.currentItem selectMediaOption:mediaOption inMediaSelectionGroup:group];
1155}
1156
1157- (void)setTextTracks:(NSArray*) textTracks;
1158{
1159 _textTracks = textTracks;
1160
1161 // in case textTracks was set after selectedTextTrack
1162 if (_selectedTextTrack) [self setSelectedTextTrack:_selectedTextTrack];
1163}
1164
1165- (NSArray *)getAudioTrackInfo
1166{
1167 NSMutableArray *audioTracks = [[NSMutableArray alloc] init];
1168 AVMediaSelectionGroup *group = [_player.currentItem.asset
1169 mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
1170 for (int i = 0; i < group.options.count; ++i) {
1171 AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i];
1172 NSString *title = @"";
1173 NSArray *values = [[currentOption commonMetadata] valueForKey:@"value"];
1174 if (values.count > 0) {
1175 title = [values objectAtIndex:0];
1176 }
1177 NSString *language = [currentOption extendedLanguageTag] ? [currentOption extendedLanguageTag] : @"";
1178 NSDictionary *audioTrack = @{
1179 @"index": [NSNumber numberWithInt:i],
1180 @"title": title,
1181 @"language": language
1182 };
1183 [audioTracks addObject:audioTrack];
1184 }
1185 return audioTracks;
1186}
1187
1188- (NSArray *)getTextTrackInfo
1189{
1190 // if sideloaded, textTracks will already be set
1191 if (_textTracks) return _textTracks;
1192
1193 // if streaming video, we extract the text tracks
1194 NSMutableArray *textTracks = [[NSMutableArray alloc] init];
1195 AVMediaSelectionGroup *group = [_player.currentItem.asset
1196 mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
1197 for (int i = 0; i < group.options.count; ++i) {
1198 AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i];
1199 NSString *title = @"";
1200 NSArray *values = [[currentOption commonMetadata] valueForKey:@"value"];
1201 if (values.count > 0) {
1202 title = [values objectAtIndex:0];
1203 }
1204 NSString *language = [currentOption extendedLanguageTag] ? [currentOption extendedLanguageTag] : @"";
1205 NSDictionary *textTrack = @{
1206 @"index": [NSNumber numberWithInt:i],
1207 @"title": title,
1208 @"language": language
1209 };
1210 [textTracks addObject:textTrack];
1211 }
1212 return textTracks;
1213}
1214
1215- (BOOL)getFullscreen
1216{
1217 return _fullscreenPlayerPresented;
1218}
1219
1220- (void)setFullscreen:(BOOL) fullscreen {
1221 if( fullscreen && !_fullscreenPlayerPresented && _player )
1222 {
1223 // Ensure player view controller is not null
1224 if( !_playerViewController )
1225 {
1226 [self usePlayerViewController];
1227 }
1228 // Set presentation style to fullscreen
1229 [_playerViewController setModalPresentationStyle:UIModalPresentationFullScreen];
1230
1231 // Find the nearest view controller
1232 UIViewController *viewController = [self firstAvailableUIViewController];
1233 if( !viewController )
1234 {
1235 UIWindow *keyWindow = [[UIApplication sharedApplication] keyWindow];
1236 viewController = keyWindow.rootViewController;
1237 if( viewController.childViewControllers.count > 0 )
1238 {
1239 viewController = viewController.childViewControllers.lastObject;
1240 }
1241 }
1242 if( viewController )
1243 {
1244 _presentingViewController = viewController;
1245 if(self.onVideoFullscreenPlayerWillPresent) {
1246 self.onVideoFullscreenPlayerWillPresent(@{@"target": self.reactTag});
1247 }
1248 [viewController presentViewController:_playerViewController animated:true completion:^{
1249 _playerViewController.showsPlaybackControls = YES;
1250 _fullscreenPlayerPresented = fullscreen;
1251 _playerViewController.autorotate = _fullscreenAutorotate;
1252 if(self.onVideoFullscreenPlayerDidPresent) {
1253 self.onVideoFullscreenPlayerDidPresent(@{@"target": self.reactTag});
1254 }
1255 }];
1256 }
1257 }
1258 else if ( !fullscreen && _fullscreenPlayerPresented )
1259 {
1260 [self videoPlayerViewControllerWillDismiss:_playerViewController];
1261 [_presentingViewController dismissViewControllerAnimated:true completion:^{
1262 [self videoPlayerViewControllerDidDismiss:_playerViewController];
1263 }];
1264 }
1265}
1266
1267- (void)setFullscreenAutorotate:(BOOL)autorotate {
1268 _fullscreenAutorotate = autorotate;
1269 if (_fullscreenPlayerPresented) {
1270 _playerViewController.autorotate = autorotate;
1271 }
1272}
1273
1274- (void)setFullscreenOrientation:(NSString *)orientation {
1275 _fullscreenOrientation = orientation;
1276 if (_fullscreenPlayerPresented) {
1277 _playerViewController.preferredOrientation = orientation;
1278 }
1279}
1280
1281- (void)usePlayerViewController
1282{
1283 if( _player )
1284 {
1285 if (!_playerViewController) {
1286 _playerViewController = [self createPlayerViewController:_player withPlayerItem:_playerItem];
1287 }
1288 // to prevent video from being animated when resizeMode is 'cover'
1289 // resize mode must be set before subview is added
1290 [self setResizeMode:_resizeMode];
1291
1292 if (_controls) {
1293 UIViewController *viewController = [self reactViewController];
1294 [viewController addChildViewController:_playerViewController];
1295 [self addSubview:_playerViewController.view];
1296 }
1297
1298 [_playerViewController addObserver:self forKeyPath:readyForDisplayKeyPath options:NSKeyValueObservingOptionNew context:nil];
1299
1300 [_playerViewController.contentOverlayView addObserver:self forKeyPath:@"frame" options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld context:NULL];
1301 }
1302}
1303-(void)touchPlayerLayer{
1304 if(!_player)return;
1305 if(_playerLayer)return;
1306
1307 _playerLayer = [AVPlayerLayer playerLayerWithPlayer:_player];
1308 _playerLayer.frame = self.bounds;
1309 _playerLayer.needsDisplayOnBoundsChange = YES;
1310
1311 [self.layer addSublayer:_playerLayer];
1312 self.layer.needsDisplayOnBoundsChange=YES;
1313}
1314- (void)usePlayerLayer
1315{
1316 if( _player )
1317 {
1318 [self touchPlayerLayer];
1319
1320 // to prevent video from being animated when resizeMode is 'cover'
1321 // resize mode must be set before layer is added
1322 [self setResizeMode:_resizeMode];
1323 [_playerLayer addObserver:self forKeyPath:readyForDisplayKeyPath options:NSKeyValueObservingOptionNew context:nil];
1324 _playerLayerObserverSet = YES;
1325
1326 [self.layer addSublayer:_playerLayer];
1327 self.layer.needsDisplayOnBoundsChange = YES;
1328 #if TARGET_OS_IOS
1329 [self setupPipController];
1330 #endif
1331 }
1332}
1333
1334- (void)setControls:(BOOL)controls
1335{
1336 if( _controls != controls || (!_playerLayer && !_playerViewController) )
1337 {
1338 _controls = controls;
1339 if( _controls )
1340 {
1341 [self removePlayerLayer];
1342 [self usePlayerViewController];
1343 }
1344 else
1345 {
1346 [_playerViewController.view removeFromSuperview];
1347 _playerViewController = nil;
1348 [self usePlayerLayer];
1349 }
1350 }
1351}
1352
1353- (void)setProgressUpdateInterval:(float)progressUpdateInterval
1354{
1355 _progressUpdateInterval = progressUpdateInterval;
1356
1357 if (_timeObserver) {
1358 [self removePlayerTimeObserver];
1359 [self addPlayerTimeObserver];
1360 }
1361}
1362
1363- (void)removePlayerLayer
1364{
1365 [_playerLayer removeFromSuperlayer];
1366 if (_playerLayerObserverSet) {
1367 [_playerLayer removeObserver:self forKeyPath:readyForDisplayKeyPath];
1368 _playerLayerObserverSet = NO;
1369 }
1370 _playerLayer = nil;
1371}
1372
1373#pragma mark - RCTVideoPlayerViewControllerDelegate
1374
1375- (void)videoPlayerViewControllerWillDismiss:(AVPlayerViewController *)playerViewController
1376{
1377 if (_playerViewController == playerViewController && _fullscreenPlayerPresented && self.onVideoFullscreenPlayerWillDismiss)
1378 {
1379 self.onVideoFullscreenPlayerWillDismiss(@{@"target": self.reactTag});
1380 }
1381}
1382
1383- (void)videoPlayerViewControllerDidDismiss:(AVPlayerViewController *)playerViewController
1384{
1385 if (_playerViewController == playerViewController && _fullscreenPlayerPresented)
1386 {
1387 _fullscreenPlayerPresented = false;
1388 _presentingViewController = nil;
1389 _playerViewController = nil;
1390 [self applyModifiers];
1391 if(self.onVideoFullscreenPlayerDidDismiss) {
1392 self.onVideoFullscreenPlayerDidDismiss(@{@"target": self.reactTag});
1393 }
1394 }
1395}
1396
1397- (void)setFilter:(NSString *)filterName {
1398 _filterName = filterName;
1399
1400 if (!_filterEnabled) {
1401 return;
1402 } else if ([[_source objectForKey:@"uri"] rangeOfString:@"m3u8"].location != NSNotFound) {
1403 return; // filters don't work for HLS... return
1404 } else if (!_playerItem.asset) {
1405 return;
1406 }
1407
1408 CIFilter *filter = [CIFilter filterWithName:filterName];
1409 _playerItem.videoComposition = [AVVideoComposition
1410 videoCompositionWithAsset:_playerItem.asset
1411 applyingCIFiltersWithHandler:^(AVAsynchronousCIImageFilteringRequest *_Nonnull request) {
1412 if (filter == nil) {
1413 [request finishWithImage:request.sourceImage context:nil];
1414 } else {
1415 CIImage *image = request.sourceImage.imageByClampingToExtent;
1416 [filter setValue:image forKey:kCIInputImageKey];
1417 CIImage *output = [filter.outputImage imageByCroppingToRect:request.sourceImage.extent];
1418 [request finishWithImage:output context:nil];
1419 }
1420 }];
1421}
1422
1423- (void)setFilterEnabled:(BOOL)filterEnabled {
1424 _filterEnabled = filterEnabled;
1425}
1426
1427#pragma mark - React View Management
1428
1429- (void)insertReactSubview:(UIView *)view atIndex:(NSInteger)atIndex
1430{
1431 // We are early in the game and somebody wants to set a subview.
1432 // That can only be in the context of playerViewController.
1433 if( !_controls && !_playerLayer && !_playerViewController )
1434 {
1435 [self setControls:true];
1436 }
1437
1438 if( _controls )
1439 {
1440 view.frame = self.bounds;
1441 [_playerViewController.contentOverlayView insertSubview:view atIndex:atIndex];
1442 }
1443 else
1444 {
1445 RCTLogError(@"video cannot have any subviews");
1446 }
1447 return;
1448}
1449
1450- (void)removeReactSubview:(UIView *)subview
1451{
1452 if( _controls )
1453 {
1454 [subview removeFromSuperview];
1455 }
1456 else
1457 {
1458 RCTLogError(@"video cannot have any subviews");
1459 }
1460 return;
1461}
1462
1463- (void)layoutSubviews
1464{
1465 [super layoutSubviews];
1466 if( _controls )
1467 {
1468 _playerViewController.view.frame = self.bounds;
1469
1470 // also adjust all subviews of contentOverlayView
1471 for (UIView* subview in _playerViewController.contentOverlayView.subviews) {
1472 subview.frame = self.bounds;
1473 }
1474 }
1475 else
1476 {
1477 [CATransaction begin];
1478 [CATransaction setAnimationDuration:0];
1479 _playerLayer.frame = self.bounds;
1480 [CATransaction commit];
1481 }
1482}
1483
1484#pragma mark - Lifecycle
1485
1486- (void)removeFromSuperview
1487{
1488 [_player pause];
1489 if (_playbackRateObserverRegistered) {
1490 [_player removeObserver:self forKeyPath:playbackRate context:nil];
1491 _playbackRateObserverRegistered = NO;
1492 }
1493 if (_isExternalPlaybackActiveObserverRegistered) {
1494 [_player removeObserver:self forKeyPath:externalPlaybackActive context:nil];
1495 _isExternalPlaybackActiveObserverRegistered = NO;
1496 }
1497 _player = nil;
1498
1499 [self removePlayerLayer];
1500
1501 [_playerViewController.contentOverlayView removeObserver:self forKeyPath:@"frame"];
1502 [_playerViewController removeObserver:self forKeyPath:readyForDisplayKeyPath];
1503 [_playerViewController.view removeFromSuperview];
1504 _playerViewController.rctDelegate = nil;
1505 _playerViewController.player = nil;
1506 _playerViewController = nil;
1507
1508 [self removePlayerTimeObserver];
1509 [self removePlayerItemObservers];
1510
1511 _eventDispatcher = nil;
1512 [[NSNotificationCenter defaultCenter] removeObserver:self];
1513
1514 [super removeFromSuperview];
1515}
1516
1517#pragma mark - Export
1518
1519- (void)save:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject {
1520
1521 AVAsset *asset = _playerItem.asset;
1522
1523 if (asset != nil) {
1524
1525 AVAssetExportSession *exportSession = [AVAssetExportSession
1526 exportSessionWithAsset:asset presetName:AVAssetExportPresetHighestQuality];
1527
1528 if (exportSession != nil) {
1529 NSString *path = nil;
1530 NSArray *array = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
1531 path = [self generatePathInDirectory:[[self cacheDirectoryPath] stringByAppendingPathComponent:@"Videos"]
1532 withExtension:@".mp4"];
1533 NSURL *url = [NSURL fileURLWithPath:path];
1534 exportSession.outputFileType = AVFileTypeMPEG4;
1535 exportSession.outputURL = url;
1536 exportSession.videoComposition = _playerItem.videoComposition;
1537 exportSession.shouldOptimizeForNetworkUse = true;
1538 [exportSession exportAsynchronouslyWithCompletionHandler:^{
1539
1540 switch ([exportSession status]) {
1541 case AVAssetExportSessionStatusFailed:
1542 reject(@"ERROR_COULD_NOT_EXPORT_VIDEO", @"Could not export video", exportSession.error);
1543 break;
1544 case AVAssetExportSessionStatusCancelled:
1545 reject(@"ERROR_EXPORT_SESSION_CANCELLED", @"Export session was cancelled", exportSession.error);
1546 break;
1547 default:
1548 resolve(@{@"uri": url.absoluteString});
1549 break;
1550 }
1551
1552 }];
1553
1554 } else {
1555
1556 reject(@"ERROR_COULD_NOT_CREATE_EXPORT_SESSION", @"Could not create export session", nil);
1557
1558 }
1559
1560 } else {
1561
1562 reject(@"ERROR_ASSET_NIL", @"Asset is nil", nil);
1563
1564 }
1565}
1566
1567- (BOOL)ensureDirExistsWithPath:(NSString *)path {
1568 BOOL isDir = NO;
1569 NSError *error;
1570 BOOL exists = [[NSFileManager defaultManager] fileExistsAtPath:path isDirectory:&isDir];
1571 if (!(exists && isDir)) {
1572 [[NSFileManager defaultManager] createDirectoryAtPath:path withIntermediateDirectories:YES attributes:nil error:&error];
1573 if (error) {
1574 return NO;
1575 }
1576 }
1577 return YES;
1578}
1579
1580- (NSString *)generatePathInDirectory:(NSString *)directory withExtension:(NSString *)extension {
1581 NSString *fileName = [[[NSUUID UUID] UUIDString] stringByAppendingString:extension];
1582 [self ensureDirExistsWithPath:directory];
1583 return [directory stringByAppendingPathComponent:fileName];
1584}
1585
1586- (NSString *)cacheDirectoryPath {
1587 NSArray *array = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
1588 return array[0];
1589}
1590
1591#pragma mark - Picture in Picture
1592
1593#if TARGET_OS_IOS
1594- (void)pictureInPictureControllerDidStopPictureInPicture:(AVPictureInPictureController *)pictureInPictureController {
1595 if (self.onPictureInPictureStatusChanged) {
1596 self.onPictureInPictureStatusChanged(@{
1597 @"isActive": [NSNumber numberWithBool:false]
1598 });
1599 }
1600}
1601
1602- (void)pictureInPictureControllerDidStartPictureInPicture:(AVPictureInPictureController *)pictureInPictureController {
1603 if (self.onPictureInPictureStatusChanged) {
1604 self.onPictureInPictureStatusChanged(@{
1605 @"isActive": [NSNumber numberWithBool:true]
1606 });
1607 }
1608}
1609
1610- (void)pictureInPictureControllerWillStopPictureInPicture:(AVPictureInPictureController *)pictureInPictureController {
1611
1612}
1613
1614- (void)pictureInPictureControllerWillStartPictureInPicture:(AVPictureInPictureController *)pictureInPictureController {
1615
1616}
1617
1618- (void)pictureInPictureController:(AVPictureInPictureController *)pictureInPictureController failedToStartPictureInPictureWithError:(NSError *)error {
1619
1620}
1621
1622- (void)pictureInPictureController:(AVPictureInPictureController *)pictureInPictureController restoreUserInterfaceForPictureInPictureStopWithCompletionHandler:(void (^)(BOOL))completionHandler {
1623 NSAssert(_restoreUserInterfaceForPIPStopCompletionHandler == NULL, @"restoreUserInterfaceForPIPStopCompletionHandler was not called after picture in picture was exited.");
1624 if (self.onRestoreUserInterfaceForPictureInPictureStop) {
1625 self.onRestoreUserInterfaceForPictureInPictureStop(@{});
1626 }
1627 _restoreUserInterfaceForPIPStopCompletionHandler = completionHandler;
1628}
1629#endif
1630
1631@end
1632