diff --git a/Video.js b/Video.js index 93434b3e..daccb171 100644 --- a/Video.js +++ b/Video.js @@ -1,6 +1,6 @@ import React, {Component} from 'react'; import PropTypes from 'prop-types'; -import {StyleSheet, requireNativeComponent, NativeModules, View, ViewPropTypes, Image, Platform} from 'react-native'; +import {StyleSheet, requireNativeComponent, NativeModules, View, ViewPropTypes, Image, Platform, findNodeHandle} from 'react-native'; import resolveAssetSource from 'react-native/Libraries/Image/resolveAssetSource'; import TextTrackType from './TextTrackType'; import VideoResizeMode from './VideoResizeMode.js'; @@ -71,6 +71,10 @@ export default class Video extends Component { this.setNativeProps({ fullscreen: false }); }; + saveAsync = async (options?) => { + return await NativeModules.VideoManager.save(options, findNodeHandle(this._root)); + } + _assignRoot = (component) => { this._root = component; }; diff --git a/ios/Video/RCTVideo.h b/ios/Video/RCTVideo.h index e43fbe50..eee5bca2 100644 --- a/ios/Video/RCTVideo.h +++ b/ios/Video/RCTVideo.h @@ -4,6 +4,7 @@ #import "RCTVideoPlayerViewController.h" #import "RCTVideoPlayerViewControllerDelegate.h" #import +#import #if __has_include() #import @@ -41,4 +42,6 @@ - (AVPlayerViewController*)createPlayerViewController:(AVPlayer*)player withPlayerItem:(AVPlayerItem*)playerItem; +- (void)save:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject; + @end diff --git a/ios/Video/RCTVideo.m b/ios/Video/RCTVideo.m index 51ef8949..537db77f 100644 --- a/ios/Video/RCTVideo.m +++ b/ios/Video/RCTVideo.m @@ -13,129 +13,127 @@ static NSString *const readyForDisplayKeyPath = @"readyForDisplay"; static NSString *const playbackRate = @"rate"; static NSString *const timedMetadata = @"timedMetadata"; static NSString *const externalPlaybackActive = @"externalPlaybackActive"; -static NSDictionary* filters = nil; +static NSDictionary *filters = nil; static int const RCTVideoUnset = -1; #ifdef DEBUG - #define DebugLog(...) NSLog(__VA_ARGS__) +#define DebugLog(...) NSLog(__VA_ARGS__) #else - #define DebugLog(...) (void)0 +#define DebugLog(...) (void)0 #endif -@implementation RCTVideo -{ - AVPlayer *_player; - AVPlayerItem *_playerItem; - BOOL _playerItemObserversSet; - BOOL _playerBufferEmpty; - AVPlayerLayer *_playerLayer; - BOOL _playerLayerObserverSet; - RCTVideoPlayerViewController *_playerViewController; - NSURL *_videoURL; +@implementation RCTVideo { + AVPlayer *_player; + AVPlayerItem *_playerItem; + BOOL _playerItemObserversSet; + BOOL _playerBufferEmpty; + AVPlayerLayer *_playerLayer; + BOOL _playerLayerObserverSet; + RCTVideoPlayerViewController *_playerViewController; + NSURL *_videoURL; - /* Required to publish events */ - RCTEventDispatcher *_eventDispatcher; - BOOL _playbackRateObserverRegistered; - BOOL _isExternalPlaybackActiveObserverRegistered; - BOOL _videoLoadStarted; + /* Required to publish events */ + RCTEventDispatcher *_eventDispatcher; + BOOL _playbackRateObserverRegistered; + BOOL _isExternalPlaybackActiveObserverRegistered; + BOOL _videoLoadStarted; - bool _pendingSeek; - float _pendingSeekTime; - float _lastSeekTime; + bool _pendingSeek; + float _pendingSeekTime; + float _lastSeekTime; - /* For sending videoProgress events */ - Float64 _progressUpdateInterval; - BOOL _controls; - id _timeObserver; + /* For sending videoProgress events */ + Float64 _progressUpdateInterval; + BOOL _controls; + id _timeObserver; - /* Keep track of any modifiers, need to be applied after each play */ - float _volume; - float _rate; - BOOL _muted; - BOOL _paused; - BOOL _repeat; - BOOL _allowsExternalPlayback; - NSArray * _textTracks; - NSDictionary * _selectedTextTrack; - NSDictionary * _selectedAudioTrack; - BOOL _playbackStalled; - BOOL _playInBackground; - BOOL _playWhenInactive; - NSString * _ignoreSilentSwitch; - NSString * _resizeMode; - NSString * _filter; - BOOL _fullscreen; - NSString * _fullscreenOrientation; - BOOL _fullscreenPlayerPresented; - UIViewController * _presentingViewController; + /* Keep track of any modifiers, need to be applied after each play */ + float _volume; + float _rate; + BOOL _muted; + BOOL _paused; + BOOL _repeat; + BOOL _allowsExternalPlayback; + NSArray *_textTracks; + NSDictionary *_selectedTextTrack; + NSDictionary *_selectedAudioTrack; + BOOL _playbackStalled; + BOOL _playInBackground; + BOOL _playWhenInactive; + NSString *_ignoreSilentSwitch; + NSString *_resizeMode; + NSString *_filter; + BOOL _fullscreen; + NSString *_fullscreenOrientation; + BOOL _fullscreenPlayerPresented; + UIViewController *_presentingViewController; #if __has_include() - RCTVideoCache * _videoCache; + RCTVideoCache * _videoCache; #endif } -- (instancetype)initWithEventDispatcher:(RCTEventDispatcher *)eventDispatcher -{ - if ((self = [super init])) { +- (instancetype)initWithEventDispatcher:(RCTEventDispatcher *)eventDispatcher { + if ((self = [super init])) { - filters = @{ - @"Normal": @"", - @"Country": @"CISepiaTone", - @"Winter": @"CIPhotoEffectProcess", - @"Black N White": @"CIPhotoEffectNoir", - @"Sunrise": @"CIPhotoEffectTransfer", - @"Artistic": @"CIColorPosterize", - }; + filters = @{ + @"Normal": @"", + @"Country": @"CISepiaTone", + @"Winter": @"CIPhotoEffectProcess", + @"Black N White": @"CIPhotoEffectNoir", + @"Sunrise": @"CIPhotoEffectTransfer", + @"Artistic": @"CIColorPosterize", + }; - _eventDispatcher = eventDispatcher; + _eventDispatcher = eventDispatcher; - _playbackRateObserverRegistered = NO; - _isExternalPlaybackActiveObserverRegistered = NO; - _playbackStalled = NO; - _rate = 1.0; - _volume = 1.0; - _resizeMode = @"AVLayerVideoGravityResizeAspectFill"; - _fullscreenOrientation = @"all"; - _pendingSeek = false; - _pendingSeekTime = 0.0f; - _lastSeekTime = 0.0f; - _progressUpdateInterval = 250; - _controls = NO; - _playerBufferEmpty = YES; - _playInBackground = false; - _allowsExternalPlayback = YES; - _playWhenInactive = false; - _ignoreSilentSwitch = @"inherit"; // inherit, ignore, obey + _playbackRateObserverRegistered = NO; + _isExternalPlaybackActiveObserverRegistered = NO; + _playbackStalled = NO; + _rate = 1.0; + _volume = 1.0; + _resizeMode = @"AVLayerVideoGravityResizeAspectFill"; + _fullscreenOrientation = @"all"; + _pendingSeek = false; + _pendingSeekTime = 0.0f; + _lastSeekTime = 0.0f; + _progressUpdateInterval = 250; + _controls = NO; + _playerBufferEmpty = YES; + _playInBackground = false; + _allowsExternalPlayback = YES; + _playWhenInactive = false; + _ignoreSilentSwitch = @"inherit"; // inherit, ignore, obey #if __has_include() - _videoCache = [RCTVideoCache sharedInstance]; + _videoCache = [RCTVideoCache sharedInstance]; #endif - [[NSNotificationCenter defaultCenter] addObserver:self - selector:@selector(applicationWillResignActive:) - name:UIApplicationWillResignActiveNotification - object:nil]; + [[NSNotificationCenter defaultCenter] addObserver:self + selector:@selector(applicationWillResignActive:) + name:UIApplicationWillResignActiveNotification + object:nil]; - [[NSNotificationCenter defaultCenter] addObserver:self - selector:@selector(applicationDidEnterBackground:) - name:UIApplicationDidEnterBackgroundNotification - object:nil]; + [[NSNotificationCenter defaultCenter] addObserver:self + selector:@selector(applicationDidEnterBackground:) + name:UIApplicationDidEnterBackgroundNotification + object:nil]; - [[NSNotificationCenter defaultCenter] addObserver:self - selector:@selector(applicationWillEnterForeground:) - name:UIApplicationWillEnterForegroundNotification - object:nil]; + [[NSNotificationCenter defaultCenter] addObserver:self + selector:@selector(applicationWillEnterForeground:) + name:UIApplicationWillEnterForegroundNotification + object:nil]; - [[NSNotificationCenter defaultCenter] addObserver:self - selector:@selector(audioRouteChanged:) - name:AVAudioSessionRouteChangeNotification - object:nil]; - } + [[NSNotificationCenter defaultCenter] addObserver:self + selector:@selector(audioRouteChanged:) + name:AVAudioSessionRouteChangeNotification + object:nil]; + } - return self; + return self; } -- (RCTVideoPlayerViewController*)createPlayerViewController:(AVPlayer*)player - withPlayerItem:(AVPlayerItem*)playerItem { - RCTVideoPlayerViewController* viewController = [[RCTVideoPlayerViewController alloc] init]; +- (RCTVideoPlayerViewController *)createPlayerViewController:(AVPlayer *)player + withPlayerItem:(AVPlayerItem *)playerItem { + RCTVideoPlayerViewController *viewController = [[RCTVideoPlayerViewController alloc] init]; viewController.showsPlaybackControls = YES; viewController.rctDelegate = self; viewController.preferredOrientation = _fullscreenOrientation; @@ -150,127 +148,116 @@ static int const RCTVideoUnset = -1; ** Get the duration for a AVPlayerItem. ** ------------------------------------------------------- */ -- (CMTime)playerItemDuration -{ - AVPlayerItem *playerItem = [_player currentItem]; - if (playerItem.status == AVPlayerItemStatusReadyToPlay) - { - return([playerItem duration]); - } +- (CMTime)playerItemDuration { + AVPlayerItem *playerItem = [_player currentItem]; + if (playerItem.status == AVPlayerItemStatusReadyToPlay) { + return ([playerItem duration]); + } - return(kCMTimeInvalid); + return (kCMTimeInvalid); } -- (CMTimeRange)playerItemSeekableTimeRange -{ - AVPlayerItem *playerItem = [_player currentItem]; - if (playerItem.status == AVPlayerItemStatusReadyToPlay) - { - return [playerItem seekableTimeRanges].firstObject.CMTimeRangeValue; - } +- (CMTimeRange)playerItemSeekableTimeRange { + AVPlayerItem *playerItem = [_player currentItem]; + if (playerItem.status == AVPlayerItemStatusReadyToPlay) { + return [playerItem seekableTimeRanges].firstObject.CMTimeRangeValue; + } - return (kCMTimeRangeZero); + return (kCMTimeRangeZero); } --(void)addPlayerTimeObserver -{ - const Float64 progressUpdateIntervalMS = _progressUpdateInterval / 1000; - // @see endScrubbing in AVPlayerDemoPlaybackViewController.m - // of https://developer.apple.com/library/ios/samplecode/AVPlayerDemo/Introduction/Intro.html - __weak RCTVideo *weakSelf = self; - _timeObserver = [_player addPeriodicTimeObserverForInterval:CMTimeMakeWithSeconds(progressUpdateIntervalMS, NSEC_PER_SEC) - queue:NULL - usingBlock:^(CMTime time) { [weakSelf sendProgressUpdate]; } - ]; +- (void)addPlayerTimeObserver { + const Float64 progressUpdateIntervalMS = _progressUpdateInterval / 1000; + // @see endScrubbing in AVPlayerDemoPlaybackViewController.m + // of https://developer.apple.com/library/ios/samplecode/AVPlayerDemo/Introduction/Intro.html + __weak RCTVideo *weakSelf = self; + _timeObserver = [_player addPeriodicTimeObserverForInterval:CMTimeMakeWithSeconds(progressUpdateIntervalMS, NSEC_PER_SEC) + queue:NULL + usingBlock:^(CMTime time) { + [weakSelf sendProgressUpdate]; + } + ]; } /* Cancels the previously registered time observer. */ --(void)removePlayerTimeObserver -{ - if (_timeObserver) - { - [_player removeTimeObserver:_timeObserver]; - _timeObserver = nil; - } -} - -#pragma mark - Progress - -- (void)dealloc -{ - [[NSNotificationCenter defaultCenter] removeObserver:self]; - [self removePlayerLayer]; - [self removePlayerItemObservers]; - [_player removeObserver:self forKeyPath:playbackRate context:nil]; -} - -#pragma mark - App lifecycle handlers - -- (void)applicationWillResignActive:(NSNotification *)notification -{ - if (_playInBackground || _playWhenInactive || _paused) return; - - [_player pause]; - [_player setRate:0.0]; -} - -- (void)applicationDidEnterBackground:(NSNotification *)notification -{ - if (_playInBackground) { - // Needed to play sound in background. See https://developer.apple.com/library/ios/qa/qa1668/_index.html - [_playerLayer setPlayer:nil]; - } -} - -- (void)applicationWillEnterForeground:(NSNotification *)notification -{ - [self applyModifiers]; - if (_playInBackground) { - [_playerLayer setPlayer:_player]; - } -} - -#pragma mark - Audio events - -- (void)audioRouteChanged:(NSNotification *)notification -{ - NSNumber *reason = [[notification userInfo] objectForKey:AVAudioSessionRouteChangeReasonKey]; - NSNumber *previousRoute = [[notification userInfo] objectForKey:AVAudioSessionRouteChangePreviousRouteKey]; - if (reason.unsignedIntValue == AVAudioSessionRouteChangeReasonOldDeviceUnavailable) { - self.onVideoAudioBecomingNoisy(@{@"target": self.reactTag}); +- (void)removePlayerTimeObserver { + if (_timeObserver) { + [_player removeTimeObserver:_timeObserver]; + _timeObserver = nil; } } #pragma mark - Progress -- (void)sendProgressUpdate -{ - AVPlayerItem *video = [_player currentItem]; - if (video == nil || video.status != AVPlayerItemStatusReadyToPlay) { - return; - } +- (void)dealloc { + [[NSNotificationCenter defaultCenter] removeObserver:self]; + [self removePlayerLayer]; + [self removePlayerItemObservers]; + [_player removeObserver:self forKeyPath:playbackRate context:nil]; +} - CMTime playerDuration = [self playerItemDuration]; - if (CMTIME_IS_INVALID(playerDuration)) { - return; - } +#pragma mark - App lifecycle handlers - CMTime currentTime = _player.currentTime; - const Float64 duration = CMTimeGetSeconds(playerDuration); - const Float64 currentTimeSecs = CMTimeGetSeconds(currentTime); +- (void)applicationWillResignActive:(NSNotification *)notification { + if (_playInBackground || _playWhenInactive || _paused) return; - [[NSNotificationCenter defaultCenter] postNotificationName:@"RCTVideo_progress" object:nil userInfo:@{@"progress": [NSNumber numberWithDouble: currentTimeSecs / duration]}]; + [_player pause]; + [_player setRate:0.0]; +} - if( currentTimeSecs >= 0 && self.onVideoProgress) { - self.onVideoProgress(@{ - @"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(currentTime)], - @"playableDuration": [self calculatePlayableDuration], - @"atValue": [NSNumber numberWithLongLong:currentTime.value], - @"atTimescale": [NSNumber numberWithInt:currentTime.timescale], - @"target": self.reactTag, - @"seekableDuration": [self calculateSeekableDuration], - }); - } +- (void)applicationDidEnterBackground:(NSNotification *)notification { + if (_playInBackground) { + // Needed to play sound in background. See https://developer.apple.com/library/ios/qa/qa1668/_index.html + [_playerLayer setPlayer:nil]; + } +} + +- (void)applicationWillEnterForeground:(NSNotification *)notification { + [self applyModifiers]; + if (_playInBackground) { + [_playerLayer setPlayer:_player]; + } +} + +#pragma mark - Audio events + +- (void)audioRouteChanged:(NSNotification *)notification { + NSNumber *reason = [[notification userInfo] objectForKey:AVAudioSessionRouteChangeReasonKey]; + NSNumber *previousRoute = [[notification userInfo] objectForKey:AVAudioSessionRouteChangePreviousRouteKey]; + if (reason.unsignedIntValue == AVAudioSessionRouteChangeReasonOldDeviceUnavailable) { + self.onVideoAudioBecomingNoisy(@{@"target": self.reactTag}); + } +} + +#pragma mark - Progress + +- (void)sendProgressUpdate { + AVPlayerItem *video = [_player currentItem]; + if (video == nil || video.status != AVPlayerItemStatusReadyToPlay) { + return; + } + + CMTime playerDuration = [self playerItemDuration]; + if (CMTIME_IS_INVALID(playerDuration)) { + return; + } + + CMTime currentTime = _player.currentTime; + const Float64 duration = CMTimeGetSeconds(playerDuration); + const Float64 currentTimeSecs = CMTimeGetSeconds(currentTime); + + [[NSNotificationCenter defaultCenter] postNotificationName:@"RCTVideo_progress" object:nil userInfo:@{@"progress": [NSNumber numberWithDouble:currentTimeSecs / duration]}]; + + if (currentTimeSecs >= 0 && self.onVideoProgress) { + self.onVideoProgress(@{ + @"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(currentTime)], + @"playableDuration": [self calculatePlayableDuration], + @"atValue": [NSNumber numberWithLongLong:currentTime.value], + @"atTimescale": [NSNumber numberWithInt:currentTime.timescale], + @"target": self.reactTag, + @"seekableDuration": [self calculateSeekableDuration], + }); + } } /*! @@ -278,232 +265,224 @@ static int const RCTVideoUnset = -1; * * \returns The playable duration of the current player item in seconds. */ -- (NSNumber *)calculatePlayableDuration -{ - AVPlayerItem *video = _player.currentItem; - if (video.status == AVPlayerItemStatusReadyToPlay) { - __block CMTimeRange effectiveTimeRange; - [video.loadedTimeRanges enumerateObjectsUsingBlock:^(id obj, NSUInteger idx, BOOL *stop) { - CMTimeRange timeRange = [obj CMTimeRangeValue]; - if (CMTimeRangeContainsTime(timeRange, video.currentTime)) { - effectiveTimeRange = timeRange; - *stop = YES; - } - }]; - Float64 playableDuration = CMTimeGetSeconds(CMTimeRangeGetEnd(effectiveTimeRange)); - if (playableDuration > 0) { - return [NSNumber numberWithFloat:playableDuration]; +- (NSNumber *)calculatePlayableDuration { + AVPlayerItem *video = _player.currentItem; + if (video.status == AVPlayerItemStatusReadyToPlay) { + __block CMTimeRange effectiveTimeRange; + [video.loadedTimeRanges enumerateObjectsUsingBlock:^(id obj, NSUInteger idx, BOOL *stop) { + CMTimeRange timeRange = [obj CMTimeRangeValue]; + if (CMTimeRangeContainsTime(timeRange, video.currentTime)) { + effectiveTimeRange = timeRange; + *stop = YES; + } + }]; + Float64 playableDuration = CMTimeGetSeconds(CMTimeRangeGetEnd(effectiveTimeRange)); + if (playableDuration > 0) { + return [NSNumber numberWithFloat:playableDuration]; + } } - } - return [NSNumber numberWithInteger:0]; + return [NSNumber numberWithInteger:0]; } -- (NSNumber *)calculateSeekableDuration -{ - CMTimeRange timeRange = [self playerItemSeekableTimeRange]; - if (CMTIME_IS_NUMERIC(timeRange.duration)) - { - return [NSNumber numberWithFloat:CMTimeGetSeconds(timeRange.duration)]; - } - return [NSNumber numberWithInteger:0]; +- (NSNumber *)calculateSeekableDuration { + CMTimeRange timeRange = [self playerItemSeekableTimeRange]; + if (CMTIME_IS_NUMERIC(timeRange.duration)) { + return [NSNumber numberWithFloat:CMTimeGetSeconds(timeRange.duration)]; + } + return [NSNumber numberWithInteger:0]; } -- (void)addPlayerItemObservers -{ - [_playerItem addObserver:self forKeyPath:statusKeyPath options:0 context:nil]; - [_playerItem addObserver:self forKeyPath:playbackBufferEmptyKeyPath options:0 context:nil]; - [_playerItem addObserver:self forKeyPath:playbackLikelyToKeepUpKeyPath options:0 context:nil]; - [_playerItem addObserver:self forKeyPath:timedMetadata options:NSKeyValueObservingOptionNew context:nil]; - _playerItemObserversSet = YES; +- (void)addPlayerItemObservers { + [_playerItem addObserver:self forKeyPath:statusKeyPath options:0 context:nil]; + [_playerItem addObserver:self forKeyPath:playbackBufferEmptyKeyPath options:0 context:nil]; + [_playerItem addObserver:self forKeyPath:playbackLikelyToKeepUpKeyPath options:0 context:nil]; + [_playerItem addObserver:self forKeyPath:timedMetadata options:NSKeyValueObservingOptionNew context:nil]; + _playerItemObserversSet = YES; } /* Fixes https://github.com/brentvatne/react-native-video/issues/43 * Crashes caused when trying to remove the observer when there is no * observer set */ -- (void)removePlayerItemObservers -{ - if (_playerItemObserversSet) { - [_playerItem removeObserver:self forKeyPath:statusKeyPath]; - [_playerItem removeObserver:self forKeyPath:playbackBufferEmptyKeyPath]; - [_playerItem removeObserver:self forKeyPath:playbackLikelyToKeepUpKeyPath]; - [_playerItem removeObserver:self forKeyPath:timedMetadata]; - _playerItemObserversSet = NO; - } +- (void)removePlayerItemObservers { + if (_playerItemObserversSet) { + [_playerItem removeObserver:self forKeyPath:statusKeyPath]; + [_playerItem removeObserver:self forKeyPath:playbackBufferEmptyKeyPath]; + [_playerItem removeObserver:self forKeyPath:playbackLikelyToKeepUpKeyPath]; + [_playerItem removeObserver:self forKeyPath:timedMetadata]; + _playerItemObserversSet = NO; + } } #pragma mark - Player and source -- (void)setSrc:(NSDictionary *)source -{ - [self removePlayerLayer]; - [self removePlayerTimeObserver]; - [self removePlayerItemObservers]; +- (void)setSrc:(NSDictionary *)source { + [self removePlayerLayer]; + [self removePlayerTimeObserver]; + [self removePlayerItemObservers]; - dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t) 0), dispatch_get_main_queue(), ^{ + dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t) 0), dispatch_get_main_queue(), ^{ - // perform on next run loop, otherwise other passed react-props may not be set - [self playerItemForSource:source withCallback:^(AVPlayerItem * playerItem) { - _playerItem = playerItem; - [self addPlayerItemObservers]; + // perform on next run loop, otherwise other passed react-props may not be set + [self playerItemForSource:source withCallback:^(AVPlayerItem *playerItem) { + _playerItem = playerItem; + [self addPlayerItemObservers]; - [_player pause]; - [_playerViewController.view removeFromSuperview]; - _playerViewController = nil; + [_player pause]; + [_playerViewController.view removeFromSuperview]; + _playerViewController = nil; - if (_playbackRateObserverRegistered) { - [_player removeObserver:self forKeyPath:playbackRate context:nil]; - _playbackRateObserverRegistered = NO; - } - if (_isExternalPlaybackActiveObserverRegistered) { - [_player removeObserver:self forKeyPath:externalPlaybackActive context:nil]; - _isExternalPlaybackActiveObserverRegistered = NO; - } + if (_playbackRateObserverRegistered) { + [_player removeObserver:self forKeyPath:playbackRate context:nil]; + _playbackRateObserverRegistered = NO; + } + if (_isExternalPlaybackActiveObserverRegistered) { + [_player removeObserver:self forKeyPath:externalPlaybackActive context:nil]; + _isExternalPlaybackActiveObserverRegistered = NO; + } - _player = [AVPlayer playerWithPlayerItem:_playerItem]; - _player.actionAtItemEnd = AVPlayerActionAtItemEndNone; + _player = [AVPlayer playerWithPlayerItem:_playerItem]; + _player.actionAtItemEnd = AVPlayerActionAtItemEndNone; - [_player addObserver:self forKeyPath:playbackRate options:0 context:nil]; - _playbackRateObserverRegistered = YES; + [_player addObserver:self forKeyPath:playbackRate options:0 context:nil]; + _playbackRateObserverRegistered = YES; - [_player addObserver:self forKeyPath:externalPlaybackActive options:0 context:nil]; - _isExternalPlaybackActiveObserverRegistered = YES; + [_player addObserver:self forKeyPath:externalPlaybackActive options:0 context:nil]; + _isExternalPlaybackActiveObserverRegistered = YES; - [self addPlayerTimeObserver]; + [self addPlayerTimeObserver]; - //Perform on next run loop, otherwise onVideoLoadStart is nil - if (self.onVideoLoadStart) { - id uri = [source objectForKey:@"uri"]; - id type = [source objectForKey:@"type"]; - self.onVideoLoadStart(@{@"src": @{ - @"uri": uri ? uri : [NSNull null], - @"type": type ? type : [NSNull null], - @"isNetwork": [NSNumber numberWithBool:(bool)[source objectForKey:@"isNetwork"]]}, - @"target": self.reactTag - }); - } - }]; - }); - _videoLoadStarted = YES; + //Perform on next run loop, otherwise onVideoLoadStart is nil + if (self.onVideoLoadStart) { + id uri = [source objectForKey:@"uri"]; + id type = [source objectForKey:@"type"]; + self.onVideoLoadStart(@{@"src": @{ + @"uri": uri ? uri : [NSNull null], + @"type": type ? type : [NSNull null], + @"isNetwork": [NSNumber numberWithBool:(bool) [source objectForKey:@"isNetwork"]]}, + @"target": self.reactTag + }); + } + }]; + }); + _videoLoadStarted = YES; } -- (NSURL*) urlFilePath:(NSString*) filepath { - if ([filepath containsString:@"file://"]) { - return [NSURL URLWithString:filepath]; - } - - // if no file found, check if the file exists in the Document directory - NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); - NSString* relativeFilePath = [filepath lastPathComponent]; - // the file may be multiple levels below the documents directory - NSArray* fileComponents = [filepath componentsSeparatedByString:@"Documents/"]; - if (fileComponents.count > 1) { - relativeFilePath = [fileComponents objectAtIndex:1]; - } - - NSString *path = [paths.firstObject stringByAppendingPathComponent:relativeFilePath]; - if ([[NSFileManager defaultManager] fileExistsAtPath:path]) { - return [NSURL fileURLWithPath:path]; - } - return nil; -} - -- (void)playerItemPrepareText:(AVAsset *)asset assetOptions:(NSDictionary * __nullable)assetOptions withCallback:(void(^)(AVPlayerItem *))handler -{ - if (!_textTracks) { - handler([AVPlayerItem playerItemWithAsset:asset]); - return; - } - - // sideload text tracks - AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init]; - - AVAssetTrack *videoAsset = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject; - AVMutableCompositionTrack *videoCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; - [videoCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration) - ofTrack:videoAsset - atTime:kCMTimeZero - error:nil]; - - AVAssetTrack *audioAsset = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject; - AVMutableCompositionTrack *audioCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; - [audioCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration) - ofTrack:audioAsset - atTime:kCMTimeZero - error:nil]; - - NSMutableArray* validTextTracks = [NSMutableArray array]; - for (int i = 0; i < _textTracks.count; ++i) { - AVURLAsset *textURLAsset; - NSString *textUri = [_textTracks objectAtIndex:i][@"uri"]; - if ([[textUri lowercaseString] hasPrefix:@"http"]) { - textURLAsset = [AVURLAsset URLAssetWithURL:[NSURL URLWithString:textUri] options:assetOptions]; - } else { - textURLAsset = [AVURLAsset URLAssetWithURL:[self urlFilePath:textUri] options:nil]; +- (NSURL *)urlFilePath:(NSString *)filepath { + if ([filepath containsString:@"file://"]) { + return [NSURL URLWithString:filepath]; } - AVAssetTrack *textTrackAsset = [textURLAsset tracksWithMediaType:AVMediaTypeText].firstObject; - if (!textTrackAsset) continue; // fix when there's no textTrackAsset - [validTextTracks addObject:[_textTracks objectAtIndex:i]]; - AVMutableCompositionTrack *textCompTrack = [mixComposition - addMutableTrackWithMediaType:AVMediaTypeText - preferredTrackID:kCMPersistentTrackID_Invalid]; - [textCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration) - ofTrack:textTrackAsset - atTime:kCMTimeZero - error:nil]; - } - if (validTextTracks.count != _textTracks.count) { - [self setTextTracks:validTextTracks]; - } - handler([AVPlayerItem playerItemWithAsset:mixComposition]); + // if no file found, check if the file exists in the Document directory + NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); + NSString *relativeFilePath = [filepath lastPathComponent]; + // the file may be multiple levels below the documents directory + NSArray *fileComponents = [filepath componentsSeparatedByString:@"Documents/"]; + if (fileComponents.count > 1) { + relativeFilePath = [fileComponents objectAtIndex:1]; + } + + NSString *path = [paths.firstObject stringByAppendingPathComponent:relativeFilePath]; + if ([[NSFileManager defaultManager] fileExistsAtPath:path]) { + return [NSURL fileURLWithPath:path]; + } + return nil; } -- (void)playerItemForSource:(NSDictionary *)source withCallback:(void(^)(AVPlayerItem *))handler -{ - bool isNetwork = [RCTConvert BOOL:[source objectForKey:@"isNetwork"]]; - bool isAsset = [RCTConvert BOOL:[source objectForKey:@"isAsset"]]; - NSString *uri = [source objectForKey:@"uri"]; - NSString *type = [source objectForKey:@"type"]; +- (void)playerItemPrepareText:(AVAsset *)asset assetOptions:(NSDictionary *__nullable)assetOptions withCallback:(void (^)(AVPlayerItem *))handler { + if (!_textTracks) { + handler([AVPlayerItem playerItemWithAsset:asset]); + return; + } - NSURL *url = isNetwork || isAsset - ? [NSURL URLWithString:uri] - : [[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:uri ofType:type]]; - NSMutableDictionary *assetOptions = [[NSMutableDictionary alloc] init]; + // sideload text tracks + AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init]; - if (isNetwork) { - /* Per #1091, this is not a public API. - * We need to either get approval from Apple to use this or use a different approach. - NSDictionary *headers = [source objectForKey:@"requestHeaders"]; - if ([headers count] > 0) { - [assetOptions setObject:headers forKey:@"AVURLAssetHTTPHeaderFieldsKey"]; - } - */ - NSArray *cookies = [[NSHTTPCookieStorage sharedHTTPCookieStorage] cookies]; - [assetOptions setObject:cookies forKey:AVURLAssetHTTPCookiesKey]; + AVAssetTrack *videoAsset = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject; + AVMutableCompositionTrack *videoCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; + [videoCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration) + ofTrack:videoAsset + atTime:kCMTimeZero + error:nil]; + + AVAssetTrack *audioAsset = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject; + AVMutableCompositionTrack *audioCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; + [audioCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration) + ofTrack:audioAsset + atTime:kCMTimeZero + error:nil]; + + NSMutableArray *validTextTracks = [NSMutableArray array]; + for (int i = 0; i < _textTracks.count; ++i) { + AVURLAsset *textURLAsset; + NSString *textUri = [_textTracks objectAtIndex:i][@"uri"]; + if ([[textUri lowercaseString] hasPrefix:@"http"]) { + textURLAsset = [AVURLAsset URLAssetWithURL:[NSURL URLWithString:textUri] options:assetOptions]; + } else { + textURLAsset = [AVURLAsset URLAssetWithURL:[self urlFilePath:textUri] options:nil]; + } + AVAssetTrack *textTrackAsset = [textURLAsset tracksWithMediaType:AVMediaTypeText].firstObject; + if (!textTrackAsset) continue; // fix when there's no textTrackAsset + [validTextTracks addObject:[_textTracks objectAtIndex:i]]; + AVMutableCompositionTrack *textCompTrack = [mixComposition + addMutableTrackWithMediaType:AVMediaTypeText + preferredTrackID:kCMPersistentTrackID_Invalid]; + [textCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration) + ofTrack:textTrackAsset + atTime:kCMTimeZero + error:nil]; + } + if (validTextTracks.count != _textTracks.count) { + [self setTextTracks:validTextTracks]; + } + + handler([AVPlayerItem playerItemWithAsset:mixComposition]); +} + +- (void)playerItemForSource:(NSDictionary *)source withCallback:(void (^)(AVPlayerItem *))handler { + bool isNetwork = [RCTConvert BOOL:[source objectForKey:@"isNetwork"]]; + bool isAsset = [RCTConvert BOOL:[source objectForKey:@"isAsset"]]; + NSString *uri = [source objectForKey:@"uri"]; + NSString *type = [source objectForKey:@"type"]; + + NSURL *url = isNetwork || isAsset + ? [NSURL URLWithString:uri] + : [[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:uri ofType:type]]; + NSMutableDictionary *assetOptions = [[NSMutableDictionary alloc] init]; + + if (isNetwork) { + /* Per #1091, this is not a public API. + * We need to either get approval from Apple to use this or use a different approach. + NSDictionary *headers = [source objectForKey:@"requestHeaders"]; + if ([headers count] > 0) { + [assetOptions setObject:headers forKey:@"AVURLAssetHTTPHeaderFieldsKey"]; + } + */ + NSArray *cookies = [[NSHTTPCookieStorage sharedHTTPCookieStorage] cookies]; + [assetOptions setObject:cookies forKey:AVURLAssetHTTPCookiesKey]; #if __has_include() - if (!_textTracks) { - /* The DVURLAsset created by cache doesn't have a tracksWithMediaType property, so trying - * to bring in the text track code will crash. I suspect this is because the asset hasn't fully loaded. - * Until this is fixed, we need to bypass caching when text tracks are specified. - */ - DebugLog(@"Caching is not supported for uri '%@' because text tracks are not compatible with the cache. Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md", uri); - [self playerItemForSourceUsingCache:uri assetOptions:assetOptions withCallback:handler]; - return; - } + if (!_textTracks) { + /* The DVURLAsset created by cache doesn't have a tracksWithMediaType property, so trying + * to bring in the text track code will crash. I suspect this is because the asset hasn't fully loaded. + * Until this is fixed, we need to bypass caching when text tracks are specified. + */ + DebugLog(@"Caching is not supported for uri '%@' because text tracks are not compatible with the cache. Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md", uri); + [self playerItemForSourceUsingCache:uri assetOptions:assetOptions withCallback:handler]; + return; + } #endif - AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:assetOptions]; - [self playerItemPrepareText:asset assetOptions:assetOptions withCallback:handler]; - return; - } else if (isAsset) { - AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:nil]; - [self playerItemPrepareText:asset assetOptions:assetOptions withCallback:handler]; - return; - } + AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:assetOptions]; + [self playerItemPrepareText:asset assetOptions:assetOptions withCallback:handler]; + return; + } else if (isAsset) { + AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:nil]; + [self playerItemPrepareText:asset assetOptions:assetOptions withCallback:handler]; + return; + } - AVURLAsset *asset = [AVURLAsset URLAssetWithURL:[[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:uri ofType:type]] options:nil]; - [self playerItemPrepareText:asset assetOptions:assetOptions withCallback:handler]; + AVURLAsset *asset = [AVURLAsset URLAssetWithURL:[[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:uri ofType:type]] options:nil]; + [self playerItemPrepareText:asset assetOptions:assetOptions withCallback:handler]; } #if __has_include() @@ -561,366 +540,343 @@ static int const RCTVideoUnset = -1; #endif -- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context -{ - if (object == _playerItem) { - // When timeMetadata is read the event onTimedMetadata is triggered - if ([keyPath isEqualToString:timedMetadata]) { - NSArray *items = [change objectForKey:@"new"]; - if (items && ![items isEqual:[NSNull null]] && items.count > 0) { - NSMutableArray *array = [NSMutableArray new]; - for (AVMetadataItem *item in items) { - NSString *value = (NSString *)item.value; - NSString *identifier = item.identifier; +- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context { + if (object == _playerItem) { + // When timeMetadata is read the event onTimedMetadata is triggered + if ([keyPath isEqualToString:timedMetadata]) { + NSArray *items = [change objectForKey:@"new"]; + if (items && ![items isEqual:[NSNull null]] && items.count > 0) { + NSMutableArray *array = [NSMutableArray new]; + for (AVMetadataItem *item in items) { + NSString *value = (NSString *) item.value; + NSString *identifier = item.identifier; - if (![value isEqual: [NSNull null]]) { - NSDictionary *dictionary = [[NSDictionary alloc] initWithObjects:@[value, identifier] forKeys:@[@"value", @"identifier"]]; + if (![value isEqual:[NSNull null]]) { + NSDictionary *dictionary = [[NSDictionary alloc] initWithObjects:@[value, identifier] forKeys:@[@"value", @"identifier"]]; - [array addObject:dictionary]; - } + [array addObject:dictionary]; + } + } + + self.onTimedMetadata(@{ + @"target": self.reactTag, + @"metadata": array + }); + } } - self.onTimedMetadata(@{ - @"target": self.reactTag, - @"metadata": array - }); - } - } + if ([keyPath isEqualToString:statusKeyPath]) { + // Handle player item status change. + if (_playerItem.status == AVPlayerItemStatusReadyToPlay) { + float duration = CMTimeGetSeconds(_playerItem.asset.duration); - if ([keyPath isEqualToString:statusKeyPath]) { - // Handle player item status change. - if (_playerItem.status == AVPlayerItemStatusReadyToPlay) { - float duration = CMTimeGetSeconds(_playerItem.asset.duration); + if (isnan(duration)) { + duration = 0.0; + } - if (isnan(duration)) { - duration = 0.0; - } + NSObject *width = @"undefined"; + NSObject *height = @"undefined"; + NSString *orientation = @"undefined"; - NSObject *width = @"undefined"; - NSObject *height = @"undefined"; - NSString *orientation = @"undefined"; + if ([_playerItem.asset tracksWithMediaType:AVMediaTypeVideo].count > 0) { + AVAssetTrack *videoTrack = [[_playerItem.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; + width = [NSNumber numberWithFloat:videoTrack.naturalSize.width]; + height = [NSNumber numberWithFloat:videoTrack.naturalSize.height]; + CGAffineTransform preferredTransform = [videoTrack preferredTransform]; - if ([_playerItem.asset tracksWithMediaType:AVMediaTypeVideo].count > 0) { - AVAssetTrack *videoTrack = [[_playerItem.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; - width = [NSNumber numberWithFloat:videoTrack.naturalSize.width]; - height = [NSNumber numberWithFloat:videoTrack.naturalSize.height]; - CGAffineTransform preferredTransform = [videoTrack preferredTransform]; + if ((videoTrack.naturalSize.width == preferredTransform.tx + && videoTrack.naturalSize.height == preferredTransform.ty) + || (preferredTransform.tx == 0 && preferredTransform.ty == 0)) { + orientation = @"landscape"; + } else { + orientation = @"portrait"; + } + } - if ((videoTrack.naturalSize.width == preferredTransform.tx - && videoTrack.naturalSize.height == preferredTransform.ty) - || (preferredTransform.tx == 0 && preferredTransform.ty == 0)) - { - orientation = @"landscape"; - } else { - orientation = @"portrait"; - } - } - - if (self.onVideoLoad && _videoLoadStarted) { - self.onVideoLoad(@{@"duration": [NSNumber numberWithFloat:duration], - @"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(_playerItem.currentTime)], - @"canPlayReverse": [NSNumber numberWithBool:_playerItem.canPlayReverse], - @"canPlayFastForward": [NSNumber numberWithBool:_playerItem.canPlayFastForward], - @"canPlaySlowForward": [NSNumber numberWithBool:_playerItem.canPlaySlowForward], - @"canPlaySlowReverse": [NSNumber numberWithBool:_playerItem.canPlaySlowReverse], - @"canStepBackward": [NSNumber numberWithBool:_playerItem.canStepBackward], - @"canStepForward": [NSNumber numberWithBool:_playerItem.canStepForward], - @"naturalSize": @{ - @"width": width, - @"height": height, - @"orientation": orientation - }, - @"audioTracks": [self getAudioTrackInfo], - @"textTracks": [self getTextTrackInfo], - @"target": self.reactTag}); - } - _videoLoadStarted = NO; - - [self attachListeners]; - [self applyModifiers]; - } else if (_playerItem.status == AVPlayerItemStatusFailed && self.onVideoError) { - self.onVideoError(@{@"error": @{@"code": [NSNumber numberWithInteger: _playerItem.error.code], - @"domain": _playerItem.error.domain}, + if (self.onVideoLoad && _videoLoadStarted) { + self.onVideoLoad(@{@"duration": [NSNumber numberWithFloat:duration], + @"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(_playerItem.currentTime)], + @"canPlayReverse": [NSNumber numberWithBool:_playerItem.canPlayReverse], + @"canPlayFastForward": [NSNumber numberWithBool:_playerItem.canPlayFastForward], + @"canPlaySlowForward": [NSNumber numberWithBool:_playerItem.canPlaySlowForward], + @"canPlaySlowReverse": [NSNumber numberWithBool:_playerItem.canPlaySlowReverse], + @"canStepBackward": [NSNumber numberWithBool:_playerItem.canStepBackward], + @"canStepForward": [NSNumber numberWithBool:_playerItem.canStepForward], + @"naturalSize": @{ + @"width": width, + @"height": height, + @"orientation": orientation + }, + @"audioTracks": [self getAudioTrackInfo], + @"textTracks": [self getTextTrackInfo], @"target": self.reactTag}); - } - } else if ([keyPath isEqualToString:playbackBufferEmptyKeyPath]) { - _playerBufferEmpty = YES; - self.onVideoBuffer(@{@"isBuffering": @(YES), @"target": self.reactTag}); - } else if ([keyPath isEqualToString:playbackLikelyToKeepUpKeyPath]) { - // Continue playing (or not if paused) after being paused due to hitting an unbuffered zone. - if ((!(_controls || _fullscreenPlayerPresented) || _playerBufferEmpty) && _playerItem.playbackLikelyToKeepUp) { - [self setPaused:_paused]; - } - _playerBufferEmpty = NO; - self.onVideoBuffer(@{@"isBuffering": @(NO), @"target": self.reactTag}); - } - } else if (object == _playerLayer) { - if([keyPath isEqualToString:readyForDisplayKeyPath] && [change objectForKey:NSKeyValueChangeNewKey]) { - if([change objectForKey:NSKeyValueChangeNewKey] && self.onReadyForDisplay) { - self.onReadyForDisplay(@{@"target": self.reactTag}); - } - } - } else if (object == _player) { - if([keyPath isEqualToString:playbackRate]) { - if(self.onPlaybackRateChange) { - self.onPlaybackRateChange(@{@"playbackRate": [NSNumber numberWithFloat:_player.rate], - @"target": self.reactTag}); - } - if(_playbackStalled && _player.rate > 0) { - if(self.onPlaybackResume) { - self.onPlaybackResume(@{@"playbackRate": [NSNumber numberWithFloat:_player.rate], - @"target": self.reactTag}); + } + _videoLoadStarted = NO; + + [self attachListeners]; + [self applyModifiers]; + } else if (_playerItem.status == AVPlayerItemStatusFailed && self.onVideoError) { + self.onVideoError(@{@"error": @{@"code": [NSNumber numberWithInteger:_playerItem.error.code], + @"domain": _playerItem.error.domain}, + @"target": self.reactTag}); + } + } else if ([keyPath isEqualToString:playbackBufferEmptyKeyPath]) { + _playerBufferEmpty = YES; + self.onVideoBuffer(@{@"isBuffering": @(YES), @"target": self.reactTag}); + } else if ([keyPath isEqualToString:playbackLikelyToKeepUpKeyPath]) { + // Continue playing (or not if paused) after being paused due to hitting an unbuffered zone. + if ((!(_controls || _fullscreenPlayerPresented) || _playerBufferEmpty) && _playerItem.playbackLikelyToKeepUp) { + [self setPaused:_paused]; + } + _playerBufferEmpty = NO; + self.onVideoBuffer(@{@"isBuffering": @(NO), @"target": self.reactTag}); } - _playbackStalled = NO; - } - } - else if([keyPath isEqualToString:externalPlaybackActive]) { - if(self.onVideoExternalPlaybackChange) { - self.onVideoExternalPlaybackChange(@{@"isExternalPlaybackActive": [NSNumber numberWithBool:_player.isExternalPlaybackActive], - @"target": self.reactTag}); + } else if (object == _playerLayer) { + if ([keyPath isEqualToString:readyForDisplayKeyPath] && [change objectForKey:NSKeyValueChangeNewKey]) { + if ([change objectForKey:NSKeyValueChangeNewKey] && self.onReadyForDisplay) { + self.onReadyForDisplay(@{@"target": self.reactTag}); + } } + } else if (object == _player) { + if ([keyPath isEqualToString:playbackRate]) { + if (self.onPlaybackRateChange) { + self.onPlaybackRateChange(@{@"playbackRate": [NSNumber numberWithFloat:_player.rate], + @"target": self.reactTag}); + } + if (_playbackStalled && _player.rate > 0) { + if (self.onPlaybackResume) { + self.onPlaybackResume(@{@"playbackRate": [NSNumber numberWithFloat:_player.rate], + @"target": self.reactTag}); + } + _playbackStalled = NO; + } + } else if ([keyPath isEqualToString:externalPlaybackActive]) { + if (self.onVideoExternalPlaybackChange) { + self.onVideoExternalPlaybackChange(@{@"isExternalPlaybackActive": [NSNumber numberWithBool:_player.isExternalPlaybackActive], + @"target": self.reactTag}); + } + } + } else { + [super observeValueForKeyPath:keyPath ofObject:object change:change context:context]; } - } else { - [super observeValueForKeyPath:keyPath ofObject:object change:change context:context]; - } } -- (void)attachListeners -{ - // listen for end of file - [[NSNotificationCenter defaultCenter] removeObserver:self - name:AVPlayerItemDidPlayToEndTimeNotification - object:[_player currentItem]]; - [[NSNotificationCenter defaultCenter] addObserver:self - selector:@selector(playerItemDidReachEnd:) - name:AVPlayerItemDidPlayToEndTimeNotification - object:[_player currentItem]]; +- (void)attachListeners { + // listen for end of file + [[NSNotificationCenter defaultCenter] removeObserver:self + name:AVPlayerItemDidPlayToEndTimeNotification + object:[_player currentItem]]; + [[NSNotificationCenter defaultCenter] addObserver:self + selector:@selector(playerItemDidReachEnd:) + name:AVPlayerItemDidPlayToEndTimeNotification + object:[_player currentItem]]; - [[NSNotificationCenter defaultCenter] removeObserver:self - name:AVPlayerItemPlaybackStalledNotification - object:nil]; - [[NSNotificationCenter defaultCenter] addObserver:self - selector:@selector(playbackStalled:) - name:AVPlayerItemPlaybackStalledNotification - object:nil]; + [[NSNotificationCenter defaultCenter] removeObserver:self + name:AVPlayerItemPlaybackStalledNotification + object:nil]; + [[NSNotificationCenter defaultCenter] addObserver:self + selector:@selector(playbackStalled:) + name:AVPlayerItemPlaybackStalledNotification + object:nil]; } -- (void)playbackStalled:(NSNotification *)notification -{ - if(self.onPlaybackStalled) { - self.onPlaybackStalled(@{@"target": self.reactTag}); - } - _playbackStalled = YES; +- (void)playbackStalled:(NSNotification *)notification { + if (self.onPlaybackStalled) { + self.onPlaybackStalled(@{@"target": self.reactTag}); + } + _playbackStalled = YES; } -- (void)playerItemDidReachEnd:(NSNotification *)notification -{ - if(self.onVideoEnd) { - self.onVideoEnd(@{@"target": self.reactTag}); - } +- (void)playerItemDidReachEnd:(NSNotification *)notification { + if (self.onVideoEnd) { + self.onVideoEnd(@{@"target": self.reactTag}); + } - if (_repeat) { - AVPlayerItem *item = [notification object]; - [item seekToTime:kCMTimeZero]; - [self applyModifiers]; - } else { - [self removePlayerTimeObserver]; - } + if (_repeat) { + AVPlayerItem *item = [notification object]; + [item seekToTime:kCMTimeZero]; + [self applyModifiers]; + } else { + [self removePlayerTimeObserver]; + } } #pragma mark - Prop setters -- (void)setResizeMode:(NSString*)mode -{ - if( _controls ) - { - _playerViewController.videoGravity = mode; - } - else - { - _playerLayer.videoGravity = mode; - } - _resizeMode = mode; +- (void)setResizeMode:(NSString *)mode { + if (_controls) { + _playerViewController.videoGravity = mode; + } else { + _playerLayer.videoGravity = mode; + } + _resizeMode = mode; } -- (void)setPlayInBackground:(BOOL)playInBackground -{ - _playInBackground = playInBackground; +- (void)setPlayInBackground:(BOOL)playInBackground { + _playInBackground = playInBackground; } -- (void)setAllowsExternalPlayback:(BOOL)allowsExternalPlayback -{ +- (void)setAllowsExternalPlayback:(BOOL)allowsExternalPlayback { _allowsExternalPlayback = allowsExternalPlayback; _player.allowsExternalPlayback = _allowsExternalPlayback; } -- (void)setPlayWhenInactive:(BOOL)playWhenInactive -{ - _playWhenInactive = playWhenInactive; +- (void)setPlayWhenInactive:(BOOL)playWhenInactive { + _playWhenInactive = playWhenInactive; } -- (void)setIgnoreSilentSwitch:(NSString *)ignoreSilentSwitch -{ - _ignoreSilentSwitch = ignoreSilentSwitch; - [self applyModifiers]; +- (void)setIgnoreSilentSwitch:(NSString *)ignoreSilentSwitch { + _ignoreSilentSwitch = ignoreSilentSwitch; + [self applyModifiers]; } -- (void)setPaused:(BOOL)paused -{ - if (paused) { - [_player pause]; - [_player setRate:0.0]; - } else { - if([_ignoreSilentSwitch isEqualToString:@"ignore"]) { - [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error:nil]; - } else if([_ignoreSilentSwitch isEqualToString:@"obey"]) { - [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryAmbient error:nil]; - } - [_player play]; - [_player setRate:_rate]; - } - - _paused = paused; -} - -- (float)getCurrentTime -{ - return _playerItem != NULL ? CMTimeGetSeconds(_playerItem.currentTime) : 0; -} - -- (void)setCurrentTime:(float)currentTime -{ - NSDictionary *info = @{ - @"time": [NSNumber numberWithFloat:currentTime], - @"tolerance": [NSNumber numberWithInt:100] - }; - [self setSeek:info]; -} - -- (void)setSeek:(NSDictionary *)info -{ - NSNumber *seekTime = info[@"time"]; - NSNumber *seekTolerance = info[@"tolerance"]; - - int timeScale = 1000; - - AVPlayerItem *item = _player.currentItem; - if (item && item.status == AVPlayerItemStatusReadyToPlay) { - // TODO check loadedTimeRanges - - CMTime cmSeekTime = CMTimeMakeWithSeconds([seekTime floatValue], timeScale); - CMTime current = item.currentTime; - // TODO figure out a good tolerance level - CMTime tolerance = CMTimeMake([seekTolerance floatValue], timeScale); - BOOL wasPaused = _paused; - - if (CMTimeCompare(current, cmSeekTime) != 0) { - if (!wasPaused) [_player pause]; - [_player seekToTime:cmSeekTime toleranceBefore:tolerance toleranceAfter:tolerance completionHandler:^(BOOL finished) { - if (!_timeObserver) { - [self addPlayerTimeObserver]; +- (void)setPaused:(BOOL)paused { + if (paused) { + [_player pause]; + [_player setRate:0.0]; + } else { + if ([_ignoreSilentSwitch isEqualToString:@"ignore"]) { + [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error:nil]; + } else if ([_ignoreSilentSwitch isEqualToString:@"obey"]) { + [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryAmbient error:nil]; } - if (!wasPaused) { - [self setPaused:false]; - } - if(self.onVideoSeek) { - self.onVideoSeek(@{@"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(item.currentTime)], - @"seekTime": seekTime, - @"target": self.reactTag}); - } - }]; - - _pendingSeek = false; + [_player play]; + [_player setRate:_rate]; } - } else { - // TODO: See if this makes sense and if so, actually implement it - _pendingSeek = true; - _pendingSeekTime = [seekTime floatValue]; - } + _paused = paused; } -- (void)setRate:(float)rate -{ - _rate = rate; - [self applyModifiers]; +- (float)getCurrentTime { + return _playerItem != NULL ? CMTimeGetSeconds(_playerItem.currentTime) : 0; } -- (void)setMuted:(BOOL)muted -{ - _muted = muted; - [self applyModifiers]; +- (void)setCurrentTime:(float)currentTime { + NSDictionary *info = @{ + @"time": [NSNumber numberWithFloat:currentTime], + @"tolerance": [NSNumber numberWithInt:100] + }; + [self setSeek:info]; } -- (void)setVolume:(float)volume -{ - _volume = volume; - [self applyModifiers]; +- (void)setSeek:(NSDictionary *)info { + NSNumber *seekTime = info[@"time"]; + NSNumber *seekTolerance = info[@"tolerance"]; + + int timeScale = 1000; + + AVPlayerItem *item = _player.currentItem; + if (item && item.status == AVPlayerItemStatusReadyToPlay) { + // TODO check loadedTimeRanges + + CMTime cmSeekTime = CMTimeMakeWithSeconds([seekTime floatValue], timeScale); + CMTime current = item.currentTime; + // TODO figure out a good tolerance level + CMTime tolerance = CMTimeMake([seekTolerance floatValue], timeScale); + BOOL wasPaused = _paused; + + if (CMTimeCompare(current, cmSeekTime) != 0) { + if (!wasPaused) [_player pause]; + [_player seekToTime:cmSeekTime toleranceBefore:tolerance toleranceAfter:tolerance completionHandler:^(BOOL finished) { + if (!_timeObserver) { + [self addPlayerTimeObserver]; + } + if (!wasPaused) { + [self setPaused:false]; + } + if (self.onVideoSeek) { + self.onVideoSeek(@{@"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(item.currentTime)], + @"seekTime": seekTime, + @"target": self.reactTag}); + } + }]; + + _pendingSeek = false; + } + + } else { + // TODO: See if this makes sense and if so, actually implement it + _pendingSeek = true; + _pendingSeekTime = [seekTime floatValue]; + } } -- (void)applyModifiers -{ - if (_muted) { - [_player setVolume:0]; - [_player setMuted:YES]; - } else { - [_player setVolume:_volume]; - [_player setMuted:NO]; - } +- (void)setRate:(float)rate { + _rate = rate; + [self applyModifiers]; +} - [self setSelectedAudioTrack:_selectedAudioTrack]; - [self setSelectedTextTrack:_selectedTextTrack]; - [self setResizeMode:_resizeMode]; - [self setRepeat:_repeat]; - [self setPaused:_paused]; - [self setFilter:_filter]; - [self setControls:_controls]; - [self setAllowsExternalPlayback:_allowsExternalPlayback]; +- (void)setMuted:(BOOL)muted { + _muted = muted; + [self applyModifiers]; +} + +- (void)setVolume:(float)volume { + _volume = volume; + [self applyModifiers]; +} + +- (void)applyModifiers { + if (_muted) { + [_player setVolume:0]; + [_player setMuted:YES]; + } else { + [_player setVolume:_volume]; + [_player setMuted:NO]; + } + + [self setSelectedAudioTrack:_selectedAudioTrack]; + [self setSelectedTextTrack:_selectedTextTrack]; + [self setResizeMode:_resizeMode]; + [self setRepeat:_repeat]; + [self setPaused:_paused]; + [self setFilter:_filter]; + [self setControls:_controls]; + [self setAllowsExternalPlayback:_allowsExternalPlayback]; } - (void)setRepeat:(BOOL)repeat { - _repeat = repeat; + _repeat = repeat; } - (void)setMediaSelectionTrackForCharacteristic:(AVMediaCharacteristic)characteristic - withCriteria:(NSDictionary *)criteria -{ + withCriteria:(NSDictionary *)criteria { NSString *type = criteria[@"type"]; AVMediaSelectionGroup *group = [_player.currentItem.asset - mediaSelectionGroupForMediaCharacteristic:characteristic]; + mediaSelectionGroupForMediaCharacteristic:characteristic]; AVMediaSelectionOption *mediaOption; if ([type isEqualToString:@"disabled"]) { - // Do nothing. We want to ensure option is nil + // Do nothing. We want to ensure option is nil } else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) { NSString *value = criteria[@"value"]; for (int i = 0; i < group.options.count; ++i) { AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i]; NSString *optionValue; if ([type isEqualToString:@"language"]) { - optionValue = [currentOption extendedLanguageTag]; + optionValue = [currentOption extendedLanguageTag]; } else { - optionValue = [[[currentOption commonMetadata] - valueForKey:@"value"] - objectAtIndex:0]; + optionValue = [[[currentOption commonMetadata] + valueForKey:@"value"] + objectAtIndex:0]; } if ([value isEqualToString:optionValue]) { - mediaOption = currentOption; - break; + mediaOption = currentOption; + break; } - } - //} else if ([type isEqualToString:@"default"]) { - // option = group.defaultOption; */ + } + //} else if ([type isEqualToString:@"default"]) { + // option = group.defaultOption; */ } else if ([type isEqualToString:@"index"]) { if ([criteria[@"value"] isKindOfClass:[NSNumber class]]) { - int index = [criteria[@"value"] intValue]; - if (group.options.count > index) { - mediaOption = [group.options objectAtIndex:index]; - } + int index = [criteria[@"value"] intValue]; + if (group.options.count > index) { + mediaOption = [group.options objectAtIndex:index]; + } } } else { // default. invalid type or "system" - [_player.currentItem selectMediaOptionAutomaticallyInMediaSelectionGroup:group]; - return; + [_player.currentItem selectMediaOptionAutomaticallyInMediaSelectionGroup:group]; + return; } // If a match isn't found, option will be nil and text tracks will be disabled @@ -934,234 +890,223 @@ static int const RCTVideoUnset = -1; } - (void)setSelectedTextTrack:(NSDictionary *)selectedTextTrack { - _selectedTextTrack = selectedTextTrack; - if (_textTracks) { // sideloaded text tracks - [self setSideloadedText]; - } else { // text tracks included in the HLS playlist - [self setMediaSelectionTrackForCharacteristic:AVMediaCharacteristicLegible - withCriteria:_selectedTextTrack]; - } + _selectedTextTrack = selectedTextTrack; + if (_textTracks) { // sideloaded text tracks + [self setSideloadedText]; + } else { // text tracks included in the HLS playlist + [self setMediaSelectionTrackForCharacteristic:AVMediaCharacteristicLegible + withCriteria:_selectedTextTrack]; + } } -- (void) setSideloadedText { - NSString *type = _selectedTextTrack[@"type"]; - NSArray *textTracks = [self getTextTrackInfo]; +- (void)setSideloadedText { + NSString *type = _selectedTextTrack[@"type"]; + NSArray *textTracks = [self getTextTrackInfo]; - // The first few tracks will be audio & video track - int firstTextIndex = 0; - for (firstTextIndex = 0; firstTextIndex < _player.currentItem.tracks.count; ++firstTextIndex) { - if ([_player.currentItem.tracks[firstTextIndex].assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible]) { - break; - } - } - - int selectedTrackIndex = RCTVideoUnset; - - if ([type isEqualToString:@"disabled"]) { - // Do nothing. We want to ensure option is nil - } else if ([type isEqualToString:@"language"]) { - NSString *selectedValue = _selectedTextTrack[@"value"]; - for (int i = 0; i < textTracks.count; ++i) { - NSDictionary *currentTextTrack = [textTracks objectAtIndex:i]; - if ([selectedValue isEqualToString:currentTextTrack[@"language"]]) { - selectedTrackIndex = i; - break; - } - } - } else if ([type isEqualToString:@"title"]) { - NSString *selectedValue = _selectedTextTrack[@"value"]; - for (int i = 0; i < textTracks.count; ++i) { - NSDictionary *currentTextTrack = [textTracks objectAtIndex:i]; - if ([selectedValue isEqualToString:currentTextTrack[@"title"]]) { - selectedTrackIndex = i; - break; - } - } - } else if ([type isEqualToString:@"index"]) { - if ([_selectedTextTrack[@"value"] isKindOfClass:[NSNumber class]]) { - int index = [_selectedTextTrack[@"value"] intValue]; - if (textTracks.count > index) { - selectedTrackIndex = index; - } - } - } - - // in the situation that a selected text track is not available (eg. specifies a textTrack not available) - if (![type isEqualToString:@"disabled"] && selectedTrackIndex == RCTVideoUnset) { - CFArrayRef captioningMediaCharacteristics = MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics(kMACaptionAppearanceDomainUser); - NSArray *captionSettings = (__bridge NSArray*)captioningMediaCharacteristics; - if ([captionSettings containsObject:AVMediaCharacteristicTranscribesSpokenDialogForAccessibility]) { - selectedTrackIndex = 0; // If we can't find a match, use the first available track - NSString *systemLanguage = [[NSLocale preferredLanguages] firstObject]; - for (int i = 0; i < textTracks.count; ++i) { - NSDictionary *currentTextTrack = [textTracks objectAtIndex:i]; - if ([systemLanguage isEqualToString:currentTextTrack[@"language"]]) { - selectedTrackIndex = i; - break; + // The first few tracks will be audio & video track + int firstTextIndex = 0; + for (firstTextIndex = 0; firstTextIndex < _player.currentItem.tracks.count; ++firstTextIndex) { + if ([_player.currentItem.tracks[firstTextIndex].assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible]) { + break; } - } } - } - for (int i = firstTextIndex; i < _player.currentItem.tracks.count; ++i) { - BOOL isEnabled = NO; - if (selectedTrackIndex != RCTVideoUnset) { - isEnabled = i == selectedTrackIndex + firstTextIndex; + int selectedTrackIndex = RCTVideoUnset; + + if ([type isEqualToString:@"disabled"]) { + // Do nothing. We want to ensure option is nil + } else if ([type isEqualToString:@"language"]) { + NSString *selectedValue = _selectedTextTrack[@"value"]; + for (int i = 0; i < textTracks.count; ++i) { + NSDictionary *currentTextTrack = [textTracks objectAtIndex:i]; + if ([selectedValue isEqualToString:currentTextTrack[@"language"]]) { + selectedTrackIndex = i; + break; + } + } + } else if ([type isEqualToString:@"title"]) { + NSString *selectedValue = _selectedTextTrack[@"value"]; + for (int i = 0; i < textTracks.count; ++i) { + NSDictionary *currentTextTrack = [textTracks objectAtIndex:i]; + if ([selectedValue isEqualToString:currentTextTrack[@"title"]]) { + selectedTrackIndex = i; + break; + } + } + } else if ([type isEqualToString:@"index"]) { + if ([_selectedTextTrack[@"value"] isKindOfClass:[NSNumber class]]) { + int index = [_selectedTextTrack[@"value"] intValue]; + if (textTracks.count > index) { + selectedTrackIndex = index; + } + } + } + + // in the situation that a selected text track is not available (eg. specifies a textTrack not available) + if (![type isEqualToString:@"disabled"] && selectedTrackIndex == RCTVideoUnset) { + CFArrayRef captioningMediaCharacteristics = MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics(kMACaptionAppearanceDomainUser); + NSArray *captionSettings = (__bridge NSArray *) captioningMediaCharacteristics; + if ([captionSettings containsObject:AVMediaCharacteristicTranscribesSpokenDialogForAccessibility]) { + selectedTrackIndex = 0; // If we can't find a match, use the first available track + NSString *systemLanguage = [[NSLocale preferredLanguages] firstObject]; + for (int i = 0; i < textTracks.count; ++i) { + NSDictionary *currentTextTrack = [textTracks objectAtIndex:i]; + if ([systemLanguage isEqualToString:currentTextTrack[@"language"]]) { + selectedTrackIndex = i; + break; + } + } + } + } + + for (int i = firstTextIndex; i < _player.currentItem.tracks.count; ++i) { + BOOL isEnabled = NO; + if (selectedTrackIndex != RCTVideoUnset) { + isEnabled = i == selectedTrackIndex + firstTextIndex; + } + [_player.currentItem.tracks[i] setEnabled:isEnabled]; } - [_player.currentItem.tracks[i] setEnabled:isEnabled]; - } } --(void) setStreamingText { - NSString *type = _selectedTextTrack[@"type"]; - AVMediaSelectionGroup *group = [_player.currentItem.asset - mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible]; - AVMediaSelectionOption *mediaOption; +- (void)setStreamingText { + NSString *type = _selectedTextTrack[@"type"]; + AVMediaSelectionGroup *group = [_player.currentItem.asset + mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible]; + AVMediaSelectionOption *mediaOption; - if ([type isEqualToString:@"disabled"]) { - // Do nothing. We want to ensure option is nil - } else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) { - NSString *value = _selectedTextTrack[@"value"]; - for (int i = 0; i < group.options.count; ++i) { - AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i]; - NSString *optionValue; - if ([type isEqualToString:@"language"]) { - optionValue = [currentOption extendedLanguageTag]; - } else { - optionValue = [[[currentOption commonMetadata] + if ([type isEqualToString:@"disabled"]) { + // Do nothing. We want to ensure option is nil + } else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) { + NSString *value = _selectedTextTrack[@"value"]; + for (int i = 0; i < group.options.count; ++i) { + AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i]; + NSString *optionValue; + if ([type isEqualToString:@"language"]) { + optionValue = [currentOption extendedLanguageTag]; + } else { + optionValue = [[[currentOption commonMetadata] valueForKey:@"value"] - objectAtIndex:0]; - } - if ([value isEqualToString:optionValue]) { - mediaOption = currentOption; - break; - } + objectAtIndex:0]; + } + if ([value isEqualToString:optionValue]) { + mediaOption = currentOption; + break; + } + } + //} else if ([type isEqualToString:@"default"]) { + // option = group.defaultOption; */ + } else if ([type isEqualToString:@"index"]) { + if ([_selectedTextTrack[@"value"] isKindOfClass:[NSNumber class]]) { + int index = [_selectedTextTrack[@"value"] intValue]; + if (group.options.count > index) { + mediaOption = [group.options objectAtIndex:index]; + } + } + } else { // default. invalid type or "system" + [_player.currentItem selectMediaOptionAutomaticallyInMediaSelectionGroup:group]; + return; } - //} else if ([type isEqualToString:@"default"]) { - // option = group.defaultOption; */ - } else if ([type isEqualToString:@"index"]) { - if ([_selectedTextTrack[@"value"] isKindOfClass:[NSNumber class]]) { - int index = [_selectedTextTrack[@"value"] intValue]; - if (group.options.count > index) { - mediaOption = [group.options objectAtIndex:index]; - } - } - } else { // default. invalid type or "system" - [_player.currentItem selectMediaOptionAutomaticallyInMediaSelectionGroup:group]; - return; - } - // If a match isn't found, option will be nil and text tracks will be disabled - [_player.currentItem selectMediaOption:mediaOption inMediaSelectionGroup:group]; + // If a match isn't found, option will be nil and text tracks will be disabled + [_player.currentItem selectMediaOption:mediaOption inMediaSelectionGroup:group]; } -- (void)setTextTracks:(NSArray*) textTracks; -{ - _textTracks = textTracks; +- (void)setTextTracks:(NSArray *)textTracks; { + _textTracks = textTracks; - // in case textTracks was set after selectedTextTrack - if (_selectedTextTrack) [self setSelectedTextTrack:_selectedTextTrack]; + // in case textTracks was set after selectedTextTrack + if (_selectedTextTrack) [self setSelectedTextTrack:_selectedTextTrack]; } -- (NSArray *)getAudioTrackInfo -{ +- (NSArray *)getAudioTrackInfo { NSMutableArray *audioTracks = [[NSMutableArray alloc] init]; AVMediaSelectionGroup *group = [_player.currentItem.asset - mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible]; + mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible]; for (int i = 0; i < group.options.count; ++i) { AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i]; NSString *title = @""; NSArray *values = [[currentOption commonMetadata] valueForKey:@"value"]; if (values.count > 0) { - title = [values objectAtIndex:0]; + title = [values objectAtIndex:0]; } NSString *language = [currentOption extendedLanguageTag] ? [currentOption extendedLanguageTag] : @""; NSDictionary *audioTrack = @{ - @"index": [NSNumber numberWithInt:i], - @"title": title, - @"language": language - }; + @"index": [NSNumber numberWithInt:i], + @"title": title, + @"language": language + }; [audioTracks addObject:audioTrack]; } return audioTracks; } -- (NSArray *)getTextTrackInfo -{ - // if sideloaded, textTracks will already be set - if (_textTracks) return _textTracks; +- (NSArray *)getTextTrackInfo { + // if sideloaded, textTracks will already be set + if (_textTracks) return _textTracks; - // if streaming video, we extract the text tracks - NSMutableArray *textTracks = [[NSMutableArray alloc] init]; - AVMediaSelectionGroup *group = [_player.currentItem.asset - mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible]; - for (int i = 0; i < group.options.count; ++i) { - AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i]; - NSString *title = @""; - NSArray *values = [[currentOption commonMetadata] valueForKey:@"value"]; - if (values.count > 0) { - title = [values objectAtIndex:0]; - } - NSString *language = [currentOption extendedLanguageTag] ? [currentOption extendedLanguageTag] : @""; - NSDictionary *textTrack = @{ - @"index": [NSNumber numberWithInt:i], - @"title": title, - @"language": language - }; - [textTracks addObject:textTrack]; - } - return textTracks; -} - -- (BOOL)getFullscreen -{ - return _fullscreenPlayerPresented; -} - -- (void)setFullscreen:(BOOL) fullscreen { - if( fullscreen && !_fullscreenPlayerPresented && _player ) - { - // Ensure player view controller is not null - if( !_playerViewController ) - { - [self usePlayerViewController]; - } - // Set presentation style to fullscreen - [_playerViewController setModalPresentationStyle:UIModalPresentationFullScreen]; - - // Find the nearest view controller - UIViewController *viewController = [self firstAvailableUIViewController]; - if( !viewController ) - { - UIWindow *keyWindow = [[UIApplication sharedApplication] keyWindow]; - viewController = keyWindow.rootViewController; - if( viewController.childViewControllers.count > 0 ) - { - viewController = viewController.childViewControllers.lastObject; - } - } - if( viewController ) - { - _presentingViewController = viewController; - if(self.onVideoFullscreenPlayerWillPresent) { - self.onVideoFullscreenPlayerWillPresent(@{@"target": self.reactTag}); - } - [viewController presentViewController:_playerViewController animated:true completion:^{ - _playerViewController.showsPlaybackControls = YES; - _fullscreenPlayerPresented = fullscreen; - if(self.onVideoFullscreenPlayerDidPresent) { - self.onVideoFullscreenPlayerDidPresent(@{@"target": self.reactTag}); + // if streaming video, we extract the text tracks + NSMutableArray *textTracks = [[NSMutableArray alloc] init]; + AVMediaSelectionGroup *group = [_player.currentItem.asset + mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible]; + for (int i = 0; i < group.options.count; ++i) { + AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i]; + NSString *title = @""; + NSArray *values = [[currentOption commonMetadata] valueForKey:@"value"]; + if (values.count > 0) { + title = [values objectAtIndex:0]; } - }]; + NSString *language = [currentOption extendedLanguageTag] ? [currentOption extendedLanguageTag] : @""; + NSDictionary *textTrack = @{ + @"index": [NSNumber numberWithInt:i], + @"title": title, + @"language": language + }; + [textTracks addObject:textTrack]; + } + return textTracks; +} + +- (BOOL)getFullscreen { + return _fullscreenPlayerPresented; +} + +- (void)setFullscreen:(BOOL)fullscreen { + if (fullscreen && !_fullscreenPlayerPresented && _player) { + // Ensure player view controller is not null + if (!_playerViewController) { + [self usePlayerViewController]; + } + // Set presentation style to fullscreen + [_playerViewController setModalPresentationStyle:UIModalPresentationFullScreen]; + + // Find the nearest view controller + UIViewController *viewController = [self firstAvailableUIViewController]; + if (!viewController) { + UIWindow *keyWindow = [[UIApplication sharedApplication] keyWindow]; + viewController = keyWindow.rootViewController; + if (viewController.childViewControllers.count > 0) { + viewController = viewController.childViewControllers.lastObject; + } + } + if (viewController) { + _presentingViewController = viewController; + if (self.onVideoFullscreenPlayerWillPresent) { + self.onVideoFullscreenPlayerWillPresent(@{@"target": self.reactTag}); + } + [viewController presentViewController:_playerViewController animated:true completion:^{ + _playerViewController.showsPlaybackControls = YES; + _fullscreenPlayerPresented = fullscreen; + if (self.onVideoFullscreenPlayerDidPresent) { + self.onVideoFullscreenPlayerDidPresent(@{@"target": self.reactTag}); + } + }]; + } + } else if (!fullscreen && _fullscreenPlayerPresented) { + [self videoPlayerViewControllerWillDismiss:_playerViewController]; + [_presentingViewController dismissViewControllerAnimated:true completion:^{ + [self videoPlayerViewControllerDidDismiss:_playerViewController]; + }]; } - } - else if ( !fullscreen && _fullscreenPlayerPresented ) - { - [self videoPlayerViewControllerWillDismiss:_playerViewController]; - [_presentingViewController dismissViewControllerAnimated:true completion:^{ - [self videoPlayerViewControllerDidDismiss:_playerViewController]; - }]; - } } - (void)setFilter:(NSString *)filter { @@ -1202,190 +1147,233 @@ static int const RCTVideoUnset = -1; } - (void)setFullscreenOrientation:(NSString *)orientation { - _fullscreenOrientation = orientation; - if (_fullscreenPlayerPresented) { - _playerViewController.preferredOrientation = orientation; - } -} - -- (void)usePlayerViewController -{ - if( _player ) - { - _playerViewController = [self createPlayerViewController:_player withPlayerItem:_playerItem]; - // to prevent video from being animated when resizeMode is 'cover' - // resize mode must be set before subview is added - [self setResizeMode:_resizeMode]; - [self addSubview:_playerViewController.view]; - } -} - -- (void)usePlayerLayer -{ - if( _player ) - { - _playerLayer = [AVPlayerLayer playerLayerWithPlayer:_player]; - _playerLayer.frame = self.bounds; - _playerLayer.needsDisplayOnBoundsChange = YES; - - // to prevent video from being animated when resizeMode is 'cover' - // resize mode must be set before layer is added - [self setResizeMode:_resizeMode]; - [_playerLayer addObserver:self forKeyPath:readyForDisplayKeyPath options:NSKeyValueObservingOptionNew context:nil]; - _playerLayerObserverSet = YES; - - [self.layer addSublayer:_playerLayer]; - self.layer.needsDisplayOnBoundsChange = YES; - } -} - -- (void)setControls:(BOOL)controls -{ - if( _controls != controls || (!_playerLayer && !_playerViewController) ) - { - _controls = controls; - if( _controls ) - { - [self removePlayerLayer]; - [self usePlayerViewController]; + _fullscreenOrientation = orientation; + if (_fullscreenPlayerPresented) { + _playerViewController.preferredOrientation = orientation; } - else - { - [_playerViewController.view removeFromSuperview]; - _playerViewController = nil; - [self usePlayerLayer]; +} + +- (void)usePlayerViewController { + if (_player) { + _playerViewController = [self createPlayerViewController:_player withPlayerItem:_playerItem]; + // to prevent video from being animated when resizeMode is 'cover' + // resize mode must be set before subview is added + [self setResizeMode:_resizeMode]; + [self addSubview:_playerViewController.view]; } - } } -- (void)setProgressUpdateInterval:(float)progressUpdateInterval -{ - _progressUpdateInterval = progressUpdateInterval; +- (void)usePlayerLayer { + if (_player) { + _playerLayer = [AVPlayerLayer playerLayerWithPlayer:_player]; + _playerLayer.frame = self.bounds; + _playerLayer.needsDisplayOnBoundsChange = YES; - if (_timeObserver) { - [self removePlayerTimeObserver]; - [self addPlayerTimeObserver]; - } + // to prevent video from being animated when resizeMode is 'cover' + // resize mode must be set before layer is added + [self setResizeMode:_resizeMode]; + [_playerLayer addObserver:self forKeyPath:readyForDisplayKeyPath options:NSKeyValueObservingOptionNew context:nil]; + _playerLayerObserverSet = YES; + + [self.layer addSublayer:_playerLayer]; + self.layer.needsDisplayOnBoundsChange = YES; + } } -- (void)removePlayerLayer -{ - [_playerLayer removeFromSuperlayer]; - if (_playerLayerObserverSet) { - [_playerLayer removeObserver:self forKeyPath:readyForDisplayKeyPath]; - _playerLayerObserverSet = NO; - } - _playerLayer = nil; +- (void)setControls:(BOOL)controls { + if (_controls != controls || (!_playerLayer && !_playerViewController)) { + _controls = controls; + if (_controls) { + [self removePlayerLayer]; + [self usePlayerViewController]; + } else { + [_playerViewController.view removeFromSuperview]; + _playerViewController = nil; + [self usePlayerLayer]; + } + } +} + +- (void)setProgressUpdateInterval:(float)progressUpdateInterval { + _progressUpdateInterval = progressUpdateInterval; + + if (_timeObserver) { + [self removePlayerTimeObserver]; + [self addPlayerTimeObserver]; + } +} + +- (void)removePlayerLayer { + [_playerLayer removeFromSuperlayer]; + if (_playerLayerObserverSet) { + [_playerLayer removeObserver:self forKeyPath:readyForDisplayKeyPath]; + _playerLayerObserverSet = NO; + } + _playerLayer = nil; } #pragma mark - RCTVideoPlayerViewControllerDelegate -- (void)videoPlayerViewControllerWillDismiss:(AVPlayerViewController *)playerViewController -{ - if (_playerViewController == playerViewController && _fullscreenPlayerPresented && self.onVideoFullscreenPlayerWillDismiss) - { - self.onVideoFullscreenPlayerWillDismiss(@{@"target": self.reactTag}); - } +- (void)videoPlayerViewControllerWillDismiss:(AVPlayerViewController *)playerViewController { + if (_playerViewController == playerViewController && _fullscreenPlayerPresented && self.onVideoFullscreenPlayerWillDismiss) { + self.onVideoFullscreenPlayerWillDismiss(@{@"target": self.reactTag}); + } } -- (void)videoPlayerViewControllerDidDismiss:(AVPlayerViewController *)playerViewController -{ - if (_playerViewController == playerViewController && _fullscreenPlayerPresented) - { - _fullscreenPlayerPresented = false; - _presentingViewController = nil; - _playerViewController = nil; - [self applyModifiers]; - if(self.onVideoFullscreenPlayerDidDismiss) { - self.onVideoFullscreenPlayerDidDismiss(@{@"target": self.reactTag}); +- (void)videoPlayerViewControllerDidDismiss:(AVPlayerViewController *)playerViewController { + if (_playerViewController == playerViewController && _fullscreenPlayerPresented) { + _fullscreenPlayerPresented = false; + _presentingViewController = nil; + _playerViewController = nil; + [self applyModifiers]; + if (self.onVideoFullscreenPlayerDidDismiss) { + self.onVideoFullscreenPlayerDidDismiss(@{@"target": self.reactTag}); + } } - } } #pragma mark - React View Management -- (void)insertReactSubview:(UIView *)view atIndex:(NSInteger)atIndex -{ - // We are early in the game and somebody wants to set a subview. - // That can only be in the context of playerViewController. - if( !_controls && !_playerLayer && !_playerViewController ) - { - [self setControls:true]; - } - - if( _controls ) - { - view.frame = self.bounds; - [_playerViewController.contentOverlayView insertSubview:view atIndex:atIndex]; - } - else - { - RCTLogError(@"video cannot have any subviews"); - } - return; -} - -- (void)removeReactSubview:(UIView *)subview -{ - if( _controls ) - { - [subview removeFromSuperview]; - } - else - { - RCTLogError(@"video cannot have any subviews"); - } - return; -} - -- (void)layoutSubviews -{ - [super layoutSubviews]; - if( _controls ) - { - _playerViewController.view.frame = self.bounds; - - // also adjust all subviews of contentOverlayView - for (UIView* subview in _playerViewController.contentOverlayView.subviews) { - subview.frame = self.bounds; +- (void)insertReactSubview:(UIView *)view atIndex:(NSInteger)atIndex { + // We are early in the game and somebody wants to set a subview. + // That can only be in the context of playerViewController. + if (!_controls && !_playerLayer && !_playerViewController) { + [self setControls:true]; + } + + if (_controls) { + view.frame = self.bounds; + [_playerViewController.contentOverlayView insertSubview:view atIndex:atIndex]; + } else { + RCTLogError(@"video cannot have any subviews"); + } + return; +} + +- (void)removeReactSubview:(UIView *)subview { + if (_controls) { + [subview removeFromSuperview]; + } else { + RCTLogError(@"video cannot have any subviews"); + } + return; +} + +- (void)layoutSubviews { + [super layoutSubviews]; + if (_controls) { + _playerViewController.view.frame = self.bounds; + + // also adjust all subviews of contentOverlayView + for (UIView *subview in _playerViewController.contentOverlayView.subviews) { + subview.frame = self.bounds; + } + } else { + [CATransaction begin]; + [CATransaction setAnimationDuration:0]; + _playerLayer.frame = self.bounds; + [CATransaction commit]; } - } - else - { - [CATransaction begin]; - [CATransaction setAnimationDuration:0]; - _playerLayer.frame = self.bounds; - [CATransaction commit]; - } } #pragma mark - Lifecycle -- (void)removeFromSuperview -{ - [_player pause]; - if (_playbackRateObserverRegistered) { - [_player removeObserver:self forKeyPath:playbackRate context:nil]; - _playbackRateObserverRegistered = NO; - } - if (_isExternalPlaybackActiveObserverRegistered) { - [_player removeObserver:self forKeyPath:externalPlaybackActive context:nil]; - _isExternalPlaybackActiveObserverRegistered = NO; - } - _player = nil; +- (void)removeFromSuperview { + [_player pause]; + if (_playbackRateObserverRegistered) { + [_player removeObserver:self forKeyPath:playbackRate context:nil]; + _playbackRateObserverRegistered = NO; + } + if (_isExternalPlaybackActiveObserverRegistered) { + [_player removeObserver:self forKeyPath:externalPlaybackActive context:nil]; + _isExternalPlaybackActiveObserverRegistered = NO; + } + _player = nil; - [self removePlayerLayer]; + [self removePlayerLayer]; - [_playerViewController.view removeFromSuperview]; - _playerViewController = nil; + [_playerViewController.view removeFromSuperview]; + _playerViewController = nil; - [self removePlayerTimeObserver]; - [self removePlayerItemObservers]; + [self removePlayerTimeObserver]; + [self removePlayerItemObservers]; - _eventDispatcher = nil; - [[NSNotificationCenter defaultCenter] removeObserver:self]; + _eventDispatcher = nil; + [[NSNotificationCenter defaultCenter] removeObserver:self]; - [super removeFromSuperview]; + [super removeFromSuperview]; +} + +- (void)save:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject { + + AVAsset *asset = _playerItem.asset; + + if (asset != nil) { + + AVAssetExportSession *exportSession = [AVAssetExportSession + exportSessionWithAsset:asset presetName:AVAssetExportPresetHighestQuality]; + + if (exportSession != nil) { + NSString *path = nil; + NSArray *array = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES); + path = [self generatePathInDirectory:[[self cacheDirectoryPath] stringByAppendingPathComponent:@"Filter"] + withExtension:@".mp4"]; + NSURL *url = [NSURL fileURLWithPath:path]; + exportSession.outputFileType = AVFileTypeMPEG4; + exportSession.outputURL = url; + exportSession.videoComposition = _playerItem.videoComposition; + exportSession.shouldOptimizeForNetworkUse = true; + [exportSession exportAsynchronouslyWithCompletionHandler:^{ + + switch ([exportSession status]) { + case AVAssetExportSessionStatusFailed: + reject(@"ERROR_COULD_NOT_EXPORT_VIDEO", @"Could not export video", exportSession.error); + break; + case AVAssetExportSessionStatusCancelled: + reject(@"ERROR_EXPORT_SESSION_CANCELLED", @"Export session was cancelled", exportSession.error); + break; + default: + resolve(@{@"uri": url.absoluteString}); + break; + } + + }]; + + } else { + + reject(@"ERROR_COULD_NOT_CREATE_EXPORT_SESSION", @"Could not create export session", nil); + + } + + } else { + + reject(@"ERROR_ASSET_NIL", @"Asset is nil", nil); + + } +} + +- (BOOL)ensureDirExistsWithPath:(NSString *)path { + BOOL isDir = NO; + NSError *error; + BOOL exists = [[NSFileManager defaultManager] fileExistsAtPath:path isDirectory:&isDir]; + if (!(exists && isDir)) { + [[NSFileManager defaultManager] createDirectoryAtPath:path withIntermediateDirectories:YES attributes:nil error:&error]; + if (error) { + return NO; + } + } + return YES; +} + +- (NSString *)generatePathInDirectory:(NSString *)directory withExtension:(NSString *)extension { + NSString *fileName = [[[NSUUID UUID] UUIDString] stringByAppendingString:extension]; + [self ensureDirExistsWithPath:directory]; + return [directory stringByAppendingPathComponent:fileName]; +} + +- (NSString *)cacheDirectoryPath { + NSArray *array = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES); + return array[0]; } @end diff --git a/ios/Video/RCTVideoManager.h b/ios/Video/RCTVideoManager.h index e19a9e1f..b3bfccb5 100644 --- a/ios/Video/RCTVideoManager.h +++ b/ios/Video/RCTVideoManager.h @@ -1,5 +1,6 @@ #import +#import -@interface RCTVideoManager : RCTViewManager +@interface RCTVideoManager : RCTViewManager @end diff --git a/ios/Video/RCTVideoManager.m b/ios/Video/RCTVideoManager.m index 055d2213..f476852d 100644 --- a/ios/Video/RCTVideoManager.m +++ b/ios/Video/RCTVideoManager.m @@ -1,14 +1,13 @@ #import "RCTVideoManager.h" #import "RCTVideo.h" #import +#import #import @implementation RCTVideoManager RCT_EXPORT_MODULE(); -@synthesize bridge = _bridge; - - (UIView *)view { return [[RCTVideo alloc] initWithEventDispatcher:self.bridge.eventDispatcher]; @@ -16,7 +15,7 @@ RCT_EXPORT_MODULE(); - (dispatch_queue_t)methodQueue { - return dispatch_get_main_queue(); + return self.bridge.uiManager.methodQueue; } RCT_EXPORT_VIEW_PROPERTY(src, NSDictionary); @@ -59,6 +58,22 @@ RCT_EXPORT_VIEW_PROPERTY(onPlaybackStalled, RCTBubblingEventBlock); RCT_EXPORT_VIEW_PROPERTY(onPlaybackResume, RCTBubblingEventBlock); RCT_EXPORT_VIEW_PROPERTY(onPlaybackRateChange, RCTBubblingEventBlock); RCT_EXPORT_VIEW_PROPERTY(onVideoExternalPlaybackChange, RCTBubblingEventBlock); +RCT_EXPORT_VIEW_PROPERTY(onVideoSaved, RCTBubblingEventBlock); +RCT_REMAP_METHOD(save, + options:(NSDictionary *)options + reactTag:(nonnull NSNumber *)reactTag + resolver:(RCTPromiseResolveBlock)resolve + rejecter:(RCTPromiseRejectBlock)reject) +{ + [self.bridge.uiManager prependUIBlock:^(__unused RCTUIManager *uiManager, NSDictionary *viewRegistry) { + RCTVideo *view = viewRegistry[reactTag]; + if (![view isKindOfClass:[RCTVideo class]]) { + RCTLogError(@"Invalid view returned from registry, expecting RCTVideo, got: %@", view); + } else { + [view save:options resolve:resolve reject:reject]; + } + }]; +} - (NSDictionary *)constantsToExport {