react-native-video/RCTVideo.m

419 lines
11 KiB
Mathematica
Raw Normal View History

2015-04-08 13:02:42 -06:00
#import "RCTConvert.h"
2015-03-30 23:07:55 -06:00
#import "RCTVideo.h"
#import "RCTBridgeModule.h"
#import "RCTEventDispatcher.h"
#import "UIView+React.h"
#import <AVFoundation/AVFoundation.h>
2015-03-30 23:07:55 -06:00
2015-04-08 02:58:00 -06:00
NSString *const RNVideoEventLoaded = @"videoLoaded";
NSString *const RNVideoEventLoading = @"videoLoading";
NSString *const RNVideoEventProgress = @"videoProgress";
2015-04-08 16:15:57 -06:00
NSString *const RNVideoEventSeek = @"videoSeek";
2015-04-08 02:58:00 -06:00
NSString *const RNVideoEventLoadingError = @"videoLoadError";
NSString *const RNVideoEventEnd = @"videoEnd";
static NSString *const statusKeyPath = @"status";
2015-03-30 23:07:55 -06:00
@implementation RCTVideo
{
2015-04-07 16:24:49 -06:00
AVPlayer *_player;
AVPlayerItem *_playerItem;
BOOL _playerItemObserverSet;
2015-04-07 16:24:49 -06:00
AVPlayerLayer *_playerLayer;
NSURL *_videoURL;
2015-04-07 16:24:49 -06:00
/* Required to publish events */
RCTEventDispatcher *_eventDispatcher;
2015-04-08 16:15:57 -06:00
bool _pendingSeek;
float _pendingSeekTime;
float _lastSeekTime;
2015-04-07 16:24:49 -06:00
/* For sending videoProgress events */
id _progressUpdateTimer;
int _progressUpdateInterval;
NSDate *_prevProgressUpdateTime;
/* Keep track of any modifiers, need to be applied after each play */
float _volume;
float _rate;
BOOL _muted;
BOOL _paused;
BOOL _repeat;
NSString * _resizeMode;
2015-03-30 23:07:55 -06:00
}
- (instancetype)initWithEventDispatcher:(RCTEventDispatcher *)eventDispatcher
{
2015-03-30 23:07:55 -06:00
if ((self = [super init])) {
_eventDispatcher = eventDispatcher;
2015-06-16 22:14:14 -06:00
_rate = 1.0;
_volume = 1.0;
2015-06-16 22:14:14 -06:00
_resizeMode = @"AVLayerVideoGravityResizeAspectFill";
2015-04-08 16:15:57 -06:00
_pendingSeek = false;
_pendingSeekTime = 0.0f;
_lastSeekTime = 0.0f;
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(applicationWillResignActive:)
name:UIApplicationWillResignActiveNotification
object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(applicationWillEnterForeground:)
name:UIApplicationWillEnterForegroundNotification
object:nil];
2015-03-30 23:07:55 -06:00
}
2015-04-18 12:21:12 -06:00
2015-03-30 23:07:55 -06:00
return self;
}
- (void)dealloc
{
[[NSNotificationCenter defaultCenter] removeObserver:self];
}
#pragma mark - App lifecycle handlers
- (void)applicationWillResignActive:(NSNotification *)notification
{
if (!_paused) {
[self stopProgressTimer];
[_player setRate:0.0];
}
}
- (void)applicationWillEnterForeground:(NSNotification *)notification
{
[self startProgressTimer];
[self applyModifiers];
}
#pragma mark - Progress
- (void)sendProgressUpdate
{
AVPlayerItem *video = [_player currentItem];
if (video == nil || video.status != AVPlayerItemStatusReadyToPlay) {
return;
}
if (_prevProgressUpdateTime == nil ||
(([_prevProgressUpdateTime timeIntervalSinceNow] * -1000.0) >= _progressUpdateInterval)) {
2015-04-08 02:58:00 -06:00
[_eventDispatcher sendInputEventWithName:RNVideoEventProgress body:@{
@"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(video.currentTime)],
@"playableDuration": [self calculatePlayableDuration],
@"target": self.reactTag
}];
_prevProgressUpdateTime = [NSDate date];
}
}
/*!
* Calculates and returns the playable duration of the current player item using its loaded time ranges.
*
* \returns The playable duration of the current player item in seconds.
*/
- (NSNumber *)calculatePlayableDuration {
AVPlayerItem *video = _player.currentItem;
if (video.status == AVPlayerItemStatusReadyToPlay) {
__block CMTimeRange effectiveTimeRange;
[video.loadedTimeRanges enumerateObjectsUsingBlock:^(id obj, NSUInteger idx, BOOL *stop) {
CMTimeRange timeRange = [obj CMTimeRangeValue];
if (CMTimeRangeContainsTime(timeRange, video.currentTime)) {
effectiveTimeRange = timeRange;
*stop = YES;
}
}];
Float64 playableDuration = CMTimeGetSeconds(CMTimeRangeGetEnd(effectiveTimeRange));
if (playableDuration > 0) {
return [NSNumber numberWithFloat:playableDuration];
}
}
return [NSNumber numberWithInteger:0];
}
- (void)stopProgressTimer
{
[_progressUpdateTimer invalidate];
}
- (void)startProgressTimer
{
_progressUpdateInterval = 250;
_prevProgressUpdateTime = nil;
[self stopProgressTimer];
_progressUpdateTimer = [CADisplayLink displayLinkWithTarget:self selector:@selector(sendProgressUpdate)];
[_progressUpdateTimer addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSDefaultRunLoopMode];
}
- (void)addPlayerItemObserver
{
[_playerItem addObserver:self forKeyPath:statusKeyPath options:0 context:nil];
_playerItemObserverSet = YES;
}
/* Fixes https://github.com/brentvatne/react-native-video/issues/43
* Crashes caused when trying to remove the observer when there is no
* observer set */
- (void)removePlayerItemObserver
{
if (_playerItemObserverSet) {
[_playerItem removeObserver:self forKeyPath:statusKeyPath];
_playerItemObserverSet = NO;
}
}
#pragma mark - Player and source
- (void)setSrc:(NSDictionary *)source
{
[self removePlayerItemObserver];
_playerItem = [self playerItemForSource:source];
[self addPlayerItemObserver];
[_player pause];
[_playerLayer removeFromSuperlayer];
_player = [AVPlayer playerWithPlayerItem:_playerItem];
_player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
_playerLayer = [AVPlayerLayer playerLayerWithPlayer:_player];
_playerLayer.frame = self.bounds;
_playerLayer.needsDisplayOnBoundsChange = YES;
[self applyModifiers];
[self.layer addSublayer:_playerLayer];
self.layer.needsDisplayOnBoundsChange = YES;
[_eventDispatcher sendInputEventWithName:RNVideoEventLoading body:@{
@"src": @{
@"uri": [source objectForKey:@"uri"],
@"type": [source objectForKey:@"type"],
@"isNetwork":[NSNumber numberWithBool:(bool)[source objectForKey:@"isNetwork"]]
},
@"target": self.reactTag
}];
}
- (AVPlayerItem*)playerItemForSource:(NSDictionary *)source
{
bool isNetwork = [RCTConvert BOOL:[source objectForKey:@"isNetwork"]];
bool isAsset = [RCTConvert BOOL:[source objectForKey:@"isAsset"]];
NSString *uri = [source objectForKey:@"uri"];
NSString *type = [source objectForKey:@"type"];
NSURL *url = (isNetwork || isAsset) ?
[NSURL URLWithString:uri] :
[[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:uri ofType:type]];
if (isAsset) {
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:nil];
return [AVPlayerItem playerItemWithAsset:asset];
}
return [AVPlayerItem playerItemWithURL:url];
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
if (object == _playerItem) {
if (_playerItem.status == AVPlayerItemStatusReadyToPlay) {
2015-05-11 00:54:58 -06:00
float duration = CMTimeGetSeconds(_playerItem.asset.duration);
if (isnan(duration)) {
duration = 0.0;
}
2015-04-08 02:58:00 -06:00
[_eventDispatcher sendInputEventWithName:RNVideoEventLoaded body:@{
2015-05-11 00:54:58 -06:00
@"duration": [NSNumber numberWithFloat:duration],
@"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(_playerItem.currentTime)],
@"canPlayReverse": [NSNumber numberWithBool:_playerItem.canPlayReverse],
@"canPlayFastForward": [NSNumber numberWithBool:_playerItem.canPlayFastForward],
@"canPlaySlowForward": [NSNumber numberWithBool:_playerItem.canPlaySlowForward],
@"canPlaySlowReverse": [NSNumber numberWithBool:_playerItem.canPlaySlowReverse],
@"canStepBackward": [NSNumber numberWithBool:_playerItem.canStepBackward],
@"canStepForward": [NSNumber numberWithBool:_playerItem.canStepForward],
@"target": self.reactTag
}];
[self startProgressTimer];
[self attachListeners];
[self applyModifiers];
} else if(_playerItem.status == AVPlayerItemStatusFailed) {
[_eventDispatcher sendInputEventWithName:RNVideoEventLoadingError body:@{
@"error": @{
2015-06-27 17:03:24 -06:00
@"code": [NSNumber numberWithInteger: _playerItem.error.code],
@"domain": _playerItem.error.domain
},
@"target": self.reactTag
}];
}
} else {
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
}
2015-03-30 23:07:55 -06:00
}
- (void)attachListeners
{
// listen for end of file
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(playerItemDidReachEnd:)
name:AVPlayerItemDidPlayToEndTimeNotification
object:[_player currentItem]];
}
- (void)playerItemDidReachEnd:(NSNotification *)notification
{
[_eventDispatcher sendInputEventWithName:RNVideoEventEnd body:@{
@"target": self.reactTag
}];
if (_repeat) {
AVPlayerItem *item = [notification object];
[item seekToTime:kCMTimeZero];
[self applyModifiers];
}
}
#pragma mark - Prop setters
- (void)setResizeMode:(NSString*)mode
{
_resizeMode = mode;
_playerLayer.videoGravity = mode;
}
- (void)setPaused:(BOOL)paused
{
if (paused) {
[self stopProgressTimer];
[_player setRate:0.0];
} else {
[self startProgressTimer];
[_player setRate:_rate];
}
_paused = paused;
}
- (void)setSeek:(float)seekTime
{
2015-04-08 16:15:57 -06:00
int timeScale = 10000;
AVPlayerItem *item = _player.currentItem;
if (item && item.status == AVPlayerItemStatusReadyToPlay) {
// TODO check loadedTimeRanges
CMTime cmSeekTime = CMTimeMakeWithSeconds(seekTime, timeScale);
CMTime current = item.currentTime;
// TODO figure out a good tolerance level
CMTime tolerance = CMTimeMake(1000, timeScale);
2015-04-08 16:15:57 -06:00
if (CMTimeCompare(current, cmSeekTime) != 0) {
[_player seekToTime:cmSeekTime toleranceBefore:tolerance toleranceAfter:tolerance completionHandler:^(BOOL finished) {
[_eventDispatcher sendInputEventWithName:RNVideoEventSeek body:@{
@"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(item.currentTime)],
@"seekTime": [NSNumber numberWithFloat:seekTime],
@"target": self.reactTag
}];
}];
_pendingSeek = false;
2015-04-08 16:15:57 -06:00
}
} else {
// TODO see if this makes sense and if so,
// actually implement it
_pendingSeek = true;
_pendingSeekTime = seekTime;
}
}
- (void)setRate:(float)rate
{
_rate = rate;
[self applyModifiers];
}
- (void)setMuted:(BOOL)muted
{
_muted = muted;
[self applyModifiers];
}
- (void)setVolume:(float)volume
{
_volume = volume;
[self applyModifiers];
}
- (void)applyModifiers
{
if (_muted) {
[_player setVolume:0];
[_player setMuted:YES];
} else {
[_player setVolume:_volume];
[_player setMuted:NO];
}
[self setResizeMode:_resizeMode];
[self setRepeat:_repeat];
[self setPaused:_paused];
}
- (void)setRepeat:(BOOL)repeat {
_repeat = repeat;
2015-03-31 00:29:15 -06:00
}
#pragma mark - React View Management
- (void)insertReactSubview:(UIView *)view atIndex:(NSInteger)atIndex
{
2015-03-30 23:07:55 -06:00
RCTLogError(@"video cannot have any subviews");
return;
}
- (void)removeReactSubview:(UIView *)subview
{
2015-03-30 23:07:55 -06:00
RCTLogError(@"video cannot have any subviews");
return;
}
- (void)layoutSubviews
{
2015-03-30 23:07:55 -06:00
[super layoutSubviews];
[CATransaction begin];
[CATransaction setAnimationDuration:0];
_playerLayer.frame = self.bounds;
[CATransaction commit];
}
#pragma mark - Lifecycle
- (void)removeFromSuperview
{
[_progressUpdateTimer invalidate];
2015-04-07 11:41:34 -06:00
_prevProgressUpdateTime = nil;
2015-04-10 01:15:19 -06:00
[_player pause];
_player = nil;
[_playerLayer removeFromSuperlayer];
_playerLayer = nil;
[self removePlayerItemObserver];
2015-04-10 01:15:19 -06:00
_eventDispatcher = nil;
[[NSNotificationCenter defaultCenter] removeObserver:self];
[super removeFromSuperview];
2015-03-30 23:07:55 -06:00
}
@end