updated basic example, added filterEnabled flag, check for HLS playlist before applying filter
This commit is contained in:
parent
bba7e9ed07
commit
67a963328a
10
README.md
10
README.md
@ -260,6 +260,7 @@ var styles = StyleSheet.create({
|
|||||||
* [bufferConfig](#bufferconfig)
|
* [bufferConfig](#bufferconfig)
|
||||||
* [controls](#controls)
|
* [controls](#controls)
|
||||||
* [filter](#filter)
|
* [filter](#filter)
|
||||||
|
* [filterEnabled](#filterEnabled)
|
||||||
* [fullscreen](#fullscreen)
|
* [fullscreen](#fullscreen)
|
||||||
* [fullscreenAutorotate](#fullscreenautorotate)
|
* [fullscreenAutorotate](#fullscreenautorotate)
|
||||||
* [fullscreenOrientation](#fullscreenorientation)
|
* [fullscreenOrientation](#fullscreenorientation)
|
||||||
@ -379,6 +380,15 @@ For more details on these filters refer to the [iOS docs](https://developer.appl
|
|||||||
Notes:
|
Notes:
|
||||||
1. Using a filter can impact CPU usage. A workaround is to save the video with the filter and then load the saved video.
|
1. Using a filter can impact CPU usage. A workaround is to save the video with the filter and then load the saved video.
|
||||||
2. Video filter is currently not supported on HLS playlists.
|
2. Video filter is currently not supported on HLS playlists.
|
||||||
|
3. `filterEnabled` must be set to `true`
|
||||||
|
|
||||||
|
Platforms: iOS
|
||||||
|
|
||||||
|
#### filterEnabled
|
||||||
|
Enable video filter.
|
||||||
|
|
||||||
|
* **false (default)** - Don't enable filter
|
||||||
|
* **true** - Enable filter
|
||||||
|
|
||||||
Platforms: iOS
|
Platforms: iOS
|
||||||
|
|
||||||
|
1
Video.js
1
Video.js
@ -300,6 +300,7 @@ Video.propTypes = {
|
|||||||
FilterType.TRANSFER,
|
FilterType.TRANSFER,
|
||||||
FilterType.SEPIA
|
FilterType.SEPIA
|
||||||
]),
|
]),
|
||||||
|
filterEnabled: PropTypes.bool,
|
||||||
/* Native only */
|
/* Native only */
|
||||||
src: PropTypes.object,
|
src: PropTypes.object,
|
||||||
seek: PropTypes.oneOfType([
|
seek: PropTypes.oneOfType([
|
||||||
|
@ -13,7 +13,26 @@ import {
|
|||||||
View,
|
View,
|
||||||
} from 'react-native';
|
} from 'react-native';
|
||||||
|
|
||||||
import Video from 'react-native-video';
|
import Video,{FilterType} from 'react-native-video';
|
||||||
|
|
||||||
|
const filterTypes = [
|
||||||
|
FilterType.NONE,
|
||||||
|
FilterType.INVERT,
|
||||||
|
FilterType.MONOCHROME,
|
||||||
|
FilterType.POSTERIZE,
|
||||||
|
FilterType.FALSE,
|
||||||
|
FilterType.MAXIMUMCOMPONENT,
|
||||||
|
FilterType.MINIMUMCOMPONENT,
|
||||||
|
FilterType.CHROME,
|
||||||
|
FilterType.FADE,
|
||||||
|
FilterType.INSTANT,
|
||||||
|
FilterType.MONO,
|
||||||
|
FilterType.NOIR,
|
||||||
|
FilterType.PROCESS,
|
||||||
|
FilterType.TONAL,
|
||||||
|
FilterType.TRANSFER,
|
||||||
|
FilterType.SEPIA
|
||||||
|
];
|
||||||
|
|
||||||
class VideoPlayer extends Component {
|
class VideoPlayer extends Component {
|
||||||
constructor(props) {
|
constructor(props) {
|
||||||
@ -34,6 +53,8 @@ class VideoPlayer extends Component {
|
|||||||
skin: 'custom',
|
skin: 'custom',
|
||||||
ignoreSilentSwitch: null,
|
ignoreSilentSwitch: null,
|
||||||
isBuffering: false,
|
isBuffering: false,
|
||||||
|
filter: FilterType.NONE,
|
||||||
|
filterEnabled: true
|
||||||
};
|
};
|
||||||
|
|
||||||
onLoad(data) {
|
onLoad(data) {
|
||||||
@ -57,6 +78,20 @@ class VideoPlayer extends Component {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
setFilter(step) {
|
||||||
|
let index = filterTypes.indexOf(this.state.filter) + step;
|
||||||
|
|
||||||
|
if (index === filterTypes.length) {
|
||||||
|
index = 0;
|
||||||
|
} else if (index === -1) {
|
||||||
|
index = filterTypes.length - 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.setState({
|
||||||
|
filter: filterTypes[index]
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
renderSkinControl(skin) {
|
renderSkinControl(skin) {
|
||||||
const isSelected = this.state.skin == skin;
|
const isSelected = this.state.skin == skin;
|
||||||
const selectControls = skin == 'native' || skin == 'embed';
|
const selectControls = skin == 'native' || skin == 'embed';
|
||||||
@ -141,6 +176,8 @@ class VideoPlayer extends Component {
|
|||||||
onProgress={this.onProgress}
|
onProgress={this.onProgress}
|
||||||
onEnd={() => { AlertIOS.alert('Done!') }}
|
onEnd={() => { AlertIOS.alert('Done!') }}
|
||||||
repeat={true}
|
repeat={true}
|
||||||
|
filter={this.state.filter}
|
||||||
|
filterEnabled={this.state.filterEnabled}
|
||||||
/>
|
/>
|
||||||
</TouchableOpacity>
|
</TouchableOpacity>
|
||||||
|
|
||||||
@ -151,6 +188,21 @@ class VideoPlayer extends Component {
|
|||||||
{this.renderSkinControl('native')}
|
{this.renderSkinControl('native')}
|
||||||
{this.renderSkinControl('embed')}
|
{this.renderSkinControl('embed')}
|
||||||
</View>
|
</View>
|
||||||
|
{
|
||||||
|
(this.state.filterEnabled) ?
|
||||||
|
<View style={styles.skinControl}>
|
||||||
|
<TouchableOpacity onPress={() => {
|
||||||
|
this.setFilter(-1)
|
||||||
|
}}>
|
||||||
|
<Text style={styles.controlOption}>Previous Filter</Text>
|
||||||
|
</TouchableOpacity>
|
||||||
|
<TouchableOpacity onPress={() => {
|
||||||
|
this.setFilter(1)
|
||||||
|
}}>
|
||||||
|
<Text style={styles.controlOption}>Next Filter</Text>
|
||||||
|
</TouchableOpacity>
|
||||||
|
</View> : null
|
||||||
|
}
|
||||||
</View>
|
</View>
|
||||||
<View style={styles.generalControls}>
|
<View style={styles.generalControls}>
|
||||||
<View style={styles.rateControl}>
|
<View style={styles.rateControl}>
|
||||||
@ -212,6 +264,8 @@ class VideoPlayer extends Component {
|
|||||||
onEnd={() => { AlertIOS.alert('Done!') }}
|
onEnd={() => { AlertIOS.alert('Done!') }}
|
||||||
repeat={true}
|
repeat={true}
|
||||||
controls={this.state.controls}
|
controls={this.state.controls}
|
||||||
|
filter={this.state.filter}
|
||||||
|
filterEnabled={this.state.filterEnabled}
|
||||||
/>
|
/>
|
||||||
</View>
|
</View>
|
||||||
<View style={styles.controls}>
|
<View style={styles.controls}>
|
||||||
@ -221,6 +275,21 @@ class VideoPlayer extends Component {
|
|||||||
{this.renderSkinControl('native')}
|
{this.renderSkinControl('native')}
|
||||||
{this.renderSkinControl('embed')}
|
{this.renderSkinControl('embed')}
|
||||||
</View>
|
</View>
|
||||||
|
{
|
||||||
|
(this.state.filterEnabled) ?
|
||||||
|
<View style={styles.skinControl}>
|
||||||
|
<TouchableOpacity onPress={() => {
|
||||||
|
this.setFilter(-1)
|
||||||
|
}}>
|
||||||
|
<Text style={styles.controlOption}>Previous Filter</Text>
|
||||||
|
</TouchableOpacity>
|
||||||
|
<TouchableOpacity onPress={() => {
|
||||||
|
this.setFilter(1)
|
||||||
|
}}>
|
||||||
|
<Text style={styles.controlOption}>Next Filter</Text>
|
||||||
|
</TouchableOpacity>
|
||||||
|
</View> : null
|
||||||
|
}
|
||||||
</View>
|
</View>
|
||||||
<View style={styles.generalControls}>
|
<View style={styles.generalControls}>
|
||||||
<View style={styles.rateControl}>
|
<View style={styles.rateControl}>
|
||||||
|
@ -26,28 +26,29 @@ static int const RCTVideoUnset = -1;
|
|||||||
{
|
{
|
||||||
AVPlayer *_player;
|
AVPlayer *_player;
|
||||||
AVPlayerItem *_playerItem;
|
AVPlayerItem *_playerItem;
|
||||||
|
NSDictionary *_source;
|
||||||
BOOL _playerItemObserversSet;
|
BOOL _playerItemObserversSet;
|
||||||
BOOL _playerBufferEmpty;
|
BOOL _playerBufferEmpty;
|
||||||
AVPlayerLayer *_playerLayer;
|
AVPlayerLayer *_playerLayer;
|
||||||
BOOL _playerLayerObserverSet;
|
BOOL _playerLayerObserverSet;
|
||||||
RCTVideoPlayerViewController *_playerViewController;
|
RCTVideoPlayerViewController *_playerViewController;
|
||||||
NSURL *_videoURL;
|
NSURL *_videoURL;
|
||||||
|
|
||||||
/* Required to publish events */
|
/* Required to publish events */
|
||||||
RCTEventDispatcher *_eventDispatcher;
|
RCTEventDispatcher *_eventDispatcher;
|
||||||
BOOL _playbackRateObserverRegistered;
|
BOOL _playbackRateObserverRegistered;
|
||||||
BOOL _isExternalPlaybackActiveObserverRegistered;
|
BOOL _isExternalPlaybackActiveObserverRegistered;
|
||||||
BOOL _videoLoadStarted;
|
BOOL _videoLoadStarted;
|
||||||
|
|
||||||
bool _pendingSeek;
|
bool _pendingSeek;
|
||||||
float _pendingSeekTime;
|
float _pendingSeekTime;
|
||||||
float _lastSeekTime;
|
float _lastSeekTime;
|
||||||
|
|
||||||
/* For sending videoProgress events */
|
/* For sending videoProgress events */
|
||||||
Float64 _progressUpdateInterval;
|
Float64 _progressUpdateInterval;
|
||||||
BOOL _controls;
|
BOOL _controls;
|
||||||
id _timeObserver;
|
id _timeObserver;
|
||||||
|
|
||||||
/* Keep track of any modifiers, need to be applied after each play */
|
/* Keep track of any modifiers, need to be applied after each play */
|
||||||
float _volume;
|
float _volume;
|
||||||
float _rate;
|
float _rate;
|
||||||
@ -68,6 +69,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
NSString * _fullscreenOrientation;
|
NSString * _fullscreenOrientation;
|
||||||
BOOL _fullscreenPlayerPresented;
|
BOOL _fullscreenPlayerPresented;
|
||||||
NSString *_filterName;
|
NSString *_filterName;
|
||||||
|
BOOL _filterEnabled;
|
||||||
UIViewController * _presentingViewController;
|
UIViewController * _presentingViewController;
|
||||||
#if __has_include(<react-native-video/RCTVideoCache.h>)
|
#if __has_include(<react-native-video/RCTVideoCache.h>)
|
||||||
RCTVideoCache * _videoCache;
|
RCTVideoCache * _videoCache;
|
||||||
@ -78,7 +80,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
{
|
{
|
||||||
if ((self = [super init])) {
|
if ((self = [super init])) {
|
||||||
_eventDispatcher = eventDispatcher;
|
_eventDispatcher = eventDispatcher;
|
||||||
|
|
||||||
_playbackRateObserverRegistered = NO;
|
_playbackRateObserverRegistered = NO;
|
||||||
_isExternalPlaybackActiveObserverRegistered = NO;
|
_isExternalPlaybackActiveObserverRegistered = NO;
|
||||||
_playbackStalled = NO;
|
_playbackStalled = NO;
|
||||||
@ -104,23 +106,23 @@ static int const RCTVideoUnset = -1;
|
|||||||
selector:@selector(applicationWillResignActive:)
|
selector:@selector(applicationWillResignActive:)
|
||||||
name:UIApplicationWillResignActiveNotification
|
name:UIApplicationWillResignActiveNotification
|
||||||
object:nil];
|
object:nil];
|
||||||
|
|
||||||
[[NSNotificationCenter defaultCenter] addObserver:self
|
[[NSNotificationCenter defaultCenter] addObserver:self
|
||||||
selector:@selector(applicationDidEnterBackground:)
|
selector:@selector(applicationDidEnterBackground:)
|
||||||
name:UIApplicationDidEnterBackgroundNotification
|
name:UIApplicationDidEnterBackgroundNotification
|
||||||
object:nil];
|
object:nil];
|
||||||
|
|
||||||
[[NSNotificationCenter defaultCenter] addObserver:self
|
[[NSNotificationCenter defaultCenter] addObserver:self
|
||||||
selector:@selector(applicationWillEnterForeground:)
|
selector:@selector(applicationWillEnterForeground:)
|
||||||
name:UIApplicationWillEnterForegroundNotification
|
name:UIApplicationWillEnterForegroundNotification
|
||||||
object:nil];
|
object:nil];
|
||||||
|
|
||||||
[[NSNotificationCenter defaultCenter] addObserver:self
|
[[NSNotificationCenter defaultCenter] addObserver:self
|
||||||
selector:@selector(audioRouteChanged:)
|
selector:@selector(audioRouteChanged:)
|
||||||
name:AVAudioSessionRouteChangeNotification
|
name:AVAudioSessionRouteChangeNotification
|
||||||
object:nil];
|
object:nil];
|
||||||
}
|
}
|
||||||
|
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -130,7 +132,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
viewController.showsPlaybackControls = YES;
|
viewController.showsPlaybackControls = YES;
|
||||||
viewController.rctDelegate = self;
|
viewController.rctDelegate = self;
|
||||||
viewController.preferredOrientation = _fullscreenOrientation;
|
viewController.preferredOrientation = _fullscreenOrientation;
|
||||||
|
|
||||||
viewController.view.frame = self.bounds;
|
viewController.view.frame = self.bounds;
|
||||||
viewController.player = player;
|
viewController.player = player;
|
||||||
viewController.view.frame = self.bounds;
|
viewController.view.frame = self.bounds;
|
||||||
@ -148,7 +150,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
{
|
{
|
||||||
return([playerItem duration]);
|
return([playerItem duration]);
|
||||||
}
|
}
|
||||||
|
|
||||||
return(kCMTimeInvalid);
|
return(kCMTimeInvalid);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -159,7 +161,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
{
|
{
|
||||||
return [playerItem seekableTimeRanges].firstObject.CMTimeRangeValue;
|
return [playerItem seekableTimeRanges].firstObject.CMTimeRangeValue;
|
||||||
}
|
}
|
||||||
|
|
||||||
return (kCMTimeRangeZero);
|
return (kCMTimeRangeZero);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -200,7 +202,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
- (void)applicationWillResignActive:(NSNotification *)notification
|
- (void)applicationWillResignActive:(NSNotification *)notification
|
||||||
{
|
{
|
||||||
if (_playInBackground || _playWhenInactive || _paused) return;
|
if (_playInBackground || _playWhenInactive || _paused) return;
|
||||||
|
|
||||||
[_player pause];
|
[_player pause];
|
||||||
[_player setRate:0.0];
|
[_player setRate:0.0];
|
||||||
}
|
}
|
||||||
@ -240,18 +242,18 @@ static int const RCTVideoUnset = -1;
|
|||||||
if (video == nil || video.status != AVPlayerItemStatusReadyToPlay) {
|
if (video == nil || video.status != AVPlayerItemStatusReadyToPlay) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
CMTime playerDuration = [self playerItemDuration];
|
CMTime playerDuration = [self playerItemDuration];
|
||||||
if (CMTIME_IS_INVALID(playerDuration)) {
|
if (CMTIME_IS_INVALID(playerDuration)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
CMTime currentTime = _player.currentTime;
|
CMTime currentTime = _player.currentTime;
|
||||||
const Float64 duration = CMTimeGetSeconds(playerDuration);
|
const Float64 duration = CMTimeGetSeconds(playerDuration);
|
||||||
const Float64 currentTimeSecs = CMTimeGetSeconds(currentTime);
|
const Float64 currentTimeSecs = CMTimeGetSeconds(currentTime);
|
||||||
|
|
||||||
[[NSNotificationCenter defaultCenter] postNotificationName:@"RCTVideo_progress" object:nil userInfo:@{@"progress": [NSNumber numberWithDouble: currentTimeSecs / duration]}];
|
[[NSNotificationCenter defaultCenter] postNotificationName:@"RCTVideo_progress" object:nil userInfo:@{@"progress": [NSNumber numberWithDouble: currentTimeSecs / duration]}];
|
||||||
|
|
||||||
if( currentTimeSecs >= 0 && self.onVideoProgress) {
|
if( currentTimeSecs >= 0 && self.onVideoProgress) {
|
||||||
self.onVideoProgress(@{
|
self.onVideoProgress(@{
|
||||||
@"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(currentTime)],
|
@"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(currentTime)],
|
||||||
@ -326,6 +328,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
|
|
||||||
- (void)setSrc:(NSDictionary *)source
|
- (void)setSrc:(NSDictionary *)source
|
||||||
{
|
{
|
||||||
|
_source = source;
|
||||||
[self removePlayerLayer];
|
[self removePlayerLayer];
|
||||||
[self removePlayerTimeObserver];
|
[self removePlayerTimeObserver];
|
||||||
[self removePlayerItemObservers];
|
[self removePlayerItemObservers];
|
||||||
@ -341,7 +344,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
[_player pause];
|
[_player pause];
|
||||||
[_playerViewController.view removeFromSuperview];
|
[_playerViewController.view removeFromSuperview];
|
||||||
_playerViewController = nil;
|
_playerViewController = nil;
|
||||||
|
|
||||||
if (_playbackRateObserverRegistered) {
|
if (_playbackRateObserverRegistered) {
|
||||||
[_player removeObserver:self forKeyPath:playbackRate context:nil];
|
[_player removeObserver:self forKeyPath:playbackRate context:nil];
|
||||||
_playbackRateObserverRegistered = NO;
|
_playbackRateObserverRegistered = NO;
|
||||||
@ -350,16 +353,16 @@ static int const RCTVideoUnset = -1;
|
|||||||
[_player removeObserver:self forKeyPath:externalPlaybackActive context:nil];
|
[_player removeObserver:self forKeyPath:externalPlaybackActive context:nil];
|
||||||
_isExternalPlaybackActiveObserverRegistered = NO;
|
_isExternalPlaybackActiveObserverRegistered = NO;
|
||||||
}
|
}
|
||||||
|
|
||||||
_player = [AVPlayer playerWithPlayerItem:_playerItem];
|
_player = [AVPlayer playerWithPlayerItem:_playerItem];
|
||||||
_player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
|
_player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
|
||||||
|
|
||||||
[_player addObserver:self forKeyPath:playbackRate options:0 context:nil];
|
[_player addObserver:self forKeyPath:playbackRate options:0 context:nil];
|
||||||
_playbackRateObserverRegistered = YES;
|
_playbackRateObserverRegistered = YES;
|
||||||
|
|
||||||
[_player addObserver:self forKeyPath:externalPlaybackActive options:0 context:nil];
|
[_player addObserver:self forKeyPath:externalPlaybackActive options:0 context:nil];
|
||||||
_isExternalPlaybackActiveObserverRegistered = YES;
|
_isExternalPlaybackActiveObserverRegistered = YES;
|
||||||
|
|
||||||
[self addPlayerTimeObserver];
|
[self addPlayerTimeObserver];
|
||||||
|
|
||||||
//Perform on next run loop, otherwise onVideoLoadStart is nil
|
//Perform on next run loop, otherwise onVideoLoadStart is nil
|
||||||
@ -382,7 +385,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
if ([filepath containsString:@"file://"]) {
|
if ([filepath containsString:@"file://"]) {
|
||||||
return [NSURL URLWithString:filepath];
|
return [NSURL URLWithString:filepath];
|
||||||
}
|
}
|
||||||
|
|
||||||
// if no file found, check if the file exists in the Document directory
|
// if no file found, check if the file exists in the Document directory
|
||||||
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
|
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
|
||||||
NSString* relativeFilePath = [filepath lastPathComponent];
|
NSString* relativeFilePath = [filepath lastPathComponent];
|
||||||
@ -391,7 +394,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
if (fileComponents.count > 1) {
|
if (fileComponents.count > 1) {
|
||||||
relativeFilePath = [fileComponents objectAtIndex:1];
|
relativeFilePath = [fileComponents objectAtIndex:1];
|
||||||
}
|
}
|
||||||
|
|
||||||
NSString *path = [paths.firstObject stringByAppendingPathComponent:relativeFilePath];
|
NSString *path = [paths.firstObject stringByAppendingPathComponent:relativeFilePath];
|
||||||
if ([[NSFileManager defaultManager] fileExistsAtPath:path]) {
|
if ([[NSFileManager defaultManager] fileExistsAtPath:path]) {
|
||||||
return [NSURL fileURLWithPath:path];
|
return [NSURL fileURLWithPath:path];
|
||||||
@ -408,21 +411,21 @@ static int const RCTVideoUnset = -1;
|
|||||||
|
|
||||||
// sideload text tracks
|
// sideload text tracks
|
||||||
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
|
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
|
||||||
|
|
||||||
AVAssetTrack *videoAsset = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject;
|
AVAssetTrack *videoAsset = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject;
|
||||||
AVMutableCompositionTrack *videoCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
|
AVMutableCompositionTrack *videoCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
|
||||||
[videoCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration)
|
[videoCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration)
|
||||||
ofTrack:videoAsset
|
ofTrack:videoAsset
|
||||||
atTime:kCMTimeZero
|
atTime:kCMTimeZero
|
||||||
error:nil];
|
error:nil];
|
||||||
|
|
||||||
AVAssetTrack *audioAsset = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject;
|
AVAssetTrack *audioAsset = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject;
|
||||||
AVMutableCompositionTrack *audioCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
|
AVMutableCompositionTrack *audioCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
|
||||||
[audioCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration)
|
[audioCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration)
|
||||||
ofTrack:audioAsset
|
ofTrack:audioAsset
|
||||||
atTime:kCMTimeZero
|
atTime:kCMTimeZero
|
||||||
error:nil];
|
error:nil];
|
||||||
|
|
||||||
NSMutableArray* validTextTracks = [NSMutableArray array];
|
NSMutableArray* validTextTracks = [NSMutableArray array];
|
||||||
for (int i = 0; i < _textTracks.count; ++i) {
|
for (int i = 0; i < _textTracks.count; ++i) {
|
||||||
AVURLAsset *textURLAsset;
|
AVURLAsset *textURLAsset;
|
||||||
@ -461,7 +464,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
? [NSURL URLWithString:uri]
|
? [NSURL URLWithString:uri]
|
||||||
: [[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:uri ofType:type]];
|
: [[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:uri ofType:type]];
|
||||||
NSMutableDictionary *assetOptions = [[NSMutableDictionary alloc] init];
|
NSMutableDictionary *assetOptions = [[NSMutableDictionary alloc] init];
|
||||||
|
|
||||||
if (isNetwork) {
|
if (isNetwork) {
|
||||||
/* Per #1091, this is not a public API.
|
/* Per #1091, this is not a public API.
|
||||||
* We need to either get approval from Apple to use this or use a different approach.
|
* We need to either get approval from Apple to use this or use a different approach.
|
||||||
@ -527,7 +530,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
|
|
||||||
DVURLAsset *asset = [[DVURLAsset alloc] initWithURL:url options:options networkTimeout:10000];
|
DVURLAsset *asset = [[DVURLAsset alloc] initWithURL:url options:options networkTimeout:10000];
|
||||||
asset.loaderDelegate = self;
|
asset.loaderDelegate = self;
|
||||||
|
|
||||||
/* More granular code to have control over the DVURLAsset
|
/* More granular code to have control over the DVURLAsset
|
||||||
DVAssetLoaderDelegate *resourceLoaderDelegate = [[DVAssetLoaderDelegate alloc] initWithURL:url];
|
DVAssetLoaderDelegate *resourceLoaderDelegate = [[DVAssetLoaderDelegate alloc] initWithURL:url];
|
||||||
resourceLoaderDelegate.delegate = self;
|
resourceLoaderDelegate.delegate = self;
|
||||||
@ -564,40 +567,40 @@ static int const RCTVideoUnset = -1;
|
|||||||
for (AVMetadataItem *item in items) {
|
for (AVMetadataItem *item in items) {
|
||||||
NSString *value = (NSString *)item.value;
|
NSString *value = (NSString *)item.value;
|
||||||
NSString *identifier = item.identifier;
|
NSString *identifier = item.identifier;
|
||||||
|
|
||||||
if (![value isEqual: [NSNull null]]) {
|
if (![value isEqual: [NSNull null]]) {
|
||||||
NSDictionary *dictionary = [[NSDictionary alloc] initWithObjects:@[value, identifier] forKeys:@[@"value", @"identifier"]];
|
NSDictionary *dictionary = [[NSDictionary alloc] initWithObjects:@[value, identifier] forKeys:@[@"value", @"identifier"]];
|
||||||
|
|
||||||
[array addObject:dictionary];
|
[array addObject:dictionary];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self.onTimedMetadata(@{
|
self.onTimedMetadata(@{
|
||||||
@"target": self.reactTag,
|
@"target": self.reactTag,
|
||||||
@"metadata": array
|
@"metadata": array
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if ([keyPath isEqualToString:statusKeyPath]) {
|
if ([keyPath isEqualToString:statusKeyPath]) {
|
||||||
// Handle player item status change.
|
// Handle player item status change.
|
||||||
if (_playerItem.status == AVPlayerItemStatusReadyToPlay) {
|
if (_playerItem.status == AVPlayerItemStatusReadyToPlay) {
|
||||||
float duration = CMTimeGetSeconds(_playerItem.asset.duration);
|
float duration = CMTimeGetSeconds(_playerItem.asset.duration);
|
||||||
|
|
||||||
if (isnan(duration)) {
|
if (isnan(duration)) {
|
||||||
duration = 0.0;
|
duration = 0.0;
|
||||||
}
|
}
|
||||||
|
|
||||||
NSObject *width = @"undefined";
|
NSObject *width = @"undefined";
|
||||||
NSObject *height = @"undefined";
|
NSObject *height = @"undefined";
|
||||||
NSString *orientation = @"undefined";
|
NSString *orientation = @"undefined";
|
||||||
|
|
||||||
if ([_playerItem.asset tracksWithMediaType:AVMediaTypeVideo].count > 0) {
|
if ([_playerItem.asset tracksWithMediaType:AVMediaTypeVideo].count > 0) {
|
||||||
AVAssetTrack *videoTrack = [[_playerItem.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
|
AVAssetTrack *videoTrack = [[_playerItem.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
|
||||||
width = [NSNumber numberWithFloat:videoTrack.naturalSize.width];
|
width = [NSNumber numberWithFloat:videoTrack.naturalSize.width];
|
||||||
height = [NSNumber numberWithFloat:videoTrack.naturalSize.height];
|
height = [NSNumber numberWithFloat:videoTrack.naturalSize.height];
|
||||||
CGAffineTransform preferredTransform = [videoTrack preferredTransform];
|
CGAffineTransform preferredTransform = [videoTrack preferredTransform];
|
||||||
|
|
||||||
if ((videoTrack.naturalSize.width == preferredTransform.tx
|
if ((videoTrack.naturalSize.width == preferredTransform.tx
|
||||||
&& videoTrack.naturalSize.height == preferredTransform.ty)
|
&& videoTrack.naturalSize.height == preferredTransform.ty)
|
||||||
|| (preferredTransform.tx == 0 && preferredTransform.ty == 0))
|
|| (preferredTransform.tx == 0 && preferredTransform.ty == 0))
|
||||||
@ -607,7 +610,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
orientation = @"portrait";
|
orientation = @"portrait";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (self.onVideoLoad && _videoLoadStarted) {
|
if (self.onVideoLoad && _videoLoadStarted) {
|
||||||
self.onVideoLoad(@{@"duration": [NSNumber numberWithFloat:duration],
|
self.onVideoLoad(@{@"duration": [NSNumber numberWithFloat:duration],
|
||||||
@"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(_playerItem.currentTime)],
|
@"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(_playerItem.currentTime)],
|
||||||
@ -627,7 +630,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
@"target": self.reactTag});
|
@"target": self.reactTag});
|
||||||
}
|
}
|
||||||
_videoLoadStarted = NO;
|
_videoLoadStarted = NO;
|
||||||
|
|
||||||
[self attachListeners];
|
[self attachListeners];
|
||||||
[self applyModifiers];
|
[self applyModifiers];
|
||||||
} else if (_playerItem.status == AVPlayerItemStatusFailed && self.onVideoError) {
|
} else if (_playerItem.status == AVPlayerItemStatusFailed && self.onVideoError) {
|
||||||
@ -687,7 +690,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
selector:@selector(playerItemDidReachEnd:)
|
selector:@selector(playerItemDidReachEnd:)
|
||||||
name:AVPlayerItemDidPlayToEndTimeNotification
|
name:AVPlayerItemDidPlayToEndTimeNotification
|
||||||
object:[_player currentItem]];
|
object:[_player currentItem]];
|
||||||
|
|
||||||
[[NSNotificationCenter defaultCenter] removeObserver:self
|
[[NSNotificationCenter defaultCenter] removeObserver:self
|
||||||
name:AVPlayerItemPlaybackStalledNotification
|
name:AVPlayerItemPlaybackStalledNotification
|
||||||
object:nil];
|
object:nil];
|
||||||
@ -710,7 +713,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
if(self.onVideoEnd) {
|
if(self.onVideoEnd) {
|
||||||
self.onVideoEnd(@{@"target": self.reactTag});
|
self.onVideoEnd(@{@"target": self.reactTag});
|
||||||
}
|
}
|
||||||
|
|
||||||
if (_repeat) {
|
if (_repeat) {
|
||||||
AVPlayerItem *item = [notification object];
|
AVPlayerItem *item = [notification object];
|
||||||
[item seekToTime:kCMTimeZero];
|
[item seekToTime:kCMTimeZero];
|
||||||
@ -771,7 +774,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
[_player play];
|
[_player play];
|
||||||
[_player setRate:_rate];
|
[_player setRate:_rate];
|
||||||
}
|
}
|
||||||
|
|
||||||
_paused = paused;
|
_paused = paused;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -793,19 +796,19 @@ static int const RCTVideoUnset = -1;
|
|||||||
{
|
{
|
||||||
NSNumber *seekTime = info[@"time"];
|
NSNumber *seekTime = info[@"time"];
|
||||||
NSNumber *seekTolerance = info[@"tolerance"];
|
NSNumber *seekTolerance = info[@"tolerance"];
|
||||||
|
|
||||||
int timeScale = 1000;
|
int timeScale = 1000;
|
||||||
|
|
||||||
AVPlayerItem *item = _player.currentItem;
|
AVPlayerItem *item = _player.currentItem;
|
||||||
if (item && item.status == AVPlayerItemStatusReadyToPlay) {
|
if (item && item.status == AVPlayerItemStatusReadyToPlay) {
|
||||||
// TODO check loadedTimeRanges
|
// TODO check loadedTimeRanges
|
||||||
|
|
||||||
CMTime cmSeekTime = CMTimeMakeWithSeconds([seekTime floatValue], timeScale);
|
CMTime cmSeekTime = CMTimeMakeWithSeconds([seekTime floatValue], timeScale);
|
||||||
CMTime current = item.currentTime;
|
CMTime current = item.currentTime;
|
||||||
// TODO figure out a good tolerance level
|
// TODO figure out a good tolerance level
|
||||||
CMTime tolerance = CMTimeMake([seekTolerance floatValue], timeScale);
|
CMTime tolerance = CMTimeMake([seekTolerance floatValue], timeScale);
|
||||||
BOOL wasPaused = _paused;
|
BOOL wasPaused = _paused;
|
||||||
|
|
||||||
if (CMTimeCompare(current, cmSeekTime) != 0) {
|
if (CMTimeCompare(current, cmSeekTime) != 0) {
|
||||||
if (!wasPaused) [_player pause];
|
if (!wasPaused) [_player pause];
|
||||||
[_player seekToTime:cmSeekTime toleranceBefore:tolerance toleranceAfter:tolerance completionHandler:^(BOOL finished) {
|
[_player seekToTime:cmSeekTime toleranceBefore:tolerance toleranceAfter:tolerance completionHandler:^(BOOL finished) {
|
||||||
@ -821,10 +824,10 @@ static int const RCTVideoUnset = -1;
|
|||||||
@"target": self.reactTag});
|
@"target": self.reactTag});
|
||||||
}
|
}
|
||||||
}];
|
}];
|
||||||
|
|
||||||
_pendingSeek = false;
|
_pendingSeek = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
// TODO: See if this makes sense and if so, actually implement it
|
// TODO: See if this makes sense and if so, actually implement it
|
||||||
_pendingSeek = true;
|
_pendingSeek = true;
|
||||||
@ -859,7 +862,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
[_player setVolume:_volume];
|
[_player setVolume:_volume];
|
||||||
[_player setMuted:NO];
|
[_player setMuted:NO];
|
||||||
}
|
}
|
||||||
|
|
||||||
[self setSelectedAudioTrack:_selectedAudioTrack];
|
[self setSelectedAudioTrack:_selectedAudioTrack];
|
||||||
[self setSelectedTextTrack:_selectedTextTrack];
|
[self setSelectedTextTrack:_selectedTextTrack];
|
||||||
[self setResizeMode:_resizeMode];
|
[self setResizeMode:_resizeMode];
|
||||||
@ -880,7 +883,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
AVMediaSelectionGroup *group = [_player.currentItem.asset
|
AVMediaSelectionGroup *group = [_player.currentItem.asset
|
||||||
mediaSelectionGroupForMediaCharacteristic:characteristic];
|
mediaSelectionGroupForMediaCharacteristic:characteristic];
|
||||||
AVMediaSelectionOption *mediaOption;
|
AVMediaSelectionOption *mediaOption;
|
||||||
|
|
||||||
if ([type isEqualToString:@"disabled"]) {
|
if ([type isEqualToString:@"disabled"]) {
|
||||||
// Do nothing. We want to ensure option is nil
|
// Do nothing. We want to ensure option is nil
|
||||||
} else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) {
|
} else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) {
|
||||||
@ -913,7 +916,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
[_player.currentItem selectMediaOptionAutomaticallyInMediaSelectionGroup:group];
|
[_player.currentItem selectMediaOptionAutomaticallyInMediaSelectionGroup:group];
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// If a match isn't found, option will be nil and text tracks will be disabled
|
// If a match isn't found, option will be nil and text tracks will be disabled
|
||||||
[_player.currentItem selectMediaOption:mediaOption inMediaSelectionGroup:group];
|
[_player.currentItem selectMediaOption:mediaOption inMediaSelectionGroup:group];
|
||||||
}
|
}
|
||||||
@ -937,7 +940,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
- (void) setSideloadedText {
|
- (void) setSideloadedText {
|
||||||
NSString *type = _selectedTextTrack[@"type"];
|
NSString *type = _selectedTextTrack[@"type"];
|
||||||
NSArray *textTracks = [self getTextTrackInfo];
|
NSArray *textTracks = [self getTextTrackInfo];
|
||||||
|
|
||||||
// The first few tracks will be audio & video track
|
// The first few tracks will be audio & video track
|
||||||
int firstTextIndex = 0;
|
int firstTextIndex = 0;
|
||||||
for (firstTextIndex = 0; firstTextIndex < _player.currentItem.tracks.count; ++firstTextIndex) {
|
for (firstTextIndex = 0; firstTextIndex < _player.currentItem.tracks.count; ++firstTextIndex) {
|
||||||
@ -945,9 +948,9 @@ static int const RCTVideoUnset = -1;
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
int selectedTrackIndex = RCTVideoUnset;
|
int selectedTrackIndex = RCTVideoUnset;
|
||||||
|
|
||||||
if ([type isEqualToString:@"disabled"]) {
|
if ([type isEqualToString:@"disabled"]) {
|
||||||
// Do nothing. We want to ensure option is nil
|
// Do nothing. We want to ensure option is nil
|
||||||
} else if ([type isEqualToString:@"language"]) {
|
} else if ([type isEqualToString:@"language"]) {
|
||||||
@ -976,7 +979,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// in the situation that a selected text track is not available (eg. specifies a textTrack not available)
|
// in the situation that a selected text track is not available (eg. specifies a textTrack not available)
|
||||||
if (![type isEqualToString:@"disabled"] && selectedTrackIndex == RCTVideoUnset) {
|
if (![type isEqualToString:@"disabled"] && selectedTrackIndex == RCTVideoUnset) {
|
||||||
CFArrayRef captioningMediaCharacteristics = MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics(kMACaptionAppearanceDomainUser);
|
CFArrayRef captioningMediaCharacteristics = MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics(kMACaptionAppearanceDomainUser);
|
||||||
@ -993,7 +996,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (int i = firstTextIndex; i < _player.currentItem.tracks.count; ++i) {
|
for (int i = firstTextIndex; i < _player.currentItem.tracks.count; ++i) {
|
||||||
BOOL isEnabled = NO;
|
BOOL isEnabled = NO;
|
||||||
if (selectedTrackIndex != RCTVideoUnset) {
|
if (selectedTrackIndex != RCTVideoUnset) {
|
||||||
@ -1008,7 +1011,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
AVMediaSelectionGroup *group = [_player.currentItem.asset
|
AVMediaSelectionGroup *group = [_player.currentItem.asset
|
||||||
mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
|
mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
|
||||||
AVMediaSelectionOption *mediaOption;
|
AVMediaSelectionOption *mediaOption;
|
||||||
|
|
||||||
if ([type isEqualToString:@"disabled"]) {
|
if ([type isEqualToString:@"disabled"]) {
|
||||||
// Do nothing. We want to ensure option is nil
|
// Do nothing. We want to ensure option is nil
|
||||||
} else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) {
|
} else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) {
|
||||||
@ -1041,7 +1044,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
[_player.currentItem selectMediaOptionAutomaticallyInMediaSelectionGroup:group];
|
[_player.currentItem selectMediaOptionAutomaticallyInMediaSelectionGroup:group];
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// If a match isn't found, option will be nil and text tracks will be disabled
|
// If a match isn't found, option will be nil and text tracks will be disabled
|
||||||
[_player.currentItem selectMediaOption:mediaOption inMediaSelectionGroup:group];
|
[_player.currentItem selectMediaOption:mediaOption inMediaSelectionGroup:group];
|
||||||
}
|
}
|
||||||
@ -1049,7 +1052,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
- (void)setTextTracks:(NSArray*) textTracks;
|
- (void)setTextTracks:(NSArray*) textTracks;
|
||||||
{
|
{
|
||||||
_textTracks = textTracks;
|
_textTracks = textTracks;
|
||||||
|
|
||||||
// in case textTracks was set after selectedTextTrack
|
// in case textTracks was set after selectedTextTrack
|
||||||
if (_selectedTextTrack) [self setSelectedTextTrack:_selectedTextTrack];
|
if (_selectedTextTrack) [self setSelectedTextTrack:_selectedTextTrack];
|
||||||
}
|
}
|
||||||
@ -1081,7 +1084,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
{
|
{
|
||||||
// if sideloaded, textTracks will already be set
|
// if sideloaded, textTracks will already be set
|
||||||
if (_textTracks) return _textTracks;
|
if (_textTracks) return _textTracks;
|
||||||
|
|
||||||
// if streaming video, we extract the text tracks
|
// if streaming video, we extract the text tracks
|
||||||
NSMutableArray *textTracks = [[NSMutableArray alloc] init];
|
NSMutableArray *textTracks = [[NSMutableArray alloc] init];
|
||||||
AVMediaSelectionGroup *group = [_player.currentItem.asset
|
AVMediaSelectionGroup *group = [_player.currentItem.asset
|
||||||
@ -1119,7 +1122,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
}
|
}
|
||||||
// Set presentation style to fullscreen
|
// Set presentation style to fullscreen
|
||||||
[_playerViewController setModalPresentationStyle:UIModalPresentationFullScreen];
|
[_playerViewController setModalPresentationStyle:UIModalPresentationFullScreen];
|
||||||
|
|
||||||
// Find the nearest view controller
|
// Find the nearest view controller
|
||||||
UIViewController *viewController = [self firstAvailableUIViewController];
|
UIViewController *viewController = [self firstAvailableUIViewController];
|
||||||
if( !viewController )
|
if( !viewController )
|
||||||
@ -1189,13 +1192,13 @@ static int const RCTVideoUnset = -1;
|
|||||||
_playerLayer = [AVPlayerLayer playerLayerWithPlayer:_player];
|
_playerLayer = [AVPlayerLayer playerLayerWithPlayer:_player];
|
||||||
_playerLayer.frame = self.bounds;
|
_playerLayer.frame = self.bounds;
|
||||||
_playerLayer.needsDisplayOnBoundsChange = YES;
|
_playerLayer.needsDisplayOnBoundsChange = YES;
|
||||||
|
|
||||||
// to prevent video from being animated when resizeMode is 'cover'
|
// to prevent video from being animated when resizeMode is 'cover'
|
||||||
// resize mode must be set before layer is added
|
// resize mode must be set before layer is added
|
||||||
[self setResizeMode:_resizeMode];
|
[self setResizeMode:_resizeMode];
|
||||||
[_playerLayer addObserver:self forKeyPath:readyForDisplayKeyPath options:NSKeyValueObservingOptionNew context:nil];
|
[_playerLayer addObserver:self forKeyPath:readyForDisplayKeyPath options:NSKeyValueObservingOptionNew context:nil];
|
||||||
_playerLayerObserverSet = YES;
|
_playerLayerObserverSet = YES;
|
||||||
|
|
||||||
[self.layer addSublayer:_playerLayer];
|
[self.layer addSublayer:_playerLayer];
|
||||||
self.layer.needsDisplayOnBoundsChange = YES;
|
self.layer.needsDisplayOnBoundsChange = YES;
|
||||||
}
|
}
|
||||||
@ -1223,7 +1226,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
- (void)setProgressUpdateInterval:(float)progressUpdateInterval
|
- (void)setProgressUpdateInterval:(float)progressUpdateInterval
|
||||||
{
|
{
|
||||||
_progressUpdateInterval = progressUpdateInterval;
|
_progressUpdateInterval = progressUpdateInterval;
|
||||||
|
|
||||||
if (_timeObserver) {
|
if (_timeObserver) {
|
||||||
[self removePlayerTimeObserver];
|
[self removePlayerTimeObserver];
|
||||||
[self addPlayerTimeObserver];
|
[self addPlayerTimeObserver];
|
||||||
@ -1266,12 +1269,17 @@ static int const RCTVideoUnset = -1;
|
|||||||
|
|
||||||
- (void)setFilter:(NSString *)filterName {
|
- (void)setFilter:(NSString *)filterName {
|
||||||
_filterName = filterName;
|
_filterName = filterName;
|
||||||
|
|
||||||
|
if (!_filterEnabled) {
|
||||||
|
return;
|
||||||
|
} else if ([[_source objectForKey:@"uri"] rangeOfString:@"m3u8"].location != NSNotFound) {
|
||||||
|
return; // filters don't work for HLS... return
|
||||||
|
}
|
||||||
|
|
||||||
AVAsset *asset = _playerItem.asset;
|
AVAsset *asset = _playerItem.asset;
|
||||||
|
|
||||||
if (!asset) {
|
if (!asset) {
|
||||||
return;
|
return;
|
||||||
} else if (!_playerItem.videoComposition && (filterName == nil || [filterName isEqualToString:@""])) {
|
|
||||||
return; // Setting up an empty filter has a cost so avoid whenever possible
|
|
||||||
}
|
}
|
||||||
// TODO: filters don't work for HLS, check & return
|
// TODO: filters don't work for HLS, check & return
|
||||||
|
|
||||||
@ -1290,6 +1298,10 @@ static int const RCTVideoUnset = -1;
|
|||||||
}];
|
}];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (void)setFilterEnabled:(BOOL)filterEnabled {
|
||||||
|
_filterEnabled = filterEnabled;
|
||||||
|
}
|
||||||
|
|
||||||
#pragma mark - React View Management
|
#pragma mark - React View Management
|
||||||
|
|
||||||
- (void)insertReactSubview:(UIView *)view atIndex:(NSInteger)atIndex
|
- (void)insertReactSubview:(UIView *)view atIndex:(NSInteger)atIndex
|
||||||
@ -1300,7 +1312,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
{
|
{
|
||||||
[self setControls:true];
|
[self setControls:true];
|
||||||
}
|
}
|
||||||
|
|
||||||
if( _controls )
|
if( _controls )
|
||||||
{
|
{
|
||||||
view.frame = self.bounds;
|
view.frame = self.bounds;
|
||||||
@ -1332,7 +1344,7 @@ static int const RCTVideoUnset = -1;
|
|||||||
if( _controls )
|
if( _controls )
|
||||||
{
|
{
|
||||||
_playerViewController.view.frame = self.bounds;
|
_playerViewController.view.frame = self.bounds;
|
||||||
|
|
||||||
// also adjust all subviews of contentOverlayView
|
// also adjust all subviews of contentOverlayView
|
||||||
for (UIView* subview in _playerViewController.contentOverlayView.subviews) {
|
for (UIView* subview in _playerViewController.contentOverlayView.subviews) {
|
||||||
subview.frame = self.bounds;
|
subview.frame = self.bounds;
|
||||||
@ -1361,18 +1373,18 @@ static int const RCTVideoUnset = -1;
|
|||||||
_isExternalPlaybackActiveObserverRegistered = NO;
|
_isExternalPlaybackActiveObserverRegistered = NO;
|
||||||
}
|
}
|
||||||
_player = nil;
|
_player = nil;
|
||||||
|
|
||||||
[self removePlayerLayer];
|
[self removePlayerLayer];
|
||||||
|
|
||||||
[_playerViewController.view removeFromSuperview];
|
[_playerViewController.view removeFromSuperview];
|
||||||
_playerViewController = nil;
|
_playerViewController = nil;
|
||||||
|
|
||||||
[self removePlayerTimeObserver];
|
[self removePlayerTimeObserver];
|
||||||
[self removePlayerItemObservers];
|
[self removePlayerItemObservers];
|
||||||
|
|
||||||
_eventDispatcher = nil;
|
_eventDispatcher = nil;
|
||||||
[[NSNotificationCenter defaultCenter] removeObserver:self];
|
[[NSNotificationCenter defaultCenter] removeObserver:self];
|
||||||
|
|
||||||
[super removeFromSuperview];
|
[super removeFromSuperview];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -39,6 +39,7 @@ RCT_EXPORT_VIEW_PROPERTY(fullscreen, BOOL);
|
|||||||
RCT_EXPORT_VIEW_PROPERTY(fullscreenAutorotate, BOOL);
|
RCT_EXPORT_VIEW_PROPERTY(fullscreenAutorotate, BOOL);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(fullscreenOrientation, NSString);
|
RCT_EXPORT_VIEW_PROPERTY(fullscreenOrientation, NSString);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(filter, NSString);
|
RCT_EXPORT_VIEW_PROPERTY(filter, NSString);
|
||||||
|
RCT_EXPORT_VIEW_PROPERTY(filterEnabled, BOOL);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(progressUpdateInterval, float);
|
RCT_EXPORT_VIEW_PROPERTY(progressUpdateInterval, float);
|
||||||
/* Should support: onLoadStart, onLoad, and onError to stay consistent with Image */
|
/* Should support: onLoadStart, onLoad, and onError to stay consistent with Image */
|
||||||
RCT_EXPORT_VIEW_PROPERTY(onVideoLoadStart, RCTBubblingEventBlock);
|
RCT_EXPORT_VIEW_PROPERTY(onVideoLoadStart, RCTBubblingEventBlock);
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "react-native-video",
|
"name": "react-native-video",
|
||||||
"version": "4.0.1",
|
"version": "4.0.2",
|
||||||
"description": "A <Video /> element for react-native",
|
"description": "A <Video /> element for react-native",
|
||||||
"main": "Video.js",
|
"main": "Video.js",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
|
Loading…
Reference in New Issue
Block a user