diff --git a/README.md b/README.md
index a7c8430e..5dc8affc 100644
--- a/README.md
+++ b/README.md
@@ -260,6 +260,7 @@ var styles = StyleSheet.create({
* [bufferConfig](#bufferconfig)
* [controls](#controls)
* [filter](#filter)
+* [filterEnabled](#filterEnabled)
* [fullscreen](#fullscreen)
* [fullscreenAutorotate](#fullscreenautorotate)
* [fullscreenOrientation](#fullscreenorientation)
@@ -379,6 +380,15 @@ For more details on these filters refer to the [iOS docs](https://developer.appl
Notes:
1. Using a filter can impact CPU usage. A workaround is to save the video with the filter and then load the saved video.
2. Video filter is currently not supported on HLS playlists.
+3. `filterEnabled` must be set to `true`
+
+Platforms: iOS
+
+#### filterEnabled
+Enable video filter.
+
+* **false (default)** - Don't enable filter
+* **true** - Enable filter
Platforms: iOS
diff --git a/Video.js b/Video.js
index 77b02f0a..a0ca9574 100644
--- a/Video.js
+++ b/Video.js
@@ -300,6 +300,7 @@ Video.propTypes = {
FilterType.TRANSFER,
FilterType.SEPIA
]),
+ filterEnabled: PropTypes.bool,
/* Native only */
src: PropTypes.object,
seek: PropTypes.oneOfType([
diff --git a/examples/basic/index.ios.js b/examples/basic/index.ios.js
index 2abd3b9f..1bc0ac47 100644
--- a/examples/basic/index.ios.js
+++ b/examples/basic/index.ios.js
@@ -13,7 +13,26 @@ import {
View,
} from 'react-native';
-import Video from 'react-native-video';
+import Video,{FilterType} from 'react-native-video';
+
+const filterTypes = [
+ FilterType.NONE,
+ FilterType.INVERT,
+ FilterType.MONOCHROME,
+ FilterType.POSTERIZE,
+ FilterType.FALSE,
+ FilterType.MAXIMUMCOMPONENT,
+ FilterType.MINIMUMCOMPONENT,
+ FilterType.CHROME,
+ FilterType.FADE,
+ FilterType.INSTANT,
+ FilterType.MONO,
+ FilterType.NOIR,
+ FilterType.PROCESS,
+ FilterType.TONAL,
+ FilterType.TRANSFER,
+ FilterType.SEPIA
+];
class VideoPlayer extends Component {
constructor(props) {
@@ -34,6 +53,8 @@ class VideoPlayer extends Component {
skin: 'custom',
ignoreSilentSwitch: null,
isBuffering: false,
+ filter: FilterType.NONE,
+ filterEnabled: true
};
onLoad(data) {
@@ -57,6 +78,20 @@ class VideoPlayer extends Component {
}
}
+ setFilter(step) {
+ let index = filterTypes.indexOf(this.state.filter) + step;
+
+ if (index === filterTypes.length) {
+ index = 0;
+ } else if (index === -1) {
+ index = filterTypes.length - 1;
+ }
+
+ this.setState({
+ filter: filterTypes[index]
+ })
+ }
+
renderSkinControl(skin) {
const isSelected = this.state.skin == skin;
const selectControls = skin == 'native' || skin == 'embed';
@@ -141,6 +176,8 @@ class VideoPlayer extends Component {
onProgress={this.onProgress}
onEnd={() => { AlertIOS.alert('Done!') }}
repeat={true}
+ filter={this.state.filter}
+ filterEnabled={this.state.filterEnabled}
/>
@@ -151,6 +188,21 @@ class VideoPlayer extends Component {
{this.renderSkinControl('native')}
{this.renderSkinControl('embed')}
+ {
+ (this.state.filterEnabled) ?
+
+ {
+ this.setFilter(-1)
+ }}>
+ Previous Filter
+
+ {
+ this.setFilter(1)
+ }}>
+ Next Filter
+
+ : null
+ }
@@ -212,6 +264,8 @@ class VideoPlayer extends Component {
onEnd={() => { AlertIOS.alert('Done!') }}
repeat={true}
controls={this.state.controls}
+ filter={this.state.filter}
+ filterEnabled={this.state.filterEnabled}
/>
@@ -221,6 +275,21 @@ class VideoPlayer extends Component {
{this.renderSkinControl('native')}
{this.renderSkinControl('embed')}
+ {
+ (this.state.filterEnabled) ?
+
+ {
+ this.setFilter(-1)
+ }}>
+ Previous Filter
+
+ {
+ this.setFilter(1)
+ }}>
+ Next Filter
+
+ : null
+ }
diff --git a/ios/Video/RCTVideo.m b/ios/Video/RCTVideo.m
index 5c247795..2c8190ca 100644
--- a/ios/Video/RCTVideo.m
+++ b/ios/Video/RCTVideo.m
@@ -26,28 +26,29 @@ static int const RCTVideoUnset = -1;
{
AVPlayer *_player;
AVPlayerItem *_playerItem;
+ NSDictionary *_source;
BOOL _playerItemObserversSet;
BOOL _playerBufferEmpty;
AVPlayerLayer *_playerLayer;
BOOL _playerLayerObserverSet;
RCTVideoPlayerViewController *_playerViewController;
NSURL *_videoURL;
-
+
/* Required to publish events */
RCTEventDispatcher *_eventDispatcher;
BOOL _playbackRateObserverRegistered;
BOOL _isExternalPlaybackActiveObserverRegistered;
BOOL _videoLoadStarted;
-
+
bool _pendingSeek;
float _pendingSeekTime;
float _lastSeekTime;
-
+
/* For sending videoProgress events */
Float64 _progressUpdateInterval;
BOOL _controls;
id _timeObserver;
-
+
/* Keep track of any modifiers, need to be applied after each play */
float _volume;
float _rate;
@@ -68,6 +69,7 @@ static int const RCTVideoUnset = -1;
NSString * _fullscreenOrientation;
BOOL _fullscreenPlayerPresented;
NSString *_filterName;
+ BOOL _filterEnabled;
UIViewController * _presentingViewController;
#if __has_include()
RCTVideoCache * _videoCache;
@@ -78,7 +80,7 @@ static int const RCTVideoUnset = -1;
{
if ((self = [super init])) {
_eventDispatcher = eventDispatcher;
-
+
_playbackRateObserverRegistered = NO;
_isExternalPlaybackActiveObserverRegistered = NO;
_playbackStalled = NO;
@@ -104,23 +106,23 @@ static int const RCTVideoUnset = -1;
selector:@selector(applicationWillResignActive:)
name:UIApplicationWillResignActiveNotification
object:nil];
-
+
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(applicationDidEnterBackground:)
name:UIApplicationDidEnterBackgroundNotification
object:nil];
-
+
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(applicationWillEnterForeground:)
name:UIApplicationWillEnterForegroundNotification
object:nil];
-
+
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(audioRouteChanged:)
name:AVAudioSessionRouteChangeNotification
object:nil];
}
-
+
return self;
}
@@ -130,7 +132,7 @@ static int const RCTVideoUnset = -1;
viewController.showsPlaybackControls = YES;
viewController.rctDelegate = self;
viewController.preferredOrientation = _fullscreenOrientation;
-
+
viewController.view.frame = self.bounds;
viewController.player = player;
viewController.view.frame = self.bounds;
@@ -148,7 +150,7 @@ static int const RCTVideoUnset = -1;
{
return([playerItem duration]);
}
-
+
return(kCMTimeInvalid);
}
@@ -159,7 +161,7 @@ static int const RCTVideoUnset = -1;
{
return [playerItem seekableTimeRanges].firstObject.CMTimeRangeValue;
}
-
+
return (kCMTimeRangeZero);
}
@@ -200,7 +202,7 @@ static int const RCTVideoUnset = -1;
- (void)applicationWillResignActive:(NSNotification *)notification
{
if (_playInBackground || _playWhenInactive || _paused) return;
-
+
[_player pause];
[_player setRate:0.0];
}
@@ -240,18 +242,18 @@ static int const RCTVideoUnset = -1;
if (video == nil || video.status != AVPlayerItemStatusReadyToPlay) {
return;
}
-
+
CMTime playerDuration = [self playerItemDuration];
if (CMTIME_IS_INVALID(playerDuration)) {
return;
}
-
+
CMTime currentTime = _player.currentTime;
const Float64 duration = CMTimeGetSeconds(playerDuration);
const Float64 currentTimeSecs = CMTimeGetSeconds(currentTime);
-
+
[[NSNotificationCenter defaultCenter] postNotificationName:@"RCTVideo_progress" object:nil userInfo:@{@"progress": [NSNumber numberWithDouble: currentTimeSecs / duration]}];
-
+
if( currentTimeSecs >= 0 && self.onVideoProgress) {
self.onVideoProgress(@{
@"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(currentTime)],
@@ -326,6 +328,7 @@ static int const RCTVideoUnset = -1;
- (void)setSrc:(NSDictionary *)source
{
+ _source = source;
[self removePlayerLayer];
[self removePlayerTimeObserver];
[self removePlayerItemObservers];
@@ -341,7 +344,7 @@ static int const RCTVideoUnset = -1;
[_player pause];
[_playerViewController.view removeFromSuperview];
_playerViewController = nil;
-
+
if (_playbackRateObserverRegistered) {
[_player removeObserver:self forKeyPath:playbackRate context:nil];
_playbackRateObserverRegistered = NO;
@@ -350,16 +353,16 @@ static int const RCTVideoUnset = -1;
[_player removeObserver:self forKeyPath:externalPlaybackActive context:nil];
_isExternalPlaybackActiveObserverRegistered = NO;
}
-
+
_player = [AVPlayer playerWithPlayerItem:_playerItem];
_player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
-
+
[_player addObserver:self forKeyPath:playbackRate options:0 context:nil];
_playbackRateObserverRegistered = YES;
-
+
[_player addObserver:self forKeyPath:externalPlaybackActive options:0 context:nil];
_isExternalPlaybackActiveObserverRegistered = YES;
-
+
[self addPlayerTimeObserver];
//Perform on next run loop, otherwise onVideoLoadStart is nil
@@ -382,7 +385,7 @@ static int const RCTVideoUnset = -1;
if ([filepath containsString:@"file://"]) {
return [NSURL URLWithString:filepath];
}
-
+
// if no file found, check if the file exists in the Document directory
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString* relativeFilePath = [filepath lastPathComponent];
@@ -391,7 +394,7 @@ static int const RCTVideoUnset = -1;
if (fileComponents.count > 1) {
relativeFilePath = [fileComponents objectAtIndex:1];
}
-
+
NSString *path = [paths.firstObject stringByAppendingPathComponent:relativeFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:path]) {
return [NSURL fileURLWithPath:path];
@@ -408,21 +411,21 @@ static int const RCTVideoUnset = -1;
// sideload text tracks
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
-
+
AVAssetTrack *videoAsset = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject;
AVMutableCompositionTrack *videoCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[videoCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration)
ofTrack:videoAsset
atTime:kCMTimeZero
error:nil];
-
+
AVAssetTrack *audioAsset = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject;
AVMutableCompositionTrack *audioCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[audioCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration)
ofTrack:audioAsset
atTime:kCMTimeZero
error:nil];
-
+
NSMutableArray* validTextTracks = [NSMutableArray array];
for (int i = 0; i < _textTracks.count; ++i) {
AVURLAsset *textURLAsset;
@@ -461,7 +464,7 @@ static int const RCTVideoUnset = -1;
? [NSURL URLWithString:uri]
: [[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:uri ofType:type]];
NSMutableDictionary *assetOptions = [[NSMutableDictionary alloc] init];
-
+
if (isNetwork) {
/* Per #1091, this is not a public API.
* We need to either get approval from Apple to use this or use a different approach.
@@ -527,7 +530,7 @@ static int const RCTVideoUnset = -1;
DVURLAsset *asset = [[DVURLAsset alloc] initWithURL:url options:options networkTimeout:10000];
asset.loaderDelegate = self;
-
+
/* More granular code to have control over the DVURLAsset
DVAssetLoaderDelegate *resourceLoaderDelegate = [[DVAssetLoaderDelegate alloc] initWithURL:url];
resourceLoaderDelegate.delegate = self;
@@ -564,40 +567,40 @@ static int const RCTVideoUnset = -1;
for (AVMetadataItem *item in items) {
NSString *value = (NSString *)item.value;
NSString *identifier = item.identifier;
-
+
if (![value isEqual: [NSNull null]]) {
NSDictionary *dictionary = [[NSDictionary alloc] initWithObjects:@[value, identifier] forKeys:@[@"value", @"identifier"]];
-
+
[array addObject:dictionary];
}
}
-
+
self.onTimedMetadata(@{
@"target": self.reactTag,
@"metadata": array
});
}
}
-
+
if ([keyPath isEqualToString:statusKeyPath]) {
// Handle player item status change.
if (_playerItem.status == AVPlayerItemStatusReadyToPlay) {
float duration = CMTimeGetSeconds(_playerItem.asset.duration);
-
+
if (isnan(duration)) {
duration = 0.0;
}
-
+
NSObject *width = @"undefined";
NSObject *height = @"undefined";
NSString *orientation = @"undefined";
-
+
if ([_playerItem.asset tracksWithMediaType:AVMediaTypeVideo].count > 0) {
AVAssetTrack *videoTrack = [[_playerItem.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
width = [NSNumber numberWithFloat:videoTrack.naturalSize.width];
height = [NSNumber numberWithFloat:videoTrack.naturalSize.height];
CGAffineTransform preferredTransform = [videoTrack preferredTransform];
-
+
if ((videoTrack.naturalSize.width == preferredTransform.tx
&& videoTrack.naturalSize.height == preferredTransform.ty)
|| (preferredTransform.tx == 0 && preferredTransform.ty == 0))
@@ -607,7 +610,7 @@ static int const RCTVideoUnset = -1;
orientation = @"portrait";
}
}
-
+
if (self.onVideoLoad && _videoLoadStarted) {
self.onVideoLoad(@{@"duration": [NSNumber numberWithFloat:duration],
@"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(_playerItem.currentTime)],
@@ -627,7 +630,7 @@ static int const RCTVideoUnset = -1;
@"target": self.reactTag});
}
_videoLoadStarted = NO;
-
+
[self attachListeners];
[self applyModifiers];
} else if (_playerItem.status == AVPlayerItemStatusFailed && self.onVideoError) {
@@ -687,7 +690,7 @@ static int const RCTVideoUnset = -1;
selector:@selector(playerItemDidReachEnd:)
name:AVPlayerItemDidPlayToEndTimeNotification
object:[_player currentItem]];
-
+
[[NSNotificationCenter defaultCenter] removeObserver:self
name:AVPlayerItemPlaybackStalledNotification
object:nil];
@@ -710,7 +713,7 @@ static int const RCTVideoUnset = -1;
if(self.onVideoEnd) {
self.onVideoEnd(@{@"target": self.reactTag});
}
-
+
if (_repeat) {
AVPlayerItem *item = [notification object];
[item seekToTime:kCMTimeZero];
@@ -771,7 +774,7 @@ static int const RCTVideoUnset = -1;
[_player play];
[_player setRate:_rate];
}
-
+
_paused = paused;
}
@@ -793,19 +796,19 @@ static int const RCTVideoUnset = -1;
{
NSNumber *seekTime = info[@"time"];
NSNumber *seekTolerance = info[@"tolerance"];
-
+
int timeScale = 1000;
-
+
AVPlayerItem *item = _player.currentItem;
if (item && item.status == AVPlayerItemStatusReadyToPlay) {
// TODO check loadedTimeRanges
-
+
CMTime cmSeekTime = CMTimeMakeWithSeconds([seekTime floatValue], timeScale);
CMTime current = item.currentTime;
// TODO figure out a good tolerance level
CMTime tolerance = CMTimeMake([seekTolerance floatValue], timeScale);
BOOL wasPaused = _paused;
-
+
if (CMTimeCompare(current, cmSeekTime) != 0) {
if (!wasPaused) [_player pause];
[_player seekToTime:cmSeekTime toleranceBefore:tolerance toleranceAfter:tolerance completionHandler:^(BOOL finished) {
@@ -821,10 +824,10 @@ static int const RCTVideoUnset = -1;
@"target": self.reactTag});
}
}];
-
+
_pendingSeek = false;
}
-
+
} else {
// TODO: See if this makes sense and if so, actually implement it
_pendingSeek = true;
@@ -859,7 +862,7 @@ static int const RCTVideoUnset = -1;
[_player setVolume:_volume];
[_player setMuted:NO];
}
-
+
[self setSelectedAudioTrack:_selectedAudioTrack];
[self setSelectedTextTrack:_selectedTextTrack];
[self setResizeMode:_resizeMode];
@@ -880,7 +883,7 @@ static int const RCTVideoUnset = -1;
AVMediaSelectionGroup *group = [_player.currentItem.asset
mediaSelectionGroupForMediaCharacteristic:characteristic];
AVMediaSelectionOption *mediaOption;
-
+
if ([type isEqualToString:@"disabled"]) {
// Do nothing. We want to ensure option is nil
} else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) {
@@ -913,7 +916,7 @@ static int const RCTVideoUnset = -1;
[_player.currentItem selectMediaOptionAutomaticallyInMediaSelectionGroup:group];
return;
}
-
+
// If a match isn't found, option will be nil and text tracks will be disabled
[_player.currentItem selectMediaOption:mediaOption inMediaSelectionGroup:group];
}
@@ -937,7 +940,7 @@ static int const RCTVideoUnset = -1;
- (void) setSideloadedText {
NSString *type = _selectedTextTrack[@"type"];
NSArray *textTracks = [self getTextTrackInfo];
-
+
// The first few tracks will be audio & video track
int firstTextIndex = 0;
for (firstTextIndex = 0; firstTextIndex < _player.currentItem.tracks.count; ++firstTextIndex) {
@@ -945,9 +948,9 @@ static int const RCTVideoUnset = -1;
break;
}
}
-
+
int selectedTrackIndex = RCTVideoUnset;
-
+
if ([type isEqualToString:@"disabled"]) {
// Do nothing. We want to ensure option is nil
} else if ([type isEqualToString:@"language"]) {
@@ -976,7 +979,7 @@ static int const RCTVideoUnset = -1;
}
}
}
-
+
// in the situation that a selected text track is not available (eg. specifies a textTrack not available)
if (![type isEqualToString:@"disabled"] && selectedTrackIndex == RCTVideoUnset) {
CFArrayRef captioningMediaCharacteristics = MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics(kMACaptionAppearanceDomainUser);
@@ -993,7 +996,7 @@ static int const RCTVideoUnset = -1;
}
}
}
-
+
for (int i = firstTextIndex; i < _player.currentItem.tracks.count; ++i) {
BOOL isEnabled = NO;
if (selectedTrackIndex != RCTVideoUnset) {
@@ -1008,7 +1011,7 @@ static int const RCTVideoUnset = -1;
AVMediaSelectionGroup *group = [_player.currentItem.asset
mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
AVMediaSelectionOption *mediaOption;
-
+
if ([type isEqualToString:@"disabled"]) {
// Do nothing. We want to ensure option is nil
} else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) {
@@ -1041,7 +1044,7 @@ static int const RCTVideoUnset = -1;
[_player.currentItem selectMediaOptionAutomaticallyInMediaSelectionGroup:group];
return;
}
-
+
// If a match isn't found, option will be nil and text tracks will be disabled
[_player.currentItem selectMediaOption:mediaOption inMediaSelectionGroup:group];
}
@@ -1049,7 +1052,7 @@ static int const RCTVideoUnset = -1;
- (void)setTextTracks:(NSArray*) textTracks;
{
_textTracks = textTracks;
-
+
// in case textTracks was set after selectedTextTrack
if (_selectedTextTrack) [self setSelectedTextTrack:_selectedTextTrack];
}
@@ -1081,7 +1084,7 @@ static int const RCTVideoUnset = -1;
{
// if sideloaded, textTracks will already be set
if (_textTracks) return _textTracks;
-
+
// if streaming video, we extract the text tracks
NSMutableArray *textTracks = [[NSMutableArray alloc] init];
AVMediaSelectionGroup *group = [_player.currentItem.asset
@@ -1119,7 +1122,7 @@ static int const RCTVideoUnset = -1;
}
// Set presentation style to fullscreen
[_playerViewController setModalPresentationStyle:UIModalPresentationFullScreen];
-
+
// Find the nearest view controller
UIViewController *viewController = [self firstAvailableUIViewController];
if( !viewController )
@@ -1189,13 +1192,13 @@ static int const RCTVideoUnset = -1;
_playerLayer = [AVPlayerLayer playerLayerWithPlayer:_player];
_playerLayer.frame = self.bounds;
_playerLayer.needsDisplayOnBoundsChange = YES;
-
+
// to prevent video from being animated when resizeMode is 'cover'
// resize mode must be set before layer is added
[self setResizeMode:_resizeMode];
[_playerLayer addObserver:self forKeyPath:readyForDisplayKeyPath options:NSKeyValueObservingOptionNew context:nil];
_playerLayerObserverSet = YES;
-
+
[self.layer addSublayer:_playerLayer];
self.layer.needsDisplayOnBoundsChange = YES;
}
@@ -1223,7 +1226,7 @@ static int const RCTVideoUnset = -1;
- (void)setProgressUpdateInterval:(float)progressUpdateInterval
{
_progressUpdateInterval = progressUpdateInterval;
-
+
if (_timeObserver) {
[self removePlayerTimeObserver];
[self addPlayerTimeObserver];
@@ -1266,12 +1269,17 @@ static int const RCTVideoUnset = -1;
- (void)setFilter:(NSString *)filterName {
_filterName = filterName;
+
+ if (!_filterEnabled) {
+ return;
+ } else if ([[_source objectForKey:@"uri"] rangeOfString:@"m3u8"].location != NSNotFound) {
+ return; // filters don't work for HLS... return
+ }
+
AVAsset *asset = _playerItem.asset;
-
+
if (!asset) {
return;
- } else if (!_playerItem.videoComposition && (filterName == nil || [filterName isEqualToString:@""])) {
- return; // Setting up an empty filter has a cost so avoid whenever possible
}
// TODO: filters don't work for HLS, check & return
@@ -1290,6 +1298,10 @@ static int const RCTVideoUnset = -1;
}];
}
+- (void)setFilterEnabled:(BOOL)filterEnabled {
+ _filterEnabled = filterEnabled;
+}
+
#pragma mark - React View Management
- (void)insertReactSubview:(UIView *)view atIndex:(NSInteger)atIndex
@@ -1300,7 +1312,7 @@ static int const RCTVideoUnset = -1;
{
[self setControls:true];
}
-
+
if( _controls )
{
view.frame = self.bounds;
@@ -1332,7 +1344,7 @@ static int const RCTVideoUnset = -1;
if( _controls )
{
_playerViewController.view.frame = self.bounds;
-
+
// also adjust all subviews of contentOverlayView
for (UIView* subview in _playerViewController.contentOverlayView.subviews) {
subview.frame = self.bounds;
@@ -1361,18 +1373,18 @@ static int const RCTVideoUnset = -1;
_isExternalPlaybackActiveObserverRegistered = NO;
}
_player = nil;
-
+
[self removePlayerLayer];
-
+
[_playerViewController.view removeFromSuperview];
_playerViewController = nil;
-
+
[self removePlayerTimeObserver];
[self removePlayerItemObservers];
-
+
_eventDispatcher = nil;
[[NSNotificationCenter defaultCenter] removeObserver:self];
-
+
[super removeFromSuperview];
}
diff --git a/ios/Video/RCTVideoManager.m b/ios/Video/RCTVideoManager.m
index 9823dcfb..d9578dc3 100644
--- a/ios/Video/RCTVideoManager.m
+++ b/ios/Video/RCTVideoManager.m
@@ -39,6 +39,7 @@ RCT_EXPORT_VIEW_PROPERTY(fullscreen, BOOL);
RCT_EXPORT_VIEW_PROPERTY(fullscreenAutorotate, BOOL);
RCT_EXPORT_VIEW_PROPERTY(fullscreenOrientation, NSString);
RCT_EXPORT_VIEW_PROPERTY(filter, NSString);
+RCT_EXPORT_VIEW_PROPERTY(filterEnabled, BOOL);
RCT_EXPORT_VIEW_PROPERTY(progressUpdateInterval, float);
/* Should support: onLoadStart, onLoad, and onError to stay consistent with Image */
RCT_EXPORT_VIEW_PROPERTY(onVideoLoadStart, RCTBubblingEventBlock);
diff --git a/package.json b/package.json
index 3392e479..8419ce14 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "react-native-video",
- "version": "4.0.1",
+ "version": "4.0.2",
"description": "A element for react-native",
"main": "Video.js",
"license": "MIT",