Merge branch 'master' of git://github.com/react-native-community/react-native-video into react-native-community-master
Merge branch 'master' of git://github.com/react-native-community/react-native-video into react-native-community-master
This commit is contained in:
@@ -7,14 +7,14 @@
|
||||
objects = {
|
||||
|
||||
/* Begin PBXBuildFile section */
|
||||
31CAFB211CADA8CD009BCF6F /* UIView+FindUIViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 31CAFB201CADA8CD009BCF6F /* UIView+FindUIViewController.m */; };
|
||||
31CAFB2F1CADC77F009BCF6F /* RCTVideoPlayerViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 31CAFB2E1CADC77F009BCF6F /* RCTVideoPlayerViewController.m */; };
|
||||
641E284D1F0EECF100443AF6 /* RCTVideo.m in Sources */ = {isa = PBXBuildFile; fileRef = BBD49E3A1AC8DEF000610F8E /* RCTVideo.m */; };
|
||||
641E284E1F0EECF100443AF6 /* RCTVideoPlayerViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 31CAFB2E1CADC77F009BCF6F /* RCTVideoPlayerViewController.m */; };
|
||||
641E284F1F0EECF100443AF6 /* RCTVideoManager.m in Sources */ = {isa = PBXBuildFile; fileRef = BBD49E3C1AC8DEF000610F8E /* RCTVideoManager.m */; };
|
||||
641E28501F0EECF100443AF6 /* UIView+FindUIViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 31CAFB201CADA8CD009BCF6F /* UIView+FindUIViewController.m */; };
|
||||
BBD49E3F1AC8DEF000610F8E /* RCTVideo.m in Sources */ = {isa = PBXBuildFile; fileRef = BBD49E3A1AC8DEF000610F8E /* RCTVideo.m */; };
|
||||
BBD49E401AC8DEF000610F8E /* RCTVideoManager.m in Sources */ = {isa = PBXBuildFile; fileRef = BBD49E3C1AC8DEF000610F8E /* RCTVideoManager.m */; };
|
||||
D1107C0A2110259000073188 /* UIView+FindUIViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = D1107C032110259000073188 /* UIView+FindUIViewController.m */; };
|
||||
D1107C0B2110259000073188 /* UIView+FindUIViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = D1107C032110259000073188 /* UIView+FindUIViewController.m */; };
|
||||
D1107C0C2110259000073188 /* RCTVideo.m in Sources */ = {isa = PBXBuildFile; fileRef = D1107C052110259000073188 /* RCTVideo.m */; };
|
||||
D1107C0D2110259000073188 /* RCTVideo.m in Sources */ = {isa = PBXBuildFile; fileRef = D1107C052110259000073188 /* RCTVideo.m */; };
|
||||
D1107C0E2110259000073188 /* RCTVideoManager.m in Sources */ = {isa = PBXBuildFile; fileRef = D1107C062110259000073188 /* RCTVideoManager.m */; };
|
||||
D1107C0F2110259000073188 /* RCTVideoManager.m in Sources */ = {isa = PBXBuildFile; fileRef = D1107C062110259000073188 /* RCTVideoManager.m */; };
|
||||
D1107C102110259000073188 /* RCTVideoPlayerViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = D1107C082110259000073188 /* RCTVideoPlayerViewController.m */; };
|
||||
D1107C112110259000073188 /* RCTVideoPlayerViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = D1107C082110259000073188 /* RCTVideoPlayerViewController.m */; };
|
||||
/* End PBXBuildFile section */
|
||||
|
||||
/* Begin PBXCopyFilesBuildPhase section */
|
||||
@@ -40,16 +40,16 @@
|
||||
|
||||
/* Begin PBXFileReference section */
|
||||
134814201AA4EA6300B7C361 /* libRCTVideo.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libRCTVideo.a; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
31CAFB1F1CADA8CD009BCF6F /* UIView+FindUIViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "UIView+FindUIViewController.h"; sourceTree = "<group>"; };
|
||||
31CAFB201CADA8CD009BCF6F /* UIView+FindUIViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = "UIView+FindUIViewController.m"; sourceTree = "<group>"; };
|
||||
31CAFB2D1CADC77F009BCF6F /* RCTVideoPlayerViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RCTVideoPlayerViewController.h; sourceTree = "<group>"; };
|
||||
31CAFB2E1CADC77F009BCF6F /* RCTVideoPlayerViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = RCTVideoPlayerViewController.m; sourceTree = "<group>"; };
|
||||
31CAFB301CAE6B5F009BCF6F /* RCTVideoPlayerViewControllerDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RCTVideoPlayerViewControllerDelegate.h; sourceTree = "<group>"; };
|
||||
641E28441F0EEC8500443AF6 /* libRCTVideo.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libRCTVideo.a; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
BBD49E391AC8DEF000610F8E /* RCTVideo.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RCTVideo.h; sourceTree = "<group>"; };
|
||||
BBD49E3A1AC8DEF000610F8E /* RCTVideo.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = RCTVideo.m; sourceTree = "<group>"; };
|
||||
BBD49E3B1AC8DEF000610F8E /* RCTVideoManager.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RCTVideoManager.h; sourceTree = "<group>"; };
|
||||
BBD49E3C1AC8DEF000610F8E /* RCTVideoManager.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = RCTVideoManager.m; sourceTree = "<group>"; };
|
||||
D1107C012110259000073188 /* RCTVideoPlayerViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = RCTVideoPlayerViewController.h; path = Video/RCTVideoPlayerViewController.h; sourceTree = "<group>"; };
|
||||
D1107C022110259000073188 /* RCTVideoPlayerViewControllerDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = RCTVideoPlayerViewControllerDelegate.h; path = Video/RCTVideoPlayerViewControllerDelegate.h; sourceTree = "<group>"; };
|
||||
D1107C032110259000073188 /* UIView+FindUIViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "UIView+FindUIViewController.m"; path = "Video/UIView+FindUIViewController.m"; sourceTree = "<group>"; };
|
||||
D1107C042110259000073188 /* UIView+FindUIViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "UIView+FindUIViewController.h"; path = "Video/UIView+FindUIViewController.h"; sourceTree = "<group>"; };
|
||||
D1107C052110259000073188 /* RCTVideo.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = RCTVideo.m; path = Video/RCTVideo.m; sourceTree = "<group>"; };
|
||||
D1107C062110259000073188 /* RCTVideoManager.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = RCTVideoManager.m; path = Video/RCTVideoManager.m; sourceTree = "<group>"; };
|
||||
D1107C072110259000073188 /* RCTVideo.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = RCTVideo.h; path = Video/RCTVideo.h; sourceTree = "<group>"; };
|
||||
D1107C082110259000073188 /* RCTVideoPlayerViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = RCTVideoPlayerViewController.m; path = Video/RCTVideoPlayerViewController.m; sourceTree = "<group>"; };
|
||||
D1107C092110259000073188 /* RCTVideoManager.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = RCTVideoManager.h; path = Video/RCTVideoManager.h; sourceTree = "<group>"; };
|
||||
/* End PBXFileReference section */
|
||||
|
||||
/* Begin PBXFrameworksBuildPhase section */
|
||||
@@ -78,20 +78,28 @@
|
||||
name = Products;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
49E995712048B4CE00EA7890 /* Frameworks */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
);
|
||||
name = Frameworks;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
58B511D21A9E6C8500147676 = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
BBD49E391AC8DEF000610F8E /* RCTVideo.h */,
|
||||
BBD49E3A1AC8DEF000610F8E /* RCTVideo.m */,
|
||||
31CAFB301CAE6B5F009BCF6F /* RCTVideoPlayerViewControllerDelegate.h */,
|
||||
31CAFB2D1CADC77F009BCF6F /* RCTVideoPlayerViewController.h */,
|
||||
31CAFB2E1CADC77F009BCF6F /* RCTVideoPlayerViewController.m */,
|
||||
BBD49E3B1AC8DEF000610F8E /* RCTVideoManager.h */,
|
||||
BBD49E3C1AC8DEF000610F8E /* RCTVideoManager.m */,
|
||||
31CAFB1F1CADA8CD009BCF6F /* UIView+FindUIViewController.h */,
|
||||
31CAFB201CADA8CD009BCF6F /* UIView+FindUIViewController.m */,
|
||||
D1107C072110259000073188 /* RCTVideo.h */,
|
||||
D1107C052110259000073188 /* RCTVideo.m */,
|
||||
D1107C092110259000073188 /* RCTVideoManager.h */,
|
||||
D1107C062110259000073188 /* RCTVideoManager.m */,
|
||||
D1107C012110259000073188 /* RCTVideoPlayerViewController.h */,
|
||||
D1107C082110259000073188 /* RCTVideoPlayerViewController.m */,
|
||||
D1107C022110259000073188 /* RCTVideoPlayerViewControllerDelegate.h */,
|
||||
D1107C042110259000073188 /* UIView+FindUIViewController.h */,
|
||||
D1107C032110259000073188 /* UIView+FindUIViewController.m */,
|
||||
134814211AA4EA7D00B7C361 /* Products */,
|
||||
641E28441F0EEC8500443AF6 /* libRCTVideo.a */,
|
||||
49E995712048B4CE00EA7890 /* Frameworks */,
|
||||
);
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
@@ -173,10 +181,10 @@
|
||||
isa = PBXSourcesBuildPhase;
|
||||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
31CAFB211CADA8CD009BCF6F /* UIView+FindUIViewController.m in Sources */,
|
||||
31CAFB2F1CADC77F009BCF6F /* RCTVideoPlayerViewController.m in Sources */,
|
||||
BBD49E3F1AC8DEF000610F8E /* RCTVideo.m in Sources */,
|
||||
BBD49E401AC8DEF000610F8E /* RCTVideoManager.m in Sources */,
|
||||
D1107C0A2110259000073188 /* UIView+FindUIViewController.m in Sources */,
|
||||
D1107C102110259000073188 /* RCTVideoPlayerViewController.m in Sources */,
|
||||
D1107C0E2110259000073188 /* RCTVideoManager.m in Sources */,
|
||||
D1107C0C2110259000073188 /* RCTVideo.m in Sources */,
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
};
|
||||
@@ -184,10 +192,10 @@
|
||||
isa = PBXSourcesBuildPhase;
|
||||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
641E284D1F0EECF100443AF6 /* RCTVideo.m in Sources */,
|
||||
641E284E1F0EECF100443AF6 /* RCTVideoPlayerViewController.m in Sources */,
|
||||
641E284F1F0EECF100443AF6 /* RCTVideoManager.m in Sources */,
|
||||
641E28501F0EECF100443AF6 /* UIView+FindUIViewController.m in Sources */,
|
||||
D1107C0B2110259000073188 /* UIView+FindUIViewController.m in Sources */,
|
||||
D1107C112110259000073188 /* RCTVideoPlayerViewController.m in Sources */,
|
||||
D1107C0F2110259000073188 /* RCTVideoManager.m in Sources */,
|
||||
D1107C0D2110259000073188 /* RCTVideo.m in Sources */,
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
};
|
||||
@@ -275,6 +283,7 @@
|
||||
HEADER_SEARCH_PATHS = (
|
||||
"$(inherited)",
|
||||
/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/include,
|
||||
"$(SRCROOT)/Vendor/SPTPersistentCache/include/**",
|
||||
);
|
||||
LIBRARY_SEARCH_PATHS = "$(inherited)";
|
||||
OTHER_LDFLAGS = "-ObjC";
|
||||
@@ -289,6 +298,7 @@
|
||||
HEADER_SEARCH_PATHS = (
|
||||
"$(inherited)",
|
||||
/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/include,
|
||||
"$(SRCROOT)/Vendor/SPTPersistentCache/include/**",
|
||||
);
|
||||
LIBRARY_SEARCH_PATHS = "$(inherited)";
|
||||
OTHER_LDFLAGS = "-ObjC";
|
||||
@@ -362,6 +372,7 @@
|
||||
641E284B1F0EEC8500443AF6 /* Release */,
|
||||
);
|
||||
defaultConfigurationIsVisible = 0;
|
||||
defaultConfigurationName = Release;
|
||||
};
|
||||
/* End XCConfigurationList section */
|
||||
};
|
||||
|
@@ -1,16 +0,0 @@
|
||||
#import "RCTVideoPlayerViewController.h"
|
||||
|
||||
@interface RCTVideoPlayerViewController ()
|
||||
|
||||
@end
|
||||
|
||||
@implementation RCTVideoPlayerViewController
|
||||
|
||||
- (void)viewDidDisappear:(BOOL)animated
|
||||
{
|
||||
[super viewDidDisappear:animated];
|
||||
[_rctDelegate videoPlayerViewControllerWillDismiss:self];
|
||||
[_rctDelegate videoPlayerViewControllerDidDismiss:self];
|
||||
}
|
||||
|
||||
@end
|
@@ -1,13 +1,22 @@
|
||||
#import <React/RCTView.h>
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
#import "AVKit/AVKit.h"
|
||||
#import "UIView+FindUIViewController.h"
|
||||
#import "RCTVideoPlayerViewController.h"
|
||||
#import "RCTVideoPlayerViewControllerDelegate.h"
|
||||
#import <React/RCTComponent.h>
|
||||
|
||||
#if __has_include(<react-native-video/RCTVideoCache.h>)
|
||||
#import <react-native-video/RCTVideoCache.h>
|
||||
#import <DVAssetLoaderDelegate/DVURLAsset.h>
|
||||
#import <DVAssetLoaderDelegate/DVAssetLoaderDelegate.h>
|
||||
#endif
|
||||
|
||||
@class RCTEventDispatcher;
|
||||
|
||||
#if __has_include(<react-native-video/RCTVideoCache.h>)
|
||||
@interface RCTVideo : UIView <RCTVideoPlayerViewControllerDelegate, DVAssetLoaderDelegatesDelegate>
|
||||
#else
|
||||
@interface RCTVideo : UIView <RCTVideoPlayerViewControllerDelegate>
|
||||
#endif
|
||||
|
||||
@property (nonatomic, copy) RCTBubblingEventBlock onVideoLoadStart;
|
||||
@property (nonatomic, copy) RCTBubblingEventBlock onVideoLoad;
|
||||
@@ -27,6 +36,7 @@
|
||||
@property (nonatomic, copy) RCTBubblingEventBlock onPlaybackStalled;
|
||||
@property (nonatomic, copy) RCTBubblingEventBlock onPlaybackResume;
|
||||
@property (nonatomic, copy) RCTBubblingEventBlock onPlaybackRateChange;
|
||||
@property (nonatomic, copy) RCTBubblingEventBlock onVideoExternalPlaybackChange;
|
||||
|
||||
- (instancetype)initWithEventDispatcher:(RCTEventDispatcher *)eventDispatcher NS_DESIGNATED_INITIALIZER;
|
||||
|
@@ -12,9 +12,16 @@ static NSString *const playbackBufferEmptyKeyPath = @"playbackBufferEmpty";
|
||||
static NSString *const readyForDisplayKeyPath = @"readyForDisplay";
|
||||
static NSString *const playbackRate = @"rate";
|
||||
static NSString *const timedMetadata = @"timedMetadata";
|
||||
static NSString *const externalPlaybackActive = @"externalPlaybackActive";
|
||||
|
||||
static int const RCTVideoUnset = -1;
|
||||
|
||||
#ifdef DEBUG
|
||||
#define DebugLog(...) NSLog(__VA_ARGS__)
|
||||
#else
|
||||
#define DebugLog(...) (void)0
|
||||
#endif
|
||||
|
||||
@implementation RCTVideo
|
||||
{
|
||||
AVPlayer *_player;
|
||||
@@ -23,23 +30,24 @@ static int const RCTVideoUnset = -1;
|
||||
BOOL _playerBufferEmpty;
|
||||
AVPlayerLayer *_playerLayer;
|
||||
BOOL _playerLayerObserverSet;
|
||||
AVPlayerViewController *_playerViewController;
|
||||
RCTVideoPlayerViewController *_playerViewController;
|
||||
NSURL *_videoURL;
|
||||
|
||||
|
||||
/* Required to publish events */
|
||||
RCTEventDispatcher *_eventDispatcher;
|
||||
BOOL _playbackRateObserverRegistered;
|
||||
BOOL _isExternalPlaybackActiveObserverRegistered;
|
||||
BOOL _videoLoadStarted;
|
||||
|
||||
|
||||
bool _pendingSeek;
|
||||
float _pendingSeekTime;
|
||||
float _lastSeekTime;
|
||||
|
||||
|
||||
/* For sending videoProgress events */
|
||||
Float64 _progressUpdateInterval;
|
||||
BOOL _controls;
|
||||
id _timeObserver;
|
||||
|
||||
|
||||
/* Keep track of any modifiers, need to be applied after each play */
|
||||
float _volume;
|
||||
float _rate;
|
||||
@@ -55,20 +63,27 @@ static int const RCTVideoUnset = -1;
|
||||
BOOL _playWhenInactive;
|
||||
NSString * _ignoreSilentSwitch;
|
||||
NSString * _resizeMode;
|
||||
BOOL _fullscreen;
|
||||
NSString * _fullscreenOrientation;
|
||||
BOOL _fullscreenPlayerPresented;
|
||||
UIViewController * _presentingViewController;
|
||||
#if __has_include(<react-native-video/RCTVideoCache.h>)
|
||||
RCTVideoCache * _videoCache;
|
||||
#endif
|
||||
}
|
||||
|
||||
- (instancetype)initWithEventDispatcher:(RCTEventDispatcher *)eventDispatcher
|
||||
{
|
||||
if ((self = [super init])) {
|
||||
_eventDispatcher = eventDispatcher;
|
||||
|
||||
|
||||
_playbackRateObserverRegistered = NO;
|
||||
_isExternalPlaybackActiveObserverRegistered = NO;
|
||||
_playbackStalled = NO;
|
||||
_rate = 1.0;
|
||||
_volume = 1.0;
|
||||
_resizeMode = @"AVLayerVideoGravityResizeAspectFill";
|
||||
_fullscreenOrientation = @"all";
|
||||
_pendingSeek = false;
|
||||
_pendingSeekTime = 0.0f;
|
||||
_lastSeekTime = 0.0f;
|
||||
@@ -79,39 +94,44 @@ static int const RCTVideoUnset = -1;
|
||||
_allowsExternalPlayback = YES;
|
||||
_playWhenInactive = false;
|
||||
_ignoreSilentSwitch = @"inherit"; // inherit, ignore, obey
|
||||
|
||||
#if __has_include(<react-native-video/RCTVideoCache.h>)
|
||||
_videoCache = [RCTVideoCache sharedInstance];
|
||||
#endif
|
||||
[[NSNotificationCenter defaultCenter] addObserver:self
|
||||
selector:@selector(applicationWillResignActive:)
|
||||
name:UIApplicationWillResignActiveNotification
|
||||
object:nil];
|
||||
|
||||
|
||||
[[NSNotificationCenter defaultCenter] addObserver:self
|
||||
selector:@selector(applicationDidEnterBackground:)
|
||||
name:UIApplicationDidEnterBackgroundNotification
|
||||
object:nil];
|
||||
|
||||
|
||||
[[NSNotificationCenter defaultCenter] addObserver:self
|
||||
selector:@selector(applicationWillEnterForeground:)
|
||||
name:UIApplicationWillEnterForegroundNotification
|
||||
object:nil];
|
||||
|
||||
|
||||
[[NSNotificationCenter defaultCenter] addObserver:self
|
||||
selector:@selector(audioRouteChanged:)
|
||||
name:AVAudioSessionRouteChangeNotification
|
||||
object:nil];
|
||||
}
|
||||
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
- (AVPlayerViewController*)createPlayerViewController:(AVPlayer*)player withPlayerItem:(AVPlayerItem*)playerItem {
|
||||
RCTVideoPlayerViewController* playerLayer= [[RCTVideoPlayerViewController alloc] init];
|
||||
playerLayer.showsPlaybackControls = YES;
|
||||
playerLayer.rctDelegate = self;
|
||||
playerLayer.view.frame = self.bounds;
|
||||
playerLayer.player = player;
|
||||
playerLayer.view.frame = self.bounds;
|
||||
return playerLayer;
|
||||
- (RCTVideoPlayerViewController*)createPlayerViewController:(AVPlayer*)player
|
||||
withPlayerItem:(AVPlayerItem*)playerItem {
|
||||
RCTVideoPlayerViewController* viewController = [[RCTVideoPlayerViewController alloc] init];
|
||||
viewController.showsPlaybackControls = YES;
|
||||
viewController.rctDelegate = self;
|
||||
viewController.preferredOrientation = _fullscreenOrientation;
|
||||
|
||||
viewController.view.frame = self.bounds;
|
||||
viewController.player = player;
|
||||
viewController.view.frame = self.bounds;
|
||||
return viewController;
|
||||
}
|
||||
|
||||
/* ---------------------------------------------------------
|
||||
@@ -120,24 +140,24 @@ static int const RCTVideoUnset = -1;
|
||||
|
||||
- (CMTime)playerItemDuration
|
||||
{
|
||||
AVPlayerItem *playerItem = [_player currentItem];
|
||||
if (playerItem.status == AVPlayerItemStatusReadyToPlay)
|
||||
{
|
||||
return([playerItem duration]);
|
||||
}
|
||||
|
||||
return(kCMTimeInvalid);
|
||||
AVPlayerItem *playerItem = [_player currentItem];
|
||||
if (playerItem.status == AVPlayerItemStatusReadyToPlay)
|
||||
{
|
||||
return([playerItem duration]);
|
||||
}
|
||||
|
||||
return(kCMTimeInvalid);
|
||||
}
|
||||
|
||||
- (CMTimeRange)playerItemSeekableTimeRange
|
||||
{
|
||||
AVPlayerItem *playerItem = [_player currentItem];
|
||||
if (playerItem.status == AVPlayerItemStatusReadyToPlay)
|
||||
{
|
||||
return [playerItem seekableTimeRanges].firstObject.CMTimeRangeValue;
|
||||
}
|
||||
|
||||
return (kCMTimeRangeZero);
|
||||
AVPlayerItem *playerItem = [_player currentItem];
|
||||
if (playerItem.status == AVPlayerItemStatusReadyToPlay)
|
||||
{
|
||||
return [playerItem seekableTimeRanges].firstObject.CMTimeRangeValue;
|
||||
}
|
||||
|
||||
return (kCMTimeRangeZero);
|
||||
}
|
||||
|
||||
-(void)addPlayerTimeObserver
|
||||
@@ -155,11 +175,11 @@ static int const RCTVideoUnset = -1;
|
||||
/* Cancels the previously registered time observer. */
|
||||
-(void)removePlayerTimeObserver
|
||||
{
|
||||
if (_timeObserver)
|
||||
{
|
||||
[_player removeTimeObserver:_timeObserver];
|
||||
_timeObserver = nil;
|
||||
}
|
||||
if (_timeObserver)
|
||||
{
|
||||
[_player removeTimeObserver:_timeObserver];
|
||||
_timeObserver = nil;
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark - Progress
|
||||
@@ -177,7 +197,7 @@ static int const RCTVideoUnset = -1;
|
||||
- (void)applicationWillResignActive:(NSNotification *)notification
|
||||
{
|
||||
if (_playInBackground || _playWhenInactive || _paused) return;
|
||||
|
||||
|
||||
[_player pause];
|
||||
[_player setRate:0.0];
|
||||
}
|
||||
@@ -205,7 +225,7 @@ static int const RCTVideoUnset = -1;
|
||||
NSNumber *reason = [[notification userInfo] objectForKey:AVAudioSessionRouteChangeReasonKey];
|
||||
NSNumber *previousRoute = [[notification userInfo] objectForKey:AVAudioSessionRouteChangePreviousRouteKey];
|
||||
if (reason.unsignedIntValue == AVAudioSessionRouteChangeReasonOldDeviceUnavailable) {
|
||||
self.onVideoAudioBecomingNoisy(@{@"target": self.reactTag});
|
||||
self.onVideoAudioBecomingNoisy(@{@"target": self.reactTag});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -213,32 +233,32 @@ static int const RCTVideoUnset = -1;
|
||||
|
||||
- (void)sendProgressUpdate
|
||||
{
|
||||
AVPlayerItem *video = [_player currentItem];
|
||||
if (video == nil || video.status != AVPlayerItemStatusReadyToPlay) {
|
||||
return;
|
||||
}
|
||||
|
||||
CMTime playerDuration = [self playerItemDuration];
|
||||
if (CMTIME_IS_INVALID(playerDuration)) {
|
||||
return;
|
||||
}
|
||||
|
||||
CMTime currentTime = _player.currentTime;
|
||||
const Float64 duration = CMTimeGetSeconds(playerDuration);
|
||||
const Float64 currentTimeSecs = CMTimeGetSeconds(currentTime);
|
||||
|
||||
[[NSNotificationCenter defaultCenter] postNotificationName:@"RCTVideo_progress" object:nil userInfo:@{@"progress": [NSNumber numberWithDouble: currentTimeSecs / duration]}];
|
||||
|
||||
if( currentTimeSecs >= 0 && self.onVideoProgress) {
|
||||
self.onVideoProgress(@{
|
||||
@"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(currentTime)],
|
||||
@"playableDuration": [self calculatePlayableDuration],
|
||||
@"atValue": [NSNumber numberWithLongLong:currentTime.value],
|
||||
@"atTimescale": [NSNumber numberWithInt:currentTime.timescale],
|
||||
@"target": self.reactTag,
|
||||
@"seekableDuration": [self calculateSeekableDuration],
|
||||
});
|
||||
}
|
||||
AVPlayerItem *video = [_player currentItem];
|
||||
if (video == nil || video.status != AVPlayerItemStatusReadyToPlay) {
|
||||
return;
|
||||
}
|
||||
|
||||
CMTime playerDuration = [self playerItemDuration];
|
||||
if (CMTIME_IS_INVALID(playerDuration)) {
|
||||
return;
|
||||
}
|
||||
|
||||
CMTime currentTime = _player.currentTime;
|
||||
const Float64 duration = CMTimeGetSeconds(playerDuration);
|
||||
const Float64 currentTimeSecs = CMTimeGetSeconds(currentTime);
|
||||
|
||||
[[NSNotificationCenter defaultCenter] postNotificationName:@"RCTVideo_progress" object:nil userInfo:@{@"progress": [NSNumber numberWithDouble: currentTimeSecs / duration]}];
|
||||
|
||||
if( currentTimeSecs >= 0 && self.onVideoProgress) {
|
||||
self.onVideoProgress(@{
|
||||
@"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(currentTime)],
|
||||
@"playableDuration": [self calculatePlayableDuration],
|
||||
@"atValue": [NSNumber numberWithLongLong:currentTime.value],
|
||||
@"atTimescale": [NSNumber numberWithInt:currentTime.timescale],
|
||||
@"target": self.reactTag,
|
||||
@"seekableDuration": [self calculateSeekableDuration],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/*!
|
||||
@@ -268,12 +288,12 @@ static int const RCTVideoUnset = -1;
|
||||
|
||||
- (NSNumber *)calculateSeekableDuration
|
||||
{
|
||||
CMTimeRange timeRange = [self playerItemSeekableTimeRange];
|
||||
if (CMTIME_IS_NUMERIC(timeRange.duration))
|
||||
{
|
||||
return [NSNumber numberWithFloat:CMTimeGetSeconds(timeRange.duration)];
|
||||
}
|
||||
return [NSNumber numberWithInteger:0];
|
||||
CMTimeRange timeRange = [self playerItemSeekableTimeRange];
|
||||
if (CMTIME_IS_NUMERIC(timeRange.duration))
|
||||
{
|
||||
return [NSNumber numberWithFloat:CMTimeGetSeconds(timeRange.duration)];
|
||||
}
|
||||
return [NSNumber numberWithInteger:0];
|
||||
}
|
||||
|
||||
- (void)addPlayerItemObservers
|
||||
@@ -306,42 +326,50 @@ static int const RCTVideoUnset = -1;
|
||||
[self removePlayerLayer];
|
||||
[self removePlayerTimeObserver];
|
||||
[self removePlayerItemObservers];
|
||||
|
||||
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
|
||||
|
||||
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t) 0), dispatch_get_main_queue(), ^{
|
||||
|
||||
// perform on next run loop, otherwise other passed react-props may not be set
|
||||
_playerItem = [self playerItemForSource:source];
|
||||
[self addPlayerItemObservers];
|
||||
[self playerItemForSource:source withCallback:^(AVPlayerItem * playerItem) {
|
||||
_playerItem = playerItem;
|
||||
[self addPlayerItemObservers];
|
||||
|
||||
[_player pause];
|
||||
[_playerViewController.view removeFromSuperview];
|
||||
_playerViewController = nil;
|
||||
|
||||
if (_playbackRateObserverRegistered) {
|
||||
[_player removeObserver:self forKeyPath:playbackRate context:nil];
|
||||
_playbackRateObserverRegistered = NO;
|
||||
}
|
||||
if (_isExternalPlaybackActiveObserverRegistered) {
|
||||
[_player removeObserver:self forKeyPath:externalPlaybackActive context:nil];
|
||||
_isExternalPlaybackActiveObserverRegistered = NO;
|
||||
}
|
||||
|
||||
_player = [AVPlayer playerWithPlayerItem:_playerItem];
|
||||
_player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
|
||||
|
||||
[_player addObserver:self forKeyPath:playbackRate options:0 context:nil];
|
||||
_playbackRateObserverRegistered = YES;
|
||||
|
||||
[_player addObserver:self forKeyPath:externalPlaybackActive options:0 context:nil];
|
||||
_isExternalPlaybackActiveObserverRegistered = YES;
|
||||
|
||||
[self addPlayerTimeObserver];
|
||||
|
||||
[_player pause];
|
||||
[_playerViewController.view removeFromSuperview];
|
||||
_playerViewController = nil;
|
||||
|
||||
if (_playbackRateObserverRegistered) {
|
||||
[_player removeObserver:self forKeyPath:playbackRate context:nil];
|
||||
_playbackRateObserverRegistered = NO;
|
||||
}
|
||||
|
||||
_player = [AVPlayer playerWithPlayerItem:_playerItem];
|
||||
_player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
|
||||
|
||||
[_player addObserver:self forKeyPath:playbackRate options:0 context:nil];
|
||||
_playbackRateObserverRegistered = YES;
|
||||
|
||||
[self addPlayerTimeObserver];
|
||||
|
||||
//Perform on next run loop, otherwise onVideoLoadStart is nil
|
||||
if(self.onVideoLoadStart) {
|
||||
id uri = [source objectForKey:@"uri"];
|
||||
id type = [source objectForKey:@"type"];
|
||||
self.onVideoLoadStart(@{@"src": @{
|
||||
//Perform on next run loop, otherwise onVideoLoadStart is nil
|
||||
if (self.onVideoLoadStart) {
|
||||
id uri = [source objectForKey:@"uri"];
|
||||
id type = [source objectForKey:@"type"];
|
||||
self.onVideoLoadStart(@{@"src": @{
|
||||
@"uri": uri ? uri : [NSNull null],
|
||||
@"type": type ? type : [NSNull null],
|
||||
@"isNetwork": [NSNumber numberWithBool:(bool)[source objectForKey:@"isNetwork"]]},
|
||||
@"target": self.reactTag
|
||||
});
|
||||
}
|
||||
|
||||
@"target": self.reactTag
|
||||
});
|
||||
}
|
||||
}];
|
||||
});
|
||||
_videoLoadStarted = YES;
|
||||
}
|
||||
@@ -351,12 +379,12 @@ static int const RCTVideoUnset = -1;
|
||||
return [NSURL URLWithString:filepath];
|
||||
}
|
||||
|
||||
// code to support local caching
|
||||
// if no file found, check if the file exists in the Document directory
|
||||
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
|
||||
NSString* relativeFilePath = [filepath lastPathComponent];
|
||||
// the file may be multiple levels below the documents directory
|
||||
NSArray* fileComponents = [filepath componentsSeparatedByString:@"Documents/"];
|
||||
if (fileComponents.count>1) {
|
||||
if (fileComponents.count > 1) {
|
||||
relativeFilePath = [fileComponents objectAtIndex:1];
|
||||
}
|
||||
|
||||
@@ -367,37 +395,13 @@ static int const RCTVideoUnset = -1;
|
||||
return nil;
|
||||
}
|
||||
|
||||
- (AVPlayerItem*)playerItemForSource:(NSDictionary *)source
|
||||
- (void)playerItemPrepareText:(AVAsset *)asset assetOptions:(NSDictionary * __nullable)assetOptions withCallback:(void(^)(AVPlayerItem *))handler
|
||||
{
|
||||
bool isNetwork = [RCTConvert BOOL:[source objectForKey:@"isNetwork"]];
|
||||
bool isAsset = [RCTConvert BOOL:[source objectForKey:@"isAsset"]];
|
||||
NSString *uri = [source objectForKey:@"uri"];
|
||||
NSString *type = [source objectForKey:@"type"];
|
||||
|
||||
AVURLAsset *asset;
|
||||
NSMutableDictionary *assetOptions = [[NSMutableDictionary alloc] init];
|
||||
|
||||
if (isNetwork) {
|
||||
/* Per #1091, this is not a public API. We need to either get approval from Apple to use this
|
||||
* or use a different approach.
|
||||
NSDictionary *headers = [source objectForKey:@"requestHeaders"];
|
||||
if ([headers count] > 0) {
|
||||
[assetOptions setObject:headers forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
|
||||
}
|
||||
*/
|
||||
NSArray *cookies = [[NSHTTPCookieStorage sharedHTTPCookieStorage] cookies];
|
||||
[assetOptions setObject:cookies forKey:AVURLAssetHTTPCookiesKey];
|
||||
asset = [AVURLAsset URLAssetWithURL:[NSURL URLWithString:uri] options:assetOptions];
|
||||
} else if (isAsset) { // assets on iOS can be in the Bundle or Documents folder
|
||||
asset = [AVURLAsset URLAssetWithURL:[self urlFilePath:uri] options:nil];
|
||||
} else { // file passed in through JS, or an asset in the Xcode project
|
||||
asset = [AVURLAsset URLAssetWithURL:[[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:uri ofType:type]] options:nil];
|
||||
}
|
||||
|
||||
if (!_textTracks) {
|
||||
return [AVPlayerItem playerItemWithAsset:asset];
|
||||
handler([AVPlayerItem playerItemWithAsset:asset]);
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
// sideload text tracks
|
||||
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
|
||||
|
||||
@@ -407,14 +411,14 @@ static int const RCTVideoUnset = -1;
|
||||
ofTrack:videoAsset
|
||||
atTime:kCMTimeZero
|
||||
error:nil];
|
||||
|
||||
|
||||
AVAssetTrack *audioAsset = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject;
|
||||
AVMutableCompositionTrack *audioCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
|
||||
[audioCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration)
|
||||
ofTrack:audioAsset
|
||||
atTime:kCMTimeZero
|
||||
error:nil];
|
||||
|
||||
|
||||
NSMutableArray* validTextTracks = [NSMutableArray array];
|
||||
for (int i = 0; i < _textTracks.count; ++i) {
|
||||
AVURLAsset *textURLAsset;
|
||||
@@ -431,17 +435,120 @@ static int const RCTVideoUnset = -1;
|
||||
addMutableTrackWithMediaType:AVMediaTypeText
|
||||
preferredTrackID:kCMPersistentTrackID_Invalid];
|
||||
[textCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration)
|
||||
ofTrack:textTrackAsset
|
||||
atTime:kCMTimeZero
|
||||
error:nil];
|
||||
ofTrack:textTrackAsset
|
||||
atTime:kCMTimeZero
|
||||
error:nil];
|
||||
}
|
||||
if (validTextTracks.count != _textTracks.count) {
|
||||
[self setTextTracks:validTextTracks];
|
||||
}
|
||||
|
||||
return [AVPlayerItem playerItemWithAsset:mixComposition];
|
||||
handler([AVPlayerItem playerItemWithAsset:mixComposition]);
|
||||
}
|
||||
|
||||
- (void)playerItemForSource:(NSDictionary *)source withCallback:(void(^)(AVPlayerItem *))handler
|
||||
{
|
||||
bool isNetwork = [RCTConvert BOOL:[source objectForKey:@"isNetwork"]];
|
||||
bool isAsset = [RCTConvert BOOL:[source objectForKey:@"isAsset"]];
|
||||
NSString *uri = [source objectForKey:@"uri"];
|
||||
NSString *type = [source objectForKey:@"type"];
|
||||
|
||||
NSURL *url = isNetwork || isAsset
|
||||
? [NSURL URLWithString:uri]
|
||||
: [[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:uri ofType:type]];
|
||||
NSMutableDictionary *assetOptions = [[NSMutableDictionary alloc] init];
|
||||
|
||||
if (isNetwork) {
|
||||
/* Per #1091, this is not a public API.
|
||||
* We need to either get approval from Apple to use this or use a different approach.
|
||||
NSDictionary *headers = [source objectForKey:@"requestHeaders"];
|
||||
if ([headers count] > 0) {
|
||||
[assetOptions setObject:headers forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
|
||||
}
|
||||
*/
|
||||
NSArray *cookies = [[NSHTTPCookieStorage sharedHTTPCookieStorage] cookies];
|
||||
[assetOptions setObject:cookies forKey:AVURLAssetHTTPCookiesKey];
|
||||
|
||||
#if __has_include(<react-native-video/RCTVideoCache.h>)
|
||||
if (!_textTracks) {
|
||||
/* The DVURLAsset created by cache doesn't have a tracksWithMediaType property, so trying
|
||||
* to bring in the text track code will crash. I suspect this is because the asset hasn't fully loaded.
|
||||
* Until this is fixed, we need to bypass caching when text tracks are specified.
|
||||
*/
|
||||
DebugLog(@"Caching is not supported for uri '%@' because text tracks are not compatible with the cache. Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md", uri);
|
||||
[self playerItemForSourceUsingCache:uri assetOptions:assetOptions withCallback:handler];
|
||||
return;
|
||||
}
|
||||
#endif
|
||||
|
||||
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:assetOptions];
|
||||
[self playerItemPrepareText:asset assetOptions:assetOptions withCallback:handler];
|
||||
return;
|
||||
} else if (isAsset) {
|
||||
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:nil];
|
||||
[self playerItemPrepareText:asset assetOptions:assetOptions withCallback:handler];
|
||||
return;
|
||||
}
|
||||
|
||||
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:[[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:uri ofType:type]] options:nil];
|
||||
[self playerItemPrepareText:asset assetOptions:assetOptions withCallback:handler];
|
||||
}
|
||||
|
||||
#if __has_include(<react-native-video/RCTVideoCache.h>)
|
||||
|
||||
- (void)playerItemForSourceUsingCache:(NSString *)uri assetOptions:(NSDictionary *)options withCallback:(void(^)(AVPlayerItem *))handler {
|
||||
NSURL *url = [NSURL URLWithString:uri];
|
||||
[_videoCache getItemForUri:uri withCallback:^(RCTVideoCacheStatus videoCacheStatus, AVAsset * _Nullable cachedAsset) {
|
||||
switch (videoCacheStatus) {
|
||||
case RCTVideoCacheStatusMissingFileExtension: {
|
||||
DebugLog(@"Could not generate cache key for uri '%@'. It is currently not supported to cache urls that do not include a file extension. The video file will not be cached. Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md", uri);
|
||||
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:options];
|
||||
[self playerItemPrepareText:asset assetOptions:options withCallback:handler];
|
||||
return;
|
||||
}
|
||||
case RCTVideoCacheStatusUnsupportedFileExtension: {
|
||||
DebugLog(@"Could not generate cache key for uri '%@'. The file extension of that uri is currently not supported. The video file will not be cached. Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md", uri);
|
||||
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:options];
|
||||
[self playerItemPrepareText:asset assetOptions:options withCallback:handler];
|
||||
return;
|
||||
}
|
||||
default:
|
||||
if (cachedAsset) {
|
||||
DebugLog(@"Playing back uri '%@' from cache", uri);
|
||||
// See note in playerItemForSource about not being able to support text tracks & caching
|
||||
handler([AVPlayerItem playerItemWithAsset:cachedAsset]);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
DVURLAsset *asset = [[DVURLAsset alloc] initWithURL:url options:options networkTimeout:10000];
|
||||
asset.loaderDelegate = self;
|
||||
|
||||
/* More granular code to have control over the DVURLAsset
|
||||
DVAssetLoaderDelegate *resourceLoaderDelegate = [[DVAssetLoaderDelegate alloc] initWithURL:url];
|
||||
resourceLoaderDelegate.delegate = self;
|
||||
NSURLComponents *components = [[NSURLComponents alloc] initWithURL:url resolvingAgainstBaseURL:NO];
|
||||
components.scheme = [DVAssetLoaderDelegate scheme];
|
||||
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:[components URL] options:options];
|
||||
[asset.resourceLoader setDelegate:resourceLoaderDelegate queue:dispatch_get_main_queue()];
|
||||
*/
|
||||
|
||||
handler([AVPlayerItem playerItemWithAsset:asset]);
|
||||
}];
|
||||
}
|
||||
|
||||
#pragma mark - DVAssetLoaderDelegate
|
||||
|
||||
- (void)dvAssetLoaderDelegate:(DVAssetLoaderDelegate *)loaderDelegate
|
||||
didLoadData:(NSData *)data
|
||||
forURL:(NSURL *)url {
|
||||
[_videoCache storeItem:data forUri:[url absoluteString] withCallback:^(BOOL success) {
|
||||
DebugLog(@"Cache data stored successfully 🎉");
|
||||
}];
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
|
||||
{
|
||||
if (object == _playerItem) {
|
||||
@@ -555,6 +662,12 @@ static int const RCTVideoUnset = -1;
|
||||
_playbackStalled = NO;
|
||||
}
|
||||
}
|
||||
else if([keyPath isEqualToString:externalPlaybackActive]) {
|
||||
if(self.onVideoExternalPlaybackChange) {
|
||||
self.onVideoExternalPlaybackChange(@{@"isExternalPlaybackActive": [NSNumber numberWithBool:_player.isExternalPlaybackActive],
|
||||
@"target": self.reactTag});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
|
||||
}
|
||||
@@ -570,7 +683,7 @@ static int const RCTVideoUnset = -1;
|
||||
selector:@selector(playerItemDidReachEnd:)
|
||||
name:AVPlayerItemDidPlayToEndTimeNotification
|
||||
object:[_player currentItem]];
|
||||
|
||||
|
||||
[[NSNotificationCenter defaultCenter] removeObserver:self
|
||||
name:AVPlayerItemPlaybackStalledNotification
|
||||
object:nil];
|
||||
@@ -609,9 +722,9 @@ static int const RCTVideoUnset = -1;
|
||||
- (void)playerItemDidReachEnd:(NSNotification *)notification
|
||||
{
|
||||
if(self.onVideoEnd) {
|
||||
self.onVideoEnd(@{@"target": self.reactTag});
|
||||
self.onVideoEnd(@{@"target": self.reactTag});
|
||||
}
|
||||
|
||||
|
||||
if (_repeat) {
|
||||
AVPlayerItem *item = [notification object];
|
||||
[item seekToTime:kCMTimeZero];
|
||||
@@ -672,7 +785,7 @@ static int const RCTVideoUnset = -1;
|
||||
[_player play];
|
||||
[_player setRate:_rate];
|
||||
}
|
||||
|
||||
|
||||
_paused = paused;
|
||||
}
|
||||
|
||||
@@ -760,7 +873,7 @@ static int const RCTVideoUnset = -1;
|
||||
[_player setVolume:_volume];
|
||||
[_player setMuted:NO];
|
||||
}
|
||||
|
||||
|
||||
[self setSelectedAudioTrack:_selectedAudioTrack];
|
||||
[self setSelectedTextTrack:_selectedTextTrack];
|
||||
[self setResizeMode:_resizeMode];
|
||||
@@ -781,40 +894,40 @@ static int const RCTVideoUnset = -1;
|
||||
AVMediaSelectionGroup *group = [_player.currentItem.asset
|
||||
mediaSelectionGroupForMediaCharacteristic:characteristic];
|
||||
AVMediaSelectionOption *mediaOption;
|
||||
|
||||
|
||||
if ([type isEqualToString:@"disabled"]) {
|
||||
// Do nothing. We want to ensure option is nil
|
||||
// Do nothing. We want to ensure option is nil
|
||||
} else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) {
|
||||
NSString *value = criteria[@"value"];
|
||||
for (int i = 0; i < group.options.count; ++i) {
|
||||
AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i];
|
||||
NSString *optionValue;
|
||||
if ([type isEqualToString:@"language"]) {
|
||||
optionValue = [currentOption extendedLanguageTag];
|
||||
optionValue = [currentOption extendedLanguageTag];
|
||||
} else {
|
||||
optionValue = [[[currentOption commonMetadata]
|
||||
valueForKey:@"value"]
|
||||
objectAtIndex:0];
|
||||
optionValue = [[[currentOption commonMetadata]
|
||||
valueForKey:@"value"]
|
||||
objectAtIndex:0];
|
||||
}
|
||||
if ([value isEqualToString:optionValue]) {
|
||||
mediaOption = currentOption;
|
||||
break;
|
||||
mediaOption = currentOption;
|
||||
break;
|
||||
}
|
||||
}
|
||||
//} else if ([type isEqualToString:@"default"]) {
|
||||
// option = group.defaultOption; */
|
||||
}
|
||||
//} else if ([type isEqualToString:@"default"]) {
|
||||
// option = group.defaultOption; */
|
||||
} else if ([type isEqualToString:@"index"]) {
|
||||
if ([criteria[@"value"] isKindOfClass:[NSNumber class]]) {
|
||||
int index = [criteria[@"value"] intValue];
|
||||
if (group.options.count > index) {
|
||||
mediaOption = [group.options objectAtIndex:index];
|
||||
}
|
||||
int index = [criteria[@"value"] intValue];
|
||||
if (group.options.count > index) {
|
||||
mediaOption = [group.options objectAtIndex:index];
|
||||
}
|
||||
}
|
||||
} else { // default. invalid type or "system"
|
||||
[_player.currentItem selectMediaOptionAutomaticallyInMediaSelectionGroup:group];
|
||||
return;
|
||||
[_player.currentItem selectMediaOptionAutomaticallyInMediaSelectionGroup:group];
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
// If a match isn't found, option will be nil and text tracks will be disabled
|
||||
[_player.currentItem selectMediaOption:mediaOption inMediaSelectionGroup:group];
|
||||
}
|
||||
@@ -822,7 +935,7 @@ static int const RCTVideoUnset = -1;
|
||||
- (void)setSelectedAudioTrack:(NSDictionary *)selectedAudioTrack {
|
||||
_selectedAudioTrack = selectedAudioTrack;
|
||||
[self setMediaSelectionTrackForCharacteristic:AVMediaCharacteristicAudible
|
||||
withCriteria:_selectedAudioTrack];
|
||||
withCriteria:_selectedAudioTrack];
|
||||
}
|
||||
|
||||
- (void)setSelectedTextTrack:(NSDictionary *)selectedTextTrack {
|
||||
@@ -894,7 +1007,7 @@ static int const RCTVideoUnset = -1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
for (int i = firstTextIndex; i < _player.currentItem.tracks.count; ++i) {
|
||||
BOOL isEnabled = NO;
|
||||
if (selectedTrackIndex != RCTVideoUnset) {
|
||||
@@ -950,7 +1063,7 @@ static int const RCTVideoUnset = -1;
|
||||
- (void)setTextTracks:(NSArray*) textTracks;
|
||||
{
|
||||
_textTracks = textTracks;
|
||||
|
||||
|
||||
// in case textTracks was set after selectedTextTrack
|
||||
if (_selectedTextTrack) [self setSelectedTextTrack:_selectedTextTrack];
|
||||
}
|
||||
@@ -965,14 +1078,14 @@ static int const RCTVideoUnset = -1;
|
||||
NSString *title = @"";
|
||||
NSArray *values = [[currentOption commonMetadata] valueForKey:@"value"];
|
||||
if (values.count > 0) {
|
||||
title = [values objectAtIndex:0];
|
||||
title = [values objectAtIndex:0];
|
||||
}
|
||||
NSString *language = [currentOption extendedLanguageTag] ? [currentOption extendedLanguageTag] : @"";
|
||||
NSDictionary *audioTrack = @{
|
||||
@"index": [NSNumber numberWithInt:i],
|
||||
@"title": title,
|
||||
@"language": language
|
||||
};
|
||||
@"index": [NSNumber numberWithInt:i],
|
||||
@"title": title,
|
||||
@"language": language
|
||||
};
|
||||
[audioTracks addObject:audioTrack];
|
||||
}
|
||||
return audioTracks;
|
||||
@@ -1007,66 +1120,72 @@ static int const RCTVideoUnset = -1;
|
||||
|
||||
- (BOOL)getFullscreen
|
||||
{
|
||||
return _fullscreenPlayerPresented;
|
||||
return _fullscreenPlayerPresented;
|
||||
}
|
||||
|
||||
- (void)setFullscreen:(BOOL)fullscreen
|
||||
{
|
||||
if( fullscreen && !_fullscreenPlayerPresented )
|
||||
- (void)setFullscreen:(BOOL) fullscreen {
|
||||
if( fullscreen && !_fullscreenPlayerPresented && _player )
|
||||
{
|
||||
// Ensure player view controller is not null
|
||||
if( !_playerViewController )
|
||||
{
|
||||
// Ensure player view controller is not null
|
||||
if( !_playerViewController )
|
||||
{
|
||||
[self usePlayerViewController];
|
||||
[self usePlayerViewController];
|
||||
}
|
||||
// Set presentation style to fullscreen
|
||||
[_playerViewController setModalPresentationStyle:UIModalPresentationFullScreen];
|
||||
|
||||
// Find the nearest view controller
|
||||
UIViewController *viewController = [self firstAvailableUIViewController];
|
||||
if( !viewController )
|
||||
{
|
||||
UIWindow *keyWindow = [[UIApplication sharedApplication] keyWindow];
|
||||
viewController = keyWindow.rootViewController;
|
||||
if( viewController.childViewControllers.count > 0 )
|
||||
{
|
||||
viewController = viewController.childViewControllers.lastObject;
|
||||
}
|
||||
}
|
||||
if( viewController )
|
||||
{
|
||||
_presentingViewController = viewController;
|
||||
if(self.onVideoFullscreenPlayerWillPresent) {
|
||||
self.onVideoFullscreenPlayerWillPresent(@{@"target": self.reactTag});
|
||||
}
|
||||
[viewController presentViewController:_playerViewController animated:true completion:^{
|
||||
_playerViewController.showsPlaybackControls = YES;
|
||||
_fullscreenPlayerPresented = fullscreen;
|
||||
if(self.onVideoFullscreenPlayerDidPresent) {
|
||||
self.onVideoFullscreenPlayerDidPresent(@{@"target": self.reactTag});
|
||||
}
|
||||
// Set presentation style to fullscreen
|
||||
[_playerViewController setModalPresentationStyle:UIModalPresentationFullScreen];
|
||||
}];
|
||||
}
|
||||
}
|
||||
else if ( !fullscreen && _fullscreenPlayerPresented )
|
||||
{
|
||||
[self videoPlayerViewControllerWillDismiss:_playerViewController];
|
||||
[_presentingViewController dismissViewControllerAnimated:true completion:^{
|
||||
[self videoPlayerViewControllerDidDismiss:_playerViewController];
|
||||
}];
|
||||
}
|
||||
}
|
||||
|
||||
// Find the nearest view controller
|
||||
UIViewController *viewController = [self firstAvailableUIViewController];
|
||||
if( !viewController )
|
||||
{
|
||||
UIWindow *keyWindow = [[UIApplication sharedApplication] keyWindow];
|
||||
viewController = keyWindow.rootViewController;
|
||||
if( viewController.childViewControllers.count > 0 )
|
||||
{
|
||||
viewController = viewController.childViewControllers.lastObject;
|
||||
}
|
||||
}
|
||||
if( viewController )
|
||||
{
|
||||
_presentingViewController = viewController;
|
||||
if(self.onVideoFullscreenPlayerWillPresent) {
|
||||
self.onVideoFullscreenPlayerWillPresent(@{@"target": self.reactTag});
|
||||
}
|
||||
[viewController presentViewController:_playerViewController animated:true completion:^{
|
||||
_playerViewController.showsPlaybackControls = YES;
|
||||
_fullscreenPlayerPresented = fullscreen;
|
||||
if(self.onVideoFullscreenPlayerDidPresent) {
|
||||
self.onVideoFullscreenPlayerDidPresent(@{@"target": self.reactTag});
|
||||
}
|
||||
}];
|
||||
}
|
||||
}
|
||||
else if ( !fullscreen && _fullscreenPlayerPresented )
|
||||
{
|
||||
[self videoPlayerViewControllerWillDismiss:_playerViewController];
|
||||
[_presentingViewController dismissViewControllerAnimated:true completion:^{
|
||||
[self videoPlayerViewControllerDidDismiss:_playerViewController];
|
||||
}];
|
||||
}
|
||||
- (void)setFullscreenOrientation:(NSString *)orientation {
|
||||
_fullscreenOrientation = orientation;
|
||||
if (_fullscreenPlayerPresented) {
|
||||
_playerViewController.preferredOrientation = orientation;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)usePlayerViewController
|
||||
{
|
||||
if( _player )
|
||||
{
|
||||
_playerViewController = [self createPlayerViewController:_player withPlayerItem:_playerItem];
|
||||
// to prevent video from being animated when resizeMode is 'cover'
|
||||
// resize mode must be set before subview is added
|
||||
[self setResizeMode:_resizeMode];
|
||||
[self addSubview:_playerViewController.view];
|
||||
}
|
||||
if( _player )
|
||||
{
|
||||
_playerViewController = [self createPlayerViewController:_player withPlayerItem:_playerItem];
|
||||
// to prevent video from being animated when resizeMode is 'cover'
|
||||
// resize mode must be set before subview is added
|
||||
[self setResizeMode:_resizeMode];
|
||||
[self addSubview:_playerViewController.view];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)usePlayerLayer
|
||||
@@ -1090,27 +1209,27 @@ static int const RCTVideoUnset = -1;
|
||||
|
||||
- (void)setControls:(BOOL)controls
|
||||
{
|
||||
if( _controls != controls || (!_playerLayer && !_playerViewController) )
|
||||
if( _controls != controls || (!_playerLayer && !_playerViewController) )
|
||||
{
|
||||
_controls = controls;
|
||||
if( _controls )
|
||||
{
|
||||
_controls = controls;
|
||||
if( _controls )
|
||||
{
|
||||
[self removePlayerLayer];
|
||||
[self usePlayerViewController];
|
||||
}
|
||||
else
|
||||
{
|
||||
[_playerViewController.view removeFromSuperview];
|
||||
_playerViewController = nil;
|
||||
[self usePlayerLayer];
|
||||
}
|
||||
[self removePlayerLayer];
|
||||
[self usePlayerViewController];
|
||||
}
|
||||
else
|
||||
{
|
||||
[_playerViewController.view removeFromSuperview];
|
||||
_playerViewController = nil;
|
||||
[self usePlayerLayer];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setProgressUpdateInterval:(float)progressUpdateInterval
|
||||
{
|
||||
_progressUpdateInterval = progressUpdateInterval;
|
||||
|
||||
|
||||
if (_timeObserver) {
|
||||
[self removePlayerTimeObserver];
|
||||
[self addPlayerTimeObserver];
|
||||
@@ -1131,24 +1250,24 @@ static int const RCTVideoUnset = -1;
|
||||
|
||||
- (void)videoPlayerViewControllerWillDismiss:(AVPlayerViewController *)playerViewController
|
||||
{
|
||||
if (_playerViewController == playerViewController && _fullscreenPlayerPresented && self.onVideoFullscreenPlayerWillDismiss)
|
||||
{
|
||||
self.onVideoFullscreenPlayerWillDismiss(@{@"target": self.reactTag});
|
||||
}
|
||||
if (_playerViewController == playerViewController && _fullscreenPlayerPresented && self.onVideoFullscreenPlayerWillDismiss)
|
||||
{
|
||||
self.onVideoFullscreenPlayerWillDismiss(@{@"target": self.reactTag});
|
||||
}
|
||||
}
|
||||
|
||||
- (void)videoPlayerViewControllerDidDismiss:(AVPlayerViewController *)playerViewController
|
||||
{
|
||||
if (_playerViewController == playerViewController && _fullscreenPlayerPresented)
|
||||
{
|
||||
_fullscreenPlayerPresented = false;
|
||||
_presentingViewController = nil;
|
||||
_playerViewController = nil;
|
||||
[self applyModifiers];
|
||||
if(self.onVideoFullscreenPlayerDidDismiss) {
|
||||
self.onVideoFullscreenPlayerDidDismiss(@{@"target": self.reactTag});
|
||||
}
|
||||
if (_playerViewController == playerViewController && _fullscreenPlayerPresented)
|
||||
{
|
||||
_fullscreenPlayerPresented = false;
|
||||
_presentingViewController = nil;
|
||||
_playerViewController = nil;
|
||||
[self applyModifiers];
|
||||
if(self.onVideoFullscreenPlayerDidDismiss) {
|
||||
self.onVideoFullscreenPlayerDidDismiss(@{@"target": self.reactTag});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark - React View Management
|
||||
@@ -1161,15 +1280,15 @@ static int const RCTVideoUnset = -1;
|
||||
{
|
||||
[self setControls:true];
|
||||
}
|
||||
|
||||
|
||||
if( _controls )
|
||||
{
|
||||
view.frame = self.bounds;
|
||||
[_playerViewController.contentOverlayView insertSubview:view atIndex:atIndex];
|
||||
view.frame = self.bounds;
|
||||
[_playerViewController.contentOverlayView insertSubview:view atIndex:atIndex];
|
||||
}
|
||||
else
|
||||
{
|
||||
RCTLogError(@"video cannot have any subviews");
|
||||
RCTLogError(@"video cannot have any subviews");
|
||||
}
|
||||
return;
|
||||
}
|
||||
@@ -1178,7 +1297,7 @@ static int const RCTVideoUnset = -1;
|
||||
{
|
||||
if( _controls )
|
||||
{
|
||||
[subview removeFromSuperview];
|
||||
[subview removeFromSuperview];
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -1193,7 +1312,7 @@ static int const RCTVideoUnset = -1;
|
||||
if( _controls )
|
||||
{
|
||||
_playerViewController.view.frame = self.bounds;
|
||||
|
||||
|
||||
// also adjust all subviews of contentOverlayView
|
||||
for (UIView* subview in _playerViewController.contentOverlayView.subviews) {
|
||||
subview.frame = self.bounds;
|
||||
@@ -1201,10 +1320,10 @@ static int const RCTVideoUnset = -1;
|
||||
}
|
||||
else
|
||||
{
|
||||
[CATransaction begin];
|
||||
[CATransaction setAnimationDuration:0];
|
||||
_playerLayer.frame = self.bounds;
|
||||
[CATransaction commit];
|
||||
[CATransaction begin];
|
||||
[CATransaction setAnimationDuration:0];
|
||||
_playerLayer.frame = self.bounds;
|
||||
[CATransaction commit];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1217,19 +1336,23 @@ static int const RCTVideoUnset = -1;
|
||||
[_player removeObserver:self forKeyPath:playbackRate context:nil];
|
||||
_playbackRateObserverRegistered = NO;
|
||||
}
|
||||
if (_isExternalPlaybackActiveObserverRegistered) {
|
||||
[_player removeObserver:self forKeyPath:externalPlaybackActive context:nil];
|
||||
_isExternalPlaybackActiveObserverRegistered = NO;
|
||||
}
|
||||
_player = nil;
|
||||
|
||||
|
||||
[self removePlayerLayer];
|
||||
|
||||
|
||||
[_playerViewController.view removeFromSuperview];
|
||||
_playerViewController = nil;
|
||||
|
||||
|
||||
[self removePlayerTimeObserver];
|
||||
[self removePlayerItemObservers];
|
||||
|
||||
|
||||
_eventDispatcher = nil;
|
||||
[[NSNotificationCenter defaultCenter] removeObserver:self];
|
||||
|
||||
|
||||
[super removeFromSuperview];
|
||||
}
|
||||
|
@@ -37,6 +37,7 @@ RCT_EXPORT_VIEW_PROPERTY(rate, float);
|
||||
RCT_EXPORT_VIEW_PROPERTY(seek, NSDictionary);
|
||||
RCT_EXPORT_VIEW_PROPERTY(currentTime, float);
|
||||
RCT_EXPORT_VIEW_PROPERTY(fullscreen, BOOL);
|
||||
RCT_EXPORT_VIEW_PROPERTY(fullscreenOrientation, NSString);
|
||||
RCT_EXPORT_VIEW_PROPERTY(progressUpdateInterval, float);
|
||||
/* Should support: onLoadStart, onLoad, and onError to stay consistent with Image */
|
||||
RCT_EXPORT_VIEW_PROPERTY(onVideoLoadStart, RCTBubblingEventBlock);
|
||||
@@ -57,6 +58,7 @@ RCT_EXPORT_VIEW_PROPERTY(onReadyForDisplay, RCTBubblingEventBlock);
|
||||
RCT_EXPORT_VIEW_PROPERTY(onPlaybackStalled, RCTBubblingEventBlock);
|
||||
RCT_EXPORT_VIEW_PROPERTY(onPlaybackResume, RCTBubblingEventBlock);
|
||||
RCT_EXPORT_VIEW_PROPERTY(onPlaybackRateChange, RCTBubblingEventBlock);
|
||||
RCT_EXPORT_VIEW_PROPERTY(onVideoExternalPlaybackChange, RCTBubblingEventBlock);
|
||||
|
||||
- (NSDictionary *)constantsToExport
|
||||
{
|
@@ -12,4 +12,8 @@
|
||||
|
||||
@interface RCTVideoPlayerViewController : AVPlayerViewController
|
||||
@property (nonatomic, weak) id<RCTVideoPlayerViewControllerDelegate> rctDelegate;
|
||||
|
||||
// Optional paramters
|
||||
@property (nonatomic, weak) NSString* preferredOrientation;
|
||||
|
||||
@end
|
42
ios/Video/RCTVideoPlayerViewController.m
Normal file
42
ios/Video/RCTVideoPlayerViewController.m
Normal file
@@ -0,0 +1,42 @@
|
||||
#import "RCTVideoPlayerViewController.h"
|
||||
|
||||
@interface RCTVideoPlayerViewController ()
|
||||
|
||||
@end
|
||||
|
||||
@implementation RCTVideoPlayerViewController
|
||||
|
||||
- (BOOL)shouldAutorotate {
|
||||
if (self.preferredOrientation.lowercaseString == nil || [self.preferredOrientation.lowercaseString isEqualToString:@"all"])
|
||||
return YES;
|
||||
|
||||
return NO;
|
||||
}
|
||||
|
||||
- (void)viewDidDisappear:(BOOL)animated
|
||||
{
|
||||
[super viewDidDisappear:animated];
|
||||
[_rctDelegate videoPlayerViewControllerWillDismiss:self];
|
||||
[_rctDelegate videoPlayerViewControllerDidDismiss:self];
|
||||
}
|
||||
|
||||
#if !TARGET_OS_TV
|
||||
- (UIInterfaceOrientationMask)supportedInterfaceOrientations {
|
||||
return UIInterfaceOrientationMaskAll;
|
||||
}
|
||||
|
||||
- (UIInterfaceOrientation)preferredInterfaceOrientationForPresentation {
|
||||
if ([self.preferredOrientation.lowercaseString isEqualToString:@"landscape"]) {
|
||||
return UIInterfaceOrientationLandscapeRight;
|
||||
}
|
||||
else if ([self.preferredOrientation.lowercaseString isEqualToString:@"portrait"]) {
|
||||
return UIInterfaceOrientationPortrait;
|
||||
}
|
||||
else { // default case
|
||||
UIInterfaceOrientation orientation = [UIApplication sharedApplication].statusBarOrientation;
|
||||
return orientation;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
@end
|
38
ios/VideoCaching/RCTVideoCache.h
Normal file
38
ios/VideoCaching/RCTVideoCache.h
Normal file
@@ -0,0 +1,38 @@
|
||||
#import <Foundation/Foundation.h>
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
#import <SPTPersistentCache/SPTPersistentCache.h>
|
||||
#import <SPTPersistentCache/SPTPersistentCacheOptions.h>
|
||||
#import <CommonCrypto/CommonDigest.h>
|
||||
|
||||
typedef NS_ENUM(NSUInteger, RCTVideoCacheStatus) {
|
||||
RCTVideoCacheStatusMissingFileExtension,
|
||||
RCTVideoCacheStatusUnsupportedFileExtension,
|
||||
RCTVideoCacheStatusNotAvailable,
|
||||
RCTVideoCacheStatusAvailable
|
||||
};
|
||||
|
||||
@class SPTPersistentCache;
|
||||
@class SPTPersistentCacheOptions;
|
||||
|
||||
@interface RCTVideoCache : NSObject
|
||||
{
|
||||
SPTPersistentCache *videoCache;
|
||||
NSString * _Nullable cachePath;
|
||||
NSString * temporaryCachePath;
|
||||
NSString * _Nullable cacheIdentifier;
|
||||
}
|
||||
|
||||
@property(nonatomic, strong) SPTPersistentCache * _Nullable videoCache;
|
||||
@property(nonatomic, strong) NSString * cachePath;
|
||||
@property(nonatomic, strong) NSString * cacheIdentifier;
|
||||
@property(nonatomic, strong) NSString * temporaryCachePath;
|
||||
|
||||
+ (RCTVideoCache *)sharedInstance;
|
||||
- (void)storeItem:(NSData *)data forUri:(NSString *)uri withCallback:(void(^)(BOOL))handler;
|
||||
- (void)getItemForUri:(NSString *)url withCallback:(void(^)(RCTVideoCacheStatus, AVAsset * _Nullable)) handler;
|
||||
- (NSURL *)createUniqueTemporaryFileUrl:(NSString * _Nonnull)url withExtension:(NSString * _Nonnull) extension;
|
||||
- (AVURLAsset *)getItemFromTemporaryStorage:(NSString *)key;
|
||||
- (BOOL)saveDataToTemporaryStorage:(NSData *)data key:(NSString *)key;
|
||||
- (void) createTemporaryPath;
|
||||
|
||||
@end
|
174
ios/VideoCaching/RCTVideoCache.m
Normal file
174
ios/VideoCaching/RCTVideoCache.m
Normal file
@@ -0,0 +1,174 @@
|
||||
#import "RCTVideoCache.h"
|
||||
|
||||
@implementation RCTVideoCache
|
||||
|
||||
@synthesize videoCache;
|
||||
@synthesize cachePath;
|
||||
@synthesize cacheIdentifier;
|
||||
@synthesize temporaryCachePath;
|
||||
|
||||
+ (RCTVideoCache *)sharedInstance {
|
||||
static RCTVideoCache *sharedInstance = nil;
|
||||
static dispatch_once_t onceToken;
|
||||
dispatch_once(&onceToken, ^{
|
||||
sharedInstance = [[self alloc] init];
|
||||
});
|
||||
return sharedInstance;
|
||||
}
|
||||
|
||||
- (id)init {
|
||||
if (self = [super init]) {
|
||||
self.cacheIdentifier = @"rct.video.cache";
|
||||
self.temporaryCachePath = [NSTemporaryDirectory() stringByAppendingPathComponent:self.cacheIdentifier];
|
||||
self.cachePath = [NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES).firstObject stringByAppendingPathComponent:self.cacheIdentifier];
|
||||
SPTPersistentCacheOptions *options = [SPTPersistentCacheOptions new];
|
||||
options.cachePath = self.cachePath;
|
||||
options.cacheIdentifier = self.cacheIdentifier;
|
||||
options.defaultExpirationPeriod = 60 * 60 * 24 * 30;
|
||||
options.garbageCollectionInterval = (NSUInteger)(1.5 * SPTPersistentCacheDefaultGCIntervalSec);
|
||||
options.sizeConstraintBytes = 1024 * 1024 * 100;
|
||||
options.useDirectorySeparation = NO;
|
||||
#ifdef DEBUG
|
||||
options.debugOutput = ^(NSString *string) {
|
||||
NSLog(@"Video Cache: %@", string);
|
||||
};
|
||||
#endif
|
||||
[self createTemporaryPath];
|
||||
self.videoCache = [[SPTPersistentCache alloc] initWithOptions:options];
|
||||
[self.videoCache scheduleGarbageCollector];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void) createTemporaryPath {
|
||||
NSError *error = nil;
|
||||
BOOL success = [[NSFileManager defaultManager] createDirectoryAtPath:self.temporaryCachePath
|
||||
withIntermediateDirectories:YES
|
||||
attributes:nil
|
||||
error:&error];
|
||||
#ifdef DEBUG
|
||||
if (!success || error) {
|
||||
NSLog(@"Error while! %@", error);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
- (void)storeItem:(NSData *)data forUri:(NSString *)uri withCallback:(void(^)(BOOL))handler;
|
||||
{
|
||||
NSString *key = [self generateCacheKeyForUri:uri];
|
||||
if (key == nil) {
|
||||
handler(NO);
|
||||
return;
|
||||
}
|
||||
[self saveDataToTemporaryStorage:data key:key];
|
||||
[self.videoCache storeData:data forKey:key locked:NO withCallback:^(SPTPersistentCacheResponse * _Nonnull response) {
|
||||
if (response.error) {
|
||||
#ifdef DEBUG
|
||||
NSLog(@"An error occured while saving the video into the cache: %@", [response.error localizedDescription]);
|
||||
#endif
|
||||
handler(NO);
|
||||
return;
|
||||
}
|
||||
handler(YES);
|
||||
} onQueue:dispatch_get_main_queue()];
|
||||
return;
|
||||
}
|
||||
|
||||
- (AVURLAsset *)getItemFromTemporaryStorage:(NSString *)key {
|
||||
NSString * temporaryFilePath = [self.temporaryCachePath stringByAppendingPathComponent:key];
|
||||
|
||||
BOOL fileExists = [[NSFileManager defaultManager] fileExistsAtPath:temporaryFilePath];
|
||||
if (!fileExists) {
|
||||
return nil;
|
||||
}
|
||||
NSURL *assetUrl = [[NSURL alloc] initFileURLWithPath:temporaryFilePath];
|
||||
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:assetUrl options:nil];
|
||||
return asset;
|
||||
}
|
||||
|
||||
- (BOOL)saveDataToTemporaryStorage:(NSData *)data key:(NSString *)key {
|
||||
NSString *temporaryFilePath = [self.temporaryCachePath stringByAppendingPathComponent:key];
|
||||
[data writeToFile:temporaryFilePath atomically:YES];
|
||||
return YES;
|
||||
}
|
||||
|
||||
- (NSString *)generateCacheKeyForUri:(NSString *)uri {
|
||||
NSString *uriWithoutQueryParams = uri;
|
||||
|
||||
// parse file extension
|
||||
if ([uri rangeOfString:@"?"].location != NSNotFound) {
|
||||
NSArray<NSString*> * components = [uri componentsSeparatedByString:@"?"];
|
||||
uriWithoutQueryParams = [components objectAtIndex:0];
|
||||
}
|
||||
|
||||
NSString * pathExtension = [uriWithoutQueryParams pathExtension];
|
||||
NSArray * supportedExtensions = @[@"m4v", @"mp4", @"mov"];
|
||||
if ([pathExtension isEqualToString:@""]) {
|
||||
NSDictionary *userInfo = @{
|
||||
NSLocalizedDescriptionKey: NSLocalizedString(@"Missing file extension.", nil),
|
||||
NSLocalizedFailureReasonErrorKey: NSLocalizedString(@"Missing file extension.", nil),
|
||||
NSLocalizedRecoverySuggestionErrorKey: NSLocalizedString(@"Missing file extension.", nil)
|
||||
};
|
||||
NSError *error = [NSError errorWithDomain:@"RCTVideoCache"
|
||||
code:RCTVideoCacheStatusMissingFileExtension userInfo:userInfo];
|
||||
@throw error;
|
||||
} else if (![supportedExtensions containsObject:pathExtension]) {
|
||||
// Notably, we don't currently support m3u8 (HLS playlists)
|
||||
NSDictionary *userInfo = @{
|
||||
NSLocalizedDescriptionKey: NSLocalizedString(@"Unsupported file extension.", nil),
|
||||
NSLocalizedFailureReasonErrorKey: NSLocalizedString(@"Unsupported file extension.", nil),
|
||||
NSLocalizedRecoverySuggestionErrorKey: NSLocalizedString(@"Unsupported file extension.", nil)
|
||||
};
|
||||
NSError *error = [NSError errorWithDomain:@"RCTVideoCache"
|
||||
code:RCTVideoCacheStatusUnsupportedFileExtension userInfo:userInfo];
|
||||
@throw error;
|
||||
}
|
||||
return [[self generateHashForUrl:uri] stringByAppendingPathExtension:pathExtension];
|
||||
}
|
||||
|
||||
- (void)getItemForUri:(NSString *)uri withCallback:(void(^)(RCTVideoCacheStatus, AVAsset * _Nullable)) handler {
|
||||
@try {
|
||||
NSString *key = [self generateCacheKeyForUri:uri];
|
||||
AVURLAsset * temporaryAsset = [self getItemFromTemporaryStorage:key];
|
||||
if (temporaryAsset != nil) {
|
||||
handler(RCTVideoCacheStatusAvailable, temporaryAsset);
|
||||
return;
|
||||
}
|
||||
|
||||
[self.videoCache loadDataForKey:key withCallback:^(SPTPersistentCacheResponse * _Nonnull response) {
|
||||
if (response.record == nil || response.record.data == nil) {
|
||||
handler(RCTVideoCacheStatusNotAvailable, nil);
|
||||
return;
|
||||
}
|
||||
[self saveDataToTemporaryStorage:response.record.data key:key];
|
||||
handler(RCTVideoCacheStatusAvailable, [self getItemFromTemporaryStorage:key]);
|
||||
} onQueue:dispatch_get_main_queue()];
|
||||
} @catch (NSError * err) {
|
||||
switch (err.code) {
|
||||
case RCTVideoCacheStatusMissingFileExtension:
|
||||
handler(RCTVideoCacheStatusMissingFileExtension, nil);
|
||||
return;
|
||||
case RCTVideoCacheStatusUnsupportedFileExtension:
|
||||
handler(RCTVideoCacheStatusUnsupportedFileExtension, nil);
|
||||
return;
|
||||
default:
|
||||
@throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- (NSString *)generateHashForUrl:(NSString *)string {
|
||||
const char *cStr = [string UTF8String];
|
||||
unsigned char result[CC_MD5_DIGEST_LENGTH];
|
||||
CC_MD5( cStr, (CC_LONG)strlen(cStr), result );
|
||||
|
||||
return [NSString stringWithFormat:
|
||||
@"%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X",
|
||||
result[0], result[1], result[2], result[3],
|
||||
result[4], result[5], result[6], result[7],
|
||||
result[8], result[9], result[10], result[11],
|
||||
result[12], result[13], result[14], result[15]
|
||||
];
|
||||
}
|
||||
|
||||
@end
|
Reference in New Issue
Block a user