added ability to export video

This commit is contained in:
Nicolas Gonzalez 2018-10-26 08:21:41 -05:00
parent 18e8895712
commit 16e45fc9f1
5 changed files with 1040 additions and 1029 deletions

View File

@ -1,6 +1,6 @@
import React, {Component} from 'react';
import PropTypes from 'prop-types';
import {StyleSheet, requireNativeComponent, NativeModules, View, ViewPropTypes, Image, Platform} from 'react-native';
import {StyleSheet, requireNativeComponent, NativeModules, View, ViewPropTypes, Image, Platform, findNodeHandle} from 'react-native';
import resolveAssetSource from 'react-native/Libraries/Image/resolveAssetSource';
import TextTrackType from './TextTrackType';
import VideoResizeMode from './VideoResizeMode.js';
@ -71,6 +71,10 @@ export default class Video extends Component {
this.setNativeProps({ fullscreen: false });
};
saveAsync = async (options?) => {
return await NativeModules.VideoManager.save(options, findNodeHandle(this._root));
}
_assignRoot = (component) => {
this._root = component;
};

View File

@ -4,6 +4,7 @@
#import "RCTVideoPlayerViewController.h"
#import "RCTVideoPlayerViewControllerDelegate.h"
#import <React/RCTComponent.h>
#import <React/RCTBridgeModule.h>
#if __has_include(<react-native-video/RCTVideoCache.h>)
#import <react-native-video/RCTVideoCache.h>
@ -41,4 +42,6 @@
- (AVPlayerViewController*)createPlayerViewController:(AVPlayer*)player withPlayerItem:(AVPlayerItem*)playerItem;
- (void)save:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject;
@end

View File

@ -23,8 +23,7 @@ static int const RCTVideoUnset = -1;
#define DebugLog(...) (void)0
#endif
@implementation RCTVideo
{
@implementation RCTVideo {
AVPlayer *_player;
AVPlayerItem *_playerItem;
BOOL _playerItemObserversSet;
@ -74,8 +73,7 @@ static int const RCTVideoUnset = -1;
#endif
}
- (instancetype)initWithEventDispatcher:(RCTEventDispatcher *)eventDispatcher
{
- (instancetype)initWithEventDispatcher:(RCTEventDispatcher *)eventDispatcher {
if ((self = [super init])) {
filters = @{
@ -150,45 +148,40 @@ static int const RCTVideoUnset = -1;
** Get the duration for a AVPlayerItem.
** ------------------------------------------------------- */
- (CMTime)playerItemDuration
{
- (CMTime)playerItemDuration {
AVPlayerItem *playerItem = [_player currentItem];
if (playerItem.status == AVPlayerItemStatusReadyToPlay)
{
if (playerItem.status == AVPlayerItemStatusReadyToPlay) {
return ([playerItem duration]);
}
return (kCMTimeInvalid);
}
- (CMTimeRange)playerItemSeekableTimeRange
{
- (CMTimeRange)playerItemSeekableTimeRange {
AVPlayerItem *playerItem = [_player currentItem];
if (playerItem.status == AVPlayerItemStatusReadyToPlay)
{
if (playerItem.status == AVPlayerItemStatusReadyToPlay) {
return [playerItem seekableTimeRanges].firstObject.CMTimeRangeValue;
}
return (kCMTimeRangeZero);
}
-(void)addPlayerTimeObserver
{
- (void)addPlayerTimeObserver {
const Float64 progressUpdateIntervalMS = _progressUpdateInterval / 1000;
// @see endScrubbing in AVPlayerDemoPlaybackViewController.m
// of https://developer.apple.com/library/ios/samplecode/AVPlayerDemo/Introduction/Intro.html
__weak RCTVideo *weakSelf = self;
_timeObserver = [_player addPeriodicTimeObserverForInterval:CMTimeMakeWithSeconds(progressUpdateIntervalMS, NSEC_PER_SEC)
queue:NULL
usingBlock:^(CMTime time) { [weakSelf sendProgressUpdate]; }
usingBlock:^(CMTime time) {
[weakSelf sendProgressUpdate];
}
];
}
/* Cancels the previously registered time observer. */
-(void)removePlayerTimeObserver
{
if (_timeObserver)
{
- (void)removePlayerTimeObserver {
if (_timeObserver) {
[_player removeTimeObserver:_timeObserver];
_timeObserver = nil;
}
@ -196,8 +189,7 @@ static int const RCTVideoUnset = -1;
#pragma mark - Progress
- (void)dealloc
{
- (void)dealloc {
[[NSNotificationCenter defaultCenter] removeObserver:self];
[self removePlayerLayer];
[self removePlayerItemObservers];
@ -206,24 +198,21 @@ static int const RCTVideoUnset = -1;
#pragma mark - App lifecycle handlers
- (void)applicationWillResignActive:(NSNotification *)notification
{
- (void)applicationWillResignActive:(NSNotification *)notification {
if (_playInBackground || _playWhenInactive || _paused) return;
[_player pause];
[_player setRate:0.0];
}
- (void)applicationDidEnterBackground:(NSNotification *)notification
{
- (void)applicationDidEnterBackground:(NSNotification *)notification {
if (_playInBackground) {
// Needed to play sound in background. See https://developer.apple.com/library/ios/qa/qa1668/_index.html
[_playerLayer setPlayer:nil];
}
}
- (void)applicationWillEnterForeground:(NSNotification *)notification
{
- (void)applicationWillEnterForeground:(NSNotification *)notification {
[self applyModifiers];
if (_playInBackground) {
[_playerLayer setPlayer:_player];
@ -232,8 +221,7 @@ static int const RCTVideoUnset = -1;
#pragma mark - Audio events
- (void)audioRouteChanged:(NSNotification *)notification
{
- (void)audioRouteChanged:(NSNotification *)notification {
NSNumber *reason = [[notification userInfo] objectForKey:AVAudioSessionRouteChangeReasonKey];
NSNumber *previousRoute = [[notification userInfo] objectForKey:AVAudioSessionRouteChangePreviousRouteKey];
if (reason.unsignedIntValue == AVAudioSessionRouteChangeReasonOldDeviceUnavailable) {
@ -243,8 +231,7 @@ static int const RCTVideoUnset = -1;
#pragma mark - Progress
- (void)sendProgressUpdate
{
- (void)sendProgressUpdate {
AVPlayerItem *video = [_player currentItem];
if (video == nil || video.status != AVPlayerItemStatusReadyToPlay) {
return;
@ -278,8 +265,7 @@ static int const RCTVideoUnset = -1;
*
* \returns The playable duration of the current player item in seconds.
*/
- (NSNumber *)calculatePlayableDuration
{
- (NSNumber *)calculatePlayableDuration {
AVPlayerItem *video = _player.currentItem;
if (video.status == AVPlayerItemStatusReadyToPlay) {
__block CMTimeRange effectiveTimeRange;
@ -298,18 +284,15 @@ static int const RCTVideoUnset = -1;
return [NSNumber numberWithInteger:0];
}
- (NSNumber *)calculateSeekableDuration
{
- (NSNumber *)calculateSeekableDuration {
CMTimeRange timeRange = [self playerItemSeekableTimeRange];
if (CMTIME_IS_NUMERIC(timeRange.duration))
{
if (CMTIME_IS_NUMERIC(timeRange.duration)) {
return [NSNumber numberWithFloat:CMTimeGetSeconds(timeRange.duration)];
}
return [NSNumber numberWithInteger:0];
}
- (void)addPlayerItemObservers
{
- (void)addPlayerItemObservers {
[_playerItem addObserver:self forKeyPath:statusKeyPath options:0 context:nil];
[_playerItem addObserver:self forKeyPath:playbackBufferEmptyKeyPath options:0 context:nil];
[_playerItem addObserver:self forKeyPath:playbackLikelyToKeepUpKeyPath options:0 context:nil];
@ -320,8 +303,7 @@ static int const RCTVideoUnset = -1;
/* Fixes https://github.com/brentvatne/react-native-video/issues/43
* Crashes caused when trying to remove the observer when there is no
* observer set */
- (void)removePlayerItemObservers
{
- (void)removePlayerItemObservers {
if (_playerItemObserversSet) {
[_playerItem removeObserver:self forKeyPath:statusKeyPath];
[_playerItem removeObserver:self forKeyPath:playbackBufferEmptyKeyPath];
@ -333,8 +315,7 @@ static int const RCTVideoUnset = -1;
#pragma mark - Player and source
- (void)setSrc:(NSDictionary *)source
{
- (void)setSrc:(NSDictionary *)source {
[self removePlayerLayer];
[self removePlayerTimeObserver];
[self removePlayerItemObservers];
@ -407,8 +388,7 @@ static int const RCTVideoUnset = -1;
return nil;
}
- (void)playerItemPrepareText:(AVAsset *)asset assetOptions:(NSDictionary * __nullable)assetOptions withCallback:(void(^)(AVPlayerItem *))handler
{
- (void)playerItemPrepareText:(AVAsset *)asset assetOptions:(NSDictionary *__nullable)assetOptions withCallback:(void (^)(AVPlayerItem *))handler {
if (!_textTracks) {
handler([AVPlayerItem playerItemWithAsset:asset]);
return;
@ -458,8 +438,7 @@ static int const RCTVideoUnset = -1;
handler([AVPlayerItem playerItemWithAsset:mixComposition]);
}
- (void)playerItemForSource:(NSDictionary *)source withCallback:(void(^)(AVPlayerItem *))handler
{
- (void)playerItemForSource:(NSDictionary *)source withCallback:(void (^)(AVPlayerItem *))handler {
bool isNetwork = [RCTConvert BOOL:[source objectForKey:@"isNetwork"]];
bool isAsset = [RCTConvert BOOL:[source objectForKey:@"isAsset"]];
NSString *uri = [source objectForKey:@"uri"];
@ -561,8 +540,7 @@ static int const RCTVideoUnset = -1;
#endif
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
if (object == _playerItem) {
// When timeMetadata is read the event onTimedMetadata is triggered
if ([keyPath isEqualToString:timedMetadata]) {
@ -608,8 +586,7 @@ static int const RCTVideoUnset = -1;
if ((videoTrack.naturalSize.width == preferredTransform.tx
&& videoTrack.naturalSize.height == preferredTransform.ty)
|| (preferredTransform.tx == 0 && preferredTransform.ty == 0))
{
|| (preferredTransform.tx == 0 && preferredTransform.ty == 0)) {
orientation = @"landscape";
} else {
orientation = @"portrait";
@ -673,8 +650,7 @@ static int const RCTVideoUnset = -1;
}
_playbackStalled = NO;
}
}
else if([keyPath isEqualToString:externalPlaybackActive]) {
} else if ([keyPath isEqualToString:externalPlaybackActive]) {
if (self.onVideoExternalPlaybackChange) {
self.onVideoExternalPlaybackChange(@{@"isExternalPlaybackActive": [NSNumber numberWithBool:_player.isExternalPlaybackActive],
@"target": self.reactTag});
@ -685,8 +661,7 @@ static int const RCTVideoUnset = -1;
}
}
- (void)attachListeners
{
- (void)attachListeners {
// listen for end of file
[[NSNotificationCenter defaultCenter] removeObserver:self
name:AVPlayerItemDidPlayToEndTimeNotification
@ -705,16 +680,14 @@ static int const RCTVideoUnset = -1;
object:nil];
}
- (void)playbackStalled:(NSNotification *)notification
{
- (void)playbackStalled:(NSNotification *)notification {
if (self.onPlaybackStalled) {
self.onPlaybackStalled(@{@"target": self.reactTag});
}
_playbackStalled = YES;
}
- (void)playerItemDidReachEnd:(NSNotification *)notification
{
- (void)playerItemDidReachEnd:(NSNotification *)notification {
if (self.onVideoEnd) {
self.onVideoEnd(@{@"target": self.reactTag});
}
@ -730,43 +703,34 @@ static int const RCTVideoUnset = -1;
#pragma mark - Prop setters
- (void)setResizeMode:(NSString*)mode
{
if( _controls )
{
- (void)setResizeMode:(NSString *)mode {
if (_controls) {
_playerViewController.videoGravity = mode;
}
else
{
} else {
_playerLayer.videoGravity = mode;
}
_resizeMode = mode;
}
- (void)setPlayInBackground:(BOOL)playInBackground
{
- (void)setPlayInBackground:(BOOL)playInBackground {
_playInBackground = playInBackground;
}
- (void)setAllowsExternalPlayback:(BOOL)allowsExternalPlayback
{
- (void)setAllowsExternalPlayback:(BOOL)allowsExternalPlayback {
_allowsExternalPlayback = allowsExternalPlayback;
_player.allowsExternalPlayback = _allowsExternalPlayback;
}
- (void)setPlayWhenInactive:(BOOL)playWhenInactive
{
- (void)setPlayWhenInactive:(BOOL)playWhenInactive {
_playWhenInactive = playWhenInactive;
}
- (void)setIgnoreSilentSwitch:(NSString *)ignoreSilentSwitch
{
- (void)setIgnoreSilentSwitch:(NSString *)ignoreSilentSwitch {
_ignoreSilentSwitch = ignoreSilentSwitch;
[self applyModifiers];
}
- (void)setPaused:(BOOL)paused
{
- (void)setPaused:(BOOL)paused {
if (paused) {
[_player pause];
[_player setRate:0.0];
@ -783,13 +747,11 @@ static int const RCTVideoUnset = -1;
_paused = paused;
}
- (float)getCurrentTime
{
- (float)getCurrentTime {
return _playerItem != NULL ? CMTimeGetSeconds(_playerItem.currentTime) : 0;
}
- (void)setCurrentTime:(float)currentTime
{
- (void)setCurrentTime:(float)currentTime {
NSDictionary *info = @{
@"time": [NSNumber numberWithFloat:currentTime],
@"tolerance": [NSNumber numberWithInt:100]
@ -797,8 +759,7 @@ static int const RCTVideoUnset = -1;
[self setSeek:info];
}
- (void)setSeek:(NSDictionary *)info
{
- (void)setSeek:(NSDictionary *)info {
NSNumber *seekTime = info[@"time"];
NSNumber *seekTolerance = info[@"tolerance"];
@ -840,26 +801,22 @@ static int const RCTVideoUnset = -1;
}
}
- (void)setRate:(float)rate
{
- (void)setRate:(float)rate {
_rate = rate;
[self applyModifiers];
}
- (void)setMuted:(BOOL)muted
{
- (void)setMuted:(BOOL)muted {
_muted = muted;
[self applyModifiers];
}
- (void)setVolume:(float)volume
{
- (void)setVolume:(float)volume {
_volume = volume;
[self applyModifiers];
}
- (void)applyModifiers
{
- (void)applyModifiers {
if (_muted) {
[_player setVolume:0];
[_player setMuted:YES];
@ -883,8 +840,7 @@ static int const RCTVideoUnset = -1;
}
- (void)setMediaSelectionTrackForCharacteristic:(AVMediaCharacteristic)characteristic
withCriteria:(NSDictionary *)criteria
{
withCriteria:(NSDictionary *)criteria {
NSString *type = criteria[@"type"];
AVMediaSelectionGroup *group = [_player.currentItem.asset
mediaSelectionGroupForMediaCharacteristic:characteristic];
@ -1055,16 +1011,14 @@ static int const RCTVideoUnset = -1;
[_player.currentItem selectMediaOption:mediaOption inMediaSelectionGroup:group];
}
- (void)setTextTracks:(NSArray*) textTracks;
{
- (void)setTextTracks:(NSArray *)textTracks; {
_textTracks = textTracks;
// in case textTracks was set after selectedTextTrack
if (_selectedTextTrack) [self setSelectedTextTrack:_selectedTextTrack];
}
- (NSArray *)getAudioTrackInfo
{
- (NSArray *)getAudioTrackInfo {
NSMutableArray *audioTracks = [[NSMutableArray alloc] init];
AVMediaSelectionGroup *group = [_player.currentItem.asset
mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
@ -1086,8 +1040,7 @@ static int const RCTVideoUnset = -1;
return audioTracks;
}
- (NSArray *)getTextTrackInfo
{
- (NSArray *)getTextTrackInfo {
// if sideloaded, textTracks will already be set
if (_textTracks) return _textTracks;
@ -1113,17 +1066,14 @@ static int const RCTVideoUnset = -1;
return textTracks;
}
- (BOOL)getFullscreen
{
- (BOOL)getFullscreen {
return _fullscreenPlayerPresented;
}
- (void)setFullscreen:(BOOL)fullscreen {
if( fullscreen && !_fullscreenPlayerPresented && _player )
{
if (fullscreen && !_fullscreenPlayerPresented && _player) {
// Ensure player view controller is not null
if( !_playerViewController )
{
if (!_playerViewController) {
[self usePlayerViewController];
}
// Set presentation style to fullscreen
@ -1131,17 +1081,14 @@ static int const RCTVideoUnset = -1;
// Find the nearest view controller
UIViewController *viewController = [self firstAvailableUIViewController];
if( !viewController )
{
if (!viewController) {
UIWindow *keyWindow = [[UIApplication sharedApplication] keyWindow];
viewController = keyWindow.rootViewController;
if( viewController.childViewControllers.count > 0 )
{
if (viewController.childViewControllers.count > 0) {
viewController = viewController.childViewControllers.lastObject;
}
}
if( viewController )
{
if (viewController) {
_presentingViewController = viewController;
if (self.onVideoFullscreenPlayerWillPresent) {
self.onVideoFullscreenPlayerWillPresent(@{@"target": self.reactTag});
@ -1154,9 +1101,7 @@ static int const RCTVideoUnset = -1;
}
}];
}
}
else if ( !fullscreen && _fullscreenPlayerPresented )
{
} else if (!fullscreen && _fullscreenPlayerPresented) {
[self videoPlayerViewControllerWillDismiss:_playerViewController];
[_presentingViewController dismissViewControllerAnimated:true completion:^{
[self videoPlayerViewControllerDidDismiss:_playerViewController];
@ -1208,10 +1153,8 @@ static int const RCTVideoUnset = -1;
}
}
- (void)usePlayerViewController
{
if( _player )
{
- (void)usePlayerViewController {
if (_player) {
_playerViewController = [self createPlayerViewController:_player withPlayerItem:_playerItem];
// to prevent video from being animated when resizeMode is 'cover'
// resize mode must be set before subview is added
@ -1220,10 +1163,8 @@ static int const RCTVideoUnset = -1;
}
}
- (void)usePlayerLayer
{
if( _player )
{
- (void)usePlayerLayer {
if (_player) {
_playerLayer = [AVPlayerLayer playerLayerWithPlayer:_player];
_playerLayer.frame = self.bounds;
_playerLayer.needsDisplayOnBoundsChange = YES;
@ -1239,18 +1180,13 @@ static int const RCTVideoUnset = -1;
}
}
- (void)setControls:(BOOL)controls
{
if( _controls != controls || (!_playerLayer && !_playerViewController) )
{
- (void)setControls:(BOOL)controls {
if (_controls != controls || (!_playerLayer && !_playerViewController)) {
_controls = controls;
if( _controls )
{
if (_controls) {
[self removePlayerLayer];
[self usePlayerViewController];
}
else
{
} else {
[_playerViewController.view removeFromSuperview];
_playerViewController = nil;
[self usePlayerLayer];
@ -1258,8 +1194,7 @@ static int const RCTVideoUnset = -1;
}
}
- (void)setProgressUpdateInterval:(float)progressUpdateInterval
{
- (void)setProgressUpdateInterval:(float)progressUpdateInterval {
_progressUpdateInterval = progressUpdateInterval;
if (_timeObserver) {
@ -1268,8 +1203,7 @@ static int const RCTVideoUnset = -1;
}
}
- (void)removePlayerLayer
{
- (void)removePlayerLayer {
[_playerLayer removeFromSuperlayer];
if (_playerLayerObserverSet) {
[_playerLayer removeObserver:self forKeyPath:readyForDisplayKeyPath];
@ -1280,18 +1214,14 @@ static int const RCTVideoUnset = -1;
#pragma mark - RCTVideoPlayerViewControllerDelegate
- (void)videoPlayerViewControllerWillDismiss:(AVPlayerViewController *)playerViewController
{
if (_playerViewController == playerViewController && _fullscreenPlayerPresented && self.onVideoFullscreenPlayerWillDismiss)
{
- (void)videoPlayerViewControllerWillDismiss:(AVPlayerViewController *)playerViewController {
if (_playerViewController == playerViewController && _fullscreenPlayerPresented && self.onVideoFullscreenPlayerWillDismiss) {
self.onVideoFullscreenPlayerWillDismiss(@{@"target": self.reactTag});
}
}
- (void)videoPlayerViewControllerDidDismiss:(AVPlayerViewController *)playerViewController
{
if (_playerViewController == playerViewController && _fullscreenPlayerPresented)
{
- (void)videoPlayerViewControllerDidDismiss:(AVPlayerViewController *)playerViewController {
if (_playerViewController == playerViewController && _fullscreenPlayerPresented) {
_fullscreenPlayerPresented = false;
_presentingViewController = nil;
_playerViewController = nil;
@ -1304,54 +1234,41 @@ static int const RCTVideoUnset = -1;
#pragma mark - React View Management
- (void)insertReactSubview:(UIView *)view atIndex:(NSInteger)atIndex
{
- (void)insertReactSubview:(UIView *)view atIndex:(NSInteger)atIndex {
// We are early in the game and somebody wants to set a subview.
// That can only be in the context of playerViewController.
if( !_controls && !_playerLayer && !_playerViewController )
{
if (!_controls && !_playerLayer && !_playerViewController) {
[self setControls:true];
}
if( _controls )
{
if (_controls) {
view.frame = self.bounds;
[_playerViewController.contentOverlayView insertSubview:view atIndex:atIndex];
}
else
{
} else {
RCTLogError(@"video cannot have any subviews");
}
return;
}
- (void)removeReactSubview:(UIView *)subview
{
if( _controls )
{
- (void)removeReactSubview:(UIView *)subview {
if (_controls) {
[subview removeFromSuperview];
}
else
{
} else {
RCTLogError(@"video cannot have any subviews");
}
return;
}
- (void)layoutSubviews
{
- (void)layoutSubviews {
[super layoutSubviews];
if( _controls )
{
if (_controls) {
_playerViewController.view.frame = self.bounds;
// also adjust all subviews of contentOverlayView
for (UIView *subview in _playerViewController.contentOverlayView.subviews) {
subview.frame = self.bounds;
}
}
else
{
} else {
[CATransaction begin];
[CATransaction setAnimationDuration:0];
_playerLayer.frame = self.bounds;
@ -1361,8 +1278,7 @@ static int const RCTVideoUnset = -1;
#pragma mark - Lifecycle
- (void)removeFromSuperview
{
- (void)removeFromSuperview {
[_player pause];
if (_playbackRateObserverRegistered) {
[_player removeObserver:self forKeyPath:playbackRate context:nil];
@ -1388,4 +1304,76 @@ static int const RCTVideoUnset = -1;
[super removeFromSuperview];
}
- (void)save:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject {
AVAsset *asset = _playerItem.asset;
if (asset != nil) {
AVAssetExportSession *exportSession = [AVAssetExportSession
exportSessionWithAsset:asset presetName:AVAssetExportPresetHighestQuality];
if (exportSession != nil) {
NSString *path = nil;
NSArray *array = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
path = [self generatePathInDirectory:[[self cacheDirectoryPath] stringByAppendingPathComponent:@"Filter"]
withExtension:@".mp4"];
NSURL *url = [NSURL fileURLWithPath:path];
exportSession.outputFileType = AVFileTypeMPEG4;
exportSession.outputURL = url;
exportSession.videoComposition = _playerItem.videoComposition;
exportSession.shouldOptimizeForNetworkUse = true;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch ([exportSession status]) {
case AVAssetExportSessionStatusFailed:
reject(@"ERROR_COULD_NOT_EXPORT_VIDEO", @"Could not export video", exportSession.error);
break;
case AVAssetExportSessionStatusCancelled:
reject(@"ERROR_EXPORT_SESSION_CANCELLED", @"Export session was cancelled", exportSession.error);
break;
default:
resolve(@{@"uri": url.absoluteString});
break;
}
}];
} else {
reject(@"ERROR_COULD_NOT_CREATE_EXPORT_SESSION", @"Could not create export session", nil);
}
} else {
reject(@"ERROR_ASSET_NIL", @"Asset is nil", nil);
}
}
- (BOOL)ensureDirExistsWithPath:(NSString *)path {
BOOL isDir = NO;
NSError *error;
BOOL exists = [[NSFileManager defaultManager] fileExistsAtPath:path isDirectory:&isDir];
if (!(exists && isDir)) {
[[NSFileManager defaultManager] createDirectoryAtPath:path withIntermediateDirectories:YES attributes:nil error:&error];
if (error) {
return NO;
}
}
return YES;
}
- (NSString *)generatePathInDirectory:(NSString *)directory withExtension:(NSString *)extension {
NSString *fileName = [[[NSUUID UUID] UUIDString] stringByAppendingString:extension];
[self ensureDirExistsWithPath:directory];
return [directory stringByAppendingPathComponent:fileName];
}
- (NSString *)cacheDirectoryPath {
NSArray *array = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
return array[0];
}
@end

View File

@ -1,5 +1,6 @@
#import <React/RCTViewManager.h>
#import <React/RCTBridgeModule.h>
@interface RCTVideoManager : RCTViewManager
@interface RCTVideoManager : RCTViewManager <RCTBridgeModule>
@end

View File

@ -1,14 +1,13 @@
#import "RCTVideoManager.h"
#import "RCTVideo.h"
#import <React/RCTBridge.h>
#import <React/RCTUIManager.h>
#import <AVFoundation/AVFoundation.h>
@implementation RCTVideoManager
RCT_EXPORT_MODULE();
@synthesize bridge = _bridge;
- (UIView *)view
{
return [[RCTVideo alloc] initWithEventDispatcher:self.bridge.eventDispatcher];
@ -16,7 +15,7 @@ RCT_EXPORT_MODULE();
- (dispatch_queue_t)methodQueue
{
return dispatch_get_main_queue();
return self.bridge.uiManager.methodQueue;
}
RCT_EXPORT_VIEW_PROPERTY(src, NSDictionary);
@ -59,6 +58,22 @@ RCT_EXPORT_VIEW_PROPERTY(onPlaybackStalled, RCTBubblingEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onPlaybackResume, RCTBubblingEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onPlaybackRateChange, RCTBubblingEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onVideoExternalPlaybackChange, RCTBubblingEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onVideoSaved, RCTBubblingEventBlock);
RCT_REMAP_METHOD(save,
options:(NSDictionary *)options
reactTag:(nonnull NSNumber *)reactTag
resolver:(RCTPromiseResolveBlock)resolve
rejecter:(RCTPromiseRejectBlock)reject)
{
[self.bridge.uiManager prependUIBlock:^(__unused RCTUIManager *uiManager, NSDictionary<NSNumber *, RCTVideo *> *viewRegistry) {
RCTVideo *view = viewRegistry[reactTag];
if (![view isKindOfClass:[RCTVideo class]]) {
RCTLogError(@"Invalid view returned from registry, expecting RCTVideo, got: %@", view);
} else {
[view save:options resolve:resolve reject:reject];
}
}];
}
- (NSDictionary *)constantsToExport
{