Custom skin support added for ios. onReceiveAdEvent prop added to catch ad events on ios.

This commit is contained in:
sdkcy 2021-07-26 16:11:37 +03:00
parent 96c717d6df
commit 0f94a3d956
4 changed files with 206 additions and 114 deletions

View File

@ -253,6 +253,13 @@ export default class Video extends Component {
}
}
}
_onReceiveAdEvent = (event) => {
if (this.props.onReceiveAdEvent) {
this.props.onReceiveAdEvent(event.nativeEvent);
}
};
getViewManagerConfig = viewManagerName => {
if (!NativeModules.UIManager.getViewManagerConfig) {
return NativeModules.UIManager[viewManagerName];
@ -328,6 +335,7 @@ export default class Video extends Component {
onGetLicense: nativeProps.drm && nativeProps.drm.getLicense && this._onGetLicense,
onPictureInPictureStatusChanged: this._onPictureInPictureStatusChanged,
onRestoreUserInterfaceForPictureInPictureStop: this._onRestoreUserInterfaceForPictureInPictureStop,
onReceiveAdEvent: this._onReceiveAdEvent,
});
const posterStyle = {
@ -499,6 +507,7 @@ Video.propTypes = {
onPictureInPictureStatusChanged: PropTypes.func,
needsToRestoreUserInterfaceForPictureInPictureStop: PropTypes.func,
onExternalPlaybackChange: PropTypes.func,
onReceiveAdEvents: PropTypes.func,
/* Required by react-native */
scaleX: PropTypes.number,

View File

@ -44,6 +44,7 @@
@property (nonatomic, copy) RCTDirectEventBlock onPictureInPictureStatusChanged;
@property (nonatomic, copy) RCTDirectEventBlock onRestoreUserInterfaceForPictureInPictureStop;
@property (nonatomic, copy) RCTDirectEventBlock onGetLicense;
@property (nonatomic, copy) RCTDirectEventBlock onReceiveAdEvent;
typedef NS_ENUM(NSInteger, RCTVideoError) {
RCTVideoErrorFromJSPart,
@ -71,4 +72,6 @@ typedef NS_ENUM(NSInteger, RCTVideoError) {
- (void)setLicenseResult:(NSString * )license;
- (BOOL)setLicenseResultError:(NSString * )error;
+ (NSString *)convertEventToString:(IMAAdEventType)event;
@end

View File

@ -35,27 +35,27 @@ static int const RCTVideoUnset = -1;
NSURL *_videoURL;
BOOL _requestingCertificate;
BOOL _requestingCertificateErrored;
/* DRM */
NSDictionary *_drm;
AVAssetResourceLoadingRequest *_loadingRequest;
/* Required to publish events */
RCTEventDispatcher *_eventDispatcher;
BOOL _playbackRateObserverRegistered;
BOOL _isExternalPlaybackActiveObserverRegistered;
BOOL _videoLoadStarted;
BOOL _isRequestAds;
bool _pendingSeek;
float _pendingSeekTime;
float _lastSeekTime;
/* For sending videoProgress events */
Float64 _progressUpdateInterval;
BOOL _controls;
id _timeObserver;
/* Keep track of any modifiers, need to be applied after each play */
float _volume;
float _rate;
@ -133,23 +133,23 @@ static int const RCTVideoUnset = -1;
selector:@selector(applicationWillResignActive:)
name:UIApplicationWillResignActiveNotification
object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(applicationDidEnterBackground:)
name:UIApplicationDidEnterBackgroundNotification
object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(applicationWillEnterForeground:)
name:UIApplicationWillEnterForegroundNotification
object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(audioRouteChanged:)
name:AVAudioSessionRouteChangeNotification
object:nil];
}
return self;
}
@ -159,9 +159,7 @@ static int const RCTVideoUnset = -1;
viewController.showsPlaybackControls = YES;
viewController.rctDelegate = self;
viewController.preferredOrientation = _fullscreenOrientation;
self.contentPlayhead = [[IMAAVPlayerContentPlayhead alloc] initWithAVPlayer:player];
[self setupAdsLoader];
viewController.view.frame = self.bounds;
viewController.player = player;
return viewController;
@ -178,7 +176,7 @@ static int const RCTVideoUnset = -1;
{
return([playerItem duration]);
}
return(kCMTimeInvalid);
}
@ -189,7 +187,7 @@ static int const RCTVideoUnset = -1;
{
return [playerItem seekableTimeRanges].firstObject.CMTimeRangeValue;
}
return (kCMTimeRangeZero);
}
@ -231,7 +229,7 @@ static int const RCTVideoUnset = -1;
- (void)applicationWillResignActive:(NSNotification *)notification
{
if (_playInBackground || _playWhenInactive || _paused) return;
[_player pause];
[_player setRate:0.0];
}
@ -273,19 +271,19 @@ static int const RCTVideoUnset = -1;
if (video == nil || video.status != AVPlayerItemStatusReadyToPlay) {
return;
}
CMTime playerDuration = [self playerItemDuration];
if (CMTIME_IS_INVALID(playerDuration)) {
return;
}
CMTime currentTime = _player.currentTime;
NSDate *currentPlaybackTime = _player.currentItem.currentDate;
const Float64 duration = CMTimeGetSeconds(playerDuration);
const Float64 currentTimeSecs = CMTimeGetSeconds(currentTime);
[[NSNotificationCenter defaultCenter] postNotificationName:@"RCTVideo_progress" object:nil userInfo:@{@"progress": [NSNumber numberWithDouble: currentTimeSecs / duration]}];
if( currentTimeSecs >= 0 && self.onVideoProgress) {
if(!_isRequestAds && currentTimeSecs >= 0.0001) {
[self requestAds];
@ -370,7 +368,7 @@ static int const RCTVideoUnset = -1;
[self removePlayerTimeObserver];
[self removePlayerItemObservers];
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t) 0), dispatch_get_main_queue(), ^{
// perform on next run loop, otherwise other passed react-props may not be set
[self playerItemForSource:self->_source withCallback:^(AVPlayerItem * playerItem) {
self->_playerItem = playerItem;
@ -379,9 +377,9 @@ static int const RCTVideoUnset = -1;
[self addPlayerItemObservers];
[self setFilter:self->_filterName];
[self setMaxBitRate:self->_maxBitRate];
[_player pause];
if (_playbackRateObserverRegistered) {
[_player removeObserver:self forKeyPath:playbackRate context:nil];
_playbackRateObserverRegistered = NO;
@ -390,16 +388,19 @@ static int const RCTVideoUnset = -1;
[self->_player removeObserver:self forKeyPath:externalPlaybackActive context:nil];
self->_isExternalPlaybackActiveObserverRegistered = NO;
}
self->_player = [AVPlayer playerWithPlayerItem:self->_playerItem];
self->_player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
self.contentPlayhead = [[IMAAVPlayerContentPlayhead alloc] initWithAVPlayer:_player];
[self setupAdsLoader];
[self->_player addObserver:self forKeyPath:playbackRate options:0 context:nil];
self->_playbackRateObserverRegistered = YES;
[self->_player addObserver:self forKeyPath:externalPlaybackActive options:0 context:nil];
self->_isExternalPlaybackActiveObserverRegistered = YES;
[self addPlayerTimeObserver];
if (@available(iOS 10.0, *)) {
[self setAutomaticallyWaitsToMinimizeStalling:_automaticallyWaitsToMinimizeStalling];
@ -430,7 +431,7 @@ static int const RCTVideoUnset = -1;
if ([filepath containsString:@"file://"]) {
return [NSURL URLWithString:filepath];
}
// if no file found, check if the file exists in the Document directory
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString* relativeFilePath = [filepath lastPathComponent];
@ -439,7 +440,7 @@ static int const RCTVideoUnset = -1;
if (fileComponents.count > 1) {
relativeFilePath = [fileComponents objectAtIndex:1];
}
NSString *path = [paths.firstObject stringByAppendingPathComponent:relativeFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:path]) {
return [NSURL fileURLWithPath:path];
@ -453,27 +454,27 @@ static int const RCTVideoUnset = -1;
handler([AVPlayerItem playerItemWithAsset:asset]);
return;
}
// AVPlayer can't airplay AVMutableCompositions
_allowsExternalPlayback = NO;
// sideload text tracks
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
AVAssetTrack *videoAsset = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject;
AVMutableCompositionTrack *videoCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[videoCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration)
ofTrack:videoAsset
atTime:kCMTimeZero
error:nil];
AVAssetTrack *audioAsset = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject;
AVMutableCompositionTrack *audioCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[audioCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration)
ofTrack:audioAsset
atTime:kCMTimeZero
error:nil];
NSMutableArray* validTextTracks = [NSMutableArray array];
for (int i = 0; i < _textTracks.count; ++i) {
AVURLAsset *textURLAsset;
@ -497,7 +498,7 @@ static int const RCTVideoUnset = -1;
if (validTextTracks.count != _textTracks.count) {
[self setTextTracks:validTextTracks];
}
handler([AVPlayerItem playerItemWithAsset:mixComposition]);
}
@ -513,12 +514,12 @@ static int const RCTVideoUnset = -1;
DebugLog(@"Could not find video URL in source '%@'", source);
return;
}
NSURL *url = isNetwork || isAsset
? [NSURL URLWithString:uri]
: [[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:uri ofType:type]];
NSMutableDictionary *assetOptions = [[NSMutableDictionary alloc] init];
if (isNetwork) {
NSDictionary *headers = [source objectForKey:@"requestHeaders"];
if ([headers count] > 0) {
@ -526,7 +527,7 @@ static int const RCTVideoUnset = -1;
}
NSArray *cookies = [[NSHTTPCookieStorage sharedHTTPCookieStorage] cookies];
[assetOptions setObject:cookies forKey:AVURLAssetHTTPCookiesKey];
#if __has_include(<react-native-video/RCTVideoCache.h>)
if (shouldCache && (!_textTracks || !_textTracks.count)) {
/* The DVURLAsset created by cache doesn't have a tracksWithMediaType property, so trying
@ -538,7 +539,7 @@ static int const RCTVideoUnset = -1;
return;
}
#endif
asset = [AVURLAsset URLAssetWithURL:url options:assetOptions];
} else if (isAsset) {
asset = [AVURLAsset URLAssetWithURL:url options:nil];
@ -556,7 +557,7 @@ static int const RCTVideoUnset = -1;
dispatch_queue_t queue = dispatch_queue_create("assetQueue", nil);
[asset.resourceLoader setDelegate:self queue:queue];
}
[self playerItemPrepareText:asset assetOptions:assetOptions withCallback:handler];
}
@ -586,10 +587,10 @@ static int const RCTVideoUnset = -1;
return;
}
}
DVURLAsset *asset = [[DVURLAsset alloc] initWithURL:url options:options networkTimeout:10000];
asset.loaderDelegate = self;
/* More granular code to have control over the DVURLAsset
DVAssetLoaderDelegate *resourceLoaderDelegate = [[DVAssetLoaderDelegate alloc] initWithURL:url];
resourceLoaderDelegate.delegate = self;
@ -598,7 +599,7 @@ static int const RCTVideoUnset = -1;
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:[components URL] options:options];
[asset.resourceLoader setDelegate:resourceLoaderDelegate queue:dispatch_get_main_queue()];
*/
handler([AVPlayerItem playerItemWithAsset:asset]);
}];
}
@ -631,40 +632,40 @@ static int const RCTVideoUnset = -1;
for (AVMetadataItem *item in items) {
NSString *value = (NSString *)item.value;
NSString *identifier = item.identifier;
if (![value isEqual: [NSNull null]]) {
NSDictionary *dictionary = [[NSDictionary alloc] initWithObjects:@[value, identifier] forKeys:@[@"value", @"identifier"]];
[array addObject:dictionary];
}
}
self.onTimedMetadata(@{
@"target": self.reactTag,
@"metadata": array
});
}
}
if ([keyPath isEqualToString:statusKeyPath]) {
// Handle player item status change.
if (_playerItem.status == AVPlayerItemStatusReadyToPlay) {
float duration = CMTimeGetSeconds(_playerItem.asset.duration);
if (isnan(duration)) {
duration = 0.0;
}
NSObject *width = @"undefined";
NSObject *height = @"undefined";
NSString *orientation = @"undefined";
if ([_playerItem.asset tracksWithMediaType:AVMediaTypeVideo].count > 0) {
AVAssetTrack *videoTrack = [[_playerItem.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
width = [NSNumber numberWithFloat:videoTrack.naturalSize.width];
height = [NSNumber numberWithFloat:videoTrack.naturalSize.height];
CGAffineTransform preferredTransform = [videoTrack preferredTransform];
if ((videoTrack.naturalSize.width == preferredTransform.tx
&& videoTrack.naturalSize.height == preferredTransform.ty)
|| (preferredTransform.tx == 0 && preferredTransform.ty == 0))
@ -683,7 +684,7 @@ static int const RCTVideoUnset = -1;
[self setCurrentTime:_pendingSeekTime];
_pendingSeek = false;
}
if (self.onVideoLoad && _videoLoadStarted) {
self.onVideoLoad(@{@"duration": [NSNumber numberWithFloat:duration],
@"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(_playerItem.currentTime)],
@ -814,6 +815,12 @@ static int const RCTVideoUnset = -1;
// When the SDK notifies us that ads have been loaded, play them.
[adsManager start];
}
if (self.onReceiveAdEvent) {
NSString *type = [RCTVideo convertEventToString: event.type];
self.onReceiveAdEvent(@{@"event": type,
@"target": self.reactTag
});
}
}
- (void)adsManager:(IMAAdsManager *)adsManager didReceiveAdError:(IMAAdError *)error {
@ -843,7 +850,7 @@ static int const RCTVideoUnset = -1;
selector:@selector(playerItemDidReachEnd:)
name:AVPlayerItemDidPlayToEndTimeNotification
object:[_player currentItem]];
[[NSNotificationCenter defaultCenter] removeObserver:self
name:AVPlayerItemPlaybackStalledNotification
object:nil];
@ -851,7 +858,7 @@ static int const RCTVideoUnset = -1;
selector:@selector(playbackStalled:)
name:AVPlayerItemPlaybackStalledNotification
object:nil];
[[NSNotificationCenter defaultCenter] removeObserver:self
name:AVPlayerItemNewAccessLogEntryNotification
object:nil];
@ -866,13 +873,13 @@ static int const RCTVideoUnset = -1;
selector:@selector(didFailToFinishPlaying:)
name: AVPlayerItemFailedToPlayToEndTimeNotification
object:nil];
}
- (void)handleAVPlayerAccess:(NSNotification *)notification {
AVPlayerItemAccessLog *accessLog = [((AVPlayerItem *)notification.object) accessLog];
AVPlayerItemAccessLogEvent *lastEvent = accessLog.events.lastObject;
/* TODO: get this working
if (self.onBandwidthUpdate) {
self.onBandwidthUpdate(@{@"bitrate": [NSNumber numberWithFloat:lastEvent.observedBitrate]});
@ -906,7 +913,7 @@ static int const RCTVideoUnset = -1;
if(self.onVideoEnd) {
self.onVideoEnd(@{@"target": self.reactTag});
}
if (_repeat) {
AVPlayerItem *item = [notification object];
[item seekToTime:kCMTimeZero];
@ -959,7 +966,7 @@ static int const RCTVideoUnset = -1;
if (_pictureInPicture == pictureInPicture) {
return;
}
_pictureInPicture = pictureInPicture;
if (_pipController && _pictureInPicture && ![_pipController isPictureInPictureActive]) {
dispatch_async(dispatch_get_main_queue(), ^{
@ -1041,7 +1048,7 @@ static int const RCTVideoUnset = -1;
}
[_player setRate:_rate];
}
_paused = paused;
}
@ -1063,19 +1070,19 @@ static int const RCTVideoUnset = -1;
{
NSNumber *seekTime = info[@"time"];
NSNumber *seekTolerance = info[@"tolerance"];
int timeScale = 1000;
AVPlayerItem *item = _player.currentItem;
if (item && item.status == AVPlayerItemStatusReadyToPlay) {
// TODO check loadedTimeRanges
CMTime cmSeekTime = CMTimeMakeWithSeconds([seekTime floatValue], timeScale);
CMTime current = item.currentTime;
// TODO figure out a good tolerance level
CMTime tolerance = CMTimeMake([seekTolerance floatValue], timeScale);
BOOL wasPaused = _paused;
if (CMTimeCompare(current, cmSeekTime) != 0) {
if (!wasPaused) [_player pause];
[_player seekToTime:cmSeekTime toleranceBefore:tolerance toleranceAfter:tolerance completionHandler:^(BOOL finished) {
@ -1091,10 +1098,10 @@ static int const RCTVideoUnset = -1;
@"target": self.reactTag});
}
}];
_pendingSeek = false;
}
} else {
_pendingSeek = true;
_pendingSeekTime = [seekTime floatValue];
@ -1154,7 +1161,7 @@ static int const RCTVideoUnset = -1;
} else {
// Fallback on earlier versions
}
[self setMaxBitRate:_maxBitRate];
[self setSelectedAudioTrack:_selectedAudioTrack];
[self setSelectedTextTrack:_selectedTextTrack];
@ -1176,7 +1183,7 @@ static int const RCTVideoUnset = -1;
AVMediaSelectionGroup *group = [_player.currentItem.asset
mediaSelectionGroupForMediaCharacteristic:characteristic];
AVMediaSelectionOption *mediaOption;
if ([type isEqualToString:@"disabled"]) {
// Do nothing. We want to ensure option is nil
} else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) {
@ -1209,7 +1216,7 @@ static int const RCTVideoUnset = -1;
[_player.currentItem selectMediaOptionAutomaticallyInMediaSelectionGroup:group];
return;
}
// If a match isn't found, option will be nil and text tracks will be disabled
[_player.currentItem selectMediaOption:mediaOption inMediaSelectionGroup:group];
}
@ -1233,7 +1240,7 @@ static int const RCTVideoUnset = -1;
- (void) setSideloadedText {
NSString *type = _selectedTextTrack[@"type"];
NSArray *textTracks = [self getTextTrackInfo];
// The first few tracks will be audio & video track
int firstTextIndex = 0;
for (firstTextIndex = 0; firstTextIndex < _player.currentItem.tracks.count; ++firstTextIndex) {
@ -1241,9 +1248,9 @@ static int const RCTVideoUnset = -1;
break;
}
}
int selectedTrackIndex = RCTVideoUnset;
if ([type isEqualToString:@"disabled"]) {
// Do nothing. We want to ensure option is nil
} else if ([type isEqualToString:@"language"]) {
@ -1272,7 +1279,7 @@ static int const RCTVideoUnset = -1;
}
}
}
// in the situation that a selected text track is not available (eg. specifies a textTrack not available)
if (![type isEqualToString:@"disabled"] && selectedTrackIndex == RCTVideoUnset) {
CFArrayRef captioningMediaCharacteristics = MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics(kMACaptionAppearanceDomainUser);
@ -1289,7 +1296,7 @@ static int const RCTVideoUnset = -1;
}
}
}
for (int i = firstTextIndex; i < _player.currentItem.tracks.count; ++i) {
BOOL isEnabled = NO;
if (selectedTrackIndex != RCTVideoUnset) {
@ -1304,7 +1311,7 @@ static int const RCTVideoUnset = -1;
AVMediaSelectionGroup *group = [_player.currentItem.asset
mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
AVMediaSelectionOption *mediaOption;
if ([type isEqualToString:@"disabled"]) {
// Do nothing. We want to ensure option is nil
} else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) {
@ -1337,7 +1344,7 @@ static int const RCTVideoUnset = -1;
[_player.currentItem selectMediaOptionAutomaticallyInMediaSelectionGroup:group];
return;
}
// If a match isn't found, option will be nil and text tracks will be disabled
[_player.currentItem selectMediaOption:mediaOption inMediaSelectionGroup:group];
}
@ -1345,7 +1352,7 @@ static int const RCTVideoUnset = -1;
- (void)setTextTracks:(NSArray*) textTracks;
{
_textTracks = textTracks;
// in case textTracks was set after selectedTextTrack
if (_selectedTextTrack) [self setSelectedTextTrack:_selectedTextTrack];
}
@ -1377,7 +1384,7 @@ static int const RCTVideoUnset = -1;
{
// if sideloaded, textTracks will already be set
if (_textTracks) return _textTracks;
// if streaming video, we extract the text tracks
NSMutableArray *textTracks = [[NSMutableArray alloc] init];
AVMediaSelectionGroup *group = [_player.currentItem.asset
@ -1415,7 +1422,7 @@ static int const RCTVideoUnset = -1;
}
// Set presentation style to fullscreen
[_playerViewController setModalPresentationStyle:UIModalPresentationFullScreen];
// Find the nearest view controller
UIViewController *viewController = [self firstAvailableUIViewController];
if( !viewController )
@ -1476,15 +1483,15 @@ static int const RCTVideoUnset = -1;
// to prevent video from being animated when resizeMode is 'cover'
// resize mode must be set before subview is added
[self setResizeMode:_resizeMode];
if (_controls) {
UIViewController *viewController = [self reactViewController];
[viewController addChildViewController:_playerViewController];
[self addSubview:_playerViewController.view];
}
[_playerViewController addObserver:self forKeyPath:readyForDisplayKeyPath options:NSKeyValueObservingOptionNew context:nil];
[_playerViewController.contentOverlayView addObserver:self forKeyPath:@"frame" options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld context:NULL];
}
}
@ -1496,13 +1503,13 @@ static int const RCTVideoUnset = -1;
_playerLayer = [AVPlayerLayer playerLayerWithPlayer:_player];
_playerLayer.frame = self.bounds;
_playerLayer.needsDisplayOnBoundsChange = YES;
// to prevent video from being animated when resizeMode is 'cover'
// resize mode must be set before layer is added
[self setResizeMode:_resizeMode];
[_playerLayer addObserver:self forKeyPath:readyForDisplayKeyPath options:NSKeyValueObservingOptionNew context:nil];
_playerLayerObserverSet = YES;
[self.layer addSublayer:_playerLayer];
self.layer.needsDisplayOnBoundsChange = YES;
#if TARGET_OS_IOS
@ -1533,7 +1540,7 @@ static int const RCTVideoUnset = -1;
- (void)setProgressUpdateInterval:(float)progressUpdateInterval
{
_progressUpdateInterval = progressUpdateInterval;
if (_timeObserver) {
[self removePlayerTimeObserver];
[self addPlayerTimeObserver];
@ -1586,7 +1593,7 @@ static int const RCTVideoUnset = -1;
- (void)setFilter:(NSString *)filterName {
_filterName = filterName;
if (!_filterEnabled) {
return;
} else if ([[_source objectForKey:@"uri"] rangeOfString:@"m3u8"].location != NSNotFound) {
@ -1594,7 +1601,7 @@ static int const RCTVideoUnset = -1;
} else if (!_playerItem.asset) {
return;
}
CIFilter *filter = [CIFilter filterWithName:filterName];
_playerItem.videoComposition = [AVVideoComposition
videoCompositionWithAsset:_playerItem.asset
@ -1628,7 +1635,7 @@ static int const RCTVideoUnset = -1;
{
[self setControls:true];
}
if( _controls )
{
view.frame = self.bounds;
@ -1660,7 +1667,7 @@ static int const RCTVideoUnset = -1;
if( _controls )
{
_playerViewController.view.frame = self.bounds;
// also adjust all subviews of contentOverlayView
for (UIView* subview in _playerViewController.contentOverlayView.subviews) {
subview.frame = self.bounds;
@ -1689,36 +1696,36 @@ static int const RCTVideoUnset = -1;
_isExternalPlaybackActiveObserverRegistered = NO;
}
_player = nil;
[self removePlayerLayer];
[_playerViewController.contentOverlayView removeObserver:self forKeyPath:@"frame"];
[_playerViewController removeObserver:self forKeyPath:readyForDisplayKeyPath];
[_playerViewController.view removeFromSuperview];
_playerViewController.rctDelegate = nil;
_playerViewController.player = nil;
_playerViewController = nil;
[self removePlayerTimeObserver];
[self removePlayerItemObservers];
_eventDispatcher = nil;
[[NSNotificationCenter defaultCenter] removeObserver:self];
[super removeFromSuperview];
}
#pragma mark - Export
- (void)save:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject {
AVAsset *asset = _playerItem.asset;
if (asset != nil) {
AVAssetExportSession *exportSession = [AVAssetExportSession
exportSessionWithAsset:asset presetName:AVAssetExportPresetHighestQuality];
if (exportSession != nil) {
NSString *path = nil;
NSArray *array = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
@ -1730,7 +1737,7 @@ static int const RCTVideoUnset = -1;
exportSession.videoComposition = _playerItem.videoComposition;
exportSession.shouldOptimizeForNetworkUse = true;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch ([exportSession status]) {
case AVAssetExportSessionStatusFailed:
reject(@"ERROR_COULD_NOT_EXPORT_VIDEO", @"Could not export video", exportSession.error);
@ -1742,19 +1749,19 @@ static int const RCTVideoUnset = -1;
resolve(@{@"uri": url.absoluteString});
break;
}
}];
} else {
reject(@"ERROR_COULD_NOT_CREATE_EXPORT_SESSION", @"Could not create export session", nil);
}
} else {
reject(@"ERROR_ASSET_NIL", @"Asset is nil", nil);
}
}
@ -1867,7 +1874,7 @@ didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest {
if ([self->_drm objectForKey:@"base64Certificate"]) {
certificateData = [[NSData alloc] initWithBase64EncodedData:certificateData options:NSDataBase64DecodingIgnoreUnknownCharacters];
}
if (certificateData != nil) {
NSData *contentIdData;
if(self.onGetLicense) {
@ -1919,7 +1926,7 @@ didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest {
NSData *postData = [post dataUsingEncoding:NSUTF8StringEncoding allowLossyConversion:YES];
[request setHTTPBody: postData];
}
NSURLSessionConfiguration *configuration = [NSURLSessionConfiguration defaultSessionConfiguration];
NSURLSession *session = [NSURLSession sessionWithConfiguration:configuration delegate:self delegateQueue:nil];
NSURLSessionDataTask *postDataTask = [session dataTaskWithRequest:request completionHandler:^(NSData *data, NSURLResponse *response, NSError *error) {
@ -1966,7 +1973,7 @@ didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest {
}];
[postDataTask resume];
}
} else {
NSError *licenseError = [NSError errorWithDomain: @"RCTVideo"
code: RCTVideoErrorNoSPC
@ -1979,7 +1986,7 @@ didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest {
[self finishLoadingWithError:licenseError];
self->_requestingCertificateErrored = YES;
}
} else {
NSError *licenseError = [NSError errorWithDomain: @"RCTVideo"
code: RCTVideoErrorNoDataRequest
@ -2028,7 +2035,7 @@ didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest {
];
return [self finishLoadingWithError:licenseError];
}
} else {
NSError *licenseError = [NSError errorWithDomain: @"RCTVideo"
code: RCTVideoErrorNoDRMData
@ -2040,8 +2047,8 @@ didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest {
];
return [self finishLoadingWithError:licenseError];
}
return NO;
}
@ -2073,15 +2080,15 @@ didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest {
}
- (void)pictureInPictureControllerWillStopPictureInPicture:(AVPictureInPictureController *)pictureInPictureController {
}
- (void)pictureInPictureControllerWillStartPictureInPicture:(AVPictureInPictureController *)pictureInPictureController {
}
- (void)pictureInPictureController:(AVPictureInPictureController *)pictureInPictureController failedToStartPictureInPictureWithError:(NSError *)error {
}
- (void)pictureInPictureController:(AVPictureInPictureController *)pictureInPictureController restoreUserInterfaceForPictureInPictureStopWithCompletionHandler:(void (^)(BOOL))completionHandler {
@ -2093,4 +2100,75 @@ didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest {
}
#endif
#pragma mark - helpers
+ (NSString *)convertEventToString:(IMAAdEventType)event {
NSString *result = nil;
switch(event) {
case kIMAAdEvent_AD_BREAK_READY:
result = @"AD_BREAK_READY";
break;
case kIMAAdEvent_AD_BREAK_ENDED:
result = @"AD_BREAK_ENDED";
break;
case kIMAAdEvent_AD_BREAK_STARTED:
result = @"AD_BREAK_STARTED";
break;
case kIMAAdEvent_AD_PERIOD_ENDED:
result = @"AD_PERIOD_ENDED";
break;
case kIMAAdEvent_AD_PERIOD_STARTED:
result = @"AD_PERIOD_STARTED";
break;
case kIMAAdEvent_ALL_ADS_COMPLETED:
result = @"ALL_ADS_COMPLETED";
break;
case kIMAAdEvent_CLICKED:
result = @"CLICKED";
break;
case kIMAAdEvent_COMPLETE:
result = @"COMPLETE";
break;
case kIMAAdEvent_CUEPOINTS_CHANGED:
result = @"CUEPOINTS_CHANGED";
break;
case kIMAAdEvent_FIRST_QUARTILE:
result = @"FIRST_QUARTILE";
break;
case kIMAAdEvent_LOADED:
result = @"LOADED";
break;
case kIMAAdEvent_LOG:
result = @"LOG";
break;
case kIMAAdEvent_MIDPOINT:
result = @"MIDPOINT";
break;
case kIMAAdEvent_PAUSE:
result = @"PAUSE";
break;
case kIMAAdEvent_RESUME:
result = @"RESUME";
break;
case kIMAAdEvent_SKIPPED:
result = @"SKIPPED";
break;
case kIMAAdEvent_STARTED:
result = @"STARTED";
break;
case kIMAAdEvent_STREAM_LOADED:
result = @"STREAM_LOADED";
break;
case kIMAAdEvent_TAPPED:
result = @"TAPPED";
break;
case kIMAAdEvent_THIRD_QUARTILE:
result = @"THIRD_QUARTILE";
break;
default:
result = @"UNKNOWN";
}
return result;
}
@end

View File

@ -71,6 +71,8 @@ RCT_EXPORT_VIEW_PROPERTY(onPlaybackResume, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onPlaybackRateChange, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onVideoExternalPlaybackChange, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onGetLicense, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onReceiveAdEvent, RCTDirectEventBlock);
RCT_REMAP_METHOD(save,
options:(NSDictionary *)options
reactTag:(nonnull NSNumber *)reactTag