Merge pull request #1109 from nfb-onf/master

iOS Side loading for captions and offline support
This commit is contained in:
Hampton Maxwell 2018-07-10 20:03:35 -07:00 committed by GitHub
commit 91ba07c9e9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 247 additions and 79 deletions

View File

@ -11,6 +11,7 @@ import android.text.TextUtils;
import android.util.Log; import android.util.Log;
import android.view.View; import android.view.View;
import android.view.Window; import android.view.Window;
import android.view.accessibility.CaptioningManager;
import android.widget.FrameLayout; import android.widget.FrameLayout;
import com.brentvatne.react.R; import com.brentvatne.react.R;
@ -68,6 +69,7 @@ import java.lang.Math;
import java.util.Map; import java.util.Map;
import java.lang.Object; import java.lang.Object;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Locale;
@SuppressLint("ViewConstructor") @SuppressLint("ViewConstructor")
class ReactExoplayerView extends FrameLayout implements class ReactExoplayerView extends FrameLayout implements
@ -737,10 +739,11 @@ class ReactExoplayerView extends FrameLayout implements
TrackGroupArray groups = info.getTrackGroups(index); TrackGroupArray groups = info.getTrackGroups(index);
int trackIndex = C.INDEX_UNSET; int trackIndex = C.INDEX_UNSET;
trackSelector.setSelectionOverride(index, groups, null);
if (TextUtils.isEmpty(type)) { if (TextUtils.isEmpty(type)) {
// Do nothing // Do nothing
} else if (type.equals("disabled")) { } else if (type.equals("disabled")) {
trackSelector.setSelectionOverride(index, groups, null);
return; return;
} else if (type.equals("language")) { } else if (type.equals("language")) {
for (int i = 0; i < groups.length; ++i) { for (int i = 0; i < groups.length; ++i) {
@ -760,9 +763,25 @@ class ReactExoplayerView extends FrameLayout implements
} }
} else if (type.equals("index")) { } else if (type.equals("index")) {
trackIndex = value.asInt(); trackIndex = value.asInt();
} else { // default. invalid type or "system" } else { // default. Use system settings if possible
trackSelector.clearSelectionOverrides(index); int sdk = android.os.Build.VERSION.SDK_INT;
return; if (sdk>18 && groups.length>0) {
CaptioningManager captioningManager = (CaptioningManager) themedReactContext.getSystemService(Context.CAPTIONING_SERVICE);
if (captioningManager.isEnabled()) {
// default is to take the first object
trackIndex = 0;
String locale = Locale.getDefault().getDisplayLanguage();
for (int i = 0; i < groups.length; ++i) {
Format format = groups.get(i).getFormat(0);
if (format.language != null && format.language.equals(locale)) {
trackIndex = i;
break;
}
}
}
} else return;
} }
if (trackIndex == C.INDEX_UNSET) { if (trackIndex == C.INDEX_UNSET) {

View File

@ -3,6 +3,8 @@
#import <React/RCTBridgeModule.h> #import <React/RCTBridgeModule.h>
#import <React/RCTEventDispatcher.h> #import <React/RCTEventDispatcher.h>
#import <React/UIView+React.h> #import <React/UIView+React.h>
#include <MediaAccessibility/MediaAccessibility.h>
#include <AVFoundation/AVFoundation.h>
static NSString *const statusKeyPath = @"status"; static NSString *const statusKeyPath = @"status";
static NSString *const playbackLikelyToKeepUpKeyPath = @"playbackLikelyToKeepUp"; static NSString *const playbackLikelyToKeepUpKeyPath = @"playbackLikelyToKeepUp";
@ -43,6 +45,7 @@ static NSString *const timedMetadata = @"timedMetadata";
BOOL _paused; BOOL _paused;
BOOL _repeat; BOOL _repeat;
BOOL _allowsExternalPlayback; BOOL _allowsExternalPlayback;
NSArray * _textTracks;
NSDictionary * _selectedTextTrack; NSDictionary * _selectedTextTrack;
BOOL _playbackStalled; BOOL _playbackStalled;
BOOL _playInBackground; BOOL _playInBackground;
@ -284,6 +287,10 @@ static NSString *const timedMetadata = @"timedMetadata";
[self removePlayerLayer]; [self removePlayerLayer];
[self removePlayerTimeObserver]; [self removePlayerTimeObserver];
[self removePlayerItemObservers]; [self removePlayerItemObservers];
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
// perform on next run loop, otherwise other passed react-props may not be set
_playerItem = [self playerItemForSource:source]; _playerItem = [self playerItemForSource:source];
[self addPlayerItemObservers]; [self addPlayerItemObservers];
@ -304,9 +311,8 @@ static NSString *const timedMetadata = @"timedMetadata";
[self addPlayerTimeObserver]; [self addPlayerTimeObserver];
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{ //Perform on next run loop, otherwise onVideoLoadStart is nil
// Perform on next run loop, otherwise onVideoLoadStart is nil if(self.onVideoLoadStart) {
if (self.onVideoLoadStart) {
id uri = [source objectForKey:@"uri"]; id uri = [source objectForKey:@"uri"];
id type = [source objectForKey:@"type"]; id type = [source objectForKey:@"type"];
self.onVideoLoadStart(@{@"src": @{ self.onVideoLoadStart(@{@"src": @{
@ -316,42 +322,95 @@ static NSString *const timedMetadata = @"timedMetadata";
@"target": self.reactTag @"target": self.reactTag
}); });
} }
}); });
_videoLoadStarted = YES; _videoLoadStarted = YES;
} }
- (NSURL*) urlFilePath:(NSString*) filepath {
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString* relativeFilePath = [filepath lastPathComponent];
// the file may be multiple levels below the documents directory
NSArray* fileComponents = [filepath componentsSeparatedByString:@"Documents/"];
if (fileComponents.count>1) {
relativeFilePath = [fileComponents objectAtIndex:1];
}
NSString *path = [paths.firstObject stringByAppendingPathComponent:relativeFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:path]) {
return [NSURL fileURLWithPath:path];
}
return nil;
}
- (AVPlayerItem*)playerItemForSource:(NSDictionary *)source - (AVPlayerItem*)playerItemForSource:(NSDictionary *)source
{ {
bool isNetwork = [RCTConvert BOOL:[source objectForKey:@"isNetwork"]]; bool isNetwork = [RCTConvert BOOL:[source objectForKey:@"isNetwork"]];
bool isAsset = [RCTConvert BOOL:[source objectForKey:@"isAsset"]]; bool isAsset = [RCTConvert BOOL:[source objectForKey:@"isAsset"]];
NSString *uri = [source objectForKey:@"uri"]; NSString *uri = [source objectForKey:@"uri"];
NSString *type = [source objectForKey:@"type"]; NSString *type = [source objectForKey:@"type"];
NSDictionary *headers = [source objectForKey:@"requestHeaders"];
NSURL *url = (isNetwork || isAsset) ? AVURLAsset *asset;
[NSURL URLWithString:uri] : NSMutableDictionary *assetOptions = [[NSMutableDictionary alloc] init];
[[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:uri ofType:type]];
if (isNetwork) { if (isNetwork) {
NSMutableDictionary *assetOptions = [[NSMutableDictionary alloc]init];
/* Per #1091, this is not a public API. We need to either get approval from Apple to use this /* Per #1091, this is not a public API. We need to either get approval from Apple to use this
* or use a different approach. * or use a different approach.
NSDictionary *headers = [source objectForKey:@"requestHeaders"];
if ([headers count] > 0) { if ([headers count] > 0) {
[assetOptions setObject:headers forKey:@"AVURLAssetHTTPHeaderFieldsKey"]; [assetOptions setObject:headers forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
} }
*/ */
NSArray *cookies = [[NSHTTPCookieStorage sharedHTTPCookieStorage] cookies]; NSArray *cookies = [[NSHTTPCookieStorage sharedHTTPCookieStorage] cookies];
[assetOptions setObject:cookies forKey:AVURLAssetHTTPCookiesKey]; [assetOptions setObject:cookies forKey:AVURLAssetHTTPCookiesKey];
asset = [AVURLAsset URLAssetWithURL:[NSURL URLWithString:uri] options:assetOptions];
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:assetOptions]; } else if (isAsset) { // assets on iOS have to be in the Documents folder
return [AVPlayerItem playerItemWithAsset:asset]; asset = [AVURLAsset URLAssetWithURL:[self urlFilePath:uri] options:nil];
} else { // file passed in through JS, or an asset in the Xcode project
asset = [AVURLAsset URLAssetWithURL:[[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:uri ofType:type]] options:nil];
} }
else if (isAsset) {
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:nil]; if (!_textTracks) {
return [AVPlayerItem playerItemWithAsset:asset]; return [AVPlayerItem playerItemWithAsset:asset];
} }
return [AVPlayerItem playerItemWithURL:url]; // sideload text tracks
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
AVAssetTrack *videoAsset = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject;
AVMutableCompositionTrack *videoCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[videoCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration)
ofTrack:videoAsset
atTime:kCMTimeZero
error:nil];
AVAssetTrack *audioAsset = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject;
AVMutableCompositionTrack *audioCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[audioCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration)
ofTrack:audioAsset
atTime:kCMTimeZero
error:nil];
for (int i = 0; i < _textTracks.count; ++i) {
AVURLAsset *textURLAsset;
NSString *textUri = [_textTracks objectAtIndex:i][@"uri"];
if ([[textUri lowercaseString] hasPrefix:@"http"]) {
textURLAsset = [AVURLAsset URLAssetWithURL:[NSURL URLWithString:textUri] options:assetOptions];
} else {
textURLAsset = [AVURLAsset URLAssetWithURL:[self urlFilePath:textUri] options:nil];
}
AVAssetTrack *textTrackAsset = [textURLAsset tracksWithMediaType:AVMediaTypeText].firstObject;
AVMutableCompositionTrack *textCompTrack = [mixComposition
addMutableTrackWithMediaType:AVMediaTypeText
preferredTrackID:kCMPersistentTrackID_Invalid];
[textCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration)
ofTrack:textTrackAsset
atTime:kCMTimeZero
error:nil];
}
return [AVPlayerItem playerItemWithAsset:mixComposition];
} }
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
@ -363,7 +422,7 @@ static NSString *const timedMetadata = @"timedMetadata";
if (items && ![items isEqual:[NSNull null]] && items.count > 0) { if (items && ![items isEqual:[NSNull null]] && items.count > 0) {
NSMutableArray *array = [NSMutableArray new]; NSMutableArray *array = [NSMutableArray new];
for (AVMetadataItem *item in items) { for (AVMetadataItem *item in items) {
NSString *value = item.value; NSString *value = (NSString *)item.value;
NSString *identifier = item.identifier; NSString *identifier = item.identifier;
if (![value isEqual: [NSNull null]]) { if (![value isEqual: [NSNull null]]) {
@ -668,15 +727,91 @@ static NSString *const timedMetadata = @"timedMetadata";
- (void)setSelectedTextTrack:(NSDictionary *)selectedTextTrack { - (void)setSelectedTextTrack:(NSDictionary *)selectedTextTrack {
_selectedTextTrack = selectedTextTrack; _selectedTextTrack = selectedTextTrack;
NSString *type = selectedTextTrack[@"type"]; if (_textTracks) {
[self setSideloadedText];
} else {
[self setStreamingText];
}
}
- (void) setSideloadedText {
NSString *type = _selectedTextTrack[@"type"];
NSArray* textTracks = [self getTextTrackInfo];
// The first few tracks will be audio & video track
int firstTextIndex = 0;
for (firstTextIndex = 0; firstTextIndex < _player.currentItem.tracks.count; ++firstTextIndex) {
if ([_player.currentItem.tracks[firstTextIndex].assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible]) {
break;
}
}
int selectedTrackIndex = -1;
if ([type isEqualToString:@"disabled"]) {
// Do nothing. We want to ensure option is nil
} else if ([type isEqualToString:@"language"]) {
NSString *selectedValue = _selectedTextTrack[@"value"];
for (int i = 0; i < textTracks.count; ++i) {
NSDictionary *currentTextTrack = [textTracks objectAtIndex:i];
if ([selectedValue isEqualToString:currentTextTrack[@"language"]]) {
selectedTrackIndex = i;
break;
}
}
} else if ([type isEqualToString:@"title"]) {
NSString *selectedValue = _selectedTextTrack[@"value"];
for (int i = 0; i < textTracks.count; ++i) {
NSDictionary *currentTextTrack = [textTracks objectAtIndex:i];
if ([selectedValue isEqualToString:currentTextTrack[@"title"]]) {
selectedTrackIndex = i;
break;
}
}
} else if ([type isEqualToString:@"index"]) {
if ([_selectedTextTrack[@"value"] isKindOfClass:[NSNumber class]]) {
int index = [_selectedTextTrack[@"value"] intValue];
if (textTracks.count > index) {
selectedTrackIndex = index;
}
}
}
// user's selected language might not be available, or system defaults have captions enabled
if (selectedTrackIndex == -1 || [type isEqualToString:@"default"]) {
CFArrayRef captioningMediaCharacteristics = MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics(kMACaptionAppearanceDomainUser);
NSArray *captionSettings = (__bridge NSArray*)captioningMediaCharacteristics;
if ([captionSettings containsObject: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility]) {
// iterate through the textTracks to find a matching option, or default to the first object.
selectedTrackIndex = 0;
NSString * systemLanguage = [[NSLocale preferredLanguages] firstObject];
for (int i = 0; i < textTracks.count; ++i) {
NSDictionary *currentTextTrack = [textTracks objectAtIndex:i];
if ([systemLanguage isEqualToString:currentTextTrack[@"language"]]) {
selectedTrackIndex = i;
break;
}
}
}
}
for (int i = firstTextIndex; i < _player.currentItem.tracks.count; ++i) {
BOOL isEnabled = i == selectedTrackIndex + firstTextIndex;
[_player.currentItem.tracks[i] setEnabled:isEnabled];
}
}
-(void) setStreamingText {
NSString *type = _selectedTextTrack[@"type"];
AVMediaSelectionGroup *group = [_player.currentItem.asset AVMediaSelectionGroup *group = [_player.currentItem.asset
mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible]; mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
AVMediaSelectionOption *option; AVMediaSelectionOption *mediaOption;
if ([type isEqualToString:@"disabled"]) { if ([type isEqualToString:@"disabled"]) {
// Do nothing. We want to ensure option is nil // Do nothing. We want to ensure option is nil
} else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) { } else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) {
NSString *value = selectedTextTrack[@"value"]; NSString *value = _selectedTextTrack[@"value"];
for (int i = 0; i < group.options.count; ++i) { for (int i = 0; i < group.options.count; ++i) {
AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i]; AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i];
NSString *optionValue; NSString *optionValue;
@ -688,17 +823,17 @@ static NSString *const timedMetadata = @"timedMetadata";
objectAtIndex:0]; objectAtIndex:0];
} }
if ([value isEqualToString:optionValue]) { if ([value isEqualToString:optionValue]) {
option = currentOption; mediaOption = currentOption;
break; break;
} }
} }
//} else if ([type isEqualToString:@"default"]) { //} else if ([type isEqualToString:@"default"]) {
// option = group.defaultOption; */ // option = group.defaultOption; */
} else if ([type isEqualToString:@"index"]) { } else if ([type isEqualToString:@"index"]) {
if ([selectedTextTrack[@"value"] isKindOfClass:[NSNumber class]]) { if ([_selectedTextTrack[@"value"] isKindOfClass:[NSNumber class]]) {
int index = [selectedTextTrack[@"value"] intValue]; int index = [_selectedTextTrack[@"value"] intValue];
if (group.options.count > index) { if (group.options.count > index) {
option = [group.options objectAtIndex:index]; mediaOption = [group.options objectAtIndex:index];
} }
} }
} else { // default. invalid type or "system" } else { // default. invalid type or "system"
@ -707,11 +842,24 @@ static NSString *const timedMetadata = @"timedMetadata";
} }
// If a match isn't found, option will be nil and text tracks will be disabled // If a match isn't found, option will be nil and text tracks will be disabled
[_player.currentItem selectMediaOption:option inMediaSelectionGroup:group]; [_player.currentItem selectMediaOption:mediaOption inMediaSelectionGroup:group];
}
- (void)setTextTracks:(NSArray*) textTracks;
{
_textTracks = textTracks;
// in case textTracks was set after selectedTextTrack
if (_selectedTextTrack) [self setSelectedTextTrack:_selectedTextTrack];
} }
- (NSArray *)getTextTrackInfo - (NSArray *)getTextTrackInfo
{ {
// if sideloaded, textTracks will already be set
if (_textTracks) return _textTracks;
// if streaming video, we extract the text tracks
NSMutableArray *textTracks = [[NSMutableArray alloc] init]; NSMutableArray *textTracks = [[NSMutableArray alloc] init];
AVMediaSelectionGroup *group = [_player.currentItem.asset AVMediaSelectionGroup *group = [_player.currentItem.asset
mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible]; mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];

View File

@ -23,6 +23,7 @@ RCT_EXPORT_VIEW_PROPERTY(src, NSDictionary);
RCT_EXPORT_VIEW_PROPERTY(resizeMode, NSString); RCT_EXPORT_VIEW_PROPERTY(resizeMode, NSString);
RCT_EXPORT_VIEW_PROPERTY(repeat, BOOL); RCT_EXPORT_VIEW_PROPERTY(repeat, BOOL);
RCT_EXPORT_VIEW_PROPERTY(allowsExternalPlayback, BOOL); RCT_EXPORT_VIEW_PROPERTY(allowsExternalPlayback, BOOL);
RCT_EXPORT_VIEW_PROPERTY(textTracks, NSArray);
RCT_EXPORT_VIEW_PROPERTY(selectedTextTrack, NSDictionary); RCT_EXPORT_VIEW_PROPERTY(selectedTextTrack, NSDictionary);
RCT_EXPORT_VIEW_PROPERTY(paused, BOOL); RCT_EXPORT_VIEW_PROPERTY(paused, BOOL);
RCT_EXPORT_VIEW_PROPERTY(muted, BOOL); RCT_EXPORT_VIEW_PROPERTY(muted, BOOL);