diff --git a/CHANGELOG.md b/CHANGELOG.md
index 5c6d5078..13455d3f 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,13 @@
## Changelog
+### Next Version
+* Basic fullscreen support for Android MediaPlayer [#1138](https://github.com/react-native-community/react-native-video/pull/1138)
+* Simplify default Android SDK code [#1145](https://github.com/react-native-community/react-native-video/pull/1145) [#1146](https://github.com/react-native-community/react-native-video/pull/1146)
+
+### Version 3.1.0
+* Support sidecar text tracks on iOS [#1109](https://github.com/react-native-community/react-native-video/pull/1109)
+* Support onAudioBecomingNoisy on iOS [#1131](https://github.com/react-native-community/react-native-video/pull/1131)
+
### Version 3.0
* Inherit Android buildtools and SDK version from the root project [#1081](https://github.com/react-native-community/react-native-video/pull/1081)
* Automatically play on ExoPlayer when the paused prop is not set [#1083](https://github.com/react-native-community/react-native-video/pull/1083)
diff --git a/README.md b/README.md
index 76301dff..6d519a40 100644
--- a/README.md
+++ b/README.md
@@ -180,6 +180,10 @@ using System.Collections.Generic;
## Usage
```javascript
+// Load the module
+
+import Video from 'react-native-video';
+
// Within your render function, assuming you have a file called
// "background.mp4" in your project. You can include multiple videos
// on a single screen if you like.
@@ -191,21 +195,8 @@ using System.Collections.Generic;
onBuffer={this.onBuffer} // Callback when remote video is buffering
onEnd={this.onEnd} // Callback when playback finishes
onError={this.videoError} // Callback when video cannot be loaded
- onFullscreenPlayerWillPresent={this.fullScreenPlayerWillPresent} // Callback before fullscreen starts
- onFullscreenPlayerDidPresent={this.fullScreenPlayerDidPresent} // Callback after fullscreen started
- onFullscreenPlayerWillDismiss={this.fullScreenPlayerWillDismiss} // Callback before fullscreen stops
- onFullscreenPlayerDidDismiss={this.fullScreenPlayerDidDismiss} // Callback after fullscreen stopped
style={styles.backgroundVideo} />
-// Later to trigger fullscreen
-this.player.presentFullscreenPlayer()
-
-// Disable fullscreen
-this.player.dismissFullscreenPlayer()
-
-// To set video position in seconds (seek)
-this.player.seek(0)
-
// Later on in your styles..
var styles = StyleSheet.create({
backgroundVideo: {
@@ -232,6 +223,7 @@ var styles = StyleSheet.create({
* [rate](#rate)
* [repeat](#repeat)
* [resizeMode](#resizemode)
+* [selectedAudioTrack](#selectedaudiotrack)
* [selectedTextTrack](#selectedtexttrack)
* [stereoPan](#stereopan)
* [textTracks](#texttracks)
@@ -239,12 +231,19 @@ var styles = StyleSheet.create({
* [volume](#volume)
### Event props
+* [onAudioBecomingNoisy](#onaudiobecomingnoisy)
+* [onFullscreenPlayerWillPresent](#onfullscreenplayerwillpresent)
+* [onFullscreenPlayerDidPresent](#onfullscreenplayerdidpresent)
+* [onFullscreenPlayerWillDismiss](#onfullscreenplayerwilldismiss)
+* [onFullscreenPlayerDidDismiss](#onfullscreenplayerdiddismiss)
* [onLoad](#onload)
* [onLoadStart](#onloadstart)
* [onProgress](#onprogress)
* [onTimedMetadata](#ontimedmetadata)
### Methods
+* [dismissFullscreenPlayer](#dismissfullscreenplayer)
+* [presentFullscreenPlayer](#presentfullscreenplayer)
* [seek](#seek)
### Configurable props
@@ -355,6 +354,36 @@ Determines how to resize the video when the frame doesn't match the raw video di
Platforms: Android ExoPlayer, Android MediaPlayer, iOS, Windows UWP
+#### selectedAudioTrack
+Configure which audio track, if any, is played.
+
+```
+selectedAudioTrack={{
+ type: Type,
+ value: Value
+}}
+```
+
+Example:
+```
+selectedAudioTrack={{
+ type: "title",
+ value: "Dubbing"
+}}
+```
+
+Type | Value | Description
+--- | --- | ---
+"system" (default) | N/A | Play the audio track that matches the system language. If none match, play the first track.
+"disabled" | N/A | Turn off audio
+"title" | string | Play the audio track with the title specified as the Value, e.g. "French"
+"language" | string | Play the audio track with the language specified as the Value, e.g. "fr"
+"index" | number | Play the audio track with the index specified as the value, e.g. 0
+
+If a track matching the specified Type (and Value if appropriate) is unavailable, the first audio track will be played. If multiple tracks match the criteria, the first match will be used.
+
+Platforms: Android ExoPlayer, iOS
+
#### selectedTextTrack
Configure which text track (caption or subtitle), if any, is shown.
@@ -402,9 +431,11 @@ Property | Description
--- | ---
title | Descriptive name for the track
language | 2 letter [ISO 639-1 code](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) representing the language
-type | Mime type of the track
* TextTrackType.SRT - .srt SubRip Subtitle
* TextTrackType.TTML - .ttml TTML
* TextTrackType.VTT - .vtt WebVTT
+type | Mime type of the track
* TextTrackType.SRT - SubRip (.srt)
* TextTrackType.TTML - TTML (.ttml)
* TextTrackType.VTT - WebVTT (.vtt)
iOS only supports VTT, Android ExoPlayer supports all 3
uri | URL for the text track. Currently, only tracks hosted on a webserver are supported
+On iOS, sidecar text tracks are only supported for individual files, not HLS playlists. For HLS, you should include the text tracks as part of the playlist.
+
Example:
```
import { TextTrackType }, Video from 'react-native-video';
@@ -413,21 +444,20 @@ textTracks={[
{
title: "English CC",
language: "en",
- type: "text/vtt", TextTrackType.VTT,
+ type: TextTrackType.VTT, // "text/vtt"
uri: "https://bitdash-a.akamaihd.net/content/sintel/subtitles/subtitles_en.vtt"
},
{
title: "Spanish Subtitles",
language: "es",
- type: "application/x-subrip", TextTrackType.SRT,
+ type: TextTrackType.SRT, // "application/x-subrip"
uri: "https://durian.blender.org/wp-content/content/subtitles/sintel_es.srt"
}
]}
```
-This isn't support on iOS because AVPlayer doesn't support it. Text tracks must be loaded as part of an HLS playlist.
-Platforms: Android ExoPlayer
+Platforms: Android ExoPlayer, iOS
#### useTextureView
Output to a TextureView instead of the default SurfaceView. In general, you will want to use SurfaceView because it is more efficient and provides better performance. However, SurfaceViews has two limitations:
@@ -451,6 +481,41 @@ Platforms: all
### Event props
+#### onAudioBecomingNoisy
+Callback function that is called when the audio is about to become 'noisy' due to a change in audio outputs. Typically this is called when audio output is being switched from an external source like headphones back to the internal speaker. It's a good idea to pause the media when this happens so the speaker doesn't start blasting sound.
+
+Payload: none
+
+Platforms: Android ExoPlayer, iOS
+
+#### onFullscreenPlayerWillPresent
+Callback function that is called when the player is about to enter fullscreen mode.
+
+Payload: none
+
+Platforms: Android ExoPlayer, Android MediaPlayer, iOS
+
+#### onFullscreenPlayerDidPresent
+Callback function that is called when the player has entered fullscreen mode.
+
+Payload: none
+
+Platforms: Android ExoPlayer, Android MediaPlayer, iOS
+
+#### onFullscreenPlayerWillDismiss
+Callback function that is called when the player is about to exit fullscreen mode.
+
+Payload: none
+
+Platforms: Android ExoPlayer, Android MediaPlayer, iOS
+
+#### onFullscreenPlayerDidDismiss
+Callback function that is called when the player has exited fullscreen mode.
+
+Payload: none
+
+Platforms: Android ExoPlayer, Android MediaPlayer, iOS
+
#### onLoad
Callback function that is called when the media is loaded and ready to play.
@@ -461,7 +526,8 @@ Property | Type | Description
currentPosition | number | Time in seconds where the media will start
duration | number | Length of the media in seconds
naturalSize | object | Properties:
* width - Width in pixels that the video was encoded at
* height - Height in pixels that the video was encoded at
* orientation - "portrait" or "landscape"
-textTracks | array | An array of text track info objects with the following properties:
* index - Index number
* title - Description of the track
* language - 2 letter [ISO 639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) language code
* type - Mime type of track
+audioTracks | array | An array of audio track info objects with the following properties:
* index - Index number
* title - Description of the track
* language - 2 letter [ISO 639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) or 3 letter [ISO639-2](https://en.wikipedia.org/wiki/List_of_ISO_639-2_codes) language code
* type - Mime type of track
+textTracks | array | An array of text track info objects with the following properties:
* index - Index number
* title - Description of the track
* language - 2 letter [ISO 639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) or 3 letter [ISO 639-2](https://en.wikipedia.org/wiki/List_of_ISO_639-2_codes) language code
* type - Mime type of track
Example:
```
@@ -479,6 +545,10 @@ Example:
orientation: 'landscape'
width: '1920'
},
+ audioTracks: [
+ { language: 'es', title: 'Spanish', type: 'audio/mpeg', index: 0 },
+ { language: 'en', title: 'English', type: 'audio/mpeg', index: 1 } ],
+ ],
textTracks: [
{ title: '#1 French', language: 'fr', index: 0, type: 'text/vtt' },
{ title: '#2 English CC', language: 'en', index: 1, type: 'text/vtt' },
@@ -560,6 +630,34 @@ return (
);
```
+#### dismissFullscreenPlayer
+`dismissFullscreenPlayer()`
+
+Take the player out of fullscreen mode.
+
+Example:
+```
+this.player.dismissFullscreenPlayer();
+```
+
+Platforms: Android ExoPlayer, Android MediaPlayer, iOS
+
+#### FullscreenPlayer
+`presentFullscreenPlayer()`
+
+Put the player in fullscreen mode.
+
+On iOS, this displays the video in a fullscreen view controller with controls.
+
+On Android ExoPlayer & MediaPlayer, this puts the navigation controls in fullscreen mode. It is not a complete fullscreen implementation, so you will still need to apply a style that makes the width and height match your screen dimensions to get a fullscreen video.
+
+Example:
+```
+this.player.presentFullscreenPlayer();
+```
+
+Platforms: Android ExoPlayer, Android MediaPlayer, iOS
+
#### seek()
`seek(seconds)`
diff --git a/Video.js b/Video.js
index 7a0d7a39..f32725f3 100644
--- a/Video.js
+++ b/Video.js
@@ -235,6 +235,7 @@ export default class Video extends Component {
onVideoEnd: this._onEnd,
onVideoBuffer: this._onBuffer,
onTimedMetadata: this._onTimedMetadata,
+ onVideoAudioBecomingNoisy: this._onAudioBecomingNoisy,
onVideoFullscreenPlayerWillPresent: this._onFullscreenPlayerWillPresent,
onVideoFullscreenPlayerDidPresent: this._onFullscreenPlayerDidPresent,
onVideoFullscreenPlayerWillDismiss: this._onFullscreenPlayerWillDismiss,
@@ -296,6 +297,7 @@ Video.propTypes = {
onVideoSeek: PropTypes.func,
onVideoEnd: PropTypes.func,
onTimedMetadata: PropTypes.func,
+ onVideoAudioBecomingNoisy: PropTypes.func,
onVideoFullscreenPlayerWillPresent: PropTypes.func,
onVideoFullscreenPlayerDidPresent: PropTypes.func,
onVideoFullscreenPlayerWillDismiss: PropTypes.func,
@@ -314,6 +316,13 @@ Video.propTypes = {
posterResizeMode: Image.propTypes.resizeMode,
repeat: PropTypes.bool,
allowsExternalPlayback: PropTypes.bool,
+ selectedAudioTrack: PropTypes.shape({
+ type: PropTypes.string.isRequired,
+ value: PropTypes.oneOfType([
+ PropTypes.string,
+ PropTypes.number
+ ])
+ }),
selectedTextTrack: PropTypes.shape({
type: PropTypes.string.isRequired,
value: PropTypes.oneOfType([
diff --git a/android-exoplayer/build.gradle b/android-exoplayer/build.gradle
index a957dd62..4c1e9c32 100644
--- a/android-exoplayer/build.gradle
+++ b/android-exoplayer/build.gradle
@@ -1,20 +1,16 @@
apply plugin: 'com.android.library'
-def _ext = rootProject.ext
-
-def _reactNativeVersion = _ext.has('reactNative') ? _ext.reactNative : '+'
-def _compileSdkVersion = _ext.has('compileSdkVersion') ? _ext.compileSdkVersion : 27
-def _buildToolsVersion = _ext.has('buildToolsVersion') ? _ext.buildToolsVersion : '27.0.3'
-def _minSdkVersion = _ext.has('minSdkVersion') ? _ext.minSdkVersion : 16
-def _targetSdkVersion = _ext.has('targetSdkVersion') ? _ext.targetSdkVersion : 27
+def safeExtGet(prop, fallback) {
+ rootProject.ext.has(prop) ? rootProject.ext.get(prop) : fallback
+}
android {
- compileSdkVersion _compileSdkVersion
- buildToolsVersion _buildToolsVersion
+ compileSdkVersion safeExtGet('compileSdkVersion', 27)
+ buildToolsVersion safeExtGet('buildToolsVersion', '27.0.3')
defaultConfig {
- minSdkVersion _minSdkVersion
- targetSdkVersion _targetSdkVersion
+ minSdkVersion safeExtGet('minSdkVersion', 16)
+ targetSdkVersion safeExtGet('targetSdkVersion', 27)
versionCode 1
versionName "1.0"
}
@@ -22,7 +18,7 @@ android {
dependencies {
//noinspection GradleDynamicVersion
- provided "com.facebook.react:react-native:${_reactNativeVersion}"
+ provided "com.facebook.react:react-native:${safeExtGet('reactNativeVersion', '+')}"
compile 'com.google.android.exoplayer:exoplayer:2.7.3'
compile('com.google.android.exoplayer:extension-okhttp:2.7.3') {
exclude group: 'com.squareup.okhttp3', module: 'okhttp'
diff --git a/android-exoplayer/src/main/java/com/brentvatne/exoplayer/ReactExoplayerView.java b/android-exoplayer/src/main/java/com/brentvatne/exoplayer/ReactExoplayerView.java
index 614da57e..487da343 100644
--- a/android-exoplayer/src/main/java/com/brentvatne/exoplayer/ReactExoplayerView.java
+++ b/android-exoplayer/src/main/java/com/brentvatne/exoplayer/ReactExoplayerView.java
@@ -11,6 +11,7 @@ import android.text.TextUtils;
import android.util.Log;
import android.view.View;
import android.view.Window;
+import android.view.accessibility.CaptioningManager;
import android.widget.FrameLayout;
import com.brentvatne.react.R;
@@ -68,6 +69,7 @@ import java.lang.Math;
import java.util.Map;
import java.lang.Object;
import java.util.ArrayList;
+import java.util.Locale;
@SuppressLint("ViewConstructor")
class ReactExoplayerView extends FrameLayout implements
@@ -111,6 +113,9 @@ class ReactExoplayerView extends FrameLayout implements
private Uri srcUri;
private String extension;
private boolean repeat;
+ private String audioTrackType;
+ private Dynamic audioTrackValue;
+ private ReadableArray audioTracks;
private String textTrackType;
private Dynamic textTrackValue;
private ReadableArray textTracks;
@@ -499,20 +504,43 @@ class ReactExoplayerView extends FrameLayout implements
private void videoLoaded() {
if (loadVideoStarted) {
loadVideoStarted = false;
+ setSelectedAudioTrack(audioTrackType, audioTrackValue);
setSelectedTextTrack(textTrackType, textTrackValue);
Format videoFormat = player.getVideoFormat();
int width = videoFormat != null ? videoFormat.width : 0;
int height = videoFormat != null ? videoFormat.height : 0;
eventEmitter.load(player.getDuration(), player.getCurrentPosition(), width, height,
- getTextTrackInfo());
+ getAudioTrackInfo(), getTextTrackInfo());
}
}
+ private WritableArray getAudioTrackInfo() {
+ WritableArray audioTracks = Arguments.createArray();
+
+ MappingTrackSelector.MappedTrackInfo info = trackSelector.getCurrentMappedTrackInfo();
+ int index = getTrackRendererIndex(C.TRACK_TYPE_AUDIO);
+ if (info == null || index == C.INDEX_UNSET) {
+ return audioTracks;
+ }
+
+ TrackGroupArray groups = info.getTrackGroups(index);
+ for (int i = 0; i < groups.length; ++i) {
+ Format format = groups.get(i).getFormat(0);
+ WritableMap textTrack = Arguments.createMap();
+ textTrack.putInt("index", i);
+ textTrack.putString("title", format.id != null ? format.id : "");
+ textTrack.putString("type", format.sampleMimeType);
+ textTrack.putString("language", format.language != null ? format.language : "");
+ audioTracks.pushMap(textTrack);
+ }
+ return audioTracks;
+ }
+
private WritableArray getTextTrackInfo() {
WritableArray textTracks = Arguments.createArray();
MappingTrackSelector.MappedTrackInfo info = trackSelector.getCurrentMappedTrackInfo();
- int index = getTextTrackRendererIndex();
+ int index = getTrackRendererIndex(C.TRACK_TYPE_TEXT);
if (info == null || index == C.INDEX_UNSET) {
return textTracks;
}
@@ -645,10 +673,10 @@ class ReactExoplayerView extends FrameLayout implements
return false;
}
- public int getTextTrackRendererIndex() {
+ public int getTrackRendererIndex(int trackType) {
int rendererCount = player.getRendererCount();
for (int rendererIndex = 0; rendererIndex < rendererCount; rendererIndex++) {
- if (player.getRendererType(rendererIndex) == C.TRACK_TYPE_TEXT) {
+ if (player.getRendererType(rendererIndex) == trackType) {
return rendererIndex;
}
}
@@ -722,12 +750,9 @@ class ReactExoplayerView extends FrameLayout implements
this.repeat = repeat;
}
- public void setSelectedTextTrack(String type, Dynamic value) {
- textTrackType = type;
- textTrackValue = value;
-
- int index = getTextTrackRendererIndex();
- if (index == C.INDEX_UNSET) {
+ public void setSelectedTrack(int trackType, String type, Dynamic value) {
+ int rendererIndex = getTrackRendererIndex(trackType);
+ if (rendererIndex == C.INDEX_UNSET) {
return;
}
MappingTrackSelector.MappedTrackInfo info = trackSelector.getCurrentMappedTrackInfo();
@@ -735,12 +760,15 @@ class ReactExoplayerView extends FrameLayout implements
return;
}
- TrackGroupArray groups = info.getTrackGroups(index);
+ TrackGroupArray groups = info.getTrackGroups(rendererIndex);
int trackIndex = C.INDEX_UNSET;
+
if (TextUtils.isEmpty(type)) {
- // Do nothing
- } else if (type.equals("disabled")) {
- trackSelector.setSelectionOverride(index, groups, null);
+ type = "default";
+ }
+
+ if (type.equals("disabled")) {
+ trackSelector.setSelectionOverride(rendererIndex, groups, null);
return;
} else if (type.equals("language")) {
for (int i = 0; i < groups.length; ++i) {
@@ -759,10 +787,25 @@ class ReactExoplayerView extends FrameLayout implements
}
}
} else if (type.equals("index")) {
- trackIndex = value.asInt();
- } else { // default. invalid type or "system"
- trackSelector.clearSelectionOverrides(index);
- return;
+ if (value.asInt() < groups.length) {
+ trackIndex = value.asInt();
+ }
+ } else { // default
+ if (rendererIndex == C.TRACK_TYPE_TEXT) { // Use system settings if possible
+ int sdk = android.os.Build.VERSION.SDK_INT;
+ if (sdk > 18 && groups.length > 0) {
+ CaptioningManager captioningManager
+ = (CaptioningManager)themedReactContext.getSystemService(Context.CAPTIONING_SERVICE);
+ if (captioningManager != null && captioningManager.isEnabled()) {
+ trackIndex = getTrackIndexForDefaultLocale(groups);
+ }
+ } else {
+ trackSelector.setSelectionOverride(rendererIndex, groups, null);
+ return;
+ }
+ } else if (rendererIndex == C.TRACK_TYPE_AUDIO) {
+ trackIndex = getTrackIndexForDefaultLocale(groups);
+ }
}
if (trackIndex == C.INDEX_UNSET) {
@@ -772,8 +815,35 @@ class ReactExoplayerView extends FrameLayout implements
MappingTrackSelector.SelectionOverride override
= new MappingTrackSelector.SelectionOverride(
- new FixedTrackSelection.Factory(), trackIndex, 0);
- trackSelector.setSelectionOverride(index, groups, override);
+ new FixedTrackSelection.Factory(), trackIndex, 0);
+ trackSelector.setSelectionOverride(rendererIndex, groups, override);
+ }
+
+ private int getTrackIndexForDefaultLocale(TrackGroupArray groups) {
+ int trackIndex = 0; // default if no match
+ String locale2 = Locale.getDefault().getLanguage(); // 2 letter code
+ String locale3 = Locale.getDefault().getISO3Language(); // 3 letter code
+ for (int i = 0; i < groups.length; ++i) {
+ Format format = groups.get(i).getFormat(0);
+ String language = format.language;
+ if (language != null && (language.equals(locale2) || language.equals(locale3))) {
+ trackIndex = i;
+ break;
+ }
+ }
+ return trackIndex;
+ }
+
+ public void setSelectedAudioTrack(String type, Dynamic value) {
+ audioTrackType = type;
+ audioTrackValue = value;
+ setSelectedTrack(C.TRACK_TYPE_AUDIO, audioTrackType, audioTrackValue);
+ }
+
+ public void setSelectedTextTrack(String type, Dynamic value) {
+ textTrackType = type;
+ textTrackValue = value;
+ setSelectedTrack(C.TRACK_TYPE_TEXT, textTrackType, textTrackValue);
}
public void setPausedModifier(boolean paused) {
diff --git a/android-exoplayer/src/main/java/com/brentvatne/exoplayer/ReactExoplayerViewManager.java b/android-exoplayer/src/main/java/com/brentvatne/exoplayer/ReactExoplayerViewManager.java
index e3775a6e..fbc8a9ad 100644
--- a/android-exoplayer/src/main/java/com/brentvatne/exoplayer/ReactExoplayerViewManager.java
+++ b/android-exoplayer/src/main/java/com/brentvatne/exoplayer/ReactExoplayerViewManager.java
@@ -28,6 +28,9 @@ public class ReactExoplayerViewManager extends ViewGroupManager= 19) { // 4.4+
+ uiOptions = SYSTEM_UI_FLAG_HIDE_NAVIGATION
+ | SYSTEM_UI_FLAG_IMMERSIVE_STICKY
+ | SYSTEM_UI_FLAG_FULLSCREEN;
+ } else {
+ uiOptions = SYSTEM_UI_FLAG_HIDE_NAVIGATION
+ | SYSTEM_UI_FLAG_FULLSCREEN;
+ }
+ mEventEmitter.receiveEvent(getId(), Events.EVENT_FULLSCREEN_WILL_PRESENT.toString(), null);
+ decorView.setSystemUiVisibility(uiOptions);
+ mEventEmitter.receiveEvent(getId(), Events.EVENT_FULLSCREEN_DID_PRESENT.toString(), null);
+ } else {
+ uiOptions = View.SYSTEM_UI_FLAG_VISIBLE;
+ mEventEmitter.receiveEvent(getId(), Events.EVENT_FULLSCREEN_WILL_DISMISS.toString(), null);
+ decorView.setSystemUiVisibility(uiOptions);
+ mEventEmitter.receiveEvent(getId(), Events.EVENT_FULLSCREEN_DID_DISMISS.toString(), null);
+ }
+ }
+
public void applyModifiers() {
setResizeModeModifier(mResizeMode);
setRepeatModifier(mRepeat);
diff --git a/android/src/main/java/com/brentvatne/react/ReactVideoViewManager.java b/android/src/main/java/com/brentvatne/react/ReactVideoViewManager.java
index 7973121a..dbf152a8 100644
--- a/android/src/main/java/com/brentvatne/react/ReactVideoViewManager.java
+++ b/android/src/main/java/com/brentvatne/react/ReactVideoViewManager.java
@@ -35,6 +35,7 @@ public class ReactVideoViewManager extends SimpleViewManager {
public static final String PROP_PROGRESS_UPDATE_INTERVAL = "progressUpdateInterval";
public static final String PROP_SEEK = "seek";
public static final String PROP_RATE = "rate";
+ public static final String PROP_FULLSCREEN = "fullscreen";
public static final String PROP_PLAY_IN_BACKGROUND = "playInBackground";
public static final String PROP_CONTROLS = "controls";
@@ -148,6 +149,11 @@ public class ReactVideoViewManager extends SimpleViewManager {
videoView.setRateModifier(rate);
}
+ @ReactProp(name = PROP_FULLSCREEN, defaultBoolean = false)
+ public void setFullscreen(final ReactVideoView videoView, final boolean fullscreen) {
+ videoView.setFullscreen(fullscreen);
+ }
+
@ReactProp(name = PROP_PLAY_IN_BACKGROUND, defaultBoolean = false)
public void setPlayInBackground(final ReactVideoView videoView, final boolean playInBackground) {
videoView.setPlayInBackground(playInBackground);
diff --git a/ios/RCTVideo.h b/ios/RCTVideo.h
index 38d2ab4e..1c471236 100644
--- a/ios/RCTVideo.h
+++ b/ios/RCTVideo.h
@@ -17,6 +17,7 @@
@property (nonatomic, copy) RCTBubblingEventBlock onVideoSeek;
@property (nonatomic, copy) RCTBubblingEventBlock onVideoEnd;
@property (nonatomic, copy) RCTBubblingEventBlock onTimedMetadata;
+@property (nonatomic, copy) RCTBubblingEventBlock onVideoAudioBecomingNoisy;
@property (nonatomic, copy) RCTBubblingEventBlock onVideoFullscreenPlayerWillPresent;
@property (nonatomic, copy) RCTBubblingEventBlock onVideoFullscreenPlayerDidPresent;
@property (nonatomic, copy) RCTBubblingEventBlock onVideoFullscreenPlayerWillDismiss;
diff --git a/ios/RCTVideo.m b/ios/RCTVideo.m
index 91673ea5..2c2097d0 100644
--- a/ios/RCTVideo.m
+++ b/ios/RCTVideo.m
@@ -3,6 +3,8 @@
#import
#import
#import
+#include
+#include
static NSString *const statusKeyPath = @"status";
static NSString *const playbackLikelyToKeepUpKeyPath = @"playbackLikelyToKeepUp";
@@ -43,7 +45,9 @@ static NSString *const timedMetadata = @"timedMetadata";
BOOL _paused;
BOOL _repeat;
BOOL _allowsExternalPlayback;
+ NSArray * _textTracks;
NSDictionary * _selectedTextTrack;
+ NSDictionary * _selectedAudioTrack;
BOOL _playbackStalled;
BOOL _playInBackground;
BOOL _playWhenInactive;
@@ -88,6 +92,11 @@ static NSString *const timedMetadata = @"timedMetadata";
selector:@selector(applicationWillEnterForeground:)
name:UIApplicationWillEnterForegroundNotification
object:nil];
+
+ [[NSNotificationCenter defaultCenter] addObserver:self
+ selector:@selector(audioRouteChanged:)
+ name:AVAudioSessionRouteChangeNotification
+ object:nil];
}
return self;
@@ -187,6 +196,17 @@ static NSString *const timedMetadata = @"timedMetadata";
}
}
+#pragma mark - Audio events
+
+- (void)audioRouteChanged:(NSNotification *)notification
+{
+ NSNumber *reason = [[notification userInfo] objectForKey:AVAudioSessionRouteChangeReasonKey];
+ NSNumber *previousRoute = [[notification userInfo] objectForKey:AVAudioSessionRouteChangePreviousRouteKey];
+ if (reason.unsignedIntValue == AVAudioSessionRouteChangeReasonOldDeviceUnavailable) {
+ self.onVideoAudioBecomingNoisy(@{@"target": self.reactTag});
+ }
+}
+
#pragma mark - Progress
- (void)sendProgressUpdate
@@ -284,29 +304,32 @@ static NSString *const timedMetadata = @"timedMetadata";
[self removePlayerLayer];
[self removePlayerTimeObserver];
[self removePlayerItemObservers];
- _playerItem = [self playerItemForSource:source];
- [self addPlayerItemObservers];
-
- [_player pause];
- [_playerViewController.view removeFromSuperview];
- _playerViewController = nil;
-
- if (_playbackRateObserverRegistered) {
- [_player removeObserver:self forKeyPath:playbackRate context:nil];
- _playbackRateObserverRegistered = NO;
- }
-
- _player = [AVPlayer playerWithPlayerItem:_playerItem];
- _player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
-
- [_player addObserver:self forKeyPath:playbackRate options:0 context:nil];
- _playbackRateObserverRegistered = YES;
-
- [self addPlayerTimeObserver];
-
+
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
- // Perform on next run loop, otherwise onVideoLoadStart is nil
- if (self.onVideoLoadStart) {
+
+ // perform on next run loop, otherwise other passed react-props may not be set
+ _playerItem = [self playerItemForSource:source];
+ [self addPlayerItemObservers];
+
+ [_player pause];
+ [_playerViewController.view removeFromSuperview];
+ _playerViewController = nil;
+
+ if (_playbackRateObserverRegistered) {
+ [_player removeObserver:self forKeyPath:playbackRate context:nil];
+ _playbackRateObserverRegistered = NO;
+ }
+
+ _player = [AVPlayer playerWithPlayerItem:_playerItem];
+ _player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
+
+ [_player addObserver:self forKeyPath:playbackRate options:0 context:nil];
+ _playbackRateObserverRegistered = YES;
+
+ [self addPlayerTimeObserver];
+
+ //Perform on next run loop, otherwise onVideoLoadStart is nil
+ if(self.onVideoLoadStart) {
id uri = [source objectForKey:@"uri"];
id type = [source objectForKey:@"type"];
self.onVideoLoadStart(@{@"src": @{
@@ -316,42 +339,95 @@ static NSString *const timedMetadata = @"timedMetadata";
@"target": self.reactTag
});
}
+
});
_videoLoadStarted = YES;
}
+- (NSURL*) urlFilePath:(NSString*) filepath {
+ NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
+
+ NSString* relativeFilePath = [filepath lastPathComponent];
+ // the file may be multiple levels below the documents directory
+ NSArray* fileComponents = [filepath componentsSeparatedByString:@"Documents/"];
+ if (fileComponents.count>1) {
+ relativeFilePath = [fileComponents objectAtIndex:1];
+ }
+
+ NSString *path = [paths.firstObject stringByAppendingPathComponent:relativeFilePath];
+ if ([[NSFileManager defaultManager] fileExistsAtPath:path]) {
+ return [NSURL fileURLWithPath:path];
+ }
+ return nil;
+}
+
- (AVPlayerItem*)playerItemForSource:(NSDictionary *)source
{
bool isNetwork = [RCTConvert BOOL:[source objectForKey:@"isNetwork"]];
bool isAsset = [RCTConvert BOOL:[source objectForKey:@"isAsset"]];
NSString *uri = [source objectForKey:@"uri"];
NSString *type = [source objectForKey:@"type"];
- NSDictionary *headers = [source objectForKey:@"requestHeaders"];
- NSURL *url = (isNetwork || isAsset) ?
- [NSURL URLWithString:uri] :
- [[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:uri ofType:type]];
+ AVURLAsset *asset;
+ NSMutableDictionary *assetOptions = [[NSMutableDictionary alloc] init];
if (isNetwork) {
- NSMutableDictionary *assetOptions = [[NSMutableDictionary alloc]init];
/* Per #1091, this is not a public API. We need to either get approval from Apple to use this
* or use a different approach.
+ NSDictionary *headers = [source objectForKey:@"requestHeaders"];
if ([headers count] > 0) {
[assetOptions setObject:headers forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
}
*/
NSArray *cookies = [[NSHTTPCookieStorage sharedHTTPCookieStorage] cookies];
[assetOptions setObject:cookies forKey:AVURLAssetHTTPCookiesKey];
-
- AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:assetOptions];
+ asset = [AVURLAsset URLAssetWithURL:[NSURL URLWithString:uri] options:assetOptions];
+ } else if (isAsset) { // assets on iOS have to be in the Documents folder
+ asset = [AVURLAsset URLAssetWithURL:[self urlFilePath:uri] options:nil];
+ } else { // file passed in through JS, or an asset in the Xcode project
+ asset = [AVURLAsset URLAssetWithURL:[[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:uri ofType:type]] options:nil];
+ }
+
+ if (!_textTracks) {
return [AVPlayerItem playerItemWithAsset:asset];
}
- else if (isAsset) {
- AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:nil];
- return [AVPlayerItem playerItemWithAsset:asset];
+
+ // sideload text tracks
+ AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
+
+ AVAssetTrack *videoAsset = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject;
+ AVMutableCompositionTrack *videoCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
+ [videoCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration)
+ ofTrack:videoAsset
+ atTime:kCMTimeZero
+ error:nil];
+
+ AVAssetTrack *audioAsset = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject;
+ AVMutableCompositionTrack *audioCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
+ [audioCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration)
+ ofTrack:audioAsset
+ atTime:kCMTimeZero
+ error:nil];
+
+ for (int i = 0; i < _textTracks.count; ++i) {
+ AVURLAsset *textURLAsset;
+ NSString *textUri = [_textTracks objectAtIndex:i][@"uri"];
+ if ([[textUri lowercaseString] hasPrefix:@"http"]) {
+ textURLAsset = [AVURLAsset URLAssetWithURL:[NSURL URLWithString:textUri] options:assetOptions];
+ } else {
+ textURLAsset = [AVURLAsset URLAssetWithURL:[self urlFilePath:textUri] options:nil];
+ }
+ AVAssetTrack *textTrackAsset = [textURLAsset tracksWithMediaType:AVMediaTypeText].firstObject;
+ AVMutableCompositionTrack *textCompTrack = [mixComposition
+ addMutableTrackWithMediaType:AVMediaTypeText
+ preferredTrackID:kCMPersistentTrackID_Invalid];
+ [textCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration)
+ ofTrack:textTrackAsset
+ atTime:kCMTimeZero
+ error:nil];
}
- return [AVPlayerItem playerItemWithURL:url];
+ return [AVPlayerItem playerItemWithAsset:mixComposition];
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
@@ -363,7 +439,7 @@ static NSString *const timedMetadata = @"timedMetadata";
if (items && ![items isEqual:[NSNull null]] && items.count > 0) {
NSMutableArray *array = [NSMutableArray new];
for (AVMetadataItem *item in items) {
- NSString *value = item.value;
+ NSString *value = (NSString *)item.value;
NSString *identifier = item.identifier;
if (![value isEqual: [NSNull null]]) {
@@ -423,6 +499,7 @@ static NSString *const timedMetadata = @"timedMetadata";
@"height": height,
@"orientation": orientation
},
+ @"audioTracks": [self getAudioTrackInfo],
@"textTracks": [self getTextTrackInfo],
@"target": self.reactTag});
}
@@ -587,19 +664,19 @@ static NSString *const timedMetadata = @"timedMetadata";
{
NSNumber *seekTime = info[@"time"];
NSNumber *seekTolerance = info[@"tolerance"];
-
+
int timeScale = 1000;
-
+
AVPlayerItem *item = _player.currentItem;
if (item && item.status == AVPlayerItemStatusReadyToPlay) {
// TODO check loadedTimeRanges
-
+
CMTime cmSeekTime = CMTimeMakeWithSeconds([seekTime floatValue], timeScale);
CMTime current = item.currentTime;
// TODO figure out a good tolerance level
CMTime tolerance = CMTimeMake([seekTolerance floatValue], timeScale);
BOOL wasPaused = _paused;
-
+
if (CMTimeCompare(current, cmSeekTime) != 0) {
if (!wasPaused) [_player pause];
[_player seekToTime:cmSeekTime toleranceBefore:tolerance toleranceAfter:tolerance completionHandler:^(BOOL finished) {
@@ -610,15 +687,15 @@ static NSString *const timedMetadata = @"timedMetadata";
[self setPaused:false];
}
if(self.onVideoSeek) {
- self.onVideoSeek(@{@"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(item.currentTime)],
- @"seekTime": seekTime,
- @"target": self.reactTag});
+ self.onVideoSeek(@{@"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(item.currentTime)],
+ @"seekTime": seekTime,
+ @"target": self.reactTag});
}
}];
-
+
_pendingSeek = false;
}
-
+
} else {
// TODO: See if this makes sense and if so, actually implement it
_pendingSeek = true;
@@ -654,6 +731,7 @@ static NSString *const timedMetadata = @"timedMetadata";
[_player setMuted:NO];
}
+ [self setSelectedAudioTrack:_selectedAudioTrack];
[self setSelectedTextTrack:_selectedTextTrack];
[self setResizeMode:_resizeMode];
[self setRepeat:_repeat];
@@ -666,17 +744,145 @@ static NSString *const timedMetadata = @"timedMetadata";
_repeat = repeat;
}
+- (void)setMediaSelectionTrackForCharacteristic:(AVMediaCharacteristic)characteristic
+ withCriteria:(NSDictionary *)criteria
+{
+ NSString *type = criteria[@"type"];
+ AVMediaSelectionGroup *group = [_player.currentItem.asset
+ mediaSelectionGroupForMediaCharacteristic:characteristic];
+ AVMediaSelectionOption *mediaOption;
+
+ if ([type isEqualToString:@"disabled"]) {
+ // Do nothing. We want to ensure option is nil
+ } else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) {
+ NSString *value = criteria[@"value"];
+ for (int i = 0; i < group.options.count; ++i) {
+ AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i];
+ NSString *optionValue;
+ if ([type isEqualToString:@"language"]) {
+ optionValue = [currentOption extendedLanguageTag];
+ } else {
+ optionValue = [[[currentOption commonMetadata]
+ valueForKey:@"value"]
+ objectAtIndex:0];
+ }
+ if ([value isEqualToString:optionValue]) {
+ mediaOption = currentOption;
+ break;
+ }
+ }
+ //} else if ([type isEqualToString:@"default"]) {
+ // option = group.defaultOption; */
+ } else if ([type isEqualToString:@"index"]) {
+ if ([criteria[@"value"] isKindOfClass:[NSNumber class]]) {
+ int index = [criteria[@"value"] intValue];
+ if (group.options.count > index) {
+ mediaOption = [group.options objectAtIndex:index];
+ }
+ }
+ } else { // default. invalid type or "system"
+ [_player.currentItem selectMediaOptionAutomaticallyInMediaSelectionGroup:group];
+ return;
+ }
+
+ // If a match isn't found, option will be nil and text tracks will be disabled
+ [_player.currentItem selectMediaOption:mediaOption inMediaSelectionGroup:group];
+}
+
+- (void)setSelectedAudioTrack:(NSDictionary *)selectedAudioTrack {
+ _selectedAudioTrack = selectedAudioTrack;
+ [self setMediaSelectionTrackForCharacteristic:AVMediaCharacteristicAudible
+ withCriteria:_selectedAudioTrack];
+}
+
- (void)setSelectedTextTrack:(NSDictionary *)selectedTextTrack {
_selectedTextTrack = selectedTextTrack;
- NSString *type = selectedTextTrack[@"type"];
+ if (_textTracks) { // sideloaded text tracks
+ [self setSideloadedText];
+ } else { // text tracks included in the HLS playlist
+ [self setMediaSelectionTrackForCharacteristic:AVMediaCharacteristicLegible
+ withCriteria:_selectedTextTrack];
+ }
+}
+
+- (void) setSideloadedText {
+ NSString *type = _selectedTextTrack[@"type"];
+ NSArray* textTracks = [self getTextTrackInfo];
+
+ // The first few tracks will be audio & video track
+ int firstTextIndex = 0;
+ for (firstTextIndex = 0; firstTextIndex < _player.currentItem.tracks.count; ++firstTextIndex) {
+ if ([_player.currentItem.tracks[firstTextIndex].assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible]) {
+ break;
+ }
+ }
+
+ int selectedTrackIndex = -1;
+
+ if ([type isEqualToString:@"disabled"]) {
+ // Do nothing. We want to ensure option is nil
+ } else if ([type isEqualToString:@"language"]) {
+ NSString *selectedValue = _selectedTextTrack[@"value"];
+ for (int i = 0; i < textTracks.count; ++i) {
+ NSDictionary *currentTextTrack = [textTracks objectAtIndex:i];
+ if ([selectedValue isEqualToString:currentTextTrack[@"language"]]) {
+ selectedTrackIndex = i;
+ break;
+ }
+ }
+ } else if ([type isEqualToString:@"title"]) {
+ NSString *selectedValue = _selectedTextTrack[@"value"];
+ for (int i = 0; i < textTracks.count; ++i) {
+ NSDictionary *currentTextTrack = [textTracks objectAtIndex:i];
+ if ([selectedValue isEqualToString:currentTextTrack[@"title"]]) {
+ selectedTrackIndex = i;
+ break;
+ }
+ }
+ } else if ([type isEqualToString:@"index"]) {
+ if ([_selectedTextTrack[@"value"] isKindOfClass:[NSNumber class]]) {
+ int index = [_selectedTextTrack[@"value"] intValue];
+ if (textTracks.count > index) {
+ selectedTrackIndex = index;
+ }
+ }
+ }
+
+ // user's selected language might not be available, or system defaults have captions enabled
+ if (selectedTrackIndex == -1 || [type isEqualToString:@"default"]) {
+ CFArrayRef captioningMediaCharacteristics = MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics(kMACaptionAppearanceDomainUser);
+ NSArray *captionSettings = (__bridge NSArray*)captioningMediaCharacteristics;
+ if ([captionSettings containsObject: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility]) {
+ // iterate through the textTracks to find a matching option, or default to the first object.
+ selectedTrackIndex = 0;
+
+ NSString * systemLanguage = [[NSLocale preferredLanguages] firstObject];
+ for (int i = 0; i < textTracks.count; ++i) {
+ NSDictionary *currentTextTrack = [textTracks objectAtIndex:i];
+ if ([systemLanguage isEqualToString:currentTextTrack[@"language"]]) {
+ selectedTrackIndex = i;
+ break;
+ }
+ }
+ }
+ }
+
+ for (int i = firstTextIndex; i < _player.currentItem.tracks.count; ++i) {
+ BOOL isEnabled = i == selectedTrackIndex + firstTextIndex;
+ [_player.currentItem.tracks[i] setEnabled:isEnabled];
+ }
+}
+
+-(void) setStreamingText {
+ NSString *type = _selectedTextTrack[@"type"];
AVMediaSelectionGroup *group = [_player.currentItem.asset
mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
- AVMediaSelectionOption *option;
-
+ AVMediaSelectionOption *mediaOption;
+
if ([type isEqualToString:@"disabled"]) {
// Do nothing. We want to ensure option is nil
} else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) {
- NSString *value = selectedTextTrack[@"value"];
+ NSString *value = _selectedTextTrack[@"value"];
for (int i = 0; i < group.options.count; ++i) {
AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i];
NSString *optionValue;
@@ -688,17 +894,17 @@ static NSString *const timedMetadata = @"timedMetadata";
objectAtIndex:0];
}
if ([value isEqualToString:optionValue]) {
- option = currentOption;
+ mediaOption = currentOption;
break;
}
}
- //} else if ([type isEqualToString:@"default"]) {
- // option = group.defaultOption; */
+ //} else if ([type isEqualToString:@"default"]) {
+ // option = group.defaultOption; */
} else if ([type isEqualToString:@"index"]) {
- if ([selectedTextTrack[@"value"] isKindOfClass:[NSNumber class]]) {
- int index = [selectedTextTrack[@"value"] intValue];
+ if ([_selectedTextTrack[@"value"] isKindOfClass:[NSNumber class]]) {
+ int index = [_selectedTextTrack[@"value"] intValue];
if (group.options.count > index) {
- option = [group.options objectAtIndex:index];
+ mediaOption = [group.options objectAtIndex:index];
}
}
} else { // default. invalid type or "system"
@@ -707,11 +913,46 @@ static NSString *const timedMetadata = @"timedMetadata";
}
// If a match isn't found, option will be nil and text tracks will be disabled
- [_player.currentItem selectMediaOption:option inMediaSelectionGroup:group];
+ [_player.currentItem selectMediaOption:mediaOption inMediaSelectionGroup:group];
+}
+
+- (void)setTextTracks:(NSArray*) textTracks;
+{
+ _textTracks = textTracks;
+
+ // in case textTracks was set after selectedTextTrack
+ if (_selectedTextTrack) [self setSelectedTextTrack:_selectedTextTrack];
+}
+
+- (NSArray *)getAudioTrackInfo
+{
+ NSMutableArray *audioTracks = [[NSMutableArray alloc] init];
+ AVMediaSelectionGroup *group = [_player.currentItem.asset
+ mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
+ for (int i = 0; i < group.options.count; ++i) {
+ AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i];
+ NSString *title = @"";
+ NSArray *values = [[currentOption commonMetadata] valueForKey:@"value"];
+ if (values.count > 0) {
+ title = [values objectAtIndex:0];
+ }
+ NSString *language = [currentOption extendedLanguageTag] ? [currentOption extendedLanguageTag] : @"";
+ NSDictionary *audioTrack = @{
+ @"index": [NSNumber numberWithInt:i],
+ @"title": title,
+ @"language": language
+ };
+ [audioTracks addObject:audioTrack];
+ }
+ return audioTracks;
}
- (NSArray *)getTextTrackInfo
{
+ // if sideloaded, textTracks will already be set
+ if (_textTracks) return _textTracks;
+
+ // if streaming video, we extract the text tracks
NSMutableArray *textTracks = [[NSMutableArray alloc] init];
AVMediaSelectionGroup *group = [_player.currentItem.asset
mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
@@ -799,21 +1040,21 @@ static NSString *const timedMetadata = @"timedMetadata";
- (void)usePlayerLayer
{
- if( _player )
- {
- _playerLayer = [AVPlayerLayer playerLayerWithPlayer:_player];
- _playerLayer.frame = self.bounds;
- _playerLayer.needsDisplayOnBoundsChange = YES;
-
- // to prevent video from being animated when resizeMode is 'cover'
- // resize mode must be set before layer is added
- [self setResizeMode:_resizeMode];
- [_playerLayer addObserver:self forKeyPath:readyForDisplayKeyPath options:NSKeyValueObservingOptionNew context:nil];
- _playerLayerObserverSet = YES;
-
- [self.layer addSublayer:_playerLayer];
- self.layer.needsDisplayOnBoundsChange = YES;
- }
+ if( _player )
+ {
+ _playerLayer = [AVPlayerLayer playerLayerWithPlayer:_player];
+ _playerLayer.frame = self.bounds;
+ _playerLayer.needsDisplayOnBoundsChange = YES;
+
+ // to prevent video from being animated when resizeMode is 'cover'
+ // resize mode must be set before layer is added
+ [self setResizeMode:_resizeMode];
+ [_playerLayer addObserver:self forKeyPath:readyForDisplayKeyPath options:NSKeyValueObservingOptionNew context:nil];
+ _playerLayerObserverSet = YES;
+
+ [self.layer addSublayer:_playerLayer];
+ self.layer.needsDisplayOnBoundsChange = YES;
+ }
}
- (void)setControls:(BOOL)controls
@@ -847,12 +1088,12 @@ static NSString *const timedMetadata = @"timedMetadata";
- (void)removePlayerLayer
{
- [_playerLayer removeFromSuperlayer];
- if (_playerLayerObserverSet) {
- [_playerLayer removeObserver:self forKeyPath:readyForDisplayKeyPath];
- _playerLayerObserverSet = NO;
- }
- _playerLayer = nil;
+ [_playerLayer removeFromSuperlayer];
+ if (_playerLayerObserverSet) {
+ [_playerLayer removeObserver:self forKeyPath:readyForDisplayKeyPath];
+ _playerLayerObserverSet = NO;
+ }
+ _playerLayer = nil;
}
#pragma mark - RCTVideoPlayerViewControllerDelegate
diff --git a/ios/RCTVideoManager.m b/ios/RCTVideoManager.m
index 3f12e71c..e0e0162e 100644
--- a/ios/RCTVideoManager.m
+++ b/ios/RCTVideoManager.m
@@ -23,7 +23,9 @@ RCT_EXPORT_VIEW_PROPERTY(src, NSDictionary);
RCT_EXPORT_VIEW_PROPERTY(resizeMode, NSString);
RCT_EXPORT_VIEW_PROPERTY(repeat, BOOL);
RCT_EXPORT_VIEW_PROPERTY(allowsExternalPlayback, BOOL);
+RCT_EXPORT_VIEW_PROPERTY(textTracks, NSArray);
RCT_EXPORT_VIEW_PROPERTY(selectedTextTrack, NSDictionary);
+RCT_EXPORT_VIEW_PROPERTY(selectedAudioTrack, NSDictionary);
RCT_EXPORT_VIEW_PROPERTY(paused, BOOL);
RCT_EXPORT_VIEW_PROPERTY(muted, BOOL);
RCT_EXPORT_VIEW_PROPERTY(controls, BOOL);
@@ -45,6 +47,7 @@ RCT_EXPORT_VIEW_PROPERTY(onVideoProgress, RCTBubblingEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onVideoSeek, RCTBubblingEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onVideoEnd, RCTBubblingEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onTimedMetadata, RCTBubblingEventBlock);
+RCT_EXPORT_VIEW_PROPERTY(onVideoAudioBecomingNoisy, RCTBubblingEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onVideoFullscreenPlayerWillPresent, RCTBubblingEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onVideoFullscreenPlayerDidPresent, RCTBubblingEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onVideoFullscreenPlayerWillDismiss, RCTBubblingEventBlock);
diff --git a/package.json b/package.json
index 806d24d5..7d633256 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "react-native-video",
- "version": "3.0.0",
+ "version": "3.1.0",
"description": "A element for react-native",
"main": "Video.js",
"license": "MIT",