Merge branch 'master' of https://github.com/react-native-community/react-native-video
# Conflicts: # android/src/main/java/com/brentvatne/react/ReactVideoView.java
This commit is contained in:
commit
5435e5d545
@ -1,5 +1,13 @@
|
|||||||
## Changelog
|
## Changelog
|
||||||
|
|
||||||
|
### Next Version
|
||||||
|
* Basic fullscreen support for Android MediaPlayer [#1138](https://github.com/react-native-community/react-native-video/pull/1138)
|
||||||
|
* Simplify default Android SDK code [#1145](https://github.com/react-native-community/react-native-video/pull/1145) [#1146](https://github.com/react-native-community/react-native-video/pull/1146)
|
||||||
|
|
||||||
|
### Version 3.1.0
|
||||||
|
* Support sidecar text tracks on iOS [#1109](https://github.com/react-native-community/react-native-video/pull/1109)
|
||||||
|
* Support onAudioBecomingNoisy on iOS [#1131](https://github.com/react-native-community/react-native-video/pull/1131)
|
||||||
|
|
||||||
### Version 3.0
|
### Version 3.0
|
||||||
* Inherit Android buildtools and SDK version from the root project [#1081](https://github.com/react-native-community/react-native-video/pull/1081)
|
* Inherit Android buildtools and SDK version from the root project [#1081](https://github.com/react-native-community/react-native-video/pull/1081)
|
||||||
* Automatically play on ExoPlayer when the paused prop is not set [#1083](https://github.com/react-native-community/react-native-video/pull/1083)
|
* Automatically play on ExoPlayer when the paused prop is not set [#1083](https://github.com/react-native-community/react-native-video/pull/1083)
|
||||||
|
136
README.md
136
README.md
@ -180,6 +180,10 @@ using System.Collections.Generic;
|
|||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
```javascript
|
```javascript
|
||||||
|
// Load the module
|
||||||
|
|
||||||
|
import Video from 'react-native-video';
|
||||||
|
|
||||||
// Within your render function, assuming you have a file called
|
// Within your render function, assuming you have a file called
|
||||||
// "background.mp4" in your project. You can include multiple videos
|
// "background.mp4" in your project. You can include multiple videos
|
||||||
// on a single screen if you like.
|
// on a single screen if you like.
|
||||||
@ -191,21 +195,8 @@ using System.Collections.Generic;
|
|||||||
onBuffer={this.onBuffer} // Callback when remote video is buffering
|
onBuffer={this.onBuffer} // Callback when remote video is buffering
|
||||||
onEnd={this.onEnd} // Callback when playback finishes
|
onEnd={this.onEnd} // Callback when playback finishes
|
||||||
onError={this.videoError} // Callback when video cannot be loaded
|
onError={this.videoError} // Callback when video cannot be loaded
|
||||||
onFullscreenPlayerWillPresent={this.fullScreenPlayerWillPresent} // Callback before fullscreen starts
|
|
||||||
onFullscreenPlayerDidPresent={this.fullScreenPlayerDidPresent} // Callback after fullscreen started
|
|
||||||
onFullscreenPlayerWillDismiss={this.fullScreenPlayerWillDismiss} // Callback before fullscreen stops
|
|
||||||
onFullscreenPlayerDidDismiss={this.fullScreenPlayerDidDismiss} // Callback after fullscreen stopped
|
|
||||||
style={styles.backgroundVideo} />
|
style={styles.backgroundVideo} />
|
||||||
|
|
||||||
// Later to trigger fullscreen
|
|
||||||
this.player.presentFullscreenPlayer()
|
|
||||||
|
|
||||||
// Disable fullscreen
|
|
||||||
this.player.dismissFullscreenPlayer()
|
|
||||||
|
|
||||||
// To set video position in seconds (seek)
|
|
||||||
this.player.seek(0)
|
|
||||||
|
|
||||||
// Later on in your styles..
|
// Later on in your styles..
|
||||||
var styles = StyleSheet.create({
|
var styles = StyleSheet.create({
|
||||||
backgroundVideo: {
|
backgroundVideo: {
|
||||||
@ -232,6 +223,7 @@ var styles = StyleSheet.create({
|
|||||||
* [rate](#rate)
|
* [rate](#rate)
|
||||||
* [repeat](#repeat)
|
* [repeat](#repeat)
|
||||||
* [resizeMode](#resizemode)
|
* [resizeMode](#resizemode)
|
||||||
|
* [selectedAudioTrack](#selectedaudiotrack)
|
||||||
* [selectedTextTrack](#selectedtexttrack)
|
* [selectedTextTrack](#selectedtexttrack)
|
||||||
* [stereoPan](#stereopan)
|
* [stereoPan](#stereopan)
|
||||||
* [textTracks](#texttracks)
|
* [textTracks](#texttracks)
|
||||||
@ -239,12 +231,19 @@ var styles = StyleSheet.create({
|
|||||||
* [volume](#volume)
|
* [volume](#volume)
|
||||||
|
|
||||||
### Event props
|
### Event props
|
||||||
|
* [onAudioBecomingNoisy](#onaudiobecomingnoisy)
|
||||||
|
* [onFullscreenPlayerWillPresent](#onfullscreenplayerwillpresent)
|
||||||
|
* [onFullscreenPlayerDidPresent](#onfullscreenplayerdidpresent)
|
||||||
|
* [onFullscreenPlayerWillDismiss](#onfullscreenplayerwilldismiss)
|
||||||
|
* [onFullscreenPlayerDidDismiss](#onfullscreenplayerdiddismiss)
|
||||||
* [onLoad](#onload)
|
* [onLoad](#onload)
|
||||||
* [onLoadStart](#onloadstart)
|
* [onLoadStart](#onloadstart)
|
||||||
* [onProgress](#onprogress)
|
* [onProgress](#onprogress)
|
||||||
* [onTimedMetadata](#ontimedmetadata)
|
* [onTimedMetadata](#ontimedmetadata)
|
||||||
|
|
||||||
### Methods
|
### Methods
|
||||||
|
* [dismissFullscreenPlayer](#dismissfullscreenplayer)
|
||||||
|
* [presentFullscreenPlayer](#presentfullscreenplayer)
|
||||||
* [seek](#seek)
|
* [seek](#seek)
|
||||||
|
|
||||||
### Configurable props
|
### Configurable props
|
||||||
@ -355,6 +354,36 @@ Determines how to resize the video when the frame doesn't match the raw video di
|
|||||||
|
|
||||||
Platforms: Android ExoPlayer, Android MediaPlayer, iOS, Windows UWP
|
Platforms: Android ExoPlayer, Android MediaPlayer, iOS, Windows UWP
|
||||||
|
|
||||||
|
#### selectedAudioTrack
|
||||||
|
Configure which audio track, if any, is played.
|
||||||
|
|
||||||
|
```
|
||||||
|
selectedAudioTrack={{
|
||||||
|
type: Type,
|
||||||
|
value: Value
|
||||||
|
}}
|
||||||
|
```
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```
|
||||||
|
selectedAudioTrack={{
|
||||||
|
type: "title",
|
||||||
|
value: "Dubbing"
|
||||||
|
}}
|
||||||
|
```
|
||||||
|
|
||||||
|
Type | Value | Description
|
||||||
|
--- | --- | ---
|
||||||
|
"system" (default) | N/A | Play the audio track that matches the system language. If none match, play the first track.
|
||||||
|
"disabled" | N/A | Turn off audio
|
||||||
|
"title" | string | Play the audio track with the title specified as the Value, e.g. "French"
|
||||||
|
"language" | string | Play the audio track with the language specified as the Value, e.g. "fr"
|
||||||
|
"index" | number | Play the audio track with the index specified as the value, e.g. 0
|
||||||
|
|
||||||
|
If a track matching the specified Type (and Value if appropriate) is unavailable, the first audio track will be played. If multiple tracks match the criteria, the first match will be used.
|
||||||
|
|
||||||
|
Platforms: Android ExoPlayer, iOS
|
||||||
|
|
||||||
#### selectedTextTrack
|
#### selectedTextTrack
|
||||||
Configure which text track (caption or subtitle), if any, is shown.
|
Configure which text track (caption or subtitle), if any, is shown.
|
||||||
|
|
||||||
@ -402,9 +431,11 @@ Property | Description
|
|||||||
--- | ---
|
--- | ---
|
||||||
title | Descriptive name for the track
|
title | Descriptive name for the track
|
||||||
language | 2 letter [ISO 639-1 code](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) representing the language
|
language | 2 letter [ISO 639-1 code](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) representing the language
|
||||||
type | Mime type of the track<br> * TextTrackType.SRT - .srt SubRip Subtitle<br> * TextTrackType.TTML - .ttml TTML<br> * TextTrackType.VTT - .vtt WebVTT
|
type | Mime type of the track<br> * TextTrackType.SRT - SubRip (.srt)<br> * TextTrackType.TTML - TTML (.ttml)<br> * TextTrackType.VTT - WebVTT (.vtt)<br>iOS only supports VTT, Android ExoPlayer supports all 3
|
||||||
uri | URL for the text track. Currently, only tracks hosted on a webserver are supported
|
uri | URL for the text track. Currently, only tracks hosted on a webserver are supported
|
||||||
|
|
||||||
|
On iOS, sidecar text tracks are only supported for individual files, not HLS playlists. For HLS, you should include the text tracks as part of the playlist.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
```
|
```
|
||||||
import { TextTrackType }, Video from 'react-native-video';
|
import { TextTrackType }, Video from 'react-native-video';
|
||||||
@ -413,21 +444,20 @@ textTracks={[
|
|||||||
{
|
{
|
||||||
title: "English CC",
|
title: "English CC",
|
||||||
language: "en",
|
language: "en",
|
||||||
type: "text/vtt", TextTrackType.VTT,
|
type: TextTrackType.VTT, // "text/vtt"
|
||||||
uri: "https://bitdash-a.akamaihd.net/content/sintel/subtitles/subtitles_en.vtt"
|
uri: "https://bitdash-a.akamaihd.net/content/sintel/subtitles/subtitles_en.vtt"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
title: "Spanish Subtitles",
|
title: "Spanish Subtitles",
|
||||||
language: "es",
|
language: "es",
|
||||||
type: "application/x-subrip", TextTrackType.SRT,
|
type: TextTrackType.SRT, // "application/x-subrip"
|
||||||
uri: "https://durian.blender.org/wp-content/content/subtitles/sintel_es.srt"
|
uri: "https://durian.blender.org/wp-content/content/subtitles/sintel_es.srt"
|
||||||
}
|
}
|
||||||
]}
|
]}
|
||||||
```
|
```
|
||||||
|
|
||||||
This isn't support on iOS because AVPlayer doesn't support it. Text tracks must be loaded as part of an HLS playlist.
|
|
||||||
|
|
||||||
Platforms: Android ExoPlayer
|
Platforms: Android ExoPlayer, iOS
|
||||||
|
|
||||||
#### useTextureView
|
#### useTextureView
|
||||||
Output to a TextureView instead of the default SurfaceView. In general, you will want to use SurfaceView because it is more efficient and provides better performance. However, SurfaceViews has two limitations:
|
Output to a TextureView instead of the default SurfaceView. In general, you will want to use SurfaceView because it is more efficient and provides better performance. However, SurfaceViews has two limitations:
|
||||||
@ -451,6 +481,41 @@ Platforms: all
|
|||||||
|
|
||||||
### Event props
|
### Event props
|
||||||
|
|
||||||
|
#### onAudioBecomingNoisy
|
||||||
|
Callback function that is called when the audio is about to become 'noisy' due to a change in audio outputs. Typically this is called when audio output is being switched from an external source like headphones back to the internal speaker. It's a good idea to pause the media when this happens so the speaker doesn't start blasting sound.
|
||||||
|
|
||||||
|
Payload: none
|
||||||
|
|
||||||
|
Platforms: Android ExoPlayer, iOS
|
||||||
|
|
||||||
|
#### onFullscreenPlayerWillPresent
|
||||||
|
Callback function that is called when the player is about to enter fullscreen mode.
|
||||||
|
|
||||||
|
Payload: none
|
||||||
|
|
||||||
|
Platforms: Android ExoPlayer, Android MediaPlayer, iOS
|
||||||
|
|
||||||
|
#### onFullscreenPlayerDidPresent
|
||||||
|
Callback function that is called when the player has entered fullscreen mode.
|
||||||
|
|
||||||
|
Payload: none
|
||||||
|
|
||||||
|
Platforms: Android ExoPlayer, Android MediaPlayer, iOS
|
||||||
|
|
||||||
|
#### onFullscreenPlayerWillDismiss
|
||||||
|
Callback function that is called when the player is about to exit fullscreen mode.
|
||||||
|
|
||||||
|
Payload: none
|
||||||
|
|
||||||
|
Platforms: Android ExoPlayer, Android MediaPlayer, iOS
|
||||||
|
|
||||||
|
#### onFullscreenPlayerDidDismiss
|
||||||
|
Callback function that is called when the player has exited fullscreen mode.
|
||||||
|
|
||||||
|
Payload: none
|
||||||
|
|
||||||
|
Platforms: Android ExoPlayer, Android MediaPlayer, iOS
|
||||||
|
|
||||||
#### onLoad
|
#### onLoad
|
||||||
Callback function that is called when the media is loaded and ready to play.
|
Callback function that is called when the media is loaded and ready to play.
|
||||||
|
|
||||||
@ -461,7 +526,8 @@ Property | Type | Description
|
|||||||
currentPosition | number | Time in seconds where the media will start
|
currentPosition | number | Time in seconds where the media will start
|
||||||
duration | number | Length of the media in seconds
|
duration | number | Length of the media in seconds
|
||||||
naturalSize | object | Properties:<br> * width - Width in pixels that the video was encoded at<br> * height - Height in pixels that the video was encoded at<br> * orientation - "portrait" or "landscape"
|
naturalSize | object | Properties:<br> * width - Width in pixels that the video was encoded at<br> * height - Height in pixels that the video was encoded at<br> * orientation - "portrait" or "landscape"
|
||||||
textTracks | array | An array of text track info objects with the following properties:<br> * index - Index number<br> * title - Description of the track<br> * language - 2 letter [ISO 639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) language code<br> * type - Mime type of track
|
audioTracks | array | An array of audio track info objects with the following properties:<br> * index - Index number<br> * title - Description of the track<br> * language - 2 letter [ISO 639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) or 3 letter [ISO639-2](https://en.wikipedia.org/wiki/List_of_ISO_639-2_codes) language code<br> * type - Mime type of track
|
||||||
|
textTracks | array | An array of text track info objects with the following properties:<br> * index - Index number<br> * title - Description of the track<br> * language - 2 letter [ISO 639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) or 3 letter [ISO 639-2](https://en.wikipedia.org/wiki/List_of_ISO_639-2_codes) language code<br> * type - Mime type of track
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
```
|
```
|
||||||
@ -479,6 +545,10 @@ Example:
|
|||||||
orientation: 'landscape'
|
orientation: 'landscape'
|
||||||
width: '1920'
|
width: '1920'
|
||||||
},
|
},
|
||||||
|
audioTracks: [
|
||||||
|
{ language: 'es', title: 'Spanish', type: 'audio/mpeg', index: 0 },
|
||||||
|
{ language: 'en', title: 'English', type: 'audio/mpeg', index: 1 } ],
|
||||||
|
],
|
||||||
textTracks: [
|
textTracks: [
|
||||||
{ title: '#1 French', language: 'fr', index: 0, type: 'text/vtt' },
|
{ title: '#1 French', language: 'fr', index: 0, type: 'text/vtt' },
|
||||||
{ title: '#2 English CC', language: 'en', index: 1, type: 'text/vtt' },
|
{ title: '#2 English CC', language: 'en', index: 1, type: 'text/vtt' },
|
||||||
@ -560,6 +630,34 @@ return (
|
|||||||
);
|
);
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### dismissFullscreenPlayer
|
||||||
|
`dismissFullscreenPlayer()`
|
||||||
|
|
||||||
|
Take the player out of fullscreen mode.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```
|
||||||
|
this.player.dismissFullscreenPlayer();
|
||||||
|
```
|
||||||
|
|
||||||
|
Platforms: Android ExoPlayer, Android MediaPlayer, iOS
|
||||||
|
|
||||||
|
#### FullscreenPlayer
|
||||||
|
`presentFullscreenPlayer()`
|
||||||
|
|
||||||
|
Put the player in fullscreen mode.
|
||||||
|
|
||||||
|
On iOS, this displays the video in a fullscreen view controller with controls.
|
||||||
|
|
||||||
|
On Android ExoPlayer & MediaPlayer, this puts the navigation controls in fullscreen mode. It is not a complete fullscreen implementation, so you will still need to apply a style that makes the width and height match your screen dimensions to get a fullscreen video.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```
|
||||||
|
this.player.presentFullscreenPlayer();
|
||||||
|
```
|
||||||
|
|
||||||
|
Platforms: Android ExoPlayer, Android MediaPlayer, iOS
|
||||||
|
|
||||||
#### seek()
|
#### seek()
|
||||||
`seek(seconds)`
|
`seek(seconds)`
|
||||||
|
|
||||||
|
9
Video.js
9
Video.js
@ -235,6 +235,7 @@ export default class Video extends Component {
|
|||||||
onVideoEnd: this._onEnd,
|
onVideoEnd: this._onEnd,
|
||||||
onVideoBuffer: this._onBuffer,
|
onVideoBuffer: this._onBuffer,
|
||||||
onTimedMetadata: this._onTimedMetadata,
|
onTimedMetadata: this._onTimedMetadata,
|
||||||
|
onVideoAudioBecomingNoisy: this._onAudioBecomingNoisy,
|
||||||
onVideoFullscreenPlayerWillPresent: this._onFullscreenPlayerWillPresent,
|
onVideoFullscreenPlayerWillPresent: this._onFullscreenPlayerWillPresent,
|
||||||
onVideoFullscreenPlayerDidPresent: this._onFullscreenPlayerDidPresent,
|
onVideoFullscreenPlayerDidPresent: this._onFullscreenPlayerDidPresent,
|
||||||
onVideoFullscreenPlayerWillDismiss: this._onFullscreenPlayerWillDismiss,
|
onVideoFullscreenPlayerWillDismiss: this._onFullscreenPlayerWillDismiss,
|
||||||
@ -296,6 +297,7 @@ Video.propTypes = {
|
|||||||
onVideoSeek: PropTypes.func,
|
onVideoSeek: PropTypes.func,
|
||||||
onVideoEnd: PropTypes.func,
|
onVideoEnd: PropTypes.func,
|
||||||
onTimedMetadata: PropTypes.func,
|
onTimedMetadata: PropTypes.func,
|
||||||
|
onVideoAudioBecomingNoisy: PropTypes.func,
|
||||||
onVideoFullscreenPlayerWillPresent: PropTypes.func,
|
onVideoFullscreenPlayerWillPresent: PropTypes.func,
|
||||||
onVideoFullscreenPlayerDidPresent: PropTypes.func,
|
onVideoFullscreenPlayerDidPresent: PropTypes.func,
|
||||||
onVideoFullscreenPlayerWillDismiss: PropTypes.func,
|
onVideoFullscreenPlayerWillDismiss: PropTypes.func,
|
||||||
@ -314,6 +316,13 @@ Video.propTypes = {
|
|||||||
posterResizeMode: Image.propTypes.resizeMode,
|
posterResizeMode: Image.propTypes.resizeMode,
|
||||||
repeat: PropTypes.bool,
|
repeat: PropTypes.bool,
|
||||||
allowsExternalPlayback: PropTypes.bool,
|
allowsExternalPlayback: PropTypes.bool,
|
||||||
|
selectedAudioTrack: PropTypes.shape({
|
||||||
|
type: PropTypes.string.isRequired,
|
||||||
|
value: PropTypes.oneOfType([
|
||||||
|
PropTypes.string,
|
||||||
|
PropTypes.number
|
||||||
|
])
|
||||||
|
}),
|
||||||
selectedTextTrack: PropTypes.shape({
|
selectedTextTrack: PropTypes.shape({
|
||||||
type: PropTypes.string.isRequired,
|
type: PropTypes.string.isRequired,
|
||||||
value: PropTypes.oneOfType([
|
value: PropTypes.oneOfType([
|
||||||
|
@ -1,20 +1,16 @@
|
|||||||
apply plugin: 'com.android.library'
|
apply plugin: 'com.android.library'
|
||||||
|
|
||||||
def _ext = rootProject.ext
|
def safeExtGet(prop, fallback) {
|
||||||
|
rootProject.ext.has(prop) ? rootProject.ext.get(prop) : fallback
|
||||||
def _reactNativeVersion = _ext.has('reactNative') ? _ext.reactNative : '+'
|
}
|
||||||
def _compileSdkVersion = _ext.has('compileSdkVersion') ? _ext.compileSdkVersion : 27
|
|
||||||
def _buildToolsVersion = _ext.has('buildToolsVersion') ? _ext.buildToolsVersion : '27.0.3'
|
|
||||||
def _minSdkVersion = _ext.has('minSdkVersion') ? _ext.minSdkVersion : 16
|
|
||||||
def _targetSdkVersion = _ext.has('targetSdkVersion') ? _ext.targetSdkVersion : 27
|
|
||||||
|
|
||||||
android {
|
android {
|
||||||
compileSdkVersion _compileSdkVersion
|
compileSdkVersion safeExtGet('compileSdkVersion', 27)
|
||||||
buildToolsVersion _buildToolsVersion
|
buildToolsVersion safeExtGet('buildToolsVersion', '27.0.3')
|
||||||
|
|
||||||
defaultConfig {
|
defaultConfig {
|
||||||
minSdkVersion _minSdkVersion
|
minSdkVersion safeExtGet('minSdkVersion', 16)
|
||||||
targetSdkVersion _targetSdkVersion
|
targetSdkVersion safeExtGet('targetSdkVersion', 27)
|
||||||
versionCode 1
|
versionCode 1
|
||||||
versionName "1.0"
|
versionName "1.0"
|
||||||
}
|
}
|
||||||
@ -22,7 +18,7 @@ android {
|
|||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
//noinspection GradleDynamicVersion
|
//noinspection GradleDynamicVersion
|
||||||
provided "com.facebook.react:react-native:${_reactNativeVersion}"
|
provided "com.facebook.react:react-native:${safeExtGet('reactNativeVersion', '+')}"
|
||||||
compile 'com.google.android.exoplayer:exoplayer:2.7.3'
|
compile 'com.google.android.exoplayer:exoplayer:2.7.3'
|
||||||
compile('com.google.android.exoplayer:extension-okhttp:2.7.3') {
|
compile('com.google.android.exoplayer:extension-okhttp:2.7.3') {
|
||||||
exclude group: 'com.squareup.okhttp3', module: 'okhttp'
|
exclude group: 'com.squareup.okhttp3', module: 'okhttp'
|
||||||
|
@ -11,6 +11,7 @@ import android.text.TextUtils;
|
|||||||
import android.util.Log;
|
import android.util.Log;
|
||||||
import android.view.View;
|
import android.view.View;
|
||||||
import android.view.Window;
|
import android.view.Window;
|
||||||
|
import android.view.accessibility.CaptioningManager;
|
||||||
import android.widget.FrameLayout;
|
import android.widget.FrameLayout;
|
||||||
|
|
||||||
import com.brentvatne.react.R;
|
import com.brentvatne.react.R;
|
||||||
@ -68,6 +69,7 @@ import java.lang.Math;
|
|||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.lang.Object;
|
import java.lang.Object;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Locale;
|
||||||
|
|
||||||
@SuppressLint("ViewConstructor")
|
@SuppressLint("ViewConstructor")
|
||||||
class ReactExoplayerView extends FrameLayout implements
|
class ReactExoplayerView extends FrameLayout implements
|
||||||
@ -111,6 +113,9 @@ class ReactExoplayerView extends FrameLayout implements
|
|||||||
private Uri srcUri;
|
private Uri srcUri;
|
||||||
private String extension;
|
private String extension;
|
||||||
private boolean repeat;
|
private boolean repeat;
|
||||||
|
private String audioTrackType;
|
||||||
|
private Dynamic audioTrackValue;
|
||||||
|
private ReadableArray audioTracks;
|
||||||
private String textTrackType;
|
private String textTrackType;
|
||||||
private Dynamic textTrackValue;
|
private Dynamic textTrackValue;
|
||||||
private ReadableArray textTracks;
|
private ReadableArray textTracks;
|
||||||
@ -499,20 +504,43 @@ class ReactExoplayerView extends FrameLayout implements
|
|||||||
private void videoLoaded() {
|
private void videoLoaded() {
|
||||||
if (loadVideoStarted) {
|
if (loadVideoStarted) {
|
||||||
loadVideoStarted = false;
|
loadVideoStarted = false;
|
||||||
|
setSelectedAudioTrack(audioTrackType, audioTrackValue);
|
||||||
setSelectedTextTrack(textTrackType, textTrackValue);
|
setSelectedTextTrack(textTrackType, textTrackValue);
|
||||||
Format videoFormat = player.getVideoFormat();
|
Format videoFormat = player.getVideoFormat();
|
||||||
int width = videoFormat != null ? videoFormat.width : 0;
|
int width = videoFormat != null ? videoFormat.width : 0;
|
||||||
int height = videoFormat != null ? videoFormat.height : 0;
|
int height = videoFormat != null ? videoFormat.height : 0;
|
||||||
eventEmitter.load(player.getDuration(), player.getCurrentPosition(), width, height,
|
eventEmitter.load(player.getDuration(), player.getCurrentPosition(), width, height,
|
||||||
getTextTrackInfo());
|
getAudioTrackInfo(), getTextTrackInfo());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private WritableArray getAudioTrackInfo() {
|
||||||
|
WritableArray audioTracks = Arguments.createArray();
|
||||||
|
|
||||||
|
MappingTrackSelector.MappedTrackInfo info = trackSelector.getCurrentMappedTrackInfo();
|
||||||
|
int index = getTrackRendererIndex(C.TRACK_TYPE_AUDIO);
|
||||||
|
if (info == null || index == C.INDEX_UNSET) {
|
||||||
|
return audioTracks;
|
||||||
|
}
|
||||||
|
|
||||||
|
TrackGroupArray groups = info.getTrackGroups(index);
|
||||||
|
for (int i = 0; i < groups.length; ++i) {
|
||||||
|
Format format = groups.get(i).getFormat(0);
|
||||||
|
WritableMap textTrack = Arguments.createMap();
|
||||||
|
textTrack.putInt("index", i);
|
||||||
|
textTrack.putString("title", format.id != null ? format.id : "");
|
||||||
|
textTrack.putString("type", format.sampleMimeType);
|
||||||
|
textTrack.putString("language", format.language != null ? format.language : "");
|
||||||
|
audioTracks.pushMap(textTrack);
|
||||||
|
}
|
||||||
|
return audioTracks;
|
||||||
|
}
|
||||||
|
|
||||||
private WritableArray getTextTrackInfo() {
|
private WritableArray getTextTrackInfo() {
|
||||||
WritableArray textTracks = Arguments.createArray();
|
WritableArray textTracks = Arguments.createArray();
|
||||||
|
|
||||||
MappingTrackSelector.MappedTrackInfo info = trackSelector.getCurrentMappedTrackInfo();
|
MappingTrackSelector.MappedTrackInfo info = trackSelector.getCurrentMappedTrackInfo();
|
||||||
int index = getTextTrackRendererIndex();
|
int index = getTrackRendererIndex(C.TRACK_TYPE_TEXT);
|
||||||
if (info == null || index == C.INDEX_UNSET) {
|
if (info == null || index == C.INDEX_UNSET) {
|
||||||
return textTracks;
|
return textTracks;
|
||||||
}
|
}
|
||||||
@ -645,10 +673,10 @@ class ReactExoplayerView extends FrameLayout implements
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
public int getTextTrackRendererIndex() {
|
public int getTrackRendererIndex(int trackType) {
|
||||||
int rendererCount = player.getRendererCount();
|
int rendererCount = player.getRendererCount();
|
||||||
for (int rendererIndex = 0; rendererIndex < rendererCount; rendererIndex++) {
|
for (int rendererIndex = 0; rendererIndex < rendererCount; rendererIndex++) {
|
||||||
if (player.getRendererType(rendererIndex) == C.TRACK_TYPE_TEXT) {
|
if (player.getRendererType(rendererIndex) == trackType) {
|
||||||
return rendererIndex;
|
return rendererIndex;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -722,12 +750,9 @@ class ReactExoplayerView extends FrameLayout implements
|
|||||||
this.repeat = repeat;
|
this.repeat = repeat;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setSelectedTextTrack(String type, Dynamic value) {
|
public void setSelectedTrack(int trackType, String type, Dynamic value) {
|
||||||
textTrackType = type;
|
int rendererIndex = getTrackRendererIndex(trackType);
|
||||||
textTrackValue = value;
|
if (rendererIndex == C.INDEX_UNSET) {
|
||||||
|
|
||||||
int index = getTextTrackRendererIndex();
|
|
||||||
if (index == C.INDEX_UNSET) {
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
MappingTrackSelector.MappedTrackInfo info = trackSelector.getCurrentMappedTrackInfo();
|
MappingTrackSelector.MappedTrackInfo info = trackSelector.getCurrentMappedTrackInfo();
|
||||||
@ -735,12 +760,15 @@ class ReactExoplayerView extends FrameLayout implements
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
TrackGroupArray groups = info.getTrackGroups(index);
|
TrackGroupArray groups = info.getTrackGroups(rendererIndex);
|
||||||
int trackIndex = C.INDEX_UNSET;
|
int trackIndex = C.INDEX_UNSET;
|
||||||
|
|
||||||
if (TextUtils.isEmpty(type)) {
|
if (TextUtils.isEmpty(type)) {
|
||||||
// Do nothing
|
type = "default";
|
||||||
} else if (type.equals("disabled")) {
|
}
|
||||||
trackSelector.setSelectionOverride(index, groups, null);
|
|
||||||
|
if (type.equals("disabled")) {
|
||||||
|
trackSelector.setSelectionOverride(rendererIndex, groups, null);
|
||||||
return;
|
return;
|
||||||
} else if (type.equals("language")) {
|
} else if (type.equals("language")) {
|
||||||
for (int i = 0; i < groups.length; ++i) {
|
for (int i = 0; i < groups.length; ++i) {
|
||||||
@ -759,11 +787,26 @@ class ReactExoplayerView extends FrameLayout implements
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if (type.equals("index")) {
|
} else if (type.equals("index")) {
|
||||||
|
if (value.asInt() < groups.length) {
|
||||||
trackIndex = value.asInt();
|
trackIndex = value.asInt();
|
||||||
} else { // default. invalid type or "system"
|
}
|
||||||
trackSelector.clearSelectionOverrides(index);
|
} else { // default
|
||||||
|
if (rendererIndex == C.TRACK_TYPE_TEXT) { // Use system settings if possible
|
||||||
|
int sdk = android.os.Build.VERSION.SDK_INT;
|
||||||
|
if (sdk > 18 && groups.length > 0) {
|
||||||
|
CaptioningManager captioningManager
|
||||||
|
= (CaptioningManager)themedReactContext.getSystemService(Context.CAPTIONING_SERVICE);
|
||||||
|
if (captioningManager != null && captioningManager.isEnabled()) {
|
||||||
|
trackIndex = getTrackIndexForDefaultLocale(groups);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
trackSelector.setSelectionOverride(rendererIndex, groups, null);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
} else if (rendererIndex == C.TRACK_TYPE_AUDIO) {
|
||||||
|
trackIndex = getTrackIndexForDefaultLocale(groups);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (trackIndex == C.INDEX_UNSET) {
|
if (trackIndex == C.INDEX_UNSET) {
|
||||||
trackSelector.clearSelectionOverrides(trackIndex);
|
trackSelector.clearSelectionOverrides(trackIndex);
|
||||||
@ -773,7 +816,34 @@ class ReactExoplayerView extends FrameLayout implements
|
|||||||
MappingTrackSelector.SelectionOverride override
|
MappingTrackSelector.SelectionOverride override
|
||||||
= new MappingTrackSelector.SelectionOverride(
|
= new MappingTrackSelector.SelectionOverride(
|
||||||
new FixedTrackSelection.Factory(), trackIndex, 0);
|
new FixedTrackSelection.Factory(), trackIndex, 0);
|
||||||
trackSelector.setSelectionOverride(index, groups, override);
|
trackSelector.setSelectionOverride(rendererIndex, groups, override);
|
||||||
|
}
|
||||||
|
|
||||||
|
private int getTrackIndexForDefaultLocale(TrackGroupArray groups) {
|
||||||
|
int trackIndex = 0; // default if no match
|
||||||
|
String locale2 = Locale.getDefault().getLanguage(); // 2 letter code
|
||||||
|
String locale3 = Locale.getDefault().getISO3Language(); // 3 letter code
|
||||||
|
for (int i = 0; i < groups.length; ++i) {
|
||||||
|
Format format = groups.get(i).getFormat(0);
|
||||||
|
String language = format.language;
|
||||||
|
if (language != null && (language.equals(locale2) || language.equals(locale3))) {
|
||||||
|
trackIndex = i;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return trackIndex;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setSelectedAudioTrack(String type, Dynamic value) {
|
||||||
|
audioTrackType = type;
|
||||||
|
audioTrackValue = value;
|
||||||
|
setSelectedTrack(C.TRACK_TYPE_AUDIO, audioTrackType, audioTrackValue);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setSelectedTextTrack(String type, Dynamic value) {
|
||||||
|
textTrackType = type;
|
||||||
|
textTrackValue = value;
|
||||||
|
setSelectedTrack(C.TRACK_TYPE_TEXT, textTrackType, textTrackValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setPausedModifier(boolean paused) {
|
public void setPausedModifier(boolean paused) {
|
||||||
|
@ -28,6 +28,9 @@ public class ReactExoplayerViewManager extends ViewGroupManager<ReactExoplayerVi
|
|||||||
private static final String PROP_SRC_HEADERS = "requestHeaders";
|
private static final String PROP_SRC_HEADERS = "requestHeaders";
|
||||||
private static final String PROP_RESIZE_MODE = "resizeMode";
|
private static final String PROP_RESIZE_MODE = "resizeMode";
|
||||||
private static final String PROP_REPEAT = "repeat";
|
private static final String PROP_REPEAT = "repeat";
|
||||||
|
private static final String PROP_SELECTED_AUDIO_TRACK = "selectedAudioTrack";
|
||||||
|
private static final String PROP_SELECTED_AUDIO_TRACK_TYPE = "type";
|
||||||
|
private static final String PROP_SELECTED_AUDIO_TRACK_VALUE = "value";
|
||||||
private static final String PROP_SELECTED_TEXT_TRACK = "selectedTextTrack";
|
private static final String PROP_SELECTED_TEXT_TRACK = "selectedTextTrack";
|
||||||
private static final String PROP_SELECTED_TEXT_TRACK_TYPE = "type";
|
private static final String PROP_SELECTED_TEXT_TRACK_TYPE = "type";
|
||||||
private static final String PROP_SELECTED_TEXT_TRACK_VALUE = "value";
|
private static final String PROP_SELECTED_TEXT_TRACK_VALUE = "value";
|
||||||
@ -127,6 +130,20 @@ public class ReactExoplayerViewManager extends ViewGroupManager<ReactExoplayerVi
|
|||||||
videoView.setRepeatModifier(repeat);
|
videoView.setRepeatModifier(repeat);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ReactProp(name = PROP_SELECTED_AUDIO_TRACK)
|
||||||
|
public void setSelectedAudioTrack(final ReactExoplayerView videoView,
|
||||||
|
@Nullable ReadableMap selectedAudioTrack) {
|
||||||
|
String typeString = null;
|
||||||
|
Dynamic value = null;
|
||||||
|
if (selectedAudioTrack != null) {
|
||||||
|
typeString = selectedAudioTrack.hasKey(PROP_SELECTED_AUDIO_TRACK_TYPE)
|
||||||
|
? selectedAudioTrack.getString(PROP_SELECTED_AUDIO_TRACK_TYPE) : null;
|
||||||
|
value = selectedAudioTrack.hasKey(PROP_SELECTED_AUDIO_TRACK_VALUE)
|
||||||
|
? selectedAudioTrack.getDynamic(PROP_SELECTED_AUDIO_TRACK_VALUE) : null;
|
||||||
|
}
|
||||||
|
videoView.setSelectedAudioTrack(typeString, value);
|
||||||
|
}
|
||||||
|
|
||||||
@ReactProp(name = PROP_SELECTED_TEXT_TRACK)
|
@ReactProp(name = PROP_SELECTED_TEXT_TRACK)
|
||||||
public void setSelectedTextTrack(final ReactExoplayerView videoView,
|
public void setSelectedTextTrack(final ReactExoplayerView videoView,
|
||||||
@Nullable ReadableMap selectedTextTrack) {
|
@Nullable ReadableMap selectedTextTrack) {
|
||||||
|
@ -42,7 +42,7 @@ class VideoEventEmitter {
|
|||||||
private static final String EVENT_BUFFER = "onVideoBuffer";
|
private static final String EVENT_BUFFER = "onVideoBuffer";
|
||||||
private static final String EVENT_IDLE = "onVideoIdle";
|
private static final String EVENT_IDLE = "onVideoIdle";
|
||||||
private static final String EVENT_TIMED_METADATA = "onTimedMetadata";
|
private static final String EVENT_TIMED_METADATA = "onTimedMetadata";
|
||||||
private static final String EVENT_AUDIO_BECOMING_NOISY = "onAudioBecomingNoisy";
|
private static final String EVENT_AUDIO_BECOMING_NOISY = "onVideoAudioBecomingNoisy";
|
||||||
private static final String EVENT_AUDIO_FOCUS_CHANGE = "onAudioFocusChanged";
|
private static final String EVENT_AUDIO_FOCUS_CHANGE = "onAudioFocusChanged";
|
||||||
private static final String EVENT_PLAYBACK_RATE_CHANGE = "onPlaybackRateChange";
|
private static final String EVENT_PLAYBACK_RATE_CHANGE = "onPlaybackRateChange";
|
||||||
|
|
||||||
@ -109,6 +109,7 @@ class VideoEventEmitter {
|
|||||||
private static final String EVENT_PROP_WIDTH = "width";
|
private static final String EVENT_PROP_WIDTH = "width";
|
||||||
private static final String EVENT_PROP_HEIGHT = "height";
|
private static final String EVENT_PROP_HEIGHT = "height";
|
||||||
private static final String EVENT_PROP_ORIENTATION = "orientation";
|
private static final String EVENT_PROP_ORIENTATION = "orientation";
|
||||||
|
private static final String EVENT_PROP_AUDIO_TRACKS = "audioTracks";
|
||||||
private static final String EVENT_PROP_TEXT_TRACKS = "textTracks";
|
private static final String EVENT_PROP_TEXT_TRACKS = "textTracks";
|
||||||
private static final String EVENT_PROP_HAS_AUDIO_FOCUS = "hasAudioFocus";
|
private static final String EVENT_PROP_HAS_AUDIO_FOCUS = "hasAudioFocus";
|
||||||
private static final String EVENT_PROP_IS_BUFFERING = "isBuffering";
|
private static final String EVENT_PROP_IS_BUFFERING = "isBuffering";
|
||||||
@ -130,7 +131,7 @@ class VideoEventEmitter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void load(double duration, double currentPosition, int videoWidth, int videoHeight,
|
void load(double duration, double currentPosition, int videoWidth, int videoHeight,
|
||||||
WritableArray textTracks) {
|
WritableArray audioTracks, WritableArray textTracks) {
|
||||||
WritableMap event = Arguments.createMap();
|
WritableMap event = Arguments.createMap();
|
||||||
event.putDouble(EVENT_PROP_DURATION, duration / 1000D);
|
event.putDouble(EVENT_PROP_DURATION, duration / 1000D);
|
||||||
event.putDouble(EVENT_PROP_CURRENT_TIME, currentPosition / 1000D);
|
event.putDouble(EVENT_PROP_CURRENT_TIME, currentPosition / 1000D);
|
||||||
@ -145,6 +146,7 @@ class VideoEventEmitter {
|
|||||||
}
|
}
|
||||||
event.putMap(EVENT_PROP_NATURAL_SIZE, naturalSize);
|
event.putMap(EVENT_PROP_NATURAL_SIZE, naturalSize);
|
||||||
|
|
||||||
|
event.putArray(EVENT_PROP_AUDIO_TRACKS, audioTracks);
|
||||||
event.putArray(EVENT_PROP_TEXT_TRACKS, textTracks);
|
event.putArray(EVENT_PROP_TEXT_TRACKS, textTracks);
|
||||||
|
|
||||||
// TODO: Actually check if you can.
|
// TODO: Actually check if you can.
|
||||||
|
@ -1,20 +1,16 @@
|
|||||||
apply plugin: 'com.android.library'
|
apply plugin: 'com.android.library'
|
||||||
|
|
||||||
def _ext = rootProject.ext
|
def safeExtGet(prop, fallback) {
|
||||||
|
rootProject.ext.has(prop) ? rootProject.ext.get(prop) : fallback
|
||||||
def _reactNativeVersion = _ext.has('reactNative') ? _ext.reactNative : '+'
|
}
|
||||||
def _compileSdkVersion = _ext.has('compileSdkVersion') ? _ext.compileSdkVersion : 27
|
|
||||||
def _buildToolsVersion = _ext.has('buildToolsVersion') ? _ext.buildToolsVersion : '27.0.3'
|
|
||||||
def _minSdkVersion = _ext.has('minSdkVersion') ? _ext.minSdkVersion : 16
|
|
||||||
def _targetSdkVersion = _ext.has('targetSdkVersion') ? _ext.targetSdkVersion : 27
|
|
||||||
|
|
||||||
android {
|
android {
|
||||||
compileSdkVersion _compileSdkVersion
|
compileSdkVersion safeExtGet('compileSdkVersion', 27)
|
||||||
buildToolsVersion _buildToolsVersion
|
buildToolsVersion safeExtGet('buildToolsVersion', '27.0.3')
|
||||||
|
|
||||||
defaultConfig {
|
defaultConfig {
|
||||||
minSdkVersion _minSdkVersion
|
minSdkVersion safeExtGet('minSdkVersion', 16)
|
||||||
targetSdkVersion _targetSdkVersion
|
targetSdkVersion safeExtGet('targetSdkVersion', 27)
|
||||||
versionCode 1
|
versionCode 1
|
||||||
versionName "1.0"
|
versionName "1.0"
|
||||||
ndk {
|
ndk {
|
||||||
@ -25,6 +21,6 @@ android {
|
|||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
//noinspection GradleDynamicVersion
|
//noinspection GradleDynamicVersion
|
||||||
provided "com.facebook.react:react-native:${_reactNativeVersion}"
|
provided "com.facebook.react:react-native:${safeExtGet('reactNativeVersion', '+')}"
|
||||||
compile 'com.yqritc:android-scalablevideoview:1.0.4'
|
compile 'com.yqritc:android-scalablevideoview:1.0.4'
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package com.brentvatne.react;
|
package com.brentvatne.react;
|
||||||
|
|
||||||
import android.annotation.SuppressLint;
|
import android.annotation.SuppressLint;
|
||||||
|
import android.app.Activity;
|
||||||
import android.content.res.AssetFileDescriptor;
|
import android.content.res.AssetFileDescriptor;
|
||||||
import android.graphics.Matrix;
|
import android.graphics.Matrix;
|
||||||
import android.media.MediaPlayer;
|
import android.media.MediaPlayer;
|
||||||
@ -10,6 +11,8 @@ import android.os.Handler;
|
|||||||
import android.util.Log;
|
import android.util.Log;
|
||||||
import android.view.MotionEvent;
|
import android.view.MotionEvent;
|
||||||
import android.view.WindowManager;
|
import android.view.WindowManager;
|
||||||
|
import android.view.View;
|
||||||
|
import android.view.Window;
|
||||||
import android.webkit.CookieManager;
|
import android.webkit.CookieManager;
|
||||||
import android.widget.MediaController;
|
import android.widget.MediaController;
|
||||||
|
|
||||||
@ -47,7 +50,11 @@ public class ReactVideoView extends ScalableVideoView implements MediaPlayer.OnP
|
|||||||
EVENT_END("onVideoEnd"),
|
EVENT_END("onVideoEnd"),
|
||||||
EVENT_STALLED("onPlaybackStalled"),
|
EVENT_STALLED("onPlaybackStalled"),
|
||||||
EVENT_RESUME("onPlaybackResume"),
|
EVENT_RESUME("onPlaybackResume"),
|
||||||
EVENT_READY_FOR_DISPLAY("onReadyForDisplay");
|
EVENT_READY_FOR_DISPLAY("onReadyForDisplay"),
|
||||||
|
EVENT_FULLSCREEN_WILL_PRESENT("onVideoFullscreenPlayerWillPresent"),
|
||||||
|
EVENT_FULLSCREEN_DID_PRESENT("onVideoFullscreenPlayerDidPresent"),
|
||||||
|
EVENT_FULLSCREEN_WILL_DISMISS("onVideoFullscreenPlayerWillDismiss"),
|
||||||
|
EVENT_FULLSCREEN_DID_DISMISS("onVideoFullscreenPlayerDidDismiss");
|
||||||
|
|
||||||
private final String mName;
|
private final String mName;
|
||||||
|
|
||||||
@ -106,6 +113,7 @@ public class ReactVideoView extends ScalableVideoView implements MediaPlayer.OnP
|
|||||||
private float mActiveRate = 1.0f;
|
private float mActiveRate = 1.0f;
|
||||||
private boolean mPlayInBackground = false;
|
private boolean mPlayInBackground = false;
|
||||||
private boolean mBackgroundPaused = false;
|
private boolean mBackgroundPaused = false;
|
||||||
|
private boolean mIsFullscreen = false;
|
||||||
|
|
||||||
private int mMainVer = 0;
|
private int mMainVer = 0;
|
||||||
private int mPatchVer = 0;
|
private int mPatchVer = 0;
|
||||||
@ -208,6 +216,9 @@ public class ReactVideoView extends ScalableVideoView implements MediaPlayer.OnP
|
|||||||
mMediaPlayerValid = false;
|
mMediaPlayerValid = false;
|
||||||
release();
|
release();
|
||||||
}
|
}
|
||||||
|
if (mIsFullscreen) {
|
||||||
|
setFullscreen(false);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setSrc(final String uriString, final String type, final boolean isNetwork, final boolean isAsset, final ReadableMap requestHeaders) {
|
public void setSrc(final String uriString, final String type, final boolean isNetwork, final boolean isAsset, final ReadableMap requestHeaders) {
|
||||||
@ -441,6 +452,39 @@ public class ReactVideoView extends ScalableVideoView implements MediaPlayer.OnP
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setFullscreen(boolean isFullscreen) {
|
||||||
|
if (isFullscreen == mIsFullscreen) {
|
||||||
|
return; // Avoid generating events when nothing is changing
|
||||||
|
}
|
||||||
|
mIsFullscreen = isFullscreen;
|
||||||
|
|
||||||
|
Activity activity = mThemedReactContext.getCurrentActivity();
|
||||||
|
if (activity == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
Window window = activity.getWindow();
|
||||||
|
View decorView = window.getDecorView();
|
||||||
|
int uiOptions;
|
||||||
|
if (mIsFullscreen) {
|
||||||
|
if (Build.VERSION.SDK_INT >= 19) { // 4.4+
|
||||||
|
uiOptions = SYSTEM_UI_FLAG_HIDE_NAVIGATION
|
||||||
|
| SYSTEM_UI_FLAG_IMMERSIVE_STICKY
|
||||||
|
| SYSTEM_UI_FLAG_FULLSCREEN;
|
||||||
|
} else {
|
||||||
|
uiOptions = SYSTEM_UI_FLAG_HIDE_NAVIGATION
|
||||||
|
| SYSTEM_UI_FLAG_FULLSCREEN;
|
||||||
|
}
|
||||||
|
mEventEmitter.receiveEvent(getId(), Events.EVENT_FULLSCREEN_WILL_PRESENT.toString(), null);
|
||||||
|
decorView.setSystemUiVisibility(uiOptions);
|
||||||
|
mEventEmitter.receiveEvent(getId(), Events.EVENT_FULLSCREEN_DID_PRESENT.toString(), null);
|
||||||
|
} else {
|
||||||
|
uiOptions = View.SYSTEM_UI_FLAG_VISIBLE;
|
||||||
|
mEventEmitter.receiveEvent(getId(), Events.EVENT_FULLSCREEN_WILL_DISMISS.toString(), null);
|
||||||
|
decorView.setSystemUiVisibility(uiOptions);
|
||||||
|
mEventEmitter.receiveEvent(getId(), Events.EVENT_FULLSCREEN_DID_DISMISS.toString(), null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public void applyModifiers() {
|
public void applyModifiers() {
|
||||||
setResizeModeModifier(mResizeMode);
|
setResizeModeModifier(mResizeMode);
|
||||||
setRepeatModifier(mRepeat);
|
setRepeatModifier(mRepeat);
|
||||||
|
@ -35,6 +35,7 @@ public class ReactVideoViewManager extends SimpleViewManager<ReactVideoView> {
|
|||||||
public static final String PROP_PROGRESS_UPDATE_INTERVAL = "progressUpdateInterval";
|
public static final String PROP_PROGRESS_UPDATE_INTERVAL = "progressUpdateInterval";
|
||||||
public static final String PROP_SEEK = "seek";
|
public static final String PROP_SEEK = "seek";
|
||||||
public static final String PROP_RATE = "rate";
|
public static final String PROP_RATE = "rate";
|
||||||
|
public static final String PROP_FULLSCREEN = "fullscreen";
|
||||||
public static final String PROP_PLAY_IN_BACKGROUND = "playInBackground";
|
public static final String PROP_PLAY_IN_BACKGROUND = "playInBackground";
|
||||||
public static final String PROP_CONTROLS = "controls";
|
public static final String PROP_CONTROLS = "controls";
|
||||||
|
|
||||||
@ -148,6 +149,11 @@ public class ReactVideoViewManager extends SimpleViewManager<ReactVideoView> {
|
|||||||
videoView.setRateModifier(rate);
|
videoView.setRateModifier(rate);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ReactProp(name = PROP_FULLSCREEN, defaultBoolean = false)
|
||||||
|
public void setFullscreen(final ReactVideoView videoView, final boolean fullscreen) {
|
||||||
|
videoView.setFullscreen(fullscreen);
|
||||||
|
}
|
||||||
|
|
||||||
@ReactProp(name = PROP_PLAY_IN_BACKGROUND, defaultBoolean = false)
|
@ReactProp(name = PROP_PLAY_IN_BACKGROUND, defaultBoolean = false)
|
||||||
public void setPlayInBackground(final ReactVideoView videoView, final boolean playInBackground) {
|
public void setPlayInBackground(final ReactVideoView videoView, final boolean playInBackground) {
|
||||||
videoView.setPlayInBackground(playInBackground);
|
videoView.setPlayInBackground(playInBackground);
|
||||||
|
@ -17,6 +17,7 @@
|
|||||||
@property (nonatomic, copy) RCTBubblingEventBlock onVideoSeek;
|
@property (nonatomic, copy) RCTBubblingEventBlock onVideoSeek;
|
||||||
@property (nonatomic, copy) RCTBubblingEventBlock onVideoEnd;
|
@property (nonatomic, copy) RCTBubblingEventBlock onVideoEnd;
|
||||||
@property (nonatomic, copy) RCTBubblingEventBlock onTimedMetadata;
|
@property (nonatomic, copy) RCTBubblingEventBlock onTimedMetadata;
|
||||||
|
@property (nonatomic, copy) RCTBubblingEventBlock onVideoAudioBecomingNoisy;
|
||||||
@property (nonatomic, copy) RCTBubblingEventBlock onVideoFullscreenPlayerWillPresent;
|
@property (nonatomic, copy) RCTBubblingEventBlock onVideoFullscreenPlayerWillPresent;
|
||||||
@property (nonatomic, copy) RCTBubblingEventBlock onVideoFullscreenPlayerDidPresent;
|
@property (nonatomic, copy) RCTBubblingEventBlock onVideoFullscreenPlayerDidPresent;
|
||||||
@property (nonatomic, copy) RCTBubblingEventBlock onVideoFullscreenPlayerWillDismiss;
|
@property (nonatomic, copy) RCTBubblingEventBlock onVideoFullscreenPlayerWillDismiss;
|
||||||
|
289
ios/RCTVideo.m
289
ios/RCTVideo.m
@ -3,6 +3,8 @@
|
|||||||
#import <React/RCTBridgeModule.h>
|
#import <React/RCTBridgeModule.h>
|
||||||
#import <React/RCTEventDispatcher.h>
|
#import <React/RCTEventDispatcher.h>
|
||||||
#import <React/UIView+React.h>
|
#import <React/UIView+React.h>
|
||||||
|
#include <MediaAccessibility/MediaAccessibility.h>
|
||||||
|
#include <AVFoundation/AVFoundation.h>
|
||||||
|
|
||||||
static NSString *const statusKeyPath = @"status";
|
static NSString *const statusKeyPath = @"status";
|
||||||
static NSString *const playbackLikelyToKeepUpKeyPath = @"playbackLikelyToKeepUp";
|
static NSString *const playbackLikelyToKeepUpKeyPath = @"playbackLikelyToKeepUp";
|
||||||
@ -43,7 +45,9 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
BOOL _paused;
|
BOOL _paused;
|
||||||
BOOL _repeat;
|
BOOL _repeat;
|
||||||
BOOL _allowsExternalPlayback;
|
BOOL _allowsExternalPlayback;
|
||||||
|
NSArray * _textTracks;
|
||||||
NSDictionary * _selectedTextTrack;
|
NSDictionary * _selectedTextTrack;
|
||||||
|
NSDictionary * _selectedAudioTrack;
|
||||||
BOOL _playbackStalled;
|
BOOL _playbackStalled;
|
||||||
BOOL _playInBackground;
|
BOOL _playInBackground;
|
||||||
BOOL _playWhenInactive;
|
BOOL _playWhenInactive;
|
||||||
@ -88,6 +92,11 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
selector:@selector(applicationWillEnterForeground:)
|
selector:@selector(applicationWillEnterForeground:)
|
||||||
name:UIApplicationWillEnterForegroundNotification
|
name:UIApplicationWillEnterForegroundNotification
|
||||||
object:nil];
|
object:nil];
|
||||||
|
|
||||||
|
[[NSNotificationCenter defaultCenter] addObserver:self
|
||||||
|
selector:@selector(audioRouteChanged:)
|
||||||
|
name:AVAudioSessionRouteChangeNotification
|
||||||
|
object:nil];
|
||||||
}
|
}
|
||||||
|
|
||||||
return self;
|
return self;
|
||||||
@ -187,6 +196,17 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#pragma mark - Audio events
|
||||||
|
|
||||||
|
- (void)audioRouteChanged:(NSNotification *)notification
|
||||||
|
{
|
||||||
|
NSNumber *reason = [[notification userInfo] objectForKey:AVAudioSessionRouteChangeReasonKey];
|
||||||
|
NSNumber *previousRoute = [[notification userInfo] objectForKey:AVAudioSessionRouteChangePreviousRouteKey];
|
||||||
|
if (reason.unsignedIntValue == AVAudioSessionRouteChangeReasonOldDeviceUnavailable) {
|
||||||
|
self.onVideoAudioBecomingNoisy(@{@"target": self.reactTag});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#pragma mark - Progress
|
#pragma mark - Progress
|
||||||
|
|
||||||
- (void)sendProgressUpdate
|
- (void)sendProgressUpdate
|
||||||
@ -284,6 +304,10 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
[self removePlayerLayer];
|
[self removePlayerLayer];
|
||||||
[self removePlayerTimeObserver];
|
[self removePlayerTimeObserver];
|
||||||
[self removePlayerItemObservers];
|
[self removePlayerItemObservers];
|
||||||
|
|
||||||
|
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
|
||||||
|
|
||||||
|
// perform on next run loop, otherwise other passed react-props may not be set
|
||||||
_playerItem = [self playerItemForSource:source];
|
_playerItem = [self playerItemForSource:source];
|
||||||
[self addPlayerItemObservers];
|
[self addPlayerItemObservers];
|
||||||
|
|
||||||
@ -304,7 +328,6 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
|
|
||||||
[self addPlayerTimeObserver];
|
[self addPlayerTimeObserver];
|
||||||
|
|
||||||
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
|
|
||||||
//Perform on next run loop, otherwise onVideoLoadStart is nil
|
//Perform on next run loop, otherwise onVideoLoadStart is nil
|
||||||
if(self.onVideoLoadStart) {
|
if(self.onVideoLoadStart) {
|
||||||
id uri = [source objectForKey:@"uri"];
|
id uri = [source objectForKey:@"uri"];
|
||||||
@ -316,42 +339,95 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
@"target": self.reactTag
|
@"target": self.reactTag
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
});
|
});
|
||||||
_videoLoadStarted = YES;
|
_videoLoadStarted = YES;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (NSURL*) urlFilePath:(NSString*) filepath {
|
||||||
|
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
|
||||||
|
|
||||||
|
NSString* relativeFilePath = [filepath lastPathComponent];
|
||||||
|
// the file may be multiple levels below the documents directory
|
||||||
|
NSArray* fileComponents = [filepath componentsSeparatedByString:@"Documents/"];
|
||||||
|
if (fileComponents.count>1) {
|
||||||
|
relativeFilePath = [fileComponents objectAtIndex:1];
|
||||||
|
}
|
||||||
|
|
||||||
|
NSString *path = [paths.firstObject stringByAppendingPathComponent:relativeFilePath];
|
||||||
|
if ([[NSFileManager defaultManager] fileExistsAtPath:path]) {
|
||||||
|
return [NSURL fileURLWithPath:path];
|
||||||
|
}
|
||||||
|
return nil;
|
||||||
|
}
|
||||||
|
|
||||||
- (AVPlayerItem*)playerItemForSource:(NSDictionary *)source
|
- (AVPlayerItem*)playerItemForSource:(NSDictionary *)source
|
||||||
{
|
{
|
||||||
bool isNetwork = [RCTConvert BOOL:[source objectForKey:@"isNetwork"]];
|
bool isNetwork = [RCTConvert BOOL:[source objectForKey:@"isNetwork"]];
|
||||||
bool isAsset = [RCTConvert BOOL:[source objectForKey:@"isAsset"]];
|
bool isAsset = [RCTConvert BOOL:[source objectForKey:@"isAsset"]];
|
||||||
NSString *uri = [source objectForKey:@"uri"];
|
NSString *uri = [source objectForKey:@"uri"];
|
||||||
NSString *type = [source objectForKey:@"type"];
|
NSString *type = [source objectForKey:@"type"];
|
||||||
NSDictionary *headers = [source objectForKey:@"requestHeaders"];
|
|
||||||
|
|
||||||
NSURL *url = (isNetwork || isAsset) ?
|
AVURLAsset *asset;
|
||||||
[NSURL URLWithString:uri] :
|
NSMutableDictionary *assetOptions = [[NSMutableDictionary alloc] init];
|
||||||
[[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:uri ofType:type]];
|
|
||||||
|
|
||||||
if (isNetwork) {
|
if (isNetwork) {
|
||||||
NSMutableDictionary *assetOptions = [[NSMutableDictionary alloc]init];
|
|
||||||
/* Per #1091, this is not a public API. We need to either get approval from Apple to use this
|
/* Per #1091, this is not a public API. We need to either get approval from Apple to use this
|
||||||
* or use a different approach.
|
* or use a different approach.
|
||||||
|
NSDictionary *headers = [source objectForKey:@"requestHeaders"];
|
||||||
if ([headers count] > 0) {
|
if ([headers count] > 0) {
|
||||||
[assetOptions setObject:headers forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
|
[assetOptions setObject:headers forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
|
||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
NSArray *cookies = [[NSHTTPCookieStorage sharedHTTPCookieStorage] cookies];
|
NSArray *cookies = [[NSHTTPCookieStorage sharedHTTPCookieStorage] cookies];
|
||||||
[assetOptions setObject:cookies forKey:AVURLAssetHTTPCookiesKey];
|
[assetOptions setObject:cookies forKey:AVURLAssetHTTPCookiesKey];
|
||||||
|
asset = [AVURLAsset URLAssetWithURL:[NSURL URLWithString:uri] options:assetOptions];
|
||||||
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:assetOptions];
|
} else if (isAsset) { // assets on iOS have to be in the Documents folder
|
||||||
return [AVPlayerItem playerItemWithAsset:asset];
|
asset = [AVURLAsset URLAssetWithURL:[self urlFilePath:uri] options:nil];
|
||||||
|
} else { // file passed in through JS, or an asset in the Xcode project
|
||||||
|
asset = [AVURLAsset URLAssetWithURL:[[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:uri ofType:type]] options:nil];
|
||||||
}
|
}
|
||||||
else if (isAsset) {
|
|
||||||
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:nil];
|
if (!_textTracks) {
|
||||||
return [AVPlayerItem playerItemWithAsset:asset];
|
return [AVPlayerItem playerItemWithAsset:asset];
|
||||||
}
|
}
|
||||||
|
|
||||||
return [AVPlayerItem playerItemWithURL:url];
|
// sideload text tracks
|
||||||
|
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
|
||||||
|
|
||||||
|
AVAssetTrack *videoAsset = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject;
|
||||||
|
AVMutableCompositionTrack *videoCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
|
||||||
|
[videoCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration)
|
||||||
|
ofTrack:videoAsset
|
||||||
|
atTime:kCMTimeZero
|
||||||
|
error:nil];
|
||||||
|
|
||||||
|
AVAssetTrack *audioAsset = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject;
|
||||||
|
AVMutableCompositionTrack *audioCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
|
||||||
|
[audioCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration)
|
||||||
|
ofTrack:audioAsset
|
||||||
|
atTime:kCMTimeZero
|
||||||
|
error:nil];
|
||||||
|
|
||||||
|
for (int i = 0; i < _textTracks.count; ++i) {
|
||||||
|
AVURLAsset *textURLAsset;
|
||||||
|
NSString *textUri = [_textTracks objectAtIndex:i][@"uri"];
|
||||||
|
if ([[textUri lowercaseString] hasPrefix:@"http"]) {
|
||||||
|
textURLAsset = [AVURLAsset URLAssetWithURL:[NSURL URLWithString:textUri] options:assetOptions];
|
||||||
|
} else {
|
||||||
|
textURLAsset = [AVURLAsset URLAssetWithURL:[self urlFilePath:textUri] options:nil];
|
||||||
|
}
|
||||||
|
AVAssetTrack *textTrackAsset = [textURLAsset tracksWithMediaType:AVMediaTypeText].firstObject;
|
||||||
|
AVMutableCompositionTrack *textCompTrack = [mixComposition
|
||||||
|
addMutableTrackWithMediaType:AVMediaTypeText
|
||||||
|
preferredTrackID:kCMPersistentTrackID_Invalid];
|
||||||
|
[textCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration)
|
||||||
|
ofTrack:textTrackAsset
|
||||||
|
atTime:kCMTimeZero
|
||||||
|
error:nil];
|
||||||
|
}
|
||||||
|
|
||||||
|
return [AVPlayerItem playerItemWithAsset:mixComposition];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
|
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
|
||||||
@ -363,7 +439,7 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
if (items && ![items isEqual:[NSNull null]] && items.count > 0) {
|
if (items && ![items isEqual:[NSNull null]] && items.count > 0) {
|
||||||
NSMutableArray *array = [NSMutableArray new];
|
NSMutableArray *array = [NSMutableArray new];
|
||||||
for (AVMetadataItem *item in items) {
|
for (AVMetadataItem *item in items) {
|
||||||
NSString *value = item.value;
|
NSString *value = (NSString *)item.value;
|
||||||
NSString *identifier = item.identifier;
|
NSString *identifier = item.identifier;
|
||||||
|
|
||||||
if (![value isEqual: [NSNull null]]) {
|
if (![value isEqual: [NSNull null]]) {
|
||||||
@ -423,6 +499,7 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
@"height": height,
|
@"height": height,
|
||||||
@"orientation": orientation
|
@"orientation": orientation
|
||||||
},
|
},
|
||||||
|
@"audioTracks": [self getAudioTrackInfo],
|
||||||
@"textTracks": [self getTextTrackInfo],
|
@"textTracks": [self getTextTrackInfo],
|
||||||
@"target": self.reactTag});
|
@"target": self.reactTag});
|
||||||
}
|
}
|
||||||
@ -654,6 +731,7 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
[_player setMuted:NO];
|
[_player setMuted:NO];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[self setSelectedAudioTrack:_selectedAudioTrack];
|
||||||
[self setSelectedTextTrack:_selectedTextTrack];
|
[self setSelectedTextTrack:_selectedTextTrack];
|
||||||
[self setResizeMode:_resizeMode];
|
[self setResizeMode:_resizeMode];
|
||||||
[self setRepeat:_repeat];
|
[self setRepeat:_repeat];
|
||||||
@ -666,17 +744,18 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
_repeat = repeat;
|
_repeat = repeat;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)setSelectedTextTrack:(NSDictionary *)selectedTextTrack {
|
- (void)setMediaSelectionTrackForCharacteristic:(AVMediaCharacteristic)characteristic
|
||||||
_selectedTextTrack = selectedTextTrack;
|
withCriteria:(NSDictionary *)criteria
|
||||||
NSString *type = selectedTextTrack[@"type"];
|
{
|
||||||
|
NSString *type = criteria[@"type"];
|
||||||
AVMediaSelectionGroup *group = [_player.currentItem.asset
|
AVMediaSelectionGroup *group = [_player.currentItem.asset
|
||||||
mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
|
mediaSelectionGroupForMediaCharacteristic:characteristic];
|
||||||
AVMediaSelectionOption *option;
|
AVMediaSelectionOption *mediaOption;
|
||||||
|
|
||||||
if ([type isEqualToString:@"disabled"]) {
|
if ([type isEqualToString:@"disabled"]) {
|
||||||
// Do nothing. We want to ensure option is nil
|
// Do nothing. We want to ensure option is nil
|
||||||
} else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) {
|
} else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) {
|
||||||
NSString *value = selectedTextTrack[@"value"];
|
NSString *value = criteria[@"value"];
|
||||||
for (int i = 0; i < group.options.count; ++i) {
|
for (int i = 0; i < group.options.count; ++i) {
|
||||||
AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i];
|
AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i];
|
||||||
NSString *optionValue;
|
NSString *optionValue;
|
||||||
@ -688,17 +767,17 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
objectAtIndex:0];
|
objectAtIndex:0];
|
||||||
}
|
}
|
||||||
if ([value isEqualToString:optionValue]) {
|
if ([value isEqualToString:optionValue]) {
|
||||||
option = currentOption;
|
mediaOption = currentOption;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
//} else if ([type isEqualToString:@"default"]) {
|
//} else if ([type isEqualToString:@"default"]) {
|
||||||
// option = group.defaultOption; */
|
// option = group.defaultOption; */
|
||||||
} else if ([type isEqualToString:@"index"]) {
|
} else if ([type isEqualToString:@"index"]) {
|
||||||
if ([selectedTextTrack[@"value"] isKindOfClass:[NSNumber class]]) {
|
if ([criteria[@"value"] isKindOfClass:[NSNumber class]]) {
|
||||||
int index = [selectedTextTrack[@"value"] intValue];
|
int index = [criteria[@"value"] intValue];
|
||||||
if (group.options.count > index) {
|
if (group.options.count > index) {
|
||||||
option = [group.options objectAtIndex:index];
|
mediaOption = [group.options objectAtIndex:index];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else { // default. invalid type or "system"
|
} else { // default. invalid type or "system"
|
||||||
@ -707,11 +786,173 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
}
|
}
|
||||||
|
|
||||||
// If a match isn't found, option will be nil and text tracks will be disabled
|
// If a match isn't found, option will be nil and text tracks will be disabled
|
||||||
[_player.currentItem selectMediaOption:option inMediaSelectionGroup:group];
|
[_player.currentItem selectMediaOption:mediaOption inMediaSelectionGroup:group];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)setSelectedAudioTrack:(NSDictionary *)selectedAudioTrack {
|
||||||
|
_selectedAudioTrack = selectedAudioTrack;
|
||||||
|
[self setMediaSelectionTrackForCharacteristic:AVMediaCharacteristicAudible
|
||||||
|
withCriteria:_selectedAudioTrack];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)setSelectedTextTrack:(NSDictionary *)selectedTextTrack {
|
||||||
|
_selectedTextTrack = selectedTextTrack;
|
||||||
|
if (_textTracks) { // sideloaded text tracks
|
||||||
|
[self setSideloadedText];
|
||||||
|
} else { // text tracks included in the HLS playlist
|
||||||
|
[self setMediaSelectionTrackForCharacteristic:AVMediaCharacteristicLegible
|
||||||
|
withCriteria:_selectedTextTrack];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void) setSideloadedText {
|
||||||
|
NSString *type = _selectedTextTrack[@"type"];
|
||||||
|
NSArray* textTracks = [self getTextTrackInfo];
|
||||||
|
|
||||||
|
// The first few tracks will be audio & video track
|
||||||
|
int firstTextIndex = 0;
|
||||||
|
for (firstTextIndex = 0; firstTextIndex < _player.currentItem.tracks.count; ++firstTextIndex) {
|
||||||
|
if ([_player.currentItem.tracks[firstTextIndex].assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible]) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
int selectedTrackIndex = -1;
|
||||||
|
|
||||||
|
if ([type isEqualToString:@"disabled"]) {
|
||||||
|
// Do nothing. We want to ensure option is nil
|
||||||
|
} else if ([type isEqualToString:@"language"]) {
|
||||||
|
NSString *selectedValue = _selectedTextTrack[@"value"];
|
||||||
|
for (int i = 0; i < textTracks.count; ++i) {
|
||||||
|
NSDictionary *currentTextTrack = [textTracks objectAtIndex:i];
|
||||||
|
if ([selectedValue isEqualToString:currentTextTrack[@"language"]]) {
|
||||||
|
selectedTrackIndex = i;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if ([type isEqualToString:@"title"]) {
|
||||||
|
NSString *selectedValue = _selectedTextTrack[@"value"];
|
||||||
|
for (int i = 0; i < textTracks.count; ++i) {
|
||||||
|
NSDictionary *currentTextTrack = [textTracks objectAtIndex:i];
|
||||||
|
if ([selectedValue isEqualToString:currentTextTrack[@"title"]]) {
|
||||||
|
selectedTrackIndex = i;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if ([type isEqualToString:@"index"]) {
|
||||||
|
if ([_selectedTextTrack[@"value"] isKindOfClass:[NSNumber class]]) {
|
||||||
|
int index = [_selectedTextTrack[@"value"] intValue];
|
||||||
|
if (textTracks.count > index) {
|
||||||
|
selectedTrackIndex = index;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// user's selected language might not be available, or system defaults have captions enabled
|
||||||
|
if (selectedTrackIndex == -1 || [type isEqualToString:@"default"]) {
|
||||||
|
CFArrayRef captioningMediaCharacteristics = MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics(kMACaptionAppearanceDomainUser);
|
||||||
|
NSArray *captionSettings = (__bridge NSArray*)captioningMediaCharacteristics;
|
||||||
|
if ([captionSettings containsObject: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility]) {
|
||||||
|
// iterate through the textTracks to find a matching option, or default to the first object.
|
||||||
|
selectedTrackIndex = 0;
|
||||||
|
|
||||||
|
NSString * systemLanguage = [[NSLocale preferredLanguages] firstObject];
|
||||||
|
for (int i = 0; i < textTracks.count; ++i) {
|
||||||
|
NSDictionary *currentTextTrack = [textTracks objectAtIndex:i];
|
||||||
|
if ([systemLanguage isEqualToString:currentTextTrack[@"language"]]) {
|
||||||
|
selectedTrackIndex = i;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (int i = firstTextIndex; i < _player.currentItem.tracks.count; ++i) {
|
||||||
|
BOOL isEnabled = i == selectedTrackIndex + firstTextIndex;
|
||||||
|
[_player.currentItem.tracks[i] setEnabled:isEnabled];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
-(void) setStreamingText {
|
||||||
|
NSString *type = _selectedTextTrack[@"type"];
|
||||||
|
AVMediaSelectionGroup *group = [_player.currentItem.asset
|
||||||
|
mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
|
||||||
|
AVMediaSelectionOption *mediaOption;
|
||||||
|
|
||||||
|
if ([type isEqualToString:@"disabled"]) {
|
||||||
|
// Do nothing. We want to ensure option is nil
|
||||||
|
} else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) {
|
||||||
|
NSString *value = _selectedTextTrack[@"value"];
|
||||||
|
for (int i = 0; i < group.options.count; ++i) {
|
||||||
|
AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i];
|
||||||
|
NSString *optionValue;
|
||||||
|
if ([type isEqualToString:@"language"]) {
|
||||||
|
optionValue = [currentOption extendedLanguageTag];
|
||||||
|
} else {
|
||||||
|
optionValue = [[[currentOption commonMetadata]
|
||||||
|
valueForKey:@"value"]
|
||||||
|
objectAtIndex:0];
|
||||||
|
}
|
||||||
|
if ([value isEqualToString:optionValue]) {
|
||||||
|
mediaOption = currentOption;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
//} else if ([type isEqualToString:@"default"]) {
|
||||||
|
// option = group.defaultOption; */
|
||||||
|
} else if ([type isEqualToString:@"index"]) {
|
||||||
|
if ([_selectedTextTrack[@"value"] isKindOfClass:[NSNumber class]]) {
|
||||||
|
int index = [_selectedTextTrack[@"value"] intValue];
|
||||||
|
if (group.options.count > index) {
|
||||||
|
mediaOption = [group.options objectAtIndex:index];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else { // default. invalid type or "system"
|
||||||
|
[_player.currentItem selectMediaOptionAutomaticallyInMediaSelectionGroup:group];
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If a match isn't found, option will be nil and text tracks will be disabled
|
||||||
|
[_player.currentItem selectMediaOption:mediaOption inMediaSelectionGroup:group];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)setTextTracks:(NSArray*) textTracks;
|
||||||
|
{
|
||||||
|
_textTracks = textTracks;
|
||||||
|
|
||||||
|
// in case textTracks was set after selectedTextTrack
|
||||||
|
if (_selectedTextTrack) [self setSelectedTextTrack:_selectedTextTrack];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (NSArray *)getAudioTrackInfo
|
||||||
|
{
|
||||||
|
NSMutableArray *audioTracks = [[NSMutableArray alloc] init];
|
||||||
|
AVMediaSelectionGroup *group = [_player.currentItem.asset
|
||||||
|
mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
|
||||||
|
for (int i = 0; i < group.options.count; ++i) {
|
||||||
|
AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i];
|
||||||
|
NSString *title = @"";
|
||||||
|
NSArray *values = [[currentOption commonMetadata] valueForKey:@"value"];
|
||||||
|
if (values.count > 0) {
|
||||||
|
title = [values objectAtIndex:0];
|
||||||
|
}
|
||||||
|
NSString *language = [currentOption extendedLanguageTag] ? [currentOption extendedLanguageTag] : @"";
|
||||||
|
NSDictionary *audioTrack = @{
|
||||||
|
@"index": [NSNumber numberWithInt:i],
|
||||||
|
@"title": title,
|
||||||
|
@"language": language
|
||||||
|
};
|
||||||
|
[audioTracks addObject:audioTrack];
|
||||||
|
}
|
||||||
|
return audioTracks;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (NSArray *)getTextTrackInfo
|
- (NSArray *)getTextTrackInfo
|
||||||
{
|
{
|
||||||
|
// if sideloaded, textTracks will already be set
|
||||||
|
if (_textTracks) return _textTracks;
|
||||||
|
|
||||||
|
// if streaming video, we extract the text tracks
|
||||||
NSMutableArray *textTracks = [[NSMutableArray alloc] init];
|
NSMutableArray *textTracks = [[NSMutableArray alloc] init];
|
||||||
AVMediaSelectionGroup *group = [_player.currentItem.asset
|
AVMediaSelectionGroup *group = [_player.currentItem.asset
|
||||||
mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
|
mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
|
||||||
|
@ -23,7 +23,9 @@ RCT_EXPORT_VIEW_PROPERTY(src, NSDictionary);
|
|||||||
RCT_EXPORT_VIEW_PROPERTY(resizeMode, NSString);
|
RCT_EXPORT_VIEW_PROPERTY(resizeMode, NSString);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(repeat, BOOL);
|
RCT_EXPORT_VIEW_PROPERTY(repeat, BOOL);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(allowsExternalPlayback, BOOL);
|
RCT_EXPORT_VIEW_PROPERTY(allowsExternalPlayback, BOOL);
|
||||||
|
RCT_EXPORT_VIEW_PROPERTY(textTracks, NSArray);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(selectedTextTrack, NSDictionary);
|
RCT_EXPORT_VIEW_PROPERTY(selectedTextTrack, NSDictionary);
|
||||||
|
RCT_EXPORT_VIEW_PROPERTY(selectedAudioTrack, NSDictionary);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(paused, BOOL);
|
RCT_EXPORT_VIEW_PROPERTY(paused, BOOL);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(muted, BOOL);
|
RCT_EXPORT_VIEW_PROPERTY(muted, BOOL);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(controls, BOOL);
|
RCT_EXPORT_VIEW_PROPERTY(controls, BOOL);
|
||||||
@ -45,6 +47,7 @@ RCT_EXPORT_VIEW_PROPERTY(onVideoProgress, RCTBubblingEventBlock);
|
|||||||
RCT_EXPORT_VIEW_PROPERTY(onVideoSeek, RCTBubblingEventBlock);
|
RCT_EXPORT_VIEW_PROPERTY(onVideoSeek, RCTBubblingEventBlock);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(onVideoEnd, RCTBubblingEventBlock);
|
RCT_EXPORT_VIEW_PROPERTY(onVideoEnd, RCTBubblingEventBlock);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(onTimedMetadata, RCTBubblingEventBlock);
|
RCT_EXPORT_VIEW_PROPERTY(onTimedMetadata, RCTBubblingEventBlock);
|
||||||
|
RCT_EXPORT_VIEW_PROPERTY(onVideoAudioBecomingNoisy, RCTBubblingEventBlock);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(onVideoFullscreenPlayerWillPresent, RCTBubblingEventBlock);
|
RCT_EXPORT_VIEW_PROPERTY(onVideoFullscreenPlayerWillPresent, RCTBubblingEventBlock);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(onVideoFullscreenPlayerDidPresent, RCTBubblingEventBlock);
|
RCT_EXPORT_VIEW_PROPERTY(onVideoFullscreenPlayerDidPresent, RCTBubblingEventBlock);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(onVideoFullscreenPlayerWillDismiss, RCTBubblingEventBlock);
|
RCT_EXPORT_VIEW_PROPERTY(onVideoFullscreenPlayerWillDismiss, RCTBubblingEventBlock);
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "react-native-video",
|
"name": "react-native-video",
|
||||||
"version": "3.0.0",
|
"version": "3.1.0",
|
||||||
"description": "A <Video /> element for react-native",
|
"description": "A <Video /> element for react-native",
|
||||||
"main": "Video.js",
|
"main": "Video.js",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
|
Loading…
Reference in New Issue
Block a user