Merge branch 'master' into master
This commit is contained in:
commit
c67dd7b8ef
16
CHANGELOG.md
16
CHANGELOG.md
@ -1,5 +1,21 @@
|
||||
## Changelog
|
||||
|
||||
### Next Version
|
||||
* Inherit Android buildtools and SDK version from the root project [#1081](https://github.com/react-native-community/react-native-video/pull/1081)
|
||||
* Automatically play on ExoPlayer when the paused prop is not set [#1083](https://github.com/react-native-community/react-native-video/pull/1083)
|
||||
* Preserve Android MediaPlayer paused prop when backgrounding [#1082](https://github.com/react-native-community/react-native-video/pull/1082)
|
||||
|
||||
### Version 2.3.1
|
||||
* Revert PR to inherit Android SDK versions from root project. Re-add in 3.0 [#1080](https://github.com/react-native-community/react-native-video/pull/1080)
|
||||
|
||||
### Version 2.3.0
|
||||
* Support allowsExternalPlayback on iOS [#1057](https://github.com/react-native-community/react-native-video/pull/1057)
|
||||
* Inherit Android buildtools and SDK version from the root project [#999](https://github.com/react-native-community/react-native-video/pull/999)
|
||||
* Fix bug that caused ExoPlayer to start paused if playInBackground was set [#833](https://github.com/react-native-community/react-native-video/pull/833)
|
||||
* Fix crash if clearing an observer on iOS that was already cleared [#1075](https://github.com/react-native-community/react-native-video/pull/1075)
|
||||
* Add audioOnly prop for music files [#1039](https://github.com/react-native-community/react-native-video/pull/1039)
|
||||
* Support seeking with more exact tolerance on iOS [#1076](https://github.com/react-native-community/react-native-video/pull/1076)
|
||||
|
||||
### Version 2.2.0
|
||||
* Text track selection support for iOS & ExoPlayer [#1049](https://github.com/react-native-community/react-native-video/pull/1049)
|
||||
* Support outputting to a TextureView on Android ExoPlayer [#1058](https://github.com/react-native-community/react-native-video/pull/1058)
|
||||
|
211
README.md
211
README.md
@ -5,10 +5,14 @@ A `<Video>` component for react-native, as seen in
|
||||
|
||||
Requires react-native >= 0.40.0, for RN support of 0.19.0 - 0.39.0 please use a pre 1.0 version.
|
||||
|
||||
### Version 3.0 breaking changes
|
||||
Version 3.0 features a number of changes to existing behavior. See [Updating](#updating) for changes.
|
||||
|
||||
## TOC
|
||||
|
||||
* [Installation](#installation)
|
||||
* [Usage](#usage)
|
||||
* [Updating](#updating)
|
||||
|
||||
## Installation
|
||||
|
||||
@ -191,8 +195,6 @@ using System.Collections.Generic;
|
||||
onFullscreenPlayerDidPresent={this.fullScreenPlayerDidPresent} // Callback after fullscreen started
|
||||
onFullscreenPlayerWillDismiss={this.fullScreenPlayerWillDismiss} // Callback before fullscreen stops
|
||||
onFullscreenPlayerDidDismiss={this.fullScreenPlayerDidDismiss} // Callback after fullscreen stopped
|
||||
onLoadStart={this.loadStart} // Callback when video starts to load
|
||||
onLoad={this.setDuration} // Callback when video loads
|
||||
onProgress={this.setTime} // Callback every ~250ms with currentTime
|
||||
onTimedMetadata={this.onTimedMetadata} // Callback when the stream receive some metadata
|
||||
style={styles.backgroundVideo} />
|
||||
@ -219,6 +221,8 @@ var styles = StyleSheet.create({
|
||||
```
|
||||
|
||||
### Configurable props
|
||||
* [allowsExternalPlayback](#allowsexternalplayback)
|
||||
* [audioOnly](#audioonly)
|
||||
* [ignoreSilentSwitch](#ignoresilentswitch)
|
||||
* [muted](#muted)
|
||||
* [paused](#paused)
|
||||
@ -232,9 +236,35 @@ var styles = StyleSheet.create({
|
||||
* [resizeMode](#resizemode)
|
||||
* [selectedTextTrack](#selectedtexttrack)
|
||||
* [stereoPan](#stereopan)
|
||||
* [textTracks](#texttracks)
|
||||
* [useTextureView](#usetextureview)
|
||||
* [volume](#volume)
|
||||
|
||||
### Event props
|
||||
* [onLoad](#onload)
|
||||
* [onLoadStart](#onloadstart)
|
||||
|
||||
### Methods
|
||||
* [seek](#seek)
|
||||
|
||||
### Configurable props
|
||||
|
||||
#### allowsExternalPlayback
|
||||
Indicates whether the player allows switching to external playback mode such as AirPlay or HDMI.
|
||||
* **true (default)** - allow switching to external playback mode
|
||||
* **false** - Don't allow switching to external playback mode
|
||||
|
||||
Platforms: iOS
|
||||
|
||||
#### audioOnly
|
||||
Indicates whether the player should only play the audio track and instead of displaying the video track, show the poster instead.
|
||||
* **false (default)** - Display the video as normal
|
||||
* **true** - Show the poster and play the audio
|
||||
|
||||
For this to work, the poster prop must be set.
|
||||
|
||||
Platforms: all
|
||||
|
||||
#### ignoreSilentSwitch
|
||||
Controls the iOS silent switch behavior
|
||||
* **"inherit" (default)** - Use the default AVPlayer behavior
|
||||
@ -351,7 +381,7 @@ Type | Value | Description
|
||||
"language" | string | Display the text track with the language specified as the Value, e.g. "fr"
|
||||
"index" | number | Display the text track with the index specified as the value, e.g. 0
|
||||
|
||||
Both iOS & Android offer Settings to enable Captions for hearing impaired people. If "system" is selected and the Captions Setting is enabled, iOS/Android will look for a caption that matches that customer's language and display it.
|
||||
Both iOS & Android (only 4.4 and higher) offer Settings to enable Captions for hearing impaired people. If "system" is selected and the Captions Setting is enabled, iOS/Android will look for a caption that matches that customer's language and display it.
|
||||
|
||||
If a track matching the specified Type (and Value if appropriate) is unavailable, no text track will be displayed. If multiple tracks match the criteria, the first match will be used.
|
||||
|
||||
@ -365,6 +395,40 @@ Adjust the balance of the left and right audio channels. Any value between –1
|
||||
|
||||
Platforms: Android MediaPlayer
|
||||
|
||||
#### textTracks
|
||||
Load one or more "sidecar" text tracks. This takes an array of objects representing each track. Each object should have the format:
|
||||
|
||||
Property | Description
|
||||
--- | ---
|
||||
title | Descriptive name for the track
|
||||
language | 2 letter [ISO 639-1 code](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) representing the language
|
||||
type | Mime type of the track<br> * TextTrackType.SRT - .srt SubRip Subtitle<br> * TextTrackType.TTML - .ttml TTML<br> * TextTrackType.VTT - .vtt WebVTT
|
||||
uri | URL for the text track. Currently, only tracks hosted on a webserver are supported
|
||||
|
||||
Example:
|
||||
```
|
||||
import { TextTrackType }, Video from 'react-native-video';
|
||||
|
||||
textTracks={[
|
||||
{
|
||||
title: "English CC",
|
||||
language: "en",
|
||||
type: "text/vtt", TextTrackType.VTT,
|
||||
uri: "https://bitdash-a.akamaihd.net/content/sintel/subtitles/subtitles_en.vtt"
|
||||
},
|
||||
{
|
||||
title: "Spanish Subtitles",
|
||||
language: "es",
|
||||
type: "application/x-subrip", TextTrackType.SRT,
|
||||
uri: "https://durian.blender.org/wp-content/content/subtitles/sintel_es.srt"
|
||||
}
|
||||
]}
|
||||
```
|
||||
|
||||
This isn't support on iOS because AVPlayer doesn't support it. Text tracks must be loaded as part of an HLS playlist.
|
||||
|
||||
Platforms: Android ExoPlayer
|
||||
|
||||
#### useTextureView
|
||||
Output to a TextureView instead of the default SurfaceView. In general, you will want to use SurfaceView because it is more efficient and provides better performance. However, SurfaceViews has two limitations:
|
||||
* It can't be animated, transformed or scaled
|
||||
@ -385,6 +449,107 @@ Adjust the volume.
|
||||
|
||||
Platforms: all
|
||||
|
||||
### Event props
|
||||
|
||||
#### onLoad
|
||||
Callback function that is called when the media is loaded and ready to play.
|
||||
|
||||
Payload:
|
||||
|
||||
Property | Type | Description
|
||||
--- | --- | ---
|
||||
currentPosition | number | Time in seconds where the media will start
|
||||
duration | number | Length of the media in seconds
|
||||
naturalSize | object | Properties:<br> * width - Width in pixels that the video was encoded at<br> * height - Height in pixels that the video was encoded at<br> * orientation - "portrait" or "landscape"
|
||||
textTracks | array | An array of text track info objects with the following properties:<br> * index - Index number<br> * title - Description of the track<br> * language - 2 letter [ISO 639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) language code<br> * type - Mime type of track
|
||||
|
||||
Example:
|
||||
```
|
||||
{
|
||||
canPlaySlowForward: true,
|
||||
canPlayReverse: false,
|
||||
canPlaySlowReverse: false,
|
||||
canPlayFastForward: false,
|
||||
canStepForward: false,
|
||||
canStepBackward: false,
|
||||
currentTime: 0,
|
||||
duration: 5910.208984375,
|
||||
naturalSize: {
|
||||
height: 1080
|
||||
orientation: 'landscape'
|
||||
width: '1920'
|
||||
},
|
||||
textTracks: [
|
||||
{ title: '#1 French', language: 'fr', index: 0, type: 'text/vtt' },
|
||||
{ title: '#2 English CC', language: 'en', index: 1, type: 'text/vtt' },
|
||||
{ title: '#3 English Director Commentary', language: 'en', index: 2, type: 'text/vtt' }
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
Platforms: all
|
||||
|
||||
#### onLoadStart
|
||||
Callback function that is called when the media starts loading.
|
||||
|
||||
Payload:
|
||||
|
||||
Property | Description
|
||||
--- | ---
|
||||
isNetwork | Boolean indicating if the media is being loaded from the network
|
||||
type | Type of the media. Not available on Windows
|
||||
uri | URI for the media source. Not available on Windows
|
||||
|
||||
Example:
|
||||
```
|
||||
{
|
||||
isNetwork: true,
|
||||
type: '',
|
||||
uri: 'https://bitdash-a.akamaihd.net/content/sintel/hls/playlist.m3u8'
|
||||
}
|
||||
```
|
||||
|
||||
Platforms: all
|
||||
|
||||
### Methods
|
||||
Methods operate on a ref to the Video element. You can create a ref using code like:
|
||||
```
|
||||
return (
|
||||
<Video source={...}
|
||||
ref => (this.player = ref) />
|
||||
);
|
||||
```
|
||||
|
||||
#### seek()
|
||||
`seek(seconds)`
|
||||
|
||||
Seek to the specified position represented by seconds. seconds is a float value.
|
||||
|
||||
`seek()` can only be called after the `onLoad` event has fired.
|
||||
|
||||
Example:
|
||||
```
|
||||
this.player.seek(200); // Seek to 3 minutes, 20 seconds
|
||||
```
|
||||
|
||||
Platforms: all
|
||||
|
||||
##### Exact seek
|
||||
|
||||
By default iOS seeks within 100 milliseconds of the target position. If you need more accuracy, you can use the seek with tolerance method:
|
||||
|
||||
`seek(seconds, tolerance)`
|
||||
|
||||
tolerance is the max distance in milliseconds from the seconds position that's allowed. Using a more exact tolerance can cause seeks to take longer. If you want to seek exactly, set tolerance to 0.
|
||||
|
||||
Example:
|
||||
```
|
||||
this.player.seek(120, 50); // Seek to 2 minutes with +/- 50 milliseconds accuracy
|
||||
```
|
||||
|
||||
Platforms: iOS
|
||||
|
||||
|
||||
### Additional props
|
||||
|
||||
To see the full list of available props, you can check the [propTypes](https://github.com/react-native-community/react-native-video/blob/master/Video.js#L246) of the Video.js component.
|
||||
@ -437,9 +602,47 @@ To enable audio to play in background on iOS the audio session needs to be set t
|
||||
|
||||
- [Lumpen Radio](https://github.com/jhabdas/lumpen-radio) contains another example integration using local files and full screen background video.
|
||||
|
||||
## Updating
|
||||
|
||||
### Version 3.0
|
||||
|
||||
#### All platforms now auto-play
|
||||
Previously, on Android ExoPlayer if the paused prop was not set, the media would not automatically start playing. The only way it would work was if you set `paused={false}`. This has been changed to automatically play if paused is not set so that the behavior is consistent across platforms.
|
||||
|
||||
#### All platforms now keep their paused state when returning from the background
|
||||
Previously, on Android MediaPlayer if you setup an AppState event when the app went into the background and set a paused prop so that when you returned to the app the video would be paused it would be ignored.
|
||||
|
||||
Note, Windows does not have a concept of an app going into the background, so this doesn't apply there.
|
||||
|
||||
#### Use Android SDK 27 by default
|
||||
Version 3.0 updates the Android build tools and SDK to version 27. React Native is in the process of [switchting over](https://github.com/facebook/react-native/issues/18095#issuecomment-395596130) to SDK 27 in preparation for Google's requirement that new Android apps [use SDK 26](https://android-developers.googleblog.com/2017/12/improving-app-security-and-performance.html) by August 2018.
|
||||
|
||||
You will either need to install the version 27 SDK and version 27.0.3 buildtools or modify your build.gradle file to configure react-native-video to use the same build settings as the rest of your app as described below.
|
||||
|
||||
##### Using app build settings
|
||||
You will need to create a `project.ext` section in the top-level build.gradle file (not app/build.gradle). Fill in the values from the example below using the values found in your app/build.gradle file.
|
||||
```
|
||||
// Top-level build file where you can add configuration options common to all sub-projects/modules.
|
||||
|
||||
buildscript {
|
||||
... // Various other settings go here
|
||||
}
|
||||
|
||||
allprojects {
|
||||
... // Various other settings go here
|
||||
|
||||
project.ext {
|
||||
compileSdkVersion = 23
|
||||
buildToolsVersion = "23.0.1"
|
||||
|
||||
minSdkVersion = 16
|
||||
targetSdkVersion = 22
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## TODOS
|
||||
|
||||
- [ ] Add support for captions
|
||||
- [ ] Add support for playing multiple videos in a sequence (will interfere with current `repeat` implementation)
|
||||
- [x] Callback to get buffering progress for remote videos
|
||||
- [ ] Bring API closer to HTML5 `<Video>` [reference](http://devdocs.io/html/element/video)
|
||||
|
7
TextTrackType.js
Normal file
7
TextTrackType.js
Normal file
@ -0,0 +1,7 @@
|
||||
import keyMirror from 'keymirror';
|
||||
|
||||
export default {
|
||||
SRT: 'application/x-subrip',
|
||||
TTML: 'application/ttml+xml',
|
||||
VTT: 'text/vtt'
|
||||
};
|
41
Video.js
41
Video.js
@ -1,7 +1,8 @@
|
||||
import React, {Component} from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import {StyleSheet, requireNativeComponent, NativeModules, View, ViewPropTypes, Image} from 'react-native';
|
||||
import {StyleSheet, requireNativeComponent, NativeModules, View, ViewPropTypes, Image, Platform} from 'react-native';
|
||||
import resolveAssetSource from 'react-native/Libraries/Image/resolveAssetSource';
|
||||
import TextTrackType from './TextTrackType';
|
||||
import VideoResizeMode from './VideoResizeMode.js';
|
||||
|
||||
const styles = StyleSheet.create({
|
||||
@ -10,6 +11,8 @@ const styles = StyleSheet.create({
|
||||
},
|
||||
});
|
||||
|
||||
export { TextTrackType };
|
||||
|
||||
export default class Video extends Component {
|
||||
|
||||
constructor(props) {
|
||||
@ -47,8 +50,17 @@ export default class Video extends Component {
|
||||
return strObj;
|
||||
}
|
||||
|
||||
seek = (time) => {
|
||||
this.setNativeProps({ seek: time });
|
||||
seek = (time, tolerance = 100) => {
|
||||
if (Platform.OS === 'ios') {
|
||||
this.setNativeProps({
|
||||
seek: {
|
||||
time,
|
||||
tolerance
|
||||
}
|
||||
});
|
||||
} else {
|
||||
this.setNativeProps({ seek: time });
|
||||
}
|
||||
};
|
||||
|
||||
presentFullscreenPlayer = () => {
|
||||
@ -88,7 +100,7 @@ export default class Video extends Component {
|
||||
};
|
||||
|
||||
_onSeek = (event) => {
|
||||
if (this.state.showPoster) {
|
||||
if (this.state.showPoster && !this.props.audioOnly) {
|
||||
this.setState({showPoster: false});
|
||||
}
|
||||
|
||||
@ -152,7 +164,7 @@ export default class Video extends Component {
|
||||
};
|
||||
|
||||
_onPlaybackRateChange = (event) => {
|
||||
if (this.state.showPoster && (event.nativeEvent.playbackRate !== 0)) {
|
||||
if (this.state.showPoster && event.nativeEvent.playbackRate !== 0 && !this.props.audioOnly) {
|
||||
this.setState({showPoster: false});
|
||||
}
|
||||
|
||||
@ -271,7 +283,10 @@ export default class Video extends Component {
|
||||
Video.propTypes = {
|
||||
/* Native only */
|
||||
src: PropTypes.object,
|
||||
seek: PropTypes.number,
|
||||
seek: PropTypes.oneOfType([
|
||||
PropTypes.number,
|
||||
PropTypes.object
|
||||
]),
|
||||
fullscreen: PropTypes.bool,
|
||||
onVideoLoadStart: PropTypes.func,
|
||||
onVideoLoad: PropTypes.func,
|
||||
@ -298,6 +313,7 @@ Video.propTypes = {
|
||||
poster: PropTypes.string,
|
||||
posterResizeMode: Image.propTypes.resizeMode,
|
||||
repeat: PropTypes.bool,
|
||||
allowsExternalPlayback: PropTypes.bool,
|
||||
selectedTextTrack: PropTypes.shape({
|
||||
type: PropTypes.string.isRequired,
|
||||
value: PropTypes.oneOfType([
|
||||
@ -305,6 +321,18 @@ Video.propTypes = {
|
||||
PropTypes.number
|
||||
])
|
||||
}),
|
||||
textTracks: PropTypes.arrayOf(
|
||||
PropTypes.shape({
|
||||
title: PropTypes.string,
|
||||
uri: PropTypes.string.isRequired,
|
||||
type: PropTypes.oneOf([
|
||||
TextTrackType.SRT,
|
||||
TextTrackType.TTML,
|
||||
TextTrackType.VTT,
|
||||
]),
|
||||
language: PropTypes.string.isRequired
|
||||
})
|
||||
),
|
||||
paused: PropTypes.bool,
|
||||
muted: PropTypes.bool,
|
||||
volume: PropTypes.number,
|
||||
@ -315,6 +343,7 @@ Video.propTypes = {
|
||||
ignoreSilentSwitch: PropTypes.oneOf(['ignore', 'obey']),
|
||||
disableFocus: PropTypes.bool,
|
||||
controls: PropTypes.bool,
|
||||
audioOnly: PropTypes.bool,
|
||||
currentTime: PropTypes.number,
|
||||
progressUpdateInterval: PropTypes.number,
|
||||
useTextureView: PropTypes.bool,
|
||||
|
@ -42,6 +42,5 @@ https://github.com/google/ExoPlayer
|
||||
|
||||
## Unimplemented props
|
||||
|
||||
- `playInBackground={true}`
|
||||
- `rate={1.0}`
|
||||
- Expansion file - `source={{ mainVer: 1, patchVer: 0 }}`
|
||||
|
||||
|
@ -1,17 +1,28 @@
|
||||
apply plugin: 'com.android.library'
|
||||
|
||||
def _ext = rootProject.ext
|
||||
|
||||
def _reactNativeVersion = _ext.has('reactNative') ? _ext.reactNative : '+'
|
||||
def _compileSdkVersion = _ext.has('compileSdkVersion') ? _ext.compileSdkVersion : 27
|
||||
def _buildToolsVersion = _ext.has('buildToolsVersion') ? _ext.buildToolsVersion : '27.0.3'
|
||||
def _minSdkVersion = _ext.has('minSdkVersion') ? _ext.minSdkVersion : 16
|
||||
def _targetSdkVersion = _ext.has('targetSdkVersion') ? _ext.targetSdkVersion : 27
|
||||
|
||||
android {
|
||||
compileSdkVersion 23
|
||||
buildToolsVersion "25.0.2"
|
||||
compileSdkVersion _compileSdkVersion
|
||||
buildToolsVersion _buildToolsVersion
|
||||
|
||||
defaultConfig {
|
||||
minSdkVersion 16
|
||||
targetSdkVersion 23
|
||||
minSdkVersion _minSdkVersion
|
||||
targetSdkVersion _targetSdkVersion
|
||||
versionCode 1
|
||||
versionName "1.0"
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
provided 'com.facebook.react:react-native:+'
|
||||
//noinspection GradleDynamicVersion
|
||||
provided "com.facebook.react:react-native:${_reactNativeVersion}"
|
||||
compile 'com.google.android.exoplayer:exoplayer:2.7.3'
|
||||
compile('com.google.android.exoplayer:extension-okhttp:2.7.3') {
|
||||
exclude group: 'com.squareup.okhttp3', module: 'okhttp'
|
||||
|
@ -16,9 +16,13 @@ import android.widget.FrameLayout;
|
||||
import com.brentvatne.react.R;
|
||||
import com.brentvatne.receiver.AudioBecomingNoisyReceiver;
|
||||
import com.brentvatne.receiver.BecomingNoisyListener;
|
||||
import com.facebook.react.bridge.Arguments;
|
||||
import com.facebook.react.bridge.Dynamic;
|
||||
import com.facebook.react.bridge.LifecycleEventListener;
|
||||
import com.facebook.react.bridge.ReadableArray;
|
||||
import com.facebook.react.bridge.ReadableMap;
|
||||
import com.facebook.react.bridge.WritableArray;
|
||||
import com.facebook.react.bridge.WritableMap;
|
||||
import com.facebook.react.uimanager.ThemedReactContext;
|
||||
import com.google.android.exoplayer2.C;
|
||||
import com.google.android.exoplayer2.DefaultLoadControl;
|
||||
@ -37,8 +41,9 @@ import com.google.android.exoplayer2.metadata.Metadata;
|
||||
import com.google.android.exoplayer2.metadata.MetadataRenderer;
|
||||
import com.google.android.exoplayer2.source.BehindLiveWindowException;
|
||||
import com.google.android.exoplayer2.source.ExtractorMediaSource;
|
||||
import com.google.android.exoplayer2.source.LoopingMediaSource;
|
||||
import com.google.android.exoplayer2.source.MediaSource;
|
||||
import com.google.android.exoplayer2.source.MergingMediaSource;
|
||||
import com.google.android.exoplayer2.source.SingleSampleMediaSource;
|
||||
import com.google.android.exoplayer2.source.TrackGroupArray;
|
||||
import com.google.android.exoplayer2.source.dash.DashMediaSource;
|
||||
import com.google.android.exoplayer2.source.dash.DefaultDashChunkSource;
|
||||
@ -53,6 +58,7 @@ import com.google.android.exoplayer2.trackselection.TrackSelection;
|
||||
import com.google.android.exoplayer2.trackselection.TrackSelectionArray;
|
||||
import com.google.android.exoplayer2.upstream.DataSource;
|
||||
import com.google.android.exoplayer2.upstream.DefaultBandwidthMeter;
|
||||
import com.google.android.exoplayer2.util.MimeTypes;
|
||||
import com.google.android.exoplayer2.util.Util;
|
||||
|
||||
import java.net.CookieHandler;
|
||||
@ -61,6 +67,7 @@ import java.net.CookiePolicy;
|
||||
import java.lang.Math;
|
||||
import java.util.Map;
|
||||
import java.lang.Object;
|
||||
import java.util.ArrayList;
|
||||
|
||||
@SuppressLint("ViewConstructor")
|
||||
class ReactExoplayerView extends FrameLayout implements
|
||||
@ -95,7 +102,8 @@ class ReactExoplayerView extends FrameLayout implements
|
||||
private long resumePosition;
|
||||
private boolean loadVideoStarted;
|
||||
private boolean isFullscreen;
|
||||
private boolean isPaused = true;
|
||||
private boolean isInBackground;
|
||||
private boolean isPaused;
|
||||
private boolean isBuffering;
|
||||
private float rate = 1f;
|
||||
|
||||
@ -105,6 +113,7 @@ class ReactExoplayerView extends FrameLayout implements
|
||||
private boolean repeat;
|
||||
private String textTrackType;
|
||||
private Dynamic textTrackValue;
|
||||
private ReadableArray textTracks;
|
||||
private boolean disableFocus;
|
||||
private float mProgressUpdateInterval = 250.0f;
|
||||
private boolean playInBackground = false;
|
||||
@ -189,14 +198,15 @@ class ReactExoplayerView extends FrameLayout implements
|
||||
|
||||
@Override
|
||||
public void onHostResume() {
|
||||
if (playInBackground) {
|
||||
return;
|
||||
if (!playInBackground || !isInBackground) {
|
||||
setPlayWhenReady(!isPaused);
|
||||
}
|
||||
setPlayWhenReady(!isPaused);
|
||||
isInBackground = false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onHostPause() {
|
||||
isInBackground = true;
|
||||
if (playInBackground) {
|
||||
return;
|
||||
}
|
||||
@ -231,8 +241,19 @@ class ReactExoplayerView extends FrameLayout implements
|
||||
player.setPlaybackParameters(params);
|
||||
}
|
||||
if (playerNeedsSource && srcUri != null) {
|
||||
MediaSource mediaSource = buildMediaSource(srcUri, extension);
|
||||
mediaSource = repeat ? new LoopingMediaSource(mediaSource) : mediaSource;
|
||||
ArrayList<MediaSource> mediaSourceList = buildTextSources();
|
||||
MediaSource videoSource = buildMediaSource(srcUri, extension);
|
||||
MediaSource mediaSource;
|
||||
if (mediaSourceList.size() == 0) {
|
||||
mediaSource = videoSource;
|
||||
} else {
|
||||
mediaSourceList.add(0, videoSource);
|
||||
MediaSource[] textSourceArray = mediaSourceList.toArray(
|
||||
new MediaSource[mediaSourceList.size()]
|
||||
);
|
||||
mediaSource = new MergingMediaSource(textSourceArray);
|
||||
}
|
||||
|
||||
boolean haveResumePosition = resumeWindow != C.INDEX_UNSET;
|
||||
if (haveResumePosition) {
|
||||
player.seekTo(resumeWindow, resumePosition);
|
||||
@ -266,6 +287,32 @@ class ReactExoplayerView extends FrameLayout implements
|
||||
}
|
||||
}
|
||||
|
||||
private ArrayList<MediaSource> buildTextSources() {
|
||||
ArrayList<MediaSource> textSources = new ArrayList<>();
|
||||
if (textTracks == null) {
|
||||
return textSources;
|
||||
}
|
||||
|
||||
for (int i = 0; i < textTracks.size(); ++i) {
|
||||
ReadableMap textTrack = textTracks.getMap(i);
|
||||
String language = textTrack.getString("language");
|
||||
String title = textTrack.hasKey("title")
|
||||
? textTrack.getString("title") : language + " " + i;
|
||||
Uri uri = Uri.parse(textTrack.getString("uri"));
|
||||
MediaSource textSource = buildTextSource(title, uri, textTrack.getString("type"),
|
||||
language);
|
||||
if (textSource != null) {
|
||||
textSources.add(textSource);
|
||||
}
|
||||
}
|
||||
return textSources;
|
||||
}
|
||||
|
||||
private MediaSource buildTextSource(String title, Uri uri, String mimeType, String language) {
|
||||
Format textFormat = Format.createTextSampleFormat(title, mimeType, Format.NO_VALUE, language);
|
||||
return new SingleSampleMediaSource(uri, mediaDataSourceFactory, textFormat, C.TIME_UNSET);
|
||||
}
|
||||
|
||||
private void releasePlayer() {
|
||||
if (player != null) {
|
||||
isPaused = player.getPlayWhenReady();
|
||||
@ -456,10 +503,33 @@ class ReactExoplayerView extends FrameLayout implements
|
||||
Format videoFormat = player.getVideoFormat();
|
||||
int width = videoFormat != null ? videoFormat.width : 0;
|
||||
int height = videoFormat != null ? videoFormat.height : 0;
|
||||
eventEmitter.load(player.getDuration(), player.getCurrentPosition(), width, height);
|
||||
eventEmitter.load(player.getDuration(), player.getCurrentPosition(), width, height,
|
||||
getTextTrackInfo());
|
||||
}
|
||||
}
|
||||
|
||||
private WritableArray getTextTrackInfo() {
|
||||
WritableArray textTracks = Arguments.createArray();
|
||||
|
||||
MappingTrackSelector.MappedTrackInfo info = trackSelector.getCurrentMappedTrackInfo();
|
||||
int index = getTextTrackRendererIndex();
|
||||
if (info == null || index == C.INDEX_UNSET) {
|
||||
return textTracks;
|
||||
}
|
||||
|
||||
TrackGroupArray groups = info.getTrackGroups(index);
|
||||
for (int i = 0; i < groups.length; ++i) {
|
||||
Format format = groups.get(i).getFormat(0);
|
||||
WritableMap textTrack = Arguments.createMap();
|
||||
textTrack.putInt("index", i);
|
||||
textTrack.putString("title", format.id != null ? format.id : "");
|
||||
textTrack.putString("type", format.sampleMimeType);
|
||||
textTrack.putString("language", format.language != null ? format.language : "");
|
||||
textTracks.pushMap(textTrack);
|
||||
}
|
||||
return textTracks;
|
||||
}
|
||||
|
||||
private void onBuffering(boolean buffering) {
|
||||
if (isBuffering == buffering) {
|
||||
return;
|
||||
@ -627,6 +697,11 @@ class ReactExoplayerView extends FrameLayout implements
|
||||
}
|
||||
}
|
||||
|
||||
public void setTextTracks(ReadableArray textTracks) {
|
||||
this.textTracks = textTracks;
|
||||
reloadSource();
|
||||
}
|
||||
|
||||
private void reloadSource() {
|
||||
playerNeedsSource = true;
|
||||
initializePlayer();
|
||||
|
@ -5,6 +5,7 @@ import android.net.Uri;
|
||||
import android.text.TextUtils;
|
||||
|
||||
import com.facebook.react.bridge.Dynamic;
|
||||
import com.facebook.react.bridge.ReadableArray;
|
||||
import com.facebook.react.bridge.ReadableMap;
|
||||
import com.facebook.react.common.MapBuilder;
|
||||
import com.facebook.react.uimanager.ThemedReactContext;
|
||||
@ -30,6 +31,7 @@ public class ReactExoplayerViewManager extends ViewGroupManager<ReactExoplayerVi
|
||||
private static final String PROP_SELECTED_TEXT_TRACK = "selectedTextTrack";
|
||||
private static final String PROP_SELECTED_TEXT_TRACK_TYPE = "type";
|
||||
private static final String PROP_SELECTED_TEXT_TRACK_VALUE = "value";
|
||||
private static final String PROP_TEXT_TRACKS = "textTracks";
|
||||
private static final String PROP_PAUSED = "paused";
|
||||
private static final String PROP_MUTED = "muted";
|
||||
private static final String PROP_VOLUME = "volume";
|
||||
@ -128,13 +130,23 @@ public class ReactExoplayerViewManager extends ViewGroupManager<ReactExoplayerVi
|
||||
@ReactProp(name = PROP_SELECTED_TEXT_TRACK)
|
||||
public void setSelectedTextTrack(final ReactExoplayerView videoView,
|
||||
@Nullable ReadableMap selectedTextTrack) {
|
||||
String typeString = selectedTextTrack.hasKey(PROP_SELECTED_TEXT_TRACK_TYPE)
|
||||
? selectedTextTrack.getString(PROP_SELECTED_TEXT_TRACK_TYPE) : null;
|
||||
Dynamic value = selectedTextTrack.hasKey(PROP_SELECTED_TEXT_TRACK_VALUE)
|
||||
? selectedTextTrack.getDynamic(PROP_SELECTED_TEXT_TRACK_VALUE) : null;
|
||||
String typeString = null;
|
||||
Dynamic value = null;
|
||||
if (selectedTextTrack != null) {
|
||||
typeString = selectedTextTrack.hasKey(PROP_SELECTED_TEXT_TRACK_TYPE)
|
||||
? selectedTextTrack.getString(PROP_SELECTED_TEXT_TRACK_TYPE) : null;
|
||||
value = selectedTextTrack.hasKey(PROP_SELECTED_TEXT_TRACK_VALUE)
|
||||
? selectedTextTrack.getDynamic(PROP_SELECTED_TEXT_TRACK_VALUE) : null;
|
||||
}
|
||||
videoView.setSelectedTextTrack(typeString, value);
|
||||
}
|
||||
|
||||
@ReactProp(name = PROP_TEXT_TRACKS)
|
||||
public void setPropTextTracks(final ReactExoplayerView videoView,
|
||||
@Nullable ReadableArray textTracks) {
|
||||
videoView.setTextTracks(textTracks);
|
||||
}
|
||||
|
||||
@ReactProp(name = PROP_PAUSED, defaultBoolean = false)
|
||||
public void setPaused(final ReactExoplayerView videoView, final boolean paused) {
|
||||
videoView.setPausedModifier(paused);
|
||||
|
@ -109,6 +109,7 @@ class VideoEventEmitter {
|
||||
private static final String EVENT_PROP_WIDTH = "width";
|
||||
private static final String EVENT_PROP_HEIGHT = "height";
|
||||
private static final String EVENT_PROP_ORIENTATION = "orientation";
|
||||
private static final String EVENT_PROP_TEXT_TRACKS = "textTracks";
|
||||
private static final String EVENT_PROP_HAS_AUDIO_FOCUS = "hasAudioFocus";
|
||||
private static final String EVENT_PROP_IS_BUFFERING = "isBuffering";
|
||||
private static final String EVENT_PROP_PLAYBACK_RATE = "playbackRate";
|
||||
@ -128,7 +129,8 @@ class VideoEventEmitter {
|
||||
receiveEvent(EVENT_LOAD_START, null);
|
||||
}
|
||||
|
||||
void load(double duration, double currentPosition, int videoWidth, int videoHeight) {
|
||||
void load(double duration, double currentPosition, int videoWidth, int videoHeight,
|
||||
WritableArray textTracks) {
|
||||
WritableMap event = Arguments.createMap();
|
||||
event.putDouble(EVENT_PROP_DURATION, duration / 1000D);
|
||||
event.putDouble(EVENT_PROP_CURRENT_TIME, currentPosition / 1000D);
|
||||
@ -143,6 +145,8 @@ class VideoEventEmitter {
|
||||
}
|
||||
event.putMap(EVENT_PROP_NATURAL_SIZE, naturalSize);
|
||||
|
||||
event.putArray(EVENT_PROP_TEXT_TRACKS, textTracks);
|
||||
|
||||
// TODO: Actually check if you can.
|
||||
event.putBoolean(EVENT_PROP_FAST_FORWARD, true);
|
||||
event.putBoolean(EVENT_PROP_SLOW_FORWARD, true);
|
||||
|
@ -1,12 +1,20 @@
|
||||
apply plugin: 'com.android.library'
|
||||
|
||||
def _ext = rootProject.ext
|
||||
|
||||
def _reactNativeVersion = _ext.has('reactNative') ? _ext.reactNative : '+'
|
||||
def _compileSdkVersion = _ext.has('compileSdkVersion') ? _ext.compileSdkVersion : 27
|
||||
def _buildToolsVersion = _ext.has('buildToolsVersion') ? _ext.buildToolsVersion : '27.0.3'
|
||||
def _minSdkVersion = _ext.has('minSdkVersion') ? _ext.minSdkVersion : 16
|
||||
def _targetSdkVersion = _ext.has('targetSdkVersion') ? _ext.targetSdkVersion : 27
|
||||
|
||||
android {
|
||||
compileSdkVersion 25
|
||||
buildToolsVersion "25.0.2"
|
||||
compileSdkVersion _compileSdkVersion
|
||||
buildToolsVersion _buildToolsVersion
|
||||
|
||||
defaultConfig {
|
||||
minSdkVersion 16
|
||||
targetSdkVersion 25
|
||||
minSdkVersion _minSdkVersion
|
||||
targetSdkVersion _targetSdkVersion
|
||||
versionCode 1
|
||||
versionName "1.0"
|
||||
ndk {
|
||||
@ -17,6 +25,6 @@ android {
|
||||
|
||||
dependencies {
|
||||
//noinspection GradleDynamicVersion
|
||||
provided 'com.facebook.react:react-native:+'
|
||||
provided "com.facebook.react:react-native:${_reactNativeVersion}"
|
||||
compile 'com.yqritc:android-scalablevideoview:1.0.4'
|
||||
}
|
||||
|
@ -105,8 +105,7 @@ public class ReactVideoView extends ScalableVideoView implements MediaPlayer.OnP
|
||||
private float mRate = 1.0f;
|
||||
private float mActiveRate = 1.0f;
|
||||
private boolean mPlayInBackground = false;
|
||||
private boolean mActiveStatePauseStatus = false;
|
||||
private boolean mActiveStatePauseStatusInitialized = false;
|
||||
private boolean mBackgroundPaused = false;
|
||||
|
||||
private int mMainVer = 0;
|
||||
private int mPatchVer = 0;
|
||||
@ -132,7 +131,7 @@ public class ReactVideoView extends ScalableVideoView implements MediaPlayer.OnP
|
||||
@Override
|
||||
public void run() {
|
||||
|
||||
if (mMediaPlayerValid && !isCompleted &&!mPaused) {
|
||||
if (mMediaPlayerValid && !isCompleted && !mPaused && !mBackgroundPaused) {
|
||||
WritableMap event = Arguments.createMap();
|
||||
event.putDouble(EVENT_PROP_CURRENT_TIME, mMediaPlayer.getCurrentPosition() / 1000.0);
|
||||
event.putDouble(EVENT_PROP_PLAYABLE_DURATION, mVideoBufferedDuration / 1000.0); //TODO:mBufferUpdateRunnable
|
||||
@ -348,11 +347,6 @@ public class ReactVideoView extends ScalableVideoView implements MediaPlayer.OnP
|
||||
|
||||
mPaused = paused;
|
||||
|
||||
if ( !mActiveStatePauseStatusInitialized ) {
|
||||
mActiveStatePauseStatus = mPaused;
|
||||
mActiveStatePauseStatusInitialized = true;
|
||||
}
|
||||
|
||||
if (!mMediaPlayerValid) {
|
||||
return;
|
||||
}
|
||||
@ -424,8 +418,16 @@ public class ReactVideoView extends ScalableVideoView implements MediaPlayer.OnP
|
||||
if (mMediaPlayerValid) {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
|
||||
if (!mPaused) { // Applying the rate while paused will cause the video to start
|
||||
mMediaPlayer.setPlaybackParams(mMediaPlayer.getPlaybackParams().setSpeed(rate));
|
||||
mActiveRate = rate;
|
||||
/* Per https://stackoverflow.com/questions/39442522/setplaybackparams-causes-illegalstateexception
|
||||
* Some devices throw an IllegalStateException if you set the rate without first calling reset()
|
||||
* TODO: Call reset() then reinitialize the player
|
||||
*/
|
||||
try {
|
||||
mMediaPlayer.setPlaybackParams(mMediaPlayer.getPlaybackParams().setSpeed(rate));
|
||||
mActiveRate = rate;
|
||||
} catch (Exception e) {
|
||||
Log.e(ReactVideoViewManager.REACT_CLASS, "Unable to set rate, unsupported on this device");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Log.e(ReactVideoViewManager.REACT_CLASS, "Setting playback rate is not yet supported on Android versions below 6.0");
|
||||
@ -603,25 +605,27 @@ public class ReactVideoView extends ScalableVideoView implements MediaPlayer.OnP
|
||||
|
||||
@Override
|
||||
public void onHostPause() {
|
||||
if (mMediaPlayer != null && !mPlayInBackground) {
|
||||
mActiveStatePauseStatus = mPaused;
|
||||
|
||||
// Pause the video in background
|
||||
setPausedModifier(true);
|
||||
if (mMediaPlayerValid && !mPaused && !mPlayInBackground) {
|
||||
/* Pause the video in background
|
||||
* Don't update the paused prop, developers should be able to update it on background
|
||||
* so that when you return to the app the video is paused
|
||||
*/
|
||||
mBackgroundPaused = true;
|
||||
mMediaPlayer.pause();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onHostResume() {
|
||||
if (mMediaPlayer != null && !mPlayInBackground) {
|
||||
mBackgroundPaused = false;
|
||||
if (mMediaPlayerValid && !mPlayInBackground && !mPaused) {
|
||||
new Handler().post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
// Restore original state
|
||||
setPausedModifier(mActiveStatePauseStatus);
|
||||
setPausedModifier(false);
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -18,6 +18,7 @@ static NSString *const timedMetadata = @"timedMetadata";
|
||||
BOOL _playerItemObserversSet;
|
||||
BOOL _playerBufferEmpty;
|
||||
AVPlayerLayer *_playerLayer;
|
||||
BOOL _playerLayerObserverSet;
|
||||
AVPlayerViewController *_playerViewController;
|
||||
NSURL *_videoURL;
|
||||
|
||||
@ -40,6 +41,7 @@ static NSString *const timedMetadata = @"timedMetadata";
|
||||
BOOL _muted;
|
||||
BOOL _paused;
|
||||
BOOL _repeat;
|
||||
BOOL _allowsExternalPlayback;
|
||||
NSDictionary * _selectedTextTrack;
|
||||
BOOL _playbackStalled;
|
||||
BOOL _playInBackground;
|
||||
@ -67,6 +69,7 @@ static NSString *const timedMetadata = @"timedMetadata";
|
||||
_controls = NO;
|
||||
_playerBufferEmpty = YES;
|
||||
_playInBackground = false;
|
||||
_allowsExternalPlayback = YES;
|
||||
_playWhenInactive = false;
|
||||
_ignoreSilentSwitch = @"inherit"; // inherit, ignore, obey
|
||||
|
||||
@ -420,6 +423,7 @@ static NSString *const timedMetadata = @"timedMetadata";
|
||||
@"height": height,
|
||||
@"orientation": orientation
|
||||
},
|
||||
@"textTracks": [self getTextTrackInfo],
|
||||
@"target": self.reactTag});
|
||||
}
|
||||
|
||||
@ -530,6 +534,12 @@ static NSString *const timedMetadata = @"timedMetadata";
|
||||
_playInBackground = playInBackground;
|
||||
}
|
||||
|
||||
- (void)setAllowsExternalPlayback:(BOOL)allowsExternalPlayback
|
||||
{
|
||||
_allowsExternalPlayback = allowsExternalPlayback;
|
||||
_player.allowsExternalPlayback = _allowsExternalPlayback;
|
||||
}
|
||||
|
||||
- (void)setPlayWhenInactive:(BOOL)playWhenInactive
|
||||
{
|
||||
_playWhenInactive = playWhenInactive;
|
||||
@ -566,21 +576,28 @@ static NSString *const timedMetadata = @"timedMetadata";
|
||||
|
||||
- (void)setCurrentTime:(float)currentTime
|
||||
{
|
||||
[self setSeek: currentTime];
|
||||
NSDictionary *info = @{
|
||||
@"time": [NSNumber numberWithFloat:currentTime],
|
||||
@"tolerance": [NSNumber numberWithInt:100]
|
||||
};
|
||||
[self setSeek:info];
|
||||
}
|
||||
|
||||
- (void)setSeek:(float)seekTime
|
||||
- (void)setSeek:(NSDictionary *)info
|
||||
{
|
||||
int timeScale = 10000;
|
||||
NSNumber *seekTime = info[@"time"];
|
||||
NSNumber *seekTolerance = info[@"tolerance"];
|
||||
|
||||
int timeScale = 1000;
|
||||
|
||||
AVPlayerItem *item = _player.currentItem;
|
||||
if (item && item.status == AVPlayerItemStatusReadyToPlay) {
|
||||
// TODO check loadedTimeRanges
|
||||
|
||||
CMTime cmSeekTime = CMTimeMakeWithSeconds(seekTime, timeScale);
|
||||
CMTime cmSeekTime = CMTimeMakeWithSeconds([seekTime floatValue], timeScale);
|
||||
CMTime current = item.currentTime;
|
||||
// TODO figure out a good tolerance level
|
||||
CMTime tolerance = CMTimeMake(1000, timeScale);
|
||||
CMTime tolerance = CMTimeMake([seekTolerance floatValue], timeScale);
|
||||
BOOL wasPaused = _paused;
|
||||
|
||||
if (CMTimeCompare(current, cmSeekTime) != 0) {
|
||||
@ -590,11 +607,11 @@ static NSString *const timedMetadata = @"timedMetadata";
|
||||
[self addPlayerTimeObserver];
|
||||
}
|
||||
if (!wasPaused) {
|
||||
[self setPaused:false];
|
||||
[self setPaused:false];
|
||||
}
|
||||
if(self.onVideoSeek) {
|
||||
self.onVideoSeek(@{@"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(item.currentTime)],
|
||||
@"seekTime": [NSNumber numberWithFloat:seekTime],
|
||||
@"seekTime": seekTime,
|
||||
@"target": self.reactTag});
|
||||
}
|
||||
}];
|
||||
@ -605,7 +622,7 @@ static NSString *const timedMetadata = @"timedMetadata";
|
||||
} else {
|
||||
// TODO: See if this makes sense and if so, actually implement it
|
||||
_pendingSeek = true;
|
||||
_pendingSeekTime = seekTime;
|
||||
_pendingSeekTime = [seekTime floatValue];
|
||||
}
|
||||
}
|
||||
|
||||
@ -642,6 +659,7 @@ static NSString *const timedMetadata = @"timedMetadata";
|
||||
[self setRepeat:_repeat];
|
||||
[self setPaused:_paused];
|
||||
[self setControls:_controls];
|
||||
[self setAllowsExternalPlayback:_allowsExternalPlayback];
|
||||
}
|
||||
|
||||
- (void)setRepeat:(BOOL)repeat {
|
||||
@ -692,6 +710,29 @@ static NSString *const timedMetadata = @"timedMetadata";
|
||||
[_player.currentItem selectMediaOption:option inMediaSelectionGroup:group];
|
||||
}
|
||||
|
||||
- (NSArray *)getTextTrackInfo
|
||||
{
|
||||
NSMutableArray *textTracks = [[NSMutableArray alloc] init];
|
||||
AVMediaSelectionGroup *group = [_player.currentItem.asset
|
||||
mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
|
||||
for (int i = 0; i < group.options.count; ++i) {
|
||||
AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i];
|
||||
NSString *title = @"";
|
||||
NSArray *values = [[currentOption commonMetadata] valueForKey:@"value"];
|
||||
if (values.count > 0) {
|
||||
title = [values objectAtIndex:0];
|
||||
}
|
||||
NSString *language = [currentOption extendedLanguageTag] ? [currentOption extendedLanguageTag] : @"";
|
||||
NSDictionary *textTrack = @{
|
||||
@"index": [NSNumber numberWithInt:i],
|
||||
@"title": title,
|
||||
@"language": language
|
||||
};
|
||||
[textTracks addObject:textTrack];
|
||||
}
|
||||
return textTracks;
|
||||
}
|
||||
|
||||
- (BOOL)getFullscreen
|
||||
{
|
||||
return _fullscreenPlayerPresented;
|
||||
@ -768,6 +809,7 @@ static NSString *const timedMetadata = @"timedMetadata";
|
||||
// resize mode must be set before layer is added
|
||||
[self setResizeMode:_resizeMode];
|
||||
[_playerLayer addObserver:self forKeyPath:readyForDisplayKeyPath options:NSKeyValueObservingOptionNew context:nil];
|
||||
_playerLayerObserverSet = YES;
|
||||
|
||||
[self.layer addSublayer:_playerLayer];
|
||||
self.layer.needsDisplayOnBoundsChange = YES;
|
||||
@ -806,7 +848,10 @@ static NSString *const timedMetadata = @"timedMetadata";
|
||||
- (void)removePlayerLayer
|
||||
{
|
||||
[_playerLayer removeFromSuperlayer];
|
||||
[_playerLayer removeObserver:self forKeyPath:readyForDisplayKeyPath];
|
||||
if (_playerLayerObserverSet) {
|
||||
[_playerLayer removeObserver:self forKeyPath:readyForDisplayKeyPath];
|
||||
_playerLayerObserverSet = NO;
|
||||
}
|
||||
_playerLayer = nil;
|
||||
}
|
||||
|
||||
|
@ -22,6 +22,7 @@ RCT_EXPORT_MODULE();
|
||||
RCT_EXPORT_VIEW_PROPERTY(src, NSDictionary);
|
||||
RCT_EXPORT_VIEW_PROPERTY(resizeMode, NSString);
|
||||
RCT_EXPORT_VIEW_PROPERTY(repeat, BOOL);
|
||||
RCT_EXPORT_VIEW_PROPERTY(allowsExternalPlayback, BOOL);
|
||||
RCT_EXPORT_VIEW_PROPERTY(selectedTextTrack, NSDictionary);
|
||||
RCT_EXPORT_VIEW_PROPERTY(paused, BOOL);
|
||||
RCT_EXPORT_VIEW_PROPERTY(muted, BOOL);
|
||||
@ -31,7 +32,7 @@ RCT_EXPORT_VIEW_PROPERTY(playInBackground, BOOL);
|
||||
RCT_EXPORT_VIEW_PROPERTY(playWhenInactive, BOOL);
|
||||
RCT_EXPORT_VIEW_PROPERTY(ignoreSilentSwitch, NSString);
|
||||
RCT_EXPORT_VIEW_PROPERTY(rate, float);
|
||||
RCT_EXPORT_VIEW_PROPERTY(seek, float);
|
||||
RCT_EXPORT_VIEW_PROPERTY(seek, NSDictionary);
|
||||
RCT_EXPORT_VIEW_PROPERTY(currentTime, float);
|
||||
RCT_EXPORT_VIEW_PROPERTY(fullscreen, BOOL);
|
||||
RCT_EXPORT_VIEW_PROPERTY(progressUpdateInterval, float);
|
||||
|
@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "react-native-video",
|
||||
"version": "2.2.0",
|
||||
"version": "2.3.1",
|
||||
"description": "A <Video /> element for react-native",
|
||||
"main": "Video.js",
|
||||
"license": "MIT",
|
||||
"author": "Brent Vatne <brentvatne@gmail.com> (https://github.com/brentvatne)",
|
||||
"contributors": [
|
||||
{
|
||||
{
|
||||
"name": "Isaiah Grey",
|
||||
"email": "isaiahgrey@gmail.com"
|
||||
},
|
||||
@ -40,5 +40,10 @@
|
||||
},
|
||||
"scripts": {
|
||||
"test": "node_modules/.bin/eslint *.js"
|
||||
},
|
||||
"rnpm": {
|
||||
"android": {
|
||||
"sourceDir": "./android-exoplayer"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user