Merge branch 'master' into implement-ios-caching
This commit is contained in:
commit
6cfea3aacb
19
.github/ISSUE_TEMPLATE.md
vendored
Normal file
19
.github/ISSUE_TEMPLATE.md
vendored
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
### Current behavior
|
||||||
|
Describe what happens when you encounter this issue.
|
||||||
|
|
||||||
|
### Reproduction steps
|
||||||
|
A 1, 2, 3, etc. list of what's needed to see the issue happen.
|
||||||
|
|
||||||
|
### Expected behavior
|
||||||
|
Describe what you wanted to happen
|
||||||
|
|
||||||
|
### Platform
|
||||||
|
Which player are you experiencing the problem on:
|
||||||
|
* iOS
|
||||||
|
* Android ExoPlayer
|
||||||
|
* Android MediaPlayer
|
||||||
|
* Windows UWP
|
||||||
|
* Windows WPF
|
||||||
|
|
||||||
|
### Video sample
|
||||||
|
If possible, include a link to the video that has the problem that can be streamed or downloaded from.
|
36
CHANGELOG.md
Normal file
36
CHANGELOG.md
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
## Changelog
|
||||||
|
|
||||||
|
### Version 3.1.0
|
||||||
|
* Support sidecar text tracks on iOS [#1109](https://github.com/react-native-community/react-native-video/pull/1109)
|
||||||
|
* Support onAudioBecomingNoisy on iOS [#1131](https://github.com/react-native-community/react-native-video/pull/1131)
|
||||||
|
|
||||||
|
### Version 3.0
|
||||||
|
* Inherit Android buildtools and SDK version from the root project [#1081](https://github.com/react-native-community/react-native-video/pull/1081)
|
||||||
|
* Automatically play on ExoPlayer when the paused prop is not set [#1083](https://github.com/react-native-community/react-native-video/pull/1083)
|
||||||
|
* Preserve Android MediaPlayer paused prop when backgrounding [#1082](https://github.com/react-native-community/react-native-video/pull/1082)
|
||||||
|
* Support specifying headers on ExoPlayer as part of the source [#805](https://github.com/react-native-community/react-native-video/pull/805)
|
||||||
|
* Prevent iOS onLoad event during seeking [#1088](https://github.com/react-native-community/react-native-video/pull/1088)
|
||||||
|
* ExoPlayer playableDuration incorrect [#1089](https://github.com/react-native-community/react-native-video/pull/1089)
|
||||||
|
|
||||||
|
### Version 2.3.1
|
||||||
|
* Revert PR to inherit Android SDK versions from root project. Re-add in 3.0 [#1080](https://github.com/react-native-community/react-native-video/pull/1080)
|
||||||
|
|
||||||
|
### Version 2.3.0
|
||||||
|
* Support allowsExternalPlayback on iOS [#1057](https://github.com/react-native-community/react-native-video/pull/1057)
|
||||||
|
* Inherit Android buildtools and SDK version from the root project [#999](https://github.com/react-native-community/react-native-video/pull/999)
|
||||||
|
* Fix bug that caused ExoPlayer to start paused if playInBackground was set [#833](https://github.com/react-native-community/react-native-video/pull/833)
|
||||||
|
* Fix crash if clearing an observer on iOS that was already cleared [#1075](https://github.com/react-native-community/react-native-video/pull/1075)
|
||||||
|
* Add audioOnly prop for music files [#1039](https://github.com/react-native-community/react-native-video/pull/1039)
|
||||||
|
* Support seeking with more exact tolerance on iOS [#1076](https://github.com/react-native-community/react-native-video/pull/1076)
|
||||||
|
|
||||||
|
### Version 2.2.0
|
||||||
|
* Text track selection support for iOS & ExoPlayer [#1049](https://github.com/react-native-community/react-native-video/pull/1049)
|
||||||
|
* Support outputting to a TextureView on Android ExoPlayer [#1058](https://github.com/react-native-community/react-native-video/pull/1058)
|
||||||
|
* Support changing the left/right balance on Android MediaPlayer [#1051](https://github.com/react-native-community/react-native-video/pull/1051)
|
||||||
|
* Prevent multiple onEnd notifications on iOS [#832](https://github.com/react-native-community/react-native-video/pull/832)
|
||||||
|
* Fix doing a partial swipe on iOS causing a black screen [#1048](https://github.com/react-native-community/react-native-video/pull/1048)
|
||||||
|
* Fix crash when switching to a new source on iOS [#974](https://github.com/react-native-community/react-native-video/pull/974)
|
||||||
|
* Add cookie support for ExoPlayer [#922](https://github.com/react-native-community/react-native-video/pull/922)
|
||||||
|
* Remove ExoPlayer onMetadata that wasn't being used [#1040](https://github.com/react-native-community/react-native-video/pull/1040)
|
||||||
|
* Fix bug where setting the progress interval on iOS didn't work [#800](https://github.com/react-native-community/react-native-video/pull/800)
|
||||||
|
* Support setting the poster resize mode [#595](https://github.com/react-native-community/react-native-video/pull/595)
|
565
README.md
565
README.md
@ -5,11 +5,31 @@ A `<Video>` component for react-native, as seen in
|
|||||||
|
|
||||||
Requires react-native >= 0.40.0, for RN support of 0.19.0 - 0.39.0 please use a pre 1.0 version.
|
Requires react-native >= 0.40.0, for RN support of 0.19.0 - 0.39.0 please use a pre 1.0 version.
|
||||||
|
|
||||||
### Add it to your project
|
### Version 3.0 breaking changes
|
||||||
|
Version 3.0 features a number of changes to existing behavior. See [Updating](#updating) for changes.
|
||||||
|
|
||||||
Run `npm i -S react-native-video`
|
## TOC
|
||||||
|
|
||||||
#### iOS
|
* [Installation](#installation)
|
||||||
|
* [Usage](#usage)
|
||||||
|
* [Updating](#updating)
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
Using npm:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
npm install --save react-native-video
|
||||||
|
```
|
||||||
|
|
||||||
|
or using yarn:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
yarn add react-native-video
|
||||||
|
```
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>iOS</summary>
|
||||||
|
|
||||||
Run `react-native link` to link the react-native-video library.
|
Run `react-native link` to link the react-native-video library.
|
||||||
|
|
||||||
@ -28,12 +48,14 @@ If you would like to allow other apps to play music over your video component, a
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
Note: you can also use the `ignoreSilentSwitch` prop, shown below.
|
Note: you can also use the `ignoreSilentSwitch` prop, shown below.
|
||||||
|
</details>
|
||||||
|
|
||||||
#### tvOS
|
<details>
|
||||||
|
<summary>tvOS</summary>
|
||||||
|
|
||||||
Run `react-native link` to link the react-native-video library.
|
Run `react-native link` to link the react-native-video library.
|
||||||
|
|
||||||
`react-native link` don’t works properly with the tvOS target so we need to add the library manually.
|
`react-native link` doesn’t work properly with the tvOS target so we need to add the library manually.
|
||||||
|
|
||||||
First select your project in Xcode.
|
First select your project in Xcode.
|
||||||
|
|
||||||
@ -50,10 +72,10 @@ Scroll to « Linked Frameworks and Libraries » and tap on the + button
|
|||||||
Select RCTVideo-tvOS
|
Select RCTVideo-tvOS
|
||||||
|
|
||||||
<img src="./docs/tvOS-step-4.jpg" width="40%">
|
<img src="./docs/tvOS-step-4.jpg" width="40%">
|
||||||
|
</details>
|
||||||
|
|
||||||
That’s all, you can use react-native-video for your tvOS application
|
<details>
|
||||||
|
<summary>Android</summary>
|
||||||
#### Android
|
|
||||||
|
|
||||||
Run `react-native link` to link the react-native-video library.
|
Run `react-native link` to link the react-native-video library.
|
||||||
|
|
||||||
@ -61,11 +83,21 @@ Or if you have trouble, make the following additions to the given files manually
|
|||||||
|
|
||||||
**android/settings.gradle**
|
**android/settings.gradle**
|
||||||
|
|
||||||
|
The newer ExoPlayer library will work for most people.
|
||||||
|
|
||||||
|
```gradle
|
||||||
|
include ':react-native-video'
|
||||||
|
project(':react-native-video').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-video/android-exoplayer')
|
||||||
|
```
|
||||||
|
|
||||||
|
If you need to use the old Android MediaPlayer based player, use the following instead:
|
||||||
|
|
||||||
```gradle
|
```gradle
|
||||||
include ':react-native-video'
|
include ':react-native-video'
|
||||||
project(':react-native-video').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-video/android')
|
project(':react-native-video').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-video/android')
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
**android/app/build.gradle**
|
**android/app/build.gradle**
|
||||||
|
|
||||||
```gradle
|
```gradle
|
||||||
@ -94,8 +126,10 @@ protected List<ReactPackage> getPackages() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
</details>
|
||||||
|
|
||||||
#### Windows
|
<details>
|
||||||
|
<summary>Windows</summary>
|
||||||
|
|
||||||
Make the following additions to the given files manually:
|
Make the following additions to the given files manually:
|
||||||
|
|
||||||
@ -104,19 +138,17 @@ Make the following additions to the given files manually:
|
|||||||
Add the `ReactNativeVideo` project to your solution.
|
Add the `ReactNativeVideo` project to your solution.
|
||||||
|
|
||||||
1. Open the solution in Visual Studio 2015
|
1. Open the solution in Visual Studio 2015
|
||||||
2. Right-click Solution icon in Solution Explorer > Add > Existing Project...
|
2. Right-click Solution icon in Solution Explorer > Add > Existing Project
|
||||||
3.
|
* UWP: Select `node_modules\react-native-video\windows\ReactNativeVideo\ReactNativeVideo.csproj`
|
||||||
UWP: Select `node_modules\react-native-video\windows\ReactNativeVideo\ReactNativeVideo.csproj`
|
* WPF: Select `node_modules\react-native-video\windows\ReactNativeVideo.Net46\ReactNativeVideo.Net46.csproj`
|
||||||
WPF: Select `node_modules\react-native-video\windows\ReactNativeVideo.Net46\ReactNativeVideo.Net46.csproj`
|
|
||||||
|
|
||||||
**windows/myapp/myapp.csproj**
|
**windows/myapp/myapp.csproj**
|
||||||
|
|
||||||
Add a reference to `ReactNativeVideo` to your main application project. From Visual Studio 2015:
|
Add a reference to `ReactNativeVideo` to your main application project. From Visual Studio 2015:
|
||||||
|
|
||||||
1. Right-click main application project > Add > Reference...
|
1. Right-click main application project > Add > Reference...
|
||||||
2.
|
* UWP: Check `ReactNativeVideo` from Solution Projects.
|
||||||
UWP: Check `ReactNativeVideo` from Solution Projects.
|
* WPF: Check `ReactNativeVideo.Net46` from Solution Projects.
|
||||||
WPF: Check `ReactNativeVideo.Net46` from Solution Projects.
|
|
||||||
|
|
||||||
**MainPage.cs**
|
**MainPage.cs**
|
||||||
|
|
||||||
@ -143,6 +175,7 @@ using System.Collections.Generic;
|
|||||||
|
|
||||||
...
|
...
|
||||||
```
|
```
|
||||||
|
</details>
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
@ -155,28 +188,21 @@ using System.Collections.Generic;
|
|||||||
ref={(ref) => {
|
ref={(ref) => {
|
||||||
this.player = ref
|
this.player = ref
|
||||||
}} // Store reference
|
}} // Store reference
|
||||||
rate={1.0} // 0 is paused, 1 is normal.
|
onBuffer={this.onBuffer} // Callback when remote video is buffering
|
||||||
volume={1.0} // 0 is muted, 1 is normal.
|
|
||||||
muted={false} // Mutes the audio entirely.
|
|
||||||
paused={false} // Pauses playback entirely.
|
|
||||||
resizeMode="cover" // Fill the whole screen at aspect ratio.*
|
|
||||||
repeat={true} // Repeat forever.
|
|
||||||
playInBackground={false} // Audio continues to play when app entering background.
|
|
||||||
playWhenInactive={false} // [iOS] Video continues to play when control or notification center are shown.
|
|
||||||
ignoreSilentSwitch={"ignore"} // [iOS] ignore | obey - When 'ignore', audio will still play with the iOS hard silent switch set to silent. When 'obey', audio will toggle with the switch. When not specified, will inherit audio settings as usual.
|
|
||||||
progressUpdateInterval={250.0} // [iOS] Interval to fire onProgress (default to ~250ms)
|
|
||||||
onLoadStart={this.loadStart} // Callback when video starts to load
|
|
||||||
onLoad={this.setDuration} // Callback when video loads
|
|
||||||
onProgress={this.setTime} // Callback every ~250ms with currentTime
|
|
||||||
onEnd={this.onEnd} // Callback when playback finishes
|
onEnd={this.onEnd} // Callback when playback finishes
|
||||||
onError={this.videoError} // Callback when video cannot be loaded
|
onError={this.videoError} // Callback when video cannot be loaded
|
||||||
onBuffer={this.onBuffer} // Callback when remote video is buffering
|
onFullscreenPlayerWillPresent={this.fullScreenPlayerWillPresent} // Callback before fullscreen starts
|
||||||
onTimedMetadata={this.onTimedMetadata} // Callback when the stream receive some metadata
|
onFullscreenPlayerDidPresent={this.fullScreenPlayerDidPresent} // Callback after fullscreen started
|
||||||
|
onFullscreenPlayerWillDismiss={this.fullScreenPlayerWillDismiss} // Callback before fullscreen stops
|
||||||
|
onFullscreenPlayerDidDismiss={this.fullScreenPlayerDidDismiss} // Callback after fullscreen stopped
|
||||||
style={styles.backgroundVideo} />
|
style={styles.backgroundVideo} />
|
||||||
|
|
||||||
// Later to trigger fullscreen
|
// Later to trigger fullscreen
|
||||||
this.player.presentFullscreenPlayer()
|
this.player.presentFullscreenPlayer()
|
||||||
|
|
||||||
|
// Disable fullscreen
|
||||||
|
this.player.dismissFullscreenPlayer()
|
||||||
|
|
||||||
// To set video position in seconds (seek)
|
// To set video position in seconds (seek)
|
||||||
this.player.seek(0)
|
this.player.seek(0)
|
||||||
|
|
||||||
@ -192,49 +218,420 @@ var styles = StyleSheet.create({
|
|||||||
});
|
});
|
||||||
```
|
```
|
||||||
|
|
||||||
- * *For iOS you also need to specify muted for this to work*
|
### Configurable props
|
||||||
|
* [allowsExternalPlayback](#allowsexternalplayback)
|
||||||
|
* [audioOnly](#audioonly)
|
||||||
|
* [ignoreSilentSwitch](#ignoresilentswitch)
|
||||||
|
* [muted](#muted)
|
||||||
|
* [paused](#paused)
|
||||||
|
* [playInBackground](#playinbackground)
|
||||||
|
* [playWhenInactive](#playwheninactive)
|
||||||
|
* [poster](#poster)
|
||||||
|
* [posterResizeMode](#posterresizemode)
|
||||||
|
* [progressUpdateInterval](#progressupdateinterval)
|
||||||
|
* [rate](#rate)
|
||||||
|
* [repeat](#repeat)
|
||||||
|
* [resizeMode](#resizemode)
|
||||||
|
* [selectedTextTrack](#selectedtexttrack)
|
||||||
|
* [stereoPan](#stereopan)
|
||||||
|
* [textTracks](#texttracks)
|
||||||
|
* [useTextureView](#usetextureview)
|
||||||
|
* [volume](#volume)
|
||||||
|
|
||||||
## Android Expansion File Usage
|
### Event props
|
||||||
|
* [onAudioBecomingNoisy](#onaudiobecomingnoisy)
|
||||||
|
* [onLoad](#onload)
|
||||||
|
* [onLoadStart](#onloadstart)
|
||||||
|
* [onProgress](#onprogress)
|
||||||
|
* [onTimedMetadata](#ontimedmetadata)
|
||||||
|
|
||||||
|
### Methods
|
||||||
|
* [seek](#seek)
|
||||||
|
|
||||||
|
### Configurable props
|
||||||
|
|
||||||
|
#### allowsExternalPlayback
|
||||||
|
Indicates whether the player allows switching to external playback mode such as AirPlay or HDMI.
|
||||||
|
* **true (default)** - allow switching to external playback mode
|
||||||
|
* **false** - Don't allow switching to external playback mode
|
||||||
|
|
||||||
|
Platforms: iOS
|
||||||
|
|
||||||
|
#### audioOnly
|
||||||
|
Indicates whether the player should only play the audio track and instead of displaying the video track, show the poster instead.
|
||||||
|
* **false (default)** - Display the video as normal
|
||||||
|
* **true** - Show the poster and play the audio
|
||||||
|
|
||||||
|
For this to work, the poster prop must be set.
|
||||||
|
|
||||||
|
Platforms: all
|
||||||
|
|
||||||
|
#### ignoreSilentSwitch
|
||||||
|
Controls the iOS silent switch behavior
|
||||||
|
* **"inherit" (default)** - Use the default AVPlayer behavior
|
||||||
|
* **"ignore"** - Play audio even if the silent switch is set
|
||||||
|
* **"obey"** - Don't play audio if the silent switch is set
|
||||||
|
|
||||||
|
Platforms: iOS
|
||||||
|
|
||||||
|
#### muted
|
||||||
|
Controls whether the audio is muted
|
||||||
|
* **false (default)** - Don't mute audio
|
||||||
|
* **true** - Mute audio
|
||||||
|
|
||||||
|
Platforms: all
|
||||||
|
|
||||||
|
#### paused
|
||||||
|
Controls whether the media is paused
|
||||||
|
* **false (default)** - Don't pause the media
|
||||||
|
* **true** - Pause the media
|
||||||
|
|
||||||
|
Platforms: all
|
||||||
|
|
||||||
|
#### playInBackground
|
||||||
|
Determine whether the media should continue playing while the app is in the background. This allows customers to continue listening to the audio.
|
||||||
|
* **false (default)** - Don't continue playing the media
|
||||||
|
* **true** - Continue playing the media
|
||||||
|
|
||||||
|
To use this feature on iOS, you must:
|
||||||
|
* [Enable Background Audio](https://developer.apple.com/library/archive/documentation/Audio/Conceptual/AudioSessionProgrammingGuide/AudioSessionBasics/AudioSessionBasics.html#//apple_ref/doc/uid/TP40007875-CH3-SW3) in your Xcode project
|
||||||
|
* Set the ignoreSilentSwitch prop to "ignore"
|
||||||
|
|
||||||
|
Platforms: Android ExoPlayer, Android MediaPlayer, iOS
|
||||||
|
|
||||||
|
#### playWhenInactive
|
||||||
|
Determine whether the media should continue playing when notifications or the Control Center are in front of the video.
|
||||||
|
* **false (default)** - Don't continue playing the media
|
||||||
|
* **true** - Continue playing the media
|
||||||
|
|
||||||
|
Platforms: iOS
|
||||||
|
|
||||||
|
#### poster
|
||||||
|
An image to display while the video is loading
|
||||||
|
<br>Value: string with a URL for the poster, e.g. "https://baconmockup.com/300/200/"
|
||||||
|
|
||||||
|
Platforms: all
|
||||||
|
|
||||||
|
#### posterResizeMode
|
||||||
|
Determines how to resize the poster image when the frame doesn't match the raw video dimensions.
|
||||||
|
* **"contain" (default)** - Scale the image uniformly (maintain the image's aspect ratio) so that both dimensions (width and height) of the image will be equal to or less than the corresponding dimension of the view (minus padding).
|
||||||
|
* **"center"** - Center the image in the view along both dimensions. If the image is larger than the view, scale it down uniformly so that it is contained in the view.
|
||||||
|
* **"cover"** - Scale the image uniformly (maintain the image's aspect ratio) so that both dimensions (width and height) of the image will be equal to or larger than the corresponding dimension of the view (minus padding).
|
||||||
|
* **"none"** - Don't apply resize
|
||||||
|
* **"repeat"** - Repeat the image to cover the frame of the view. The image will keep its size and aspect ratio. (iOS only)
|
||||||
|
* **"stretch"** - Scale width and height independently, This may change the aspect ratio of the src.
|
||||||
|
|
||||||
|
Platforms: all
|
||||||
|
|
||||||
|
#### progressUpdateInterval
|
||||||
|
Delay in milliseconds between onProgress events in milliseconds.
|
||||||
|
|
||||||
|
Default: 250.0
|
||||||
|
|
||||||
|
Platforms: all
|
||||||
|
|
||||||
|
### rate
|
||||||
|
Speed at which the media should play.
|
||||||
|
* **0.0** - Pauses the video
|
||||||
|
* **1.0** - Play at normal speed
|
||||||
|
* **Other values** - Slow down or speed up playback
|
||||||
|
|
||||||
|
Platforms: all
|
||||||
|
|
||||||
|
Note: For Android MediaPlayer, rate is only supported on Android 6.0 and higher devices.
|
||||||
|
|
||||||
|
#### repeat
|
||||||
|
Determine whether to repeat the video when the end is reached
|
||||||
|
* **false (default)** - Don't repeat the video
|
||||||
|
* **true** - Repeat the video
|
||||||
|
|
||||||
|
Platforms: all
|
||||||
|
|
||||||
|
#### resizeMode
|
||||||
|
Determines how to resize the video when the frame doesn't match the raw video dimensions.
|
||||||
|
* **"none" (default)** - Don't apply resize
|
||||||
|
* **"contain"** - Scale the video uniformly (maintain the video's aspect ratio) so that both dimensions (width and height) of the video will be equal to or less than the corresponding dimension of the view (minus padding).
|
||||||
|
* **"cover"** - Scale the video uniformly (maintain the video's aspect ratio) so that both dimensions (width and height) of the image will be equal to or larger than the corresponding dimension of the view (minus padding).
|
||||||
|
* **"stretch"** - Scale width and height independently, This may change the aspect ratio of the src.
|
||||||
|
|
||||||
|
Platforms: Android ExoPlayer, Android MediaPlayer, iOS, Windows UWP
|
||||||
|
|
||||||
|
#### selectedTextTrack
|
||||||
|
Configure which text track (caption or subtitle), if any, is shown.
|
||||||
|
|
||||||
|
```
|
||||||
|
selectedTextTrack={{
|
||||||
|
type: Type,
|
||||||
|
value: Value
|
||||||
|
}}
|
||||||
|
```
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```
|
||||||
|
selectedTextTrack={{
|
||||||
|
type: "title",
|
||||||
|
value: "English Subtitles"
|
||||||
|
}}
|
||||||
|
```
|
||||||
|
|
||||||
|
Type | Value | Description
|
||||||
|
--- | --- | ---
|
||||||
|
"system" (default) | N/A | Display captions only if the system preference for captions is enabled
|
||||||
|
"disabled" | N/A | Don't display a text track
|
||||||
|
"title" | string | Display the text track with the title specified as the Value, e.g. "French 1"
|
||||||
|
"language" | string | Display the text track with the language specified as the Value, e.g. "fr"
|
||||||
|
"index" | number | Display the text track with the index specified as the value, e.g. 0
|
||||||
|
|
||||||
|
Both iOS & Android (only 4.4 and higher) offer Settings to enable Captions for hearing impaired people. If "system" is selected and the Captions Setting is enabled, iOS/Android will look for a caption that matches that customer's language and display it.
|
||||||
|
|
||||||
|
If a track matching the specified Type (and Value if appropriate) is unavailable, no text track will be displayed. If multiple tracks match the criteria, the first match will be used.
|
||||||
|
|
||||||
|
Platforms: Android ExoPlayer, iOS
|
||||||
|
|
||||||
|
#### stereoPan
|
||||||
|
Adjust the balance of the left and right audio channels. Any value between –1.0 and 1.0 is accepted.
|
||||||
|
* **-1.0** - Full left
|
||||||
|
* **0.0 (default)** - Center
|
||||||
|
* **1.0** - Full right
|
||||||
|
|
||||||
|
Platforms: Android MediaPlayer
|
||||||
|
|
||||||
|
#### textTracks
|
||||||
|
Load one or more "sidecar" text tracks. This takes an array of objects representing each track. Each object should have the format:
|
||||||
|
|
||||||
|
Property | Description
|
||||||
|
--- | ---
|
||||||
|
title | Descriptive name for the track
|
||||||
|
language | 2 letter [ISO 639-1 code](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) representing the language
|
||||||
|
type | Mime type of the track<br> * TextTrackType.SRT - SubRip (.srt)<br> * TextTrackType.TTML - TTML (.ttml)<br> * TextTrackType.VTT - WebVTT (.vtt)<br>iOS only supports VTT, Android ExoPlayer supports all 3
|
||||||
|
uri | URL for the text track. Currently, only tracks hosted on a webserver are supported
|
||||||
|
|
||||||
|
On iOS, sidecar text tracks are only supported for individual files, not HLS playlists. For HLS, you should include the text tracks as part of the playlist.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```
|
||||||
|
import { TextTrackType }, Video from 'react-native-video';
|
||||||
|
|
||||||
|
textTracks={[
|
||||||
|
{
|
||||||
|
title: "English CC",
|
||||||
|
language: "en",
|
||||||
|
type: TextTrackType.VTT, // "text/vtt"
|
||||||
|
uri: "https://bitdash-a.akamaihd.net/content/sintel/subtitles/subtitles_en.vtt"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: "Spanish Subtitles",
|
||||||
|
language: "es",
|
||||||
|
type: TextTrackType.SRT, // "application/x-subrip"
|
||||||
|
uri: "https://durian.blender.org/wp-content/content/subtitles/sintel_es.srt"
|
||||||
|
}
|
||||||
|
]}
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
Platforms: Android ExoPlayer, iOS
|
||||||
|
|
||||||
|
#### useTextureView
|
||||||
|
Output to a TextureView instead of the default SurfaceView. In general, you will want to use SurfaceView because it is more efficient and provides better performance. However, SurfaceViews has two limitations:
|
||||||
|
* It can't be animated, transformed or scaled
|
||||||
|
* You can't overlay multiple SurfaceViews
|
||||||
|
|
||||||
|
useTextureView can only be set at same time you're setting the source.
|
||||||
|
|
||||||
|
* **false (default)** - Use a SurfaceView
|
||||||
|
* **true** - Use a TextureView
|
||||||
|
|
||||||
|
Platforms: Android ExoPlayer
|
||||||
|
|
||||||
|
#### volume
|
||||||
|
Adjust the volume.
|
||||||
|
* **1.0 (default)** - Play at full volume
|
||||||
|
* **0.0** - Mute the audio
|
||||||
|
* **Other values** - Reduce volume
|
||||||
|
|
||||||
|
Platforms: all
|
||||||
|
|
||||||
|
### Event props
|
||||||
|
|
||||||
|
#### onAudioBecomingNoisy
|
||||||
|
Callback function that is called when the audio is about to become 'noisy' due to a change in audio outputs. Typically this is called when audio output is being switched from an external source like headphones back to the internal speaker. It's a good idea to pause the media when this happens so the speaker doesn't start blasting sound.
|
||||||
|
|
||||||
|
Payload: none
|
||||||
|
|
||||||
|
Platforms: Android ExoPlayer, iOS
|
||||||
|
|
||||||
|
#### onLoad
|
||||||
|
Callback function that is called when the media is loaded and ready to play.
|
||||||
|
|
||||||
|
Payload:
|
||||||
|
|
||||||
|
Property | Type | Description
|
||||||
|
--- | --- | ---
|
||||||
|
currentPosition | number | Time in seconds where the media will start
|
||||||
|
duration | number | Length of the media in seconds
|
||||||
|
naturalSize | object | Properties:<br> * width - Width in pixels that the video was encoded at<br> * height - Height in pixels that the video was encoded at<br> * orientation - "portrait" or "landscape"
|
||||||
|
textTracks | array | An array of text track info objects with the following properties:<br> * index - Index number<br> * title - Description of the track<br> * language - 2 letter [ISO 639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) language code<br> * type - Mime type of track
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```
|
||||||
|
{
|
||||||
|
canPlaySlowForward: true,
|
||||||
|
canPlayReverse: false,
|
||||||
|
canPlaySlowReverse: false,
|
||||||
|
canPlayFastForward: false,
|
||||||
|
canStepForward: false,
|
||||||
|
canStepBackward: false,
|
||||||
|
currentTime: 0,
|
||||||
|
duration: 5910.208984375,
|
||||||
|
naturalSize: {
|
||||||
|
height: 1080
|
||||||
|
orientation: 'landscape'
|
||||||
|
width: '1920'
|
||||||
|
},
|
||||||
|
textTracks: [
|
||||||
|
{ title: '#1 French', language: 'fr', index: 0, type: 'text/vtt' },
|
||||||
|
{ title: '#2 English CC', language: 'en', index: 1, type: 'text/vtt' },
|
||||||
|
{ title: '#3 English Director Commentary', language: 'en', index: 2, type: 'text/vtt' }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Platforms: all
|
||||||
|
|
||||||
|
#### onLoadStart
|
||||||
|
Callback function that is called when the media starts loading.
|
||||||
|
|
||||||
|
Payload:
|
||||||
|
|
||||||
|
Property | Description
|
||||||
|
--- | ---
|
||||||
|
isNetwork | boolean | Boolean indicating if the media is being loaded from the network
|
||||||
|
type | string | Type of the media. Not available on Windows
|
||||||
|
uri | string | URI for the media source. Not available on Windows
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```
|
||||||
|
{
|
||||||
|
isNetwork: true,
|
||||||
|
type: '',
|
||||||
|
uri: 'https://bitdash-a.akamaihd.net/content/sintel/hls/playlist.m3u8'
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Platforms: all
|
||||||
|
|
||||||
|
#### onProgress
|
||||||
|
Callback function that is called every progressInterval seconds with info about which position the media is currently playing.
|
||||||
|
|
||||||
|
Property | Description
|
||||||
|
--- | ---
|
||||||
|
currentTime | number | Current position in seconds
|
||||||
|
playableDuration | number | Position to where the media can be played to using just the buffer in seconds
|
||||||
|
seekableDuration | number | Position to where the media can be seeked to in seconds. Typically, the total length of the media
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```
|
||||||
|
{
|
||||||
|
currentTime: 5.2,
|
||||||
|
playableDuration: 34.6,
|
||||||
|
seekableDuration: 888
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### onTimedMetadata
|
||||||
|
Callback function that is called when timed metadata becomes available
|
||||||
|
|
||||||
|
Payload:
|
||||||
|
|
||||||
|
Property | Type | Description
|
||||||
|
--- | --- | ---
|
||||||
|
metadata | array | Array of metadata objects
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```
|
||||||
|
{
|
||||||
|
metadata: [
|
||||||
|
{ value: 'Streaming Encoder', identifier: 'TRSN' },
|
||||||
|
{ value: 'Internet Stream', identifier: 'TRSO' },
|
||||||
|
{ value: 'Any Time You Like', identifier: 'TIT2' }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Platforms: Android ExoPlayer, iOS
|
||||||
|
|
||||||
|
### Methods
|
||||||
|
Methods operate on a ref to the Video element. You can create a ref using code like:
|
||||||
|
```
|
||||||
|
return (
|
||||||
|
<Video source={...}
|
||||||
|
ref => (this.player = ref) />
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
#### seek()
|
||||||
|
`seek(seconds)`
|
||||||
|
|
||||||
|
Seek to the specified position represented by seconds. seconds is a float value.
|
||||||
|
|
||||||
|
`seek()` can only be called after the `onLoad` event has fired.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```
|
||||||
|
this.player.seek(200); // Seek to 3 minutes, 20 seconds
|
||||||
|
```
|
||||||
|
|
||||||
|
Platforms: all
|
||||||
|
|
||||||
|
##### Exact seek
|
||||||
|
|
||||||
|
By default iOS seeks within 100 milliseconds of the target position. If you need more accuracy, you can use the seek with tolerance method:
|
||||||
|
|
||||||
|
`seek(seconds, tolerance)`
|
||||||
|
|
||||||
|
tolerance is the max distance in milliseconds from the seconds position that's allowed. Using a more exact tolerance can cause seeks to take longer. If you want to seek exactly, set tolerance to 0.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```
|
||||||
|
this.player.seek(120, 50); // Seek to 2 minutes with +/- 50 milliseconds accuracy
|
||||||
|
```
|
||||||
|
|
||||||
|
Platforms: iOS
|
||||||
|
|
||||||
|
|
||||||
|
### Additional props
|
||||||
|
|
||||||
|
To see the full list of available props, you can check the [propTypes](https://github.com/react-native-community/react-native-video/blob/master/Video.js#L246) of the Video.js component.
|
||||||
|
|
||||||
|
- By default, iOS 9+ will only load encrypted HTTPS urls. If you need to load content from a webserver that only supports HTTP, you will need to modify your Info.plist file and add the following entry:
|
||||||
|
|
||||||
|
<img src="./docs/AppTransportSecuritySetting.png" width="50%">
|
||||||
|
|
||||||
|
For more detailed info check this [article](https://cocoacasts.com/how-to-add-app-transport-security-exception-domains)
|
||||||
|
</details>
|
||||||
|
|
||||||
|
### Android Expansion File Usage
|
||||||
|
Expansions files allow you to ship assets that exceed the 100MB apk size limit and don't need to be updated each time you push an app update.
|
||||||
|
|
||||||
|
This only supports mp4 files and they must not be compressed. Example command line for preventing compression:
|
||||||
|
```bash
|
||||||
|
zip -r -n .mp4 *.mp4 player.video.example.com
|
||||||
|
```
|
||||||
|
|
||||||
```javascript
|
```javascript
|
||||||
// Within your render function, assuming you have a file called
|
// Within your render function, assuming you have a file called
|
||||||
// "background.mp4" in your expansion file. Just add your main and (if applicable) patch version
|
// "background.mp4" in your expansion file. Just add your main and (if applicable) patch version
|
||||||
<Video source={{uri: "background", mainVer: 1, patchVer: 0}} // Looks for .mp4 file (background.mp4) in the given expansion version.
|
<Video source={{uri: "background", mainVer: 1, patchVer: 0}} // Looks for .mp4 file (background.mp4) in the given expansion version.
|
||||||
rate={1.0} // 0 is paused, 1 is normal.
|
|
||||||
volume={1.0} // 0 is muted, 1 is normal.
|
|
||||||
muted={false} // Mutes the audio entirely.
|
|
||||||
paused={false} // Pauses playback entirely.
|
|
||||||
resizeMode="cover" // Fill the whole screen at aspect ratio.
|
resizeMode="cover" // Fill the whole screen at aspect ratio.
|
||||||
repeat={true} // Repeat forever.
|
|
||||||
onLoadStart={this.loadStart} // Callback when video starts to load
|
|
||||||
onLoad={this.setDuration} // Callback when video loads
|
|
||||||
onProgress={this.setTime} // Callback every ~250ms with currentTime
|
|
||||||
onEnd={this.onEnd} // Callback when playback finishes
|
|
||||||
onError={this.videoError} // Callback when video cannot be loaded
|
|
||||||
style={styles.backgroundVideo} />
|
style={styles.backgroundVideo} />
|
||||||
|
|
||||||
// Later on in your styles..
|
|
||||||
var styles = Stylesheet.create({
|
|
||||||
backgroundVideo: {
|
|
||||||
position: 'absolute',
|
|
||||||
top: 0,
|
|
||||||
left: 0,
|
|
||||||
bottom: 0,
|
|
||||||
right: 0,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
### Load files with the RN Asset System
|
### Load files with the RN Asset System
|
||||||
|
|
||||||
The asset system [introduced in RN `0.14`](http://www.reactnative.com/react-native-v0-14-0-released/) allows loading image resources shared across iOS and Android without touching native code. As of RN `0.31` [the same is true](https://github.com/facebook/react-native/commit/91ff6868a554c4930fd5fda6ba8044dbd56c8374) of mp4 video assets for Android. As of [RN `0.33`](https://github.com/facebook/react-native/releases/tag/v0.33.0) iOS is also supported. Requires `react-native-video@0.9.0`.
|
The asset system [introduced in RN `0.14`](http://www.reactnative.com/react-native-v0-14-0-released/) allows loading image resources shared across iOS and Android without touching native code. As of RN `0.31` [the same is true](https://github.com/facebook/react-native/commit/91ff6868a554c4930fd5fda6ba8044dbd56c8374) of mp4 video assets for Android. As of [RN `0.33`](https://github.com/facebook/react-native/releases/tag/v0.33.0) iOS is also supported. Requires `react-native-video@0.9.0`.
|
||||||
|
|
||||||
```
|
```
|
||||||
<Video
|
<Video
|
||||||
repeat
|
|
||||||
resizeMode='cover'
|
|
||||||
source={require('../assets/video/turntable.mp4')}
|
source={require('../assets/video/turntable.mp4')}
|
||||||
style={styles.backgroundVideo}
|
|
||||||
/>
|
/>
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -242,16 +639,6 @@ The asset system [introduced in RN `0.14`](http://www.reactnative.com/react-nati
|
|||||||
|
|
||||||
To enable audio to play in background on iOS the audio session needs to be set to `AVAudioSessionCategoryPlayback`. See [Apple documentation][3] for additional details. (NOTE: there is now a ticket to [expose this as a prop]( https://github.com/react-native-community/react-native-video/issues/310) )
|
To enable audio to play in background on iOS the audio session needs to be set to `AVAudioSessionCategoryPlayback`. See [Apple documentation][3] for additional details. (NOTE: there is now a ticket to [expose this as a prop]( https://github.com/react-native-community/react-native-video/issues/310) )
|
||||||
|
|
||||||
## Static Methods
|
|
||||||
|
|
||||||
`seek(seconds)`
|
|
||||||
|
|
||||||
Seeks the video to the specified time (in seconds). Access using a ref to the component
|
|
||||||
|
|
||||||
`presentFullscreenPlayer()`
|
|
||||||
|
|
||||||
Toggles a fullscreen player. Access using a ref to the component.
|
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
- See an [Example integration][1] in `react-native-login` *note that this example uses an older version of this library, before we used `export default` -- if you use `require` you will need to do `require('react-native-video').default` as per instructions above.*
|
- See an [Example integration][1] in `react-native-login` *note that this example uses an older version of this library, before we used `export default` -- if you use `require` you will need to do `require('react-native-video').default` as per instructions above.*
|
||||||
@ -269,9 +656,49 @@ Toggles a fullscreen player. Access using a ref to the component.
|
|||||||
|
|
||||||
- [Lumpen Radio](https://github.com/jhabdas/lumpen-radio) contains another example integration using local files and full screen background video.
|
- [Lumpen Radio](https://github.com/jhabdas/lumpen-radio) contains another example integration using local files and full screen background video.
|
||||||
|
|
||||||
|
## Updating
|
||||||
|
|
||||||
|
### Version 3.0
|
||||||
|
|
||||||
|
#### All platforms now auto-play
|
||||||
|
Previously, on Android ExoPlayer if the paused prop was not set, the media would not automatically start playing. The only way it would work was if you set `paused={false}`. This has been changed to automatically play if paused is not set so that the behavior is consistent across platforms.
|
||||||
|
|
||||||
|
#### All platforms now keep their paused state when returning from the background
|
||||||
|
Previously, on Android MediaPlayer if you setup an AppState event when the app went into the background and set a paused prop so that when you returned to the app the video would be paused it would be ignored.
|
||||||
|
|
||||||
|
Note, Windows does not have a concept of an app going into the background, so this doesn't apply there.
|
||||||
|
|
||||||
|
#### Use Android SDK 27 by default
|
||||||
|
Version 3.0 updates the Android build tools and SDK to version 27. React Native is in the process of [switchting over](https://github.com/facebook/react-native/issues/18095#issuecomment-395596130) to SDK 27 in preparation for Google's requirement that new Android apps [use SDK 26](https://android-developers.googleblog.com/2017/12/improving-app-security-and-performance.html) by August 2018.
|
||||||
|
|
||||||
|
You will either need to install the version 27 SDK and version 27.0.3 buildtools or modify your build.gradle file to configure react-native-video to use the same build settings as the rest of your app as described below.
|
||||||
|
|
||||||
|
##### Using app build settings
|
||||||
|
You will need to create a `project.ext` section in the top-level build.gradle file (not app/build.gradle). Fill in the values from the example below using the values found in your app/build.gradle file.
|
||||||
|
```
|
||||||
|
// Top-level build file where you can add configuration options common to all sub-projects/modules.
|
||||||
|
|
||||||
|
buildscript {
|
||||||
|
... // Various other settings go here
|
||||||
|
}
|
||||||
|
|
||||||
|
allprojects {
|
||||||
|
... // Various other settings go here
|
||||||
|
|
||||||
|
project.ext {
|
||||||
|
compileSdkVersion = 23
|
||||||
|
buildToolsVersion = "23.0.1"
|
||||||
|
|
||||||
|
minSdkVersion = 16
|
||||||
|
targetSdkVersion = 22
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
If you encounter an error `Could not find com.android.support:support-annotations:27.0.0.` reinstall your Android Support Repository.
|
||||||
|
|
||||||
## TODOS
|
## TODOS
|
||||||
|
|
||||||
- [ ] Add support for captions
|
|
||||||
- [ ] Add support for playing multiple videos in a sequence (will interfere with current `repeat` implementation)
|
- [ ] Add support for playing multiple videos in a sequence (will interfere with current `repeat` implementation)
|
||||||
- [x] Callback to get buffering progress for remote videos
|
- [x] Callback to get buffering progress for remote videos
|
||||||
- [ ] Bring API closer to HTML5 `<Video>` [reference](http://devdocs.io/html/element/video)
|
- [ ] Bring API closer to HTML5 `<Video>` [reference](http://devdocs.io/html/element/video)
|
||||||
|
7
TextTrackType.js
Normal file
7
TextTrackType.js
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
import keyMirror from 'keymirror';
|
||||||
|
|
||||||
|
export default {
|
||||||
|
SRT: 'application/x-subrip',
|
||||||
|
TTML: 'application/ttml+xml',
|
||||||
|
VTT: 'text/vtt'
|
||||||
|
};
|
79
Video.js
79
Video.js
@ -1,7 +1,8 @@
|
|||||||
import React, {Component} from 'react';
|
import React, {Component} from 'react';
|
||||||
import PropTypes from 'prop-types';
|
import PropTypes from 'prop-types';
|
||||||
import {StyleSheet, requireNativeComponent, NativeModules, View, ViewPropTypes, Image} from 'react-native';
|
import {StyleSheet, requireNativeComponent, NativeModules, View, ViewPropTypes, Image, Platform} from 'react-native';
|
||||||
import resolveAssetSource from 'react-native/Libraries/Image/resolveAssetSource';
|
import resolveAssetSource from 'react-native/Libraries/Image/resolveAssetSource';
|
||||||
|
import TextTrackType from './TextTrackType';
|
||||||
import VideoResizeMode from './VideoResizeMode.js';
|
import VideoResizeMode from './VideoResizeMode.js';
|
||||||
|
|
||||||
const styles = StyleSheet.create({
|
const styles = StyleSheet.create({
|
||||||
@ -10,6 +11,8 @@ const styles = StyleSheet.create({
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
export { TextTrackType };
|
||||||
|
|
||||||
export default class Video extends Component {
|
export default class Video extends Component {
|
||||||
|
|
||||||
constructor(props) {
|
constructor(props) {
|
||||||
@ -24,8 +27,40 @@ export default class Video extends Component {
|
|||||||
this._root.setNativeProps(nativeProps);
|
this._root.setNativeProps(nativeProps);
|
||||||
}
|
}
|
||||||
|
|
||||||
seek = (time) => {
|
toTypeString(x) {
|
||||||
this.setNativeProps({ seek: time });
|
switch (typeof x) {
|
||||||
|
case "object":
|
||||||
|
return x instanceof Date
|
||||||
|
? x.toISOString()
|
||||||
|
: JSON.stringify(x); // object, null
|
||||||
|
case "undefined":
|
||||||
|
return "";
|
||||||
|
default: // boolean, number, string
|
||||||
|
return x.toString();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stringsOnlyObject(obj) {
|
||||||
|
const strObj = {};
|
||||||
|
|
||||||
|
Object.keys(obj).forEach(x => {
|
||||||
|
strObj[x] = this.toTypeString(obj[x]);
|
||||||
|
});
|
||||||
|
|
||||||
|
return strObj;
|
||||||
|
}
|
||||||
|
|
||||||
|
seek = (time, tolerance = 100) => {
|
||||||
|
if (Platform.OS === 'ios') {
|
||||||
|
this.setNativeProps({
|
||||||
|
seek: {
|
||||||
|
time,
|
||||||
|
tolerance
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
this.setNativeProps({ seek: time });
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
presentFullscreenPlayer = () => {
|
presentFullscreenPlayer = () => {
|
||||||
@ -65,7 +100,7 @@ export default class Video extends Component {
|
|||||||
};
|
};
|
||||||
|
|
||||||
_onSeek = (event) => {
|
_onSeek = (event) => {
|
||||||
if (this.state.showPoster) {
|
if (this.state.showPoster && !this.props.audioOnly) {
|
||||||
this.setState({showPoster: false});
|
this.setState({showPoster: false});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -129,7 +164,7 @@ export default class Video extends Component {
|
|||||||
};
|
};
|
||||||
|
|
||||||
_onPlaybackRateChange = (event) => {
|
_onPlaybackRateChange = (event) => {
|
||||||
if (this.state.showPoster && (event.nativeEvent.playbackRate !== 0)) {
|
if (this.state.showPoster && event.nativeEvent.playbackRate !== 0 && !this.props.audioOnly) {
|
||||||
this.setState({showPoster: false});
|
this.setState({showPoster: false});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -190,6 +225,7 @@ export default class Video extends Component {
|
|||||||
type: source.type || '',
|
type: source.type || '',
|
||||||
mainVer: source.mainVer || 0,
|
mainVer: source.mainVer || 0,
|
||||||
patchVer: source.patchVer || 0,
|
patchVer: source.patchVer || 0,
|
||||||
|
requestHeaders: source.headers ? this.stringsOnlyObject(source.headers) : {}
|
||||||
},
|
},
|
||||||
onVideoLoadStart: this._onLoadStart,
|
onVideoLoadStart: this._onLoadStart,
|
||||||
onVideoLoad: this._onLoad,
|
onVideoLoad: this._onLoad,
|
||||||
@ -199,6 +235,7 @@ export default class Video extends Component {
|
|||||||
onVideoEnd: this._onEnd,
|
onVideoEnd: this._onEnd,
|
||||||
onVideoBuffer: this._onBuffer,
|
onVideoBuffer: this._onBuffer,
|
||||||
onTimedMetadata: this._onTimedMetadata,
|
onTimedMetadata: this._onTimedMetadata,
|
||||||
|
onVideoAudioBecomingNoisy: this._onAudioBecomingNoisy,
|
||||||
onVideoFullscreenPlayerWillPresent: this._onFullscreenPlayerWillPresent,
|
onVideoFullscreenPlayerWillPresent: this._onFullscreenPlayerWillPresent,
|
||||||
onVideoFullscreenPlayerDidPresent: this._onFullscreenPlayerDidPresent,
|
onVideoFullscreenPlayerDidPresent: this._onFullscreenPlayerDidPresent,
|
||||||
onVideoFullscreenPlayerWillDismiss: this._onFullscreenPlayerWillDismiss,
|
onVideoFullscreenPlayerWillDismiss: this._onFullscreenPlayerWillDismiss,
|
||||||
@ -218,7 +255,7 @@ export default class Video extends Component {
|
|||||||
top: 0,
|
top: 0,
|
||||||
right: 0,
|
right: 0,
|
||||||
bottom: 0,
|
bottom: 0,
|
||||||
resizeMode: 'contain',
|
resizeMode: this.props.posterResizeMode || 'contain'
|
||||||
};
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@ -248,7 +285,10 @@ Video.propTypes = {
|
|||||||
/* Native only */
|
/* Native only */
|
||||||
src: PropTypes.object,
|
src: PropTypes.object,
|
||||||
cache: PropTypes.bool,
|
cache: PropTypes.bool,
|
||||||
seek: PropTypes.number,
|
seek: PropTypes.oneOfType([
|
||||||
|
PropTypes.number,
|
||||||
|
PropTypes.object
|
||||||
|
]),
|
||||||
fullscreen: PropTypes.bool,
|
fullscreen: PropTypes.bool,
|
||||||
onVideoLoadStart: PropTypes.func,
|
onVideoLoadStart: PropTypes.func,
|
||||||
onVideoLoad: PropTypes.func,
|
onVideoLoad: PropTypes.func,
|
||||||
@ -258,6 +298,7 @@ Video.propTypes = {
|
|||||||
onVideoSeek: PropTypes.func,
|
onVideoSeek: PropTypes.func,
|
||||||
onVideoEnd: PropTypes.func,
|
onVideoEnd: PropTypes.func,
|
||||||
onTimedMetadata: PropTypes.func,
|
onTimedMetadata: PropTypes.func,
|
||||||
|
onVideoAudioBecomingNoisy: PropTypes.func,
|
||||||
onVideoFullscreenPlayerWillPresent: PropTypes.func,
|
onVideoFullscreenPlayerWillPresent: PropTypes.func,
|
||||||
onVideoFullscreenPlayerDidPresent: PropTypes.func,
|
onVideoFullscreenPlayerDidPresent: PropTypes.func,
|
||||||
onVideoFullscreenPlayerWillDismiss: PropTypes.func,
|
onVideoFullscreenPlayerWillDismiss: PropTypes.func,
|
||||||
@ -273,18 +314,42 @@ Video.propTypes = {
|
|||||||
]),
|
]),
|
||||||
resizeMode: PropTypes.string,
|
resizeMode: PropTypes.string,
|
||||||
poster: PropTypes.string,
|
poster: PropTypes.string,
|
||||||
|
posterResizeMode: Image.propTypes.resizeMode,
|
||||||
repeat: PropTypes.bool,
|
repeat: PropTypes.bool,
|
||||||
|
allowsExternalPlayback: PropTypes.bool,
|
||||||
|
selectedTextTrack: PropTypes.shape({
|
||||||
|
type: PropTypes.string.isRequired,
|
||||||
|
value: PropTypes.oneOfType([
|
||||||
|
PropTypes.string,
|
||||||
|
PropTypes.number
|
||||||
|
])
|
||||||
|
}),
|
||||||
|
textTracks: PropTypes.arrayOf(
|
||||||
|
PropTypes.shape({
|
||||||
|
title: PropTypes.string,
|
||||||
|
uri: PropTypes.string.isRequired,
|
||||||
|
type: PropTypes.oneOf([
|
||||||
|
TextTrackType.SRT,
|
||||||
|
TextTrackType.TTML,
|
||||||
|
TextTrackType.VTT,
|
||||||
|
]),
|
||||||
|
language: PropTypes.string.isRequired
|
||||||
|
})
|
||||||
|
),
|
||||||
paused: PropTypes.bool,
|
paused: PropTypes.bool,
|
||||||
muted: PropTypes.bool,
|
muted: PropTypes.bool,
|
||||||
volume: PropTypes.number,
|
volume: PropTypes.number,
|
||||||
|
stereoPan: PropTypes.number,
|
||||||
rate: PropTypes.number,
|
rate: PropTypes.number,
|
||||||
playInBackground: PropTypes.bool,
|
playInBackground: PropTypes.bool,
|
||||||
playWhenInactive: PropTypes.bool,
|
playWhenInactive: PropTypes.bool,
|
||||||
ignoreSilentSwitch: PropTypes.oneOf(['ignore', 'obey']),
|
ignoreSilentSwitch: PropTypes.oneOf(['ignore', 'obey']),
|
||||||
disableFocus: PropTypes.bool,
|
disableFocus: PropTypes.bool,
|
||||||
controls: PropTypes.bool,
|
controls: PropTypes.bool,
|
||||||
|
audioOnly: PropTypes.bool,
|
||||||
currentTime: PropTypes.number,
|
currentTime: PropTypes.number,
|
||||||
progressUpdateInterval: PropTypes.number,
|
progressUpdateInterval: PropTypes.number,
|
||||||
|
useTextureView: PropTypes.bool,
|
||||||
onLoadStart: PropTypes.func,
|
onLoadStart: PropTypes.func,
|
||||||
onLoad: PropTypes.func,
|
onLoad: PropTypes.func,
|
||||||
onBuffer: PropTypes.func,
|
onBuffer: PropTypes.func,
|
||||||
|
@ -42,6 +42,5 @@ https://github.com/google/ExoPlayer
|
|||||||
|
|
||||||
## Unimplemented props
|
## Unimplemented props
|
||||||
|
|
||||||
- `playInBackground={true}`
|
|
||||||
- `rate={1.0}`
|
|
||||||
- Expansion file - `source={{ mainVer: 1, patchVer: 0 }}`
|
- Expansion file - `source={{ mainVer: 1, patchVer: 0 }}`
|
||||||
|
|
||||||
|
@ -1,20 +1,31 @@
|
|||||||
apply plugin: 'com.android.library'
|
apply plugin: 'com.android.library'
|
||||||
|
|
||||||
|
def _ext = rootProject.ext
|
||||||
|
|
||||||
|
def _reactNativeVersion = _ext.has('reactNative') ? _ext.reactNative : '+'
|
||||||
|
def _compileSdkVersion = _ext.has('compileSdkVersion') ? _ext.compileSdkVersion : 27
|
||||||
|
def _buildToolsVersion = _ext.has('buildToolsVersion') ? _ext.buildToolsVersion : '27.0.3'
|
||||||
|
def _minSdkVersion = _ext.has('minSdkVersion') ? _ext.minSdkVersion : 16
|
||||||
|
def _targetSdkVersion = _ext.has('targetSdkVersion') ? _ext.targetSdkVersion : 27
|
||||||
|
|
||||||
android {
|
android {
|
||||||
compileSdkVersion 23
|
compileSdkVersion _compileSdkVersion
|
||||||
buildToolsVersion "25.0.2"
|
buildToolsVersion _buildToolsVersion
|
||||||
|
|
||||||
defaultConfig {
|
defaultConfig {
|
||||||
minSdkVersion 16
|
minSdkVersion _minSdkVersion
|
||||||
targetSdkVersion 23
|
targetSdkVersion _targetSdkVersion
|
||||||
|
versionCode 1
|
||||||
|
versionName "1.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
provided 'com.facebook.react:react-native:+'
|
//noinspection GradleDynamicVersion
|
||||||
compile 'com.google.android.exoplayer:exoplayer:r2.4.0'
|
provided "com.facebook.react:react-native:${_reactNativeVersion}"
|
||||||
compile('com.google.android.exoplayer:extension-okhttp:r2.4.0') {
|
compile 'com.google.android.exoplayer:exoplayer:2.7.3'
|
||||||
|
compile('com.google.android.exoplayer:extension-okhttp:2.7.3') {
|
||||||
exclude group: 'com.squareup.okhttp3', module: 'okhttp'
|
exclude group: 'com.squareup.okhttp3', module: 'okhttp'
|
||||||
}
|
}
|
||||||
compile 'com.squareup.okhttp3:okhttp:3.4.2'
|
compile 'com.squareup.okhttp3:okhttp:3.9.1'
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,11 @@
|
|||||||
package com.brentvatne.exoplayer;
|
package com.brentvatne.exoplayer;
|
||||||
|
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
|
import android.content.ContextWrapper;
|
||||||
|
|
||||||
|
import com.facebook.react.bridge.ReactContext;
|
||||||
|
import com.facebook.react.modules.network.CookieJarContainer;
|
||||||
|
import com.facebook.react.modules.network.ForwardingCookieHandler;
|
||||||
import com.facebook.react.modules.network.OkHttpClientProvider;
|
import com.facebook.react.modules.network.OkHttpClientProvider;
|
||||||
import com.google.android.exoplayer2.ext.okhttp.OkHttpDataSourceFactory;
|
import com.google.android.exoplayer2.ext.okhttp.OkHttpDataSourceFactory;
|
||||||
import com.google.android.exoplayer2.upstream.DataSource;
|
import com.google.android.exoplayer2.upstream.DataSource;
|
||||||
@ -10,6 +14,12 @@ import com.google.android.exoplayer2.upstream.DefaultDataSourceFactory;
|
|||||||
import com.google.android.exoplayer2.upstream.HttpDataSource;
|
import com.google.android.exoplayer2.upstream.HttpDataSource;
|
||||||
import com.google.android.exoplayer2.util.Util;
|
import com.google.android.exoplayer2.util.Util;
|
||||||
|
|
||||||
|
import okhttp3.Cookie;
|
||||||
|
import okhttp3.JavaNetCookieJar;
|
||||||
|
import okhttp3.OkHttpClient;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
|
||||||
public class DataSourceUtil {
|
public class DataSourceUtil {
|
||||||
|
|
||||||
private DataSourceUtil() {
|
private DataSourceUtil() {
|
||||||
@ -23,14 +33,14 @@ public class DataSourceUtil {
|
|||||||
DataSourceUtil.userAgent = userAgent;
|
DataSourceUtil.userAgent = userAgent;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String getUserAgent(Context context) {
|
public static String getUserAgent(ReactContext context) {
|
||||||
if (userAgent == null) {
|
if (userAgent == null) {
|
||||||
userAgent = Util.getUserAgent(context.getApplicationContext(), "ReactNativeVideo");
|
userAgent = Util.getUserAgent(context, "ReactNativeVideo");
|
||||||
}
|
}
|
||||||
return userAgent;
|
return userAgent;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static DataSource.Factory getRawDataSourceFactory(Context context) {
|
public static DataSource.Factory getRawDataSourceFactory(ReactContext context) {
|
||||||
if (rawDataSourceFactory == null) {
|
if (rawDataSourceFactory == null) {
|
||||||
rawDataSourceFactory = buildRawDataSourceFactory(context);
|
rawDataSourceFactory = buildRawDataSourceFactory(context);
|
||||||
}
|
}
|
||||||
@ -41,9 +51,10 @@ public class DataSourceUtil {
|
|||||||
DataSourceUtil.rawDataSourceFactory = factory;
|
DataSourceUtil.rawDataSourceFactory = factory;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static DataSource.Factory getDefaultDataSourceFactory(Context context, DefaultBandwidthMeter bandwidthMeter) {
|
|
||||||
if (defaultDataSourceFactory == null) {
|
public static DataSource.Factory getDefaultDataSourceFactory(ReactContext context, DefaultBandwidthMeter bandwidthMeter, Map<String, String> requestHeaders) {
|
||||||
defaultDataSourceFactory = buildDataSourceFactory(context, bandwidthMeter);
|
if (defaultDataSourceFactory == null || (requestHeaders != null && !requestHeaders.isEmpty())) {
|
||||||
|
defaultDataSourceFactory = buildDataSourceFactory(context, bandwidthMeter, requestHeaders);
|
||||||
}
|
}
|
||||||
return defaultDataSourceFactory;
|
return defaultDataSourceFactory;
|
||||||
}
|
}
|
||||||
@ -52,18 +63,25 @@ public class DataSourceUtil {
|
|||||||
DataSourceUtil.defaultDataSourceFactory = factory;
|
DataSourceUtil.defaultDataSourceFactory = factory;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static DataSource.Factory buildRawDataSourceFactory(Context context) {
|
private static DataSource.Factory buildRawDataSourceFactory(ReactContext context) {
|
||||||
return new RawResourceDataSourceFactory(context.getApplicationContext());
|
return new RawResourceDataSourceFactory(context.getApplicationContext());
|
||||||
}
|
}
|
||||||
|
|
||||||
private static DataSource.Factory buildDataSourceFactory(Context context, DefaultBandwidthMeter bandwidthMeter) {
|
private static DataSource.Factory buildDataSourceFactory(ReactContext context, DefaultBandwidthMeter bandwidthMeter, Map<String, String> requestHeaders) {
|
||||||
Context appContext = context.getApplicationContext();
|
return new DefaultDataSourceFactory(context, bandwidthMeter,
|
||||||
return new DefaultDataSourceFactory(appContext, bandwidthMeter,
|
buildHttpDataSourceFactory(context, bandwidthMeter, requestHeaders));
|
||||||
buildHttpDataSourceFactory(appContext, bandwidthMeter));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static HttpDataSource.Factory buildHttpDataSourceFactory(Context context, DefaultBandwidthMeter bandwidthMeter) {
|
private static HttpDataSource.Factory buildHttpDataSourceFactory(ReactContext context, DefaultBandwidthMeter bandwidthMeter, Map<String, String> requestHeaders) {
|
||||||
return new OkHttpDataSourceFactory(OkHttpClientProvider.getOkHttpClient(), getUserAgent(context), bandwidthMeter);
|
OkHttpClient client = OkHttpClientProvider.getOkHttpClient();
|
||||||
}
|
CookieJarContainer container = (CookieJarContainer) client.cookieJar();
|
||||||
|
ForwardingCookieHandler handler = new ForwardingCookieHandler(context);
|
||||||
|
container.setCookieJar(new JavaNetCookieJar(handler));
|
||||||
|
OkHttpDataSourceFactory okHttpDataSourceFactory = new OkHttpDataSourceFactory(client, getUserAgent(context), bandwidthMeter);
|
||||||
|
|
||||||
|
if (requestHeaders != null)
|
||||||
|
okHttpDataSourceFactory.getDefaultRequestProperties().set(requestHeaders);
|
||||||
|
|
||||||
|
return okHttpDataSourceFactory;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -18,8 +18,6 @@ import com.google.android.exoplayer2.ExoPlayer;
|
|||||||
import com.google.android.exoplayer2.PlaybackParameters;
|
import com.google.android.exoplayer2.PlaybackParameters;
|
||||||
import com.google.android.exoplayer2.SimpleExoPlayer;
|
import com.google.android.exoplayer2.SimpleExoPlayer;
|
||||||
import com.google.android.exoplayer2.Timeline;
|
import com.google.android.exoplayer2.Timeline;
|
||||||
import com.google.android.exoplayer2.metadata.Metadata;
|
|
||||||
import com.google.android.exoplayer2.metadata.MetadataRenderer;
|
|
||||||
import com.google.android.exoplayer2.source.TrackGroupArray;
|
import com.google.android.exoplayer2.source.TrackGroupArray;
|
||||||
import com.google.android.exoplayer2.text.Cue;
|
import com.google.android.exoplayer2.text.Cue;
|
||||||
import com.google.android.exoplayer2.text.TextRenderer;
|
import com.google.android.exoplayer2.text.TextRenderer;
|
||||||
@ -31,12 +29,16 @@ import java.util.List;
|
|||||||
@TargetApi(16)
|
@TargetApi(16)
|
||||||
public final class ExoPlayerView extends FrameLayout {
|
public final class ExoPlayerView extends FrameLayout {
|
||||||
|
|
||||||
private final View surfaceView;
|
private View surfaceView;
|
||||||
private final View shutterView;
|
private final View shutterView;
|
||||||
private final SubtitleView subtitleLayout;
|
private final SubtitleView subtitleLayout;
|
||||||
private final AspectRatioFrameLayout layout;
|
private final AspectRatioFrameLayout layout;
|
||||||
private final ComponentListener componentListener;
|
private final ComponentListener componentListener;
|
||||||
private SimpleExoPlayer player;
|
private SimpleExoPlayer player;
|
||||||
|
private Context context;
|
||||||
|
private ViewGroup.LayoutParams layoutParams;
|
||||||
|
|
||||||
|
private boolean useTextureView = false;
|
||||||
|
|
||||||
public ExoPlayerView(Context context) {
|
public ExoPlayerView(Context context) {
|
||||||
this(context, null);
|
this(context, null);
|
||||||
@ -49,9 +51,9 @@ public final class ExoPlayerView extends FrameLayout {
|
|||||||
public ExoPlayerView(Context context, AttributeSet attrs, int defStyleAttr) {
|
public ExoPlayerView(Context context, AttributeSet attrs, int defStyleAttr) {
|
||||||
super(context, attrs, defStyleAttr);
|
super(context, attrs, defStyleAttr);
|
||||||
|
|
||||||
boolean useTextureView = false;
|
this.context = context;
|
||||||
|
|
||||||
ViewGroup.LayoutParams params = new ViewGroup.LayoutParams(
|
layoutParams = new ViewGroup.LayoutParams(
|
||||||
ViewGroup.LayoutParams.MATCH_PARENT,
|
ViewGroup.LayoutParams.MATCH_PARENT,
|
||||||
ViewGroup.LayoutParams.MATCH_PARENT);
|
ViewGroup.LayoutParams.MATCH_PARENT);
|
||||||
|
|
||||||
@ -65,25 +67,45 @@ public final class ExoPlayerView extends FrameLayout {
|
|||||||
layout.setLayoutParams(aspectRatioParams);
|
layout.setLayoutParams(aspectRatioParams);
|
||||||
|
|
||||||
shutterView = new View(getContext());
|
shutterView = new View(getContext());
|
||||||
shutterView.setLayoutParams(params);
|
shutterView.setLayoutParams(layoutParams);
|
||||||
shutterView.setBackgroundColor(ContextCompat.getColor(context, android.R.color.black));
|
shutterView.setBackgroundColor(ContextCompat.getColor(context, android.R.color.black));
|
||||||
|
|
||||||
subtitleLayout = new SubtitleView(context);
|
subtitleLayout = new SubtitleView(context);
|
||||||
subtitleLayout.setLayoutParams(params);
|
subtitleLayout.setLayoutParams(layoutParams);
|
||||||
subtitleLayout.setUserDefaultStyle();
|
subtitleLayout.setUserDefaultStyle();
|
||||||
subtitleLayout.setUserDefaultTextSize();
|
subtitleLayout.setUserDefaultTextSize();
|
||||||
|
|
||||||
View view = useTextureView ? new TextureView(context) : new SurfaceView(context);
|
updateSurfaceView();
|
||||||
view.setLayoutParams(params);
|
|
||||||
surfaceView = view;
|
|
||||||
|
|
||||||
layout.addView(surfaceView, 0, params);
|
layout.addView(shutterView, 1, layoutParams);
|
||||||
layout.addView(shutterView, 1, params);
|
layout.addView(subtitleLayout, 2, layoutParams);
|
||||||
layout.addView(subtitleLayout, 2, params);
|
|
||||||
|
|
||||||
addViewInLayout(layout, 0, aspectRatioParams);
|
addViewInLayout(layout, 0, aspectRatioParams);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void setVideoView() {
|
||||||
|
if (surfaceView instanceof TextureView) {
|
||||||
|
player.setVideoTextureView((TextureView) surfaceView);
|
||||||
|
} else if (surfaceView instanceof SurfaceView) {
|
||||||
|
player.setVideoSurfaceView((SurfaceView) surfaceView);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void updateSurfaceView() {
|
||||||
|
View view = useTextureView ? new TextureView(context) : new SurfaceView(context);
|
||||||
|
view.setLayoutParams(layoutParams);
|
||||||
|
|
||||||
|
surfaceView = view;
|
||||||
|
if (layout.getChildAt(0) != null) {
|
||||||
|
layout.removeViewAt(0);
|
||||||
|
}
|
||||||
|
layout.addView(surfaceView, 0, layoutParams);
|
||||||
|
|
||||||
|
if (this.player != null) {
|
||||||
|
setVideoView();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set the {@link SimpleExoPlayer} to use. The {@link SimpleExoPlayer#setTextOutput} and
|
* Set the {@link SimpleExoPlayer} to use. The {@link SimpleExoPlayer#setTextOutput} and
|
||||||
* {@link SimpleExoPlayer#setVideoListener} method of the player will be called and previous
|
* {@link SimpleExoPlayer#setVideoListener} method of the player will be called and previous
|
||||||
@ -100,20 +122,14 @@ public final class ExoPlayerView extends FrameLayout {
|
|||||||
this.player.setVideoListener(null);
|
this.player.setVideoListener(null);
|
||||||
this.player.removeListener(componentListener);
|
this.player.removeListener(componentListener);
|
||||||
this.player.setVideoSurface(null);
|
this.player.setVideoSurface(null);
|
||||||
this.player.setMetadataOutput(componentListener);
|
|
||||||
}
|
}
|
||||||
this.player = player;
|
this.player = player;
|
||||||
shutterView.setVisibility(VISIBLE);
|
shutterView.setVisibility(VISIBLE);
|
||||||
if (player != null) {
|
if (player != null) {
|
||||||
if (surfaceView instanceof TextureView) {
|
setVideoView();
|
||||||
player.setVideoTextureView((TextureView) surfaceView);
|
|
||||||
} else if (surfaceView instanceof SurfaceView) {
|
|
||||||
player.setVideoSurfaceView((SurfaceView) surfaceView);
|
|
||||||
}
|
|
||||||
player.setVideoListener(componentListener);
|
player.setVideoListener(componentListener);
|
||||||
player.addListener(componentListener);
|
player.addListener(componentListener);
|
||||||
player.setTextOutput(componentListener);
|
player.setTextOutput(componentListener);
|
||||||
player.setMetadataOutput(componentListener);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -140,6 +156,11 @@ public final class ExoPlayerView extends FrameLayout {
|
|||||||
return surfaceView;
|
return surfaceView;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setUseTextureView(boolean useTextureView) {
|
||||||
|
this.useTextureView = useTextureView;
|
||||||
|
updateSurfaceView();
|
||||||
|
}
|
||||||
|
|
||||||
private final Runnable measureAndLayout = new Runnable() {
|
private final Runnable measureAndLayout = new Runnable() {
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
@ -167,7 +188,7 @@ public final class ExoPlayerView extends FrameLayout {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private final class ComponentListener implements SimpleExoPlayer.VideoListener,
|
private final class ComponentListener implements SimpleExoPlayer.VideoListener,
|
||||||
TextRenderer.Output, ExoPlayer.EventListener, MetadataRenderer.Output {
|
TextRenderer.Output, ExoPlayer.EventListener {
|
||||||
|
|
||||||
// TextRenderer.Output implementation
|
// TextRenderer.Output implementation
|
||||||
|
|
||||||
@ -212,12 +233,12 @@ public final class ExoPlayerView extends FrameLayout {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onPositionDiscontinuity() {
|
public void onPositionDiscontinuity(int reason) {
|
||||||
// Do nothing.
|
// Do nothing.
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onTimelineChanged(Timeline timeline, Object manifest) {
|
public void onTimelineChanged(Timeline timeline, Object manifest, int reason) {
|
||||||
// Do nothing.
|
// Do nothing.
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -232,8 +253,18 @@ public final class ExoPlayerView extends FrameLayout {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onMetadata(Metadata metadata) {
|
public void onSeekProcessed() {
|
||||||
Log.d("onMetadata", "onMetadata");
|
// Do nothing.
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onShuffleModeEnabledChanged(boolean shuffleModeEnabled) {
|
||||||
|
// Do nothing.
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onRepeatModeChanged(int repeatMode) {
|
||||||
|
// Do nothing.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package com.brentvatne.exoplayer;
|
package com.brentvatne.exoplayer;
|
||||||
|
|
||||||
import android.annotation.SuppressLint;
|
import android.annotation.SuppressLint;
|
||||||
|
import android.app.Activity;
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
import android.media.AudioManager;
|
import android.media.AudioManager;
|
||||||
import android.net.Uri;
|
import android.net.Uri;
|
||||||
@ -8,12 +9,21 @@ import android.os.Handler;
|
|||||||
import android.os.Message;
|
import android.os.Message;
|
||||||
import android.text.TextUtils;
|
import android.text.TextUtils;
|
||||||
import android.util.Log;
|
import android.util.Log;
|
||||||
|
import android.view.View;
|
||||||
|
import android.view.Window;
|
||||||
|
import android.view.accessibility.CaptioningManager;
|
||||||
import android.widget.FrameLayout;
|
import android.widget.FrameLayout;
|
||||||
|
|
||||||
import com.brentvatne.react.R;
|
import com.brentvatne.react.R;
|
||||||
import com.brentvatne.receiver.AudioBecomingNoisyReceiver;
|
import com.brentvatne.receiver.AudioBecomingNoisyReceiver;
|
||||||
import com.brentvatne.receiver.BecomingNoisyListener;
|
import com.brentvatne.receiver.BecomingNoisyListener;
|
||||||
|
import com.facebook.react.bridge.Arguments;
|
||||||
|
import com.facebook.react.bridge.Dynamic;
|
||||||
import com.facebook.react.bridge.LifecycleEventListener;
|
import com.facebook.react.bridge.LifecycleEventListener;
|
||||||
|
import com.facebook.react.bridge.ReadableArray;
|
||||||
|
import com.facebook.react.bridge.ReadableMap;
|
||||||
|
import com.facebook.react.bridge.WritableArray;
|
||||||
|
import com.facebook.react.bridge.WritableMap;
|
||||||
import com.facebook.react.uimanager.ThemedReactContext;
|
import com.facebook.react.uimanager.ThemedReactContext;
|
||||||
import com.google.android.exoplayer2.C;
|
import com.google.android.exoplayer2.C;
|
||||||
import com.google.android.exoplayer2.DefaultLoadControl;
|
import com.google.android.exoplayer2.DefaultLoadControl;
|
||||||
@ -22,6 +32,7 @@ import com.google.android.exoplayer2.ExoPlayer;
|
|||||||
import com.google.android.exoplayer2.ExoPlayerFactory;
|
import com.google.android.exoplayer2.ExoPlayerFactory;
|
||||||
import com.google.android.exoplayer2.Format;
|
import com.google.android.exoplayer2.Format;
|
||||||
import com.google.android.exoplayer2.PlaybackParameters;
|
import com.google.android.exoplayer2.PlaybackParameters;
|
||||||
|
import com.google.android.exoplayer2.Player;
|
||||||
import com.google.android.exoplayer2.SimpleExoPlayer;
|
import com.google.android.exoplayer2.SimpleExoPlayer;
|
||||||
import com.google.android.exoplayer2.Timeline;
|
import com.google.android.exoplayer2.Timeline;
|
||||||
import com.google.android.exoplayer2.extractor.DefaultExtractorsFactory;
|
import com.google.android.exoplayer2.extractor.DefaultExtractorsFactory;
|
||||||
@ -31,8 +42,9 @@ import com.google.android.exoplayer2.metadata.Metadata;
|
|||||||
import com.google.android.exoplayer2.metadata.MetadataRenderer;
|
import com.google.android.exoplayer2.metadata.MetadataRenderer;
|
||||||
import com.google.android.exoplayer2.source.BehindLiveWindowException;
|
import com.google.android.exoplayer2.source.BehindLiveWindowException;
|
||||||
import com.google.android.exoplayer2.source.ExtractorMediaSource;
|
import com.google.android.exoplayer2.source.ExtractorMediaSource;
|
||||||
import com.google.android.exoplayer2.source.LoopingMediaSource;
|
|
||||||
import com.google.android.exoplayer2.source.MediaSource;
|
import com.google.android.exoplayer2.source.MediaSource;
|
||||||
|
import com.google.android.exoplayer2.source.MergingMediaSource;
|
||||||
|
import com.google.android.exoplayer2.source.SingleSampleMediaSource;
|
||||||
import com.google.android.exoplayer2.source.TrackGroupArray;
|
import com.google.android.exoplayer2.source.TrackGroupArray;
|
||||||
import com.google.android.exoplayer2.source.dash.DashMediaSource;
|
import com.google.android.exoplayer2.source.dash.DashMediaSource;
|
||||||
import com.google.android.exoplayer2.source.dash.DefaultDashChunkSource;
|
import com.google.android.exoplayer2.source.dash.DefaultDashChunkSource;
|
||||||
@ -41,17 +53,23 @@ import com.google.android.exoplayer2.source.smoothstreaming.DefaultSsChunkSource
|
|||||||
import com.google.android.exoplayer2.source.smoothstreaming.SsMediaSource;
|
import com.google.android.exoplayer2.source.smoothstreaming.SsMediaSource;
|
||||||
import com.google.android.exoplayer2.trackselection.AdaptiveTrackSelection;
|
import com.google.android.exoplayer2.trackselection.AdaptiveTrackSelection;
|
||||||
import com.google.android.exoplayer2.trackselection.DefaultTrackSelector;
|
import com.google.android.exoplayer2.trackselection.DefaultTrackSelector;
|
||||||
|
import com.google.android.exoplayer2.trackselection.FixedTrackSelection;
|
||||||
import com.google.android.exoplayer2.trackselection.MappingTrackSelector;
|
import com.google.android.exoplayer2.trackselection.MappingTrackSelector;
|
||||||
import com.google.android.exoplayer2.trackselection.TrackSelection;
|
import com.google.android.exoplayer2.trackselection.TrackSelection;
|
||||||
import com.google.android.exoplayer2.trackselection.TrackSelectionArray;
|
import com.google.android.exoplayer2.trackselection.TrackSelectionArray;
|
||||||
import com.google.android.exoplayer2.upstream.DataSource;
|
import com.google.android.exoplayer2.upstream.DataSource;
|
||||||
import com.google.android.exoplayer2.upstream.DefaultBandwidthMeter;
|
import com.google.android.exoplayer2.upstream.DefaultBandwidthMeter;
|
||||||
|
import com.google.android.exoplayer2.util.MimeTypes;
|
||||||
import com.google.android.exoplayer2.util.Util;
|
import com.google.android.exoplayer2.util.Util;
|
||||||
|
|
||||||
import java.net.CookieHandler;
|
import java.net.CookieHandler;
|
||||||
import java.net.CookieManager;
|
import java.net.CookieManager;
|
||||||
import java.net.CookiePolicy;
|
import java.net.CookiePolicy;
|
||||||
import java.lang.Math;
|
import java.lang.Math;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.lang.Object;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Locale;
|
||||||
|
|
||||||
@SuppressLint("ViewConstructor")
|
@SuppressLint("ViewConstructor")
|
||||||
class ReactExoplayerView extends FrameLayout implements
|
class ReactExoplayerView extends FrameLayout implements
|
||||||
@ -85,7 +103,9 @@ class ReactExoplayerView extends FrameLayout implements
|
|||||||
private int resumeWindow;
|
private int resumeWindow;
|
||||||
private long resumePosition;
|
private long resumePosition;
|
||||||
private boolean loadVideoStarted;
|
private boolean loadVideoStarted;
|
||||||
private boolean isPaused = true;
|
private boolean isFullscreen;
|
||||||
|
private boolean isInBackground;
|
||||||
|
private boolean isPaused;
|
||||||
private boolean isBuffering;
|
private boolean isBuffering;
|
||||||
private float rate = 1f;
|
private float rate = 1f;
|
||||||
|
|
||||||
@ -93,9 +113,14 @@ class ReactExoplayerView extends FrameLayout implements
|
|||||||
private Uri srcUri;
|
private Uri srcUri;
|
||||||
private String extension;
|
private String extension;
|
||||||
private boolean repeat;
|
private boolean repeat;
|
||||||
|
private String textTrackType;
|
||||||
|
private Dynamic textTrackValue;
|
||||||
|
private ReadableArray textTracks;
|
||||||
private boolean disableFocus;
|
private boolean disableFocus;
|
||||||
private float mProgressUpdateInterval = 250.0f;
|
private float mProgressUpdateInterval = 250.0f;
|
||||||
private boolean playInBackground = false;
|
private boolean playInBackground = false;
|
||||||
|
private boolean useTextureView = false;
|
||||||
|
private Map<String, String> requestHeaders;
|
||||||
// \ End props
|
// \ End props
|
||||||
|
|
||||||
// React
|
// React
|
||||||
@ -113,7 +138,8 @@ class ReactExoplayerView extends FrameLayout implements
|
|||||||
&& player.getPlayWhenReady()
|
&& player.getPlayWhenReady()
|
||||||
) {
|
) {
|
||||||
long pos = player.getCurrentPosition();
|
long pos = player.getCurrentPosition();
|
||||||
eventEmitter.progressChanged(pos, player.getBufferedPercentage());
|
long bufferedDuration = player.getBufferedPercentage() * player.getDuration() / 100;
|
||||||
|
eventEmitter.progressChanged(pos, bufferedDuration, player.getDuration());
|
||||||
msg = obtainMessage(SHOW_PROGRESS);
|
msg = obtainMessage(SHOW_PROGRESS);
|
||||||
sendMessageDelayed(msg, Math.round(mProgressUpdateInterval));
|
sendMessageDelayed(msg, Math.round(mProgressUpdateInterval));
|
||||||
}
|
}
|
||||||
@ -124,9 +150,9 @@ class ReactExoplayerView extends FrameLayout implements
|
|||||||
|
|
||||||
public ReactExoplayerView(ThemedReactContext context) {
|
public ReactExoplayerView(ThemedReactContext context) {
|
||||||
super(context);
|
super(context);
|
||||||
|
this.themedReactContext = context;
|
||||||
createViews();
|
createViews();
|
||||||
this.eventEmitter = new VideoEventEmitter(context);
|
this.eventEmitter = new VideoEventEmitter(context);
|
||||||
this.themedReactContext = context;
|
|
||||||
audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
|
audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
|
||||||
themedReactContext.addLifecycleEventListener(this);
|
themedReactContext.addLifecycleEventListener(this);
|
||||||
audioBecomingNoisyReceiver = new AudioBecomingNoisyReceiver(themedReactContext);
|
audioBecomingNoisyReceiver = new AudioBecomingNoisyReceiver(themedReactContext);
|
||||||
@ -174,14 +200,15 @@ class ReactExoplayerView extends FrameLayout implements
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onHostResume() {
|
public void onHostResume() {
|
||||||
if (playInBackground) {
|
if (!playInBackground || !isInBackground) {
|
||||||
return;
|
setPlayWhenReady(!isPaused);
|
||||||
}
|
}
|
||||||
setPlayWhenReady(!isPaused);
|
isInBackground = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onHostPause() {
|
public void onHostPause() {
|
||||||
|
isInBackground = true;
|
||||||
if (playInBackground) {
|
if (playInBackground) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -216,8 +243,19 @@ class ReactExoplayerView extends FrameLayout implements
|
|||||||
player.setPlaybackParameters(params);
|
player.setPlaybackParameters(params);
|
||||||
}
|
}
|
||||||
if (playerNeedsSource && srcUri != null) {
|
if (playerNeedsSource && srcUri != null) {
|
||||||
MediaSource mediaSource = buildMediaSource(srcUri, extension);
|
ArrayList<MediaSource> mediaSourceList = buildTextSources();
|
||||||
mediaSource = repeat ? new LoopingMediaSource(mediaSource) : mediaSource;
|
MediaSource videoSource = buildMediaSource(srcUri, extension);
|
||||||
|
MediaSource mediaSource;
|
||||||
|
if (mediaSourceList.size() == 0) {
|
||||||
|
mediaSource = videoSource;
|
||||||
|
} else {
|
||||||
|
mediaSourceList.add(0, videoSource);
|
||||||
|
MediaSource[] textSourceArray = mediaSourceList.toArray(
|
||||||
|
new MediaSource[mediaSourceList.size()]
|
||||||
|
);
|
||||||
|
mediaSource = new MergingMediaSource(textSourceArray);
|
||||||
|
}
|
||||||
|
|
||||||
boolean haveResumePosition = resumeWindow != C.INDEX_UNSET;
|
boolean haveResumePosition = resumeWindow != C.INDEX_UNSET;
|
||||||
if (haveResumePosition) {
|
if (haveResumePosition) {
|
||||||
player.seekTo(resumeWindow, resumePosition);
|
player.seekTo(resumeWindow, resumePosition);
|
||||||
@ -251,6 +289,32 @@ class ReactExoplayerView extends FrameLayout implements
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private ArrayList<MediaSource> buildTextSources() {
|
||||||
|
ArrayList<MediaSource> textSources = new ArrayList<>();
|
||||||
|
if (textTracks == null) {
|
||||||
|
return textSources;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (int i = 0; i < textTracks.size(); ++i) {
|
||||||
|
ReadableMap textTrack = textTracks.getMap(i);
|
||||||
|
String language = textTrack.getString("language");
|
||||||
|
String title = textTrack.hasKey("title")
|
||||||
|
? textTrack.getString("title") : language + " " + i;
|
||||||
|
Uri uri = Uri.parse(textTrack.getString("uri"));
|
||||||
|
MediaSource textSource = buildTextSource(title, uri, textTrack.getString("type"),
|
||||||
|
language);
|
||||||
|
if (textSource != null) {
|
||||||
|
textSources.add(textSource);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return textSources;
|
||||||
|
}
|
||||||
|
|
||||||
|
private MediaSource buildTextSource(String title, Uri uri, String mimeType, String language) {
|
||||||
|
Format textFormat = Format.createTextSampleFormat(title, mimeType, Format.NO_VALUE, language);
|
||||||
|
return new SingleSampleMediaSource(uri, mediaDataSourceFactory, textFormat, C.TIME_UNSET);
|
||||||
|
}
|
||||||
|
|
||||||
private void releasePlayer() {
|
private void releasePlayer() {
|
||||||
if (player != null) {
|
if (player != null) {
|
||||||
isPaused = player.getPlayWhenReady();
|
isPaused = player.getPlayWhenReady();
|
||||||
@ -330,6 +394,9 @@ class ReactExoplayerView extends FrameLayout implements
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void onStopPlayback() {
|
private void onStopPlayback() {
|
||||||
|
if (isFullscreen) {
|
||||||
|
setFullscreen(false);
|
||||||
|
}
|
||||||
setKeepScreenOn(false);
|
setKeepScreenOn(false);
|
||||||
audioManager.abandonAudioFocus(this);
|
audioManager.abandonAudioFocus(this);
|
||||||
}
|
}
|
||||||
@ -353,7 +420,7 @@ class ReactExoplayerView extends FrameLayout implements
|
|||||||
* @return A new DataSource factory.
|
* @return A new DataSource factory.
|
||||||
*/
|
*/
|
||||||
private DataSource.Factory buildDataSourceFactory(boolean useBandwidthMeter) {
|
private DataSource.Factory buildDataSourceFactory(boolean useBandwidthMeter) {
|
||||||
return DataSourceUtil.getDefaultDataSourceFactory(getContext(), useBandwidthMeter ? BANDWIDTH_METER : null);
|
return DataSourceUtil.getDefaultDataSourceFactory(this.themedReactContext, useBandwidthMeter ? BANDWIDTH_METER : null, requestHeaders);
|
||||||
}
|
}
|
||||||
|
|
||||||
// AudioManager.OnAudioFocusChangeListener implementation
|
// AudioManager.OnAudioFocusChangeListener implementation
|
||||||
@ -434,13 +501,37 @@ class ReactExoplayerView extends FrameLayout implements
|
|||||||
private void videoLoaded() {
|
private void videoLoaded() {
|
||||||
if (loadVideoStarted) {
|
if (loadVideoStarted) {
|
||||||
loadVideoStarted = false;
|
loadVideoStarted = false;
|
||||||
|
setSelectedTextTrack(textTrackType, textTrackValue);
|
||||||
Format videoFormat = player.getVideoFormat();
|
Format videoFormat = player.getVideoFormat();
|
||||||
int width = videoFormat != null ? videoFormat.width : 0;
|
int width = videoFormat != null ? videoFormat.width : 0;
|
||||||
int height = videoFormat != null ? videoFormat.height : 0;
|
int height = videoFormat != null ? videoFormat.height : 0;
|
||||||
eventEmitter.load(player.getDuration(), player.getCurrentPosition(), width, height);
|
eventEmitter.load(player.getDuration(), player.getCurrentPosition(), width, height,
|
||||||
|
getTextTrackInfo());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private WritableArray getTextTrackInfo() {
|
||||||
|
WritableArray textTracks = Arguments.createArray();
|
||||||
|
|
||||||
|
MappingTrackSelector.MappedTrackInfo info = trackSelector.getCurrentMappedTrackInfo();
|
||||||
|
int index = getTextTrackRendererIndex();
|
||||||
|
if (info == null || index == C.INDEX_UNSET) {
|
||||||
|
return textTracks;
|
||||||
|
}
|
||||||
|
|
||||||
|
TrackGroupArray groups = info.getTrackGroups(index);
|
||||||
|
for (int i = 0; i < groups.length; ++i) {
|
||||||
|
Format format = groups.get(i).getFormat(0);
|
||||||
|
WritableMap textTrack = Arguments.createMap();
|
||||||
|
textTrack.putInt("index", i);
|
||||||
|
textTrack.putString("title", format.id != null ? format.id : "");
|
||||||
|
textTrack.putString("type", format.sampleMimeType);
|
||||||
|
textTrack.putString("language", format.language != null ? format.language : "");
|
||||||
|
textTracks.pushMap(textTrack);
|
||||||
|
}
|
||||||
|
return textTracks;
|
||||||
|
}
|
||||||
|
|
||||||
private void onBuffering(boolean buffering) {
|
private void onBuffering(boolean buffering) {
|
||||||
if (isBuffering == buffering) {
|
if (isBuffering == buffering) {
|
||||||
return;
|
return;
|
||||||
@ -455,17 +546,38 @@ class ReactExoplayerView extends FrameLayout implements
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onPositionDiscontinuity() {
|
public void onPositionDiscontinuity(int reason) {
|
||||||
if (playerNeedsSource) {
|
if (playerNeedsSource) {
|
||||||
// This will only occur if the user has performed a seek whilst in the error state. Update the
|
// This will only occur if the user has performed a seek whilst in the error state. Update the
|
||||||
// resume position so that if the user then retries, playback will resume from the position to
|
// resume position so that if the user then retries, playback will resume from the position to
|
||||||
// which they seeked.
|
// which they seeked.
|
||||||
updateResumePosition();
|
updateResumePosition();
|
||||||
}
|
}
|
||||||
|
// When repeat is turned on, reaching the end of the video will not cause a state change
|
||||||
|
// so we need to explicitly detect it.
|
||||||
|
if (reason == ExoPlayer.DISCONTINUITY_REASON_PERIOD_TRANSITION
|
||||||
|
&& player.getRepeatMode() == Player.REPEAT_MODE_ONE) {
|
||||||
|
eventEmitter.end();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onTimelineChanged(Timeline timeline, Object manifest) {
|
public void onTimelineChanged(Timeline timeline, Object manifest, int reason) {
|
||||||
|
// Do nothing.
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onSeekProcessed() {
|
||||||
|
// Do nothing.
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onShuffleModeEnabledChanged(boolean shuffleModeEnabled) {
|
||||||
|
// Do nothing.
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onRepeatModeChanged(int repeatMode) {
|
||||||
// Do nothing.
|
// Do nothing.
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -535,6 +647,16 @@ class ReactExoplayerView extends FrameLayout implements
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public int getTextTrackRendererIndex() {
|
||||||
|
int rendererCount = player.getRendererCount();
|
||||||
|
for (int rendererIndex = 0; rendererIndex < rendererCount; rendererIndex++) {
|
||||||
|
if (player.getRendererType(rendererIndex) == C.TRACK_TYPE_TEXT) {
|
||||||
|
return rendererIndex;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return C.INDEX_UNSET;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onMetadata(Metadata metadata) {
|
public void onMetadata(Metadata metadata) {
|
||||||
eventEmitter.timedMetadata(metadata);
|
eventEmitter.timedMetadata(metadata);
|
||||||
@ -542,14 +664,15 @@ class ReactExoplayerView extends FrameLayout implements
|
|||||||
|
|
||||||
// ReactExoplayerViewManager public api
|
// ReactExoplayerViewManager public api
|
||||||
|
|
||||||
public void setSrc(final Uri uri, final String extension) {
|
public void setSrc(final Uri uri, final String extension, Map<String, String> headers) {
|
||||||
if (uri != null) {
|
if (uri != null) {
|
||||||
boolean isOriginalSourceNull = srcUri == null;
|
boolean isOriginalSourceNull = srcUri == null;
|
||||||
boolean isSourceEqual = uri.equals(srcUri);
|
boolean isSourceEqual = uri.equals(srcUri);
|
||||||
|
|
||||||
this.srcUri = uri;
|
this.srcUri = uri;
|
||||||
this.extension = extension;
|
this.extension = extension;
|
||||||
this.mediaDataSourceFactory = DataSourceUtil.getDefaultDataSourceFactory(getContext(), BANDWIDTH_METER);
|
this.requestHeaders = headers;
|
||||||
|
this.mediaDataSourceFactory = DataSourceUtil.getDefaultDataSourceFactory(this.themedReactContext, BANDWIDTH_METER, this.requestHeaders);
|
||||||
|
|
||||||
if (!isOriginalSourceNull && !isSourceEqual) {
|
if (!isOriginalSourceNull && !isSourceEqual) {
|
||||||
reloadSource();
|
reloadSource();
|
||||||
@ -568,7 +691,7 @@ class ReactExoplayerView extends FrameLayout implements
|
|||||||
|
|
||||||
this.srcUri = uri;
|
this.srcUri = uri;
|
||||||
this.extension = extension;
|
this.extension = extension;
|
||||||
this.mediaDataSourceFactory = DataSourceUtil.getRawDataSourceFactory(getContext());
|
this.mediaDataSourceFactory = DataSourceUtil.getRawDataSourceFactory(this.themedReactContext);
|
||||||
|
|
||||||
if (!isOriginalSourceNull && !isSourceEqual) {
|
if (!isOriginalSourceNull && !isSourceEqual) {
|
||||||
reloadSource();
|
reloadSource();
|
||||||
@ -576,6 +699,11 @@ class ReactExoplayerView extends FrameLayout implements
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setTextTracks(ReadableArray textTracks) {
|
||||||
|
this.textTracks = textTracks;
|
||||||
|
reloadSource();
|
||||||
|
}
|
||||||
|
|
||||||
private void reloadSource() {
|
private void reloadSource() {
|
||||||
playerNeedsSource = true;
|
playerNeedsSource = true;
|
||||||
initializePlayer();
|
initializePlayer();
|
||||||
@ -586,9 +714,87 @@ class ReactExoplayerView extends FrameLayout implements
|
|||||||
}
|
}
|
||||||
|
|
||||||
public void setRepeatModifier(boolean repeat) {
|
public void setRepeatModifier(boolean repeat) {
|
||||||
|
if (player != null) {
|
||||||
|
if (repeat) {
|
||||||
|
player.setRepeatMode(Player.REPEAT_MODE_ONE);
|
||||||
|
} else {
|
||||||
|
player.setRepeatMode(Player.REPEAT_MODE_OFF);
|
||||||
|
}
|
||||||
|
}
|
||||||
this.repeat = repeat;
|
this.repeat = repeat;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setSelectedTextTrack(String type, Dynamic value) {
|
||||||
|
textTrackType = type;
|
||||||
|
textTrackValue = value;
|
||||||
|
|
||||||
|
int index = getTextTrackRendererIndex();
|
||||||
|
if (index == C.INDEX_UNSET) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
MappingTrackSelector.MappedTrackInfo info = trackSelector.getCurrentMappedTrackInfo();
|
||||||
|
if (info == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
TrackGroupArray groups = info.getTrackGroups(index);
|
||||||
|
int trackIndex = C.INDEX_UNSET;
|
||||||
|
trackSelector.setSelectionOverride(index, groups, null);
|
||||||
|
|
||||||
|
if (TextUtils.isEmpty(type)) {
|
||||||
|
// Do nothing
|
||||||
|
} else if (type.equals("disabled")) {
|
||||||
|
return;
|
||||||
|
} else if (type.equals("language")) {
|
||||||
|
for (int i = 0; i < groups.length; ++i) {
|
||||||
|
Format format = groups.get(i).getFormat(0);
|
||||||
|
if (format.language != null && format.language.equals(value.asString())) {
|
||||||
|
trackIndex = i;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (type.equals("title")) {
|
||||||
|
for (int i = 0; i < groups.length; ++i) {
|
||||||
|
Format format = groups.get(i).getFormat(0);
|
||||||
|
if (format.id != null && format.id.equals(value.asString())) {
|
||||||
|
trackIndex = i;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (type.equals("index")) {
|
||||||
|
trackIndex = value.asInt();
|
||||||
|
} else { // default. Use system settings if possible
|
||||||
|
int sdk = android.os.Build.VERSION.SDK_INT;
|
||||||
|
if (sdk>18 && groups.length>0) {
|
||||||
|
CaptioningManager captioningManager = (CaptioningManager) themedReactContext.getSystemService(Context.CAPTIONING_SERVICE);
|
||||||
|
if (captioningManager.isEnabled()) {
|
||||||
|
// default is to take the first object
|
||||||
|
trackIndex = 0;
|
||||||
|
|
||||||
|
String locale = Locale.getDefault().getDisplayLanguage();
|
||||||
|
for (int i = 0; i < groups.length; ++i) {
|
||||||
|
Format format = groups.get(i).getFormat(0);
|
||||||
|
if (format.language != null && format.language.equals(locale)) {
|
||||||
|
trackIndex = i;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else return;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
if (trackIndex == C.INDEX_UNSET) {
|
||||||
|
trackSelector.clearSelectionOverrides(trackIndex);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
MappingTrackSelector.SelectionOverride override
|
||||||
|
= new MappingTrackSelector.SelectionOverride(
|
||||||
|
new FixedTrackSelection.Factory(), trackIndex, 0);
|
||||||
|
trackSelector.setSelectionOverride(index, groups, override);
|
||||||
|
}
|
||||||
|
|
||||||
public void setPausedModifier(boolean paused) {
|
public void setPausedModifier(boolean paused) {
|
||||||
isPaused = paused;
|
isPaused = paused;
|
||||||
if (player != null) {
|
if (player != null) {
|
||||||
@ -637,4 +843,41 @@ class ReactExoplayerView extends FrameLayout implements
|
|||||||
public void setDisableFocus(boolean disableFocus) {
|
public void setDisableFocus(boolean disableFocus) {
|
||||||
this.disableFocus = disableFocus;
|
this.disableFocus = disableFocus;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setFullscreen(boolean fullscreen) {
|
||||||
|
if (fullscreen == isFullscreen) {
|
||||||
|
return; // Avoid generating events when nothing is changing
|
||||||
|
}
|
||||||
|
isFullscreen = fullscreen;
|
||||||
|
|
||||||
|
Activity activity = themedReactContext.getCurrentActivity();
|
||||||
|
if (activity == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
Window window = activity.getWindow();
|
||||||
|
View decorView = window.getDecorView();
|
||||||
|
int uiOptions;
|
||||||
|
if (isFullscreen) {
|
||||||
|
if (Util.SDK_INT >= 19) { // 4.4+
|
||||||
|
uiOptions = SYSTEM_UI_FLAG_HIDE_NAVIGATION
|
||||||
|
| SYSTEM_UI_FLAG_IMMERSIVE_STICKY
|
||||||
|
| SYSTEM_UI_FLAG_FULLSCREEN;
|
||||||
|
} else {
|
||||||
|
uiOptions = SYSTEM_UI_FLAG_HIDE_NAVIGATION
|
||||||
|
| SYSTEM_UI_FLAG_FULLSCREEN;
|
||||||
|
}
|
||||||
|
eventEmitter.fullscreenWillPresent();
|
||||||
|
decorView.setSystemUiVisibility(uiOptions);
|
||||||
|
eventEmitter.fullscreenDidPresent();
|
||||||
|
} else {
|
||||||
|
uiOptions = View.SYSTEM_UI_FLAG_VISIBLE;
|
||||||
|
eventEmitter.fullscreenWillDismiss();
|
||||||
|
decorView.setSystemUiVisibility(uiOptions);
|
||||||
|
eventEmitter.fullscreenDidDismiss();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setUseTextureView(boolean useTextureView) {
|
||||||
|
exoPlayerView.setUseTextureView(useTextureView);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -4,6 +4,8 @@ import android.content.Context;
|
|||||||
import android.net.Uri;
|
import android.net.Uri;
|
||||||
import android.text.TextUtils;
|
import android.text.TextUtils;
|
||||||
|
|
||||||
|
import com.facebook.react.bridge.Dynamic;
|
||||||
|
import com.facebook.react.bridge.ReadableArray;
|
||||||
import com.facebook.react.bridge.ReadableMap;
|
import com.facebook.react.bridge.ReadableMap;
|
||||||
import com.facebook.react.common.MapBuilder;
|
import com.facebook.react.common.MapBuilder;
|
||||||
import com.facebook.react.uimanager.ThemedReactContext;
|
import com.facebook.react.uimanager.ThemedReactContext;
|
||||||
@ -11,6 +13,7 @@ import com.facebook.react.uimanager.ViewGroupManager;
|
|||||||
import com.facebook.react.uimanager.annotations.ReactProp;
|
import com.facebook.react.uimanager.annotations.ReactProp;
|
||||||
import com.google.android.exoplayer2.upstream.RawResourceDataSource;
|
import com.google.android.exoplayer2.upstream.RawResourceDataSource;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
@ -22,8 +25,13 @@ public class ReactExoplayerViewManager extends ViewGroupManager<ReactExoplayerVi
|
|||||||
private static final String PROP_SRC = "src";
|
private static final String PROP_SRC = "src";
|
||||||
private static final String PROP_SRC_URI = "uri";
|
private static final String PROP_SRC_URI = "uri";
|
||||||
private static final String PROP_SRC_TYPE = "type";
|
private static final String PROP_SRC_TYPE = "type";
|
||||||
|
private static final String PROP_SRC_HEADERS = "requestHeaders";
|
||||||
private static final String PROP_RESIZE_MODE = "resizeMode";
|
private static final String PROP_RESIZE_MODE = "resizeMode";
|
||||||
private static final String PROP_REPEAT = "repeat";
|
private static final String PROP_REPEAT = "repeat";
|
||||||
|
private static final String PROP_SELECTED_TEXT_TRACK = "selectedTextTrack";
|
||||||
|
private static final String PROP_SELECTED_TEXT_TRACK_TYPE = "type";
|
||||||
|
private static final String PROP_SELECTED_TEXT_TRACK_VALUE = "value";
|
||||||
|
private static final String PROP_TEXT_TRACKS = "textTracks";
|
||||||
private static final String PROP_PAUSED = "paused";
|
private static final String PROP_PAUSED = "paused";
|
||||||
private static final String PROP_MUTED = "muted";
|
private static final String PROP_MUTED = "muted";
|
||||||
private static final String PROP_VOLUME = "volume";
|
private static final String PROP_VOLUME = "volume";
|
||||||
@ -32,6 +40,8 @@ public class ReactExoplayerViewManager extends ViewGroupManager<ReactExoplayerVi
|
|||||||
private static final String PROP_RATE = "rate";
|
private static final String PROP_RATE = "rate";
|
||||||
private static final String PROP_PLAY_IN_BACKGROUND = "playInBackground";
|
private static final String PROP_PLAY_IN_BACKGROUND = "playInBackground";
|
||||||
private static final String PROP_DISABLE_FOCUS = "disableFocus";
|
private static final String PROP_DISABLE_FOCUS = "disableFocus";
|
||||||
|
private static final String PROP_FULLSCREEN = "fullscreen";
|
||||||
|
private static final String PROP_USE_TEXTURE_VIEW = "useTextureView";
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getName() {
|
public String getName() {
|
||||||
@ -72,6 +82,8 @@ public class ReactExoplayerViewManager extends ViewGroupManager<ReactExoplayerVi
|
|||||||
Context context = videoView.getContext().getApplicationContext();
|
Context context = videoView.getContext().getApplicationContext();
|
||||||
String uriString = src.hasKey(PROP_SRC_URI) ? src.getString(PROP_SRC_URI) : null;
|
String uriString = src.hasKey(PROP_SRC_URI) ? src.getString(PROP_SRC_URI) : null;
|
||||||
String extension = src.hasKey(PROP_SRC_TYPE) ? src.getString(PROP_SRC_TYPE) : null;
|
String extension = src.hasKey(PROP_SRC_TYPE) ? src.getString(PROP_SRC_TYPE) : null;
|
||||||
|
Map<String, String> headers = src.hasKey(PROP_SRC_HEADERS) ? toStringMap(src.getMap(PROP_SRC_HEADERS)) : null;
|
||||||
|
|
||||||
|
|
||||||
if (TextUtils.isEmpty(uriString)) {
|
if (TextUtils.isEmpty(uriString)) {
|
||||||
return;
|
return;
|
||||||
@ -81,7 +93,7 @@ public class ReactExoplayerViewManager extends ViewGroupManager<ReactExoplayerVi
|
|||||||
Uri srcUri = Uri.parse(uriString);
|
Uri srcUri = Uri.parse(uriString);
|
||||||
|
|
||||||
if (srcUri != null) {
|
if (srcUri != null) {
|
||||||
videoView.setSrc(srcUri, extension);
|
videoView.setSrc(srcUri, extension, headers);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
int identifier = context.getResources().getIdentifier(
|
int identifier = context.getResources().getIdentifier(
|
||||||
@ -115,6 +127,26 @@ public class ReactExoplayerViewManager extends ViewGroupManager<ReactExoplayerVi
|
|||||||
videoView.setRepeatModifier(repeat);
|
videoView.setRepeatModifier(repeat);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ReactProp(name = PROP_SELECTED_TEXT_TRACK)
|
||||||
|
public void setSelectedTextTrack(final ReactExoplayerView videoView,
|
||||||
|
@Nullable ReadableMap selectedTextTrack) {
|
||||||
|
String typeString = null;
|
||||||
|
Dynamic value = null;
|
||||||
|
if (selectedTextTrack != null) {
|
||||||
|
typeString = selectedTextTrack.hasKey(PROP_SELECTED_TEXT_TRACK_TYPE)
|
||||||
|
? selectedTextTrack.getString(PROP_SELECTED_TEXT_TRACK_TYPE) : null;
|
||||||
|
value = selectedTextTrack.hasKey(PROP_SELECTED_TEXT_TRACK_VALUE)
|
||||||
|
? selectedTextTrack.getDynamic(PROP_SELECTED_TEXT_TRACK_VALUE) : null;
|
||||||
|
}
|
||||||
|
videoView.setSelectedTextTrack(typeString, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
@ReactProp(name = PROP_TEXT_TRACKS)
|
||||||
|
public void setPropTextTracks(final ReactExoplayerView videoView,
|
||||||
|
@Nullable ReadableArray textTracks) {
|
||||||
|
videoView.setTextTracks(textTracks);
|
||||||
|
}
|
||||||
|
|
||||||
@ReactProp(name = PROP_PAUSED, defaultBoolean = false)
|
@ReactProp(name = PROP_PAUSED, defaultBoolean = false)
|
||||||
public void setPaused(final ReactExoplayerView videoView, final boolean paused) {
|
public void setPaused(final ReactExoplayerView videoView, final boolean paused) {
|
||||||
videoView.setPausedModifier(paused);
|
videoView.setPausedModifier(paused);
|
||||||
@ -155,6 +187,16 @@ public class ReactExoplayerViewManager extends ViewGroupManager<ReactExoplayerVi
|
|||||||
videoView.setDisableFocus(disableFocus);
|
videoView.setDisableFocus(disableFocus);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ReactProp(name = PROP_FULLSCREEN, defaultBoolean = false)
|
||||||
|
public void setFullscreen(final ReactExoplayerView videoView, final boolean fullscreen) {
|
||||||
|
videoView.setFullscreen(fullscreen);
|
||||||
|
}
|
||||||
|
|
||||||
|
@ReactProp(name = PROP_USE_TEXTURE_VIEW, defaultBoolean = false)
|
||||||
|
public void setUseTextureView(final ReactExoplayerView videoView, final boolean useTextureView) {
|
||||||
|
videoView.setUseTextureView(useTextureView);
|
||||||
|
}
|
||||||
|
|
||||||
private boolean startsWithValidScheme(String uriString) {
|
private boolean startsWithValidScheme(String uriString) {
|
||||||
return uriString.startsWith("http://")
|
return uriString.startsWith("http://")
|
||||||
|| uriString.startsWith("https://")
|
|| uriString.startsWith("https://")
|
||||||
@ -170,4 +212,28 @@ public class ReactExoplayerViewManager extends ViewGroupManager<ReactExoplayerVi
|
|||||||
}
|
}
|
||||||
return ResizeMode.RESIZE_MODE_FIT;
|
return ResizeMode.RESIZE_MODE_FIT;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* toStringMap converts a {@link ReadableMap} into a HashMap.
|
||||||
|
*
|
||||||
|
* @param readableMap The ReadableMap to be conveted.
|
||||||
|
* @return A HashMap containing the data that was in the ReadableMap.
|
||||||
|
* @see 'Adapted from https://github.com/artemyarulin/react-native-eval/blob/master/android/src/main/java/com/evaluator/react/ConversionUtil.java'
|
||||||
|
*/
|
||||||
|
public static Map<String, String> toStringMap(@Nullable ReadableMap readableMap) {
|
||||||
|
if (readableMap == null)
|
||||||
|
return null;
|
||||||
|
|
||||||
|
com.facebook.react.bridge.ReadableMapKeySetIterator iterator = readableMap.keySetIterator();
|
||||||
|
if (!iterator.hasNextKey())
|
||||||
|
return null;
|
||||||
|
|
||||||
|
Map<String, String> result = new HashMap<>();
|
||||||
|
while (iterator.hasNextKey()) {
|
||||||
|
String key = iterator.nextKey();
|
||||||
|
result.put(key, readableMap.getString(key));
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -31,13 +31,18 @@ class VideoEventEmitter {
|
|||||||
private static final String EVENT_PROGRESS = "onVideoProgress";
|
private static final String EVENT_PROGRESS = "onVideoProgress";
|
||||||
private static final String EVENT_SEEK = "onVideoSeek";
|
private static final String EVENT_SEEK = "onVideoSeek";
|
||||||
private static final String EVENT_END = "onVideoEnd";
|
private static final String EVENT_END = "onVideoEnd";
|
||||||
|
private static final String EVENT_FULLSCREEN_WILL_PRESENT = "onVideoFullscreenPlayerWillPresent";
|
||||||
|
private static final String EVENT_FULLSCREEN_DID_PRESENT = "onVideoFullscreenPlayerDidPresent";
|
||||||
|
private static final String EVENT_FULLSCREEN_WILL_DISMISS = "onVideoFullscreenPlayerWillDismiss";
|
||||||
|
private static final String EVENT_FULLSCREEN_DID_DISMISS = "onVideoFullscreenPlayerDidDismiss";
|
||||||
|
|
||||||
private static final String EVENT_STALLED = "onPlaybackStalled";
|
private static final String EVENT_STALLED = "onPlaybackStalled";
|
||||||
private static final String EVENT_RESUME = "onPlaybackResume";
|
private static final String EVENT_RESUME = "onPlaybackResume";
|
||||||
private static final String EVENT_READY = "onReadyForDisplay";
|
private static final String EVENT_READY = "onReadyForDisplay";
|
||||||
private static final String EVENT_BUFFER = "onVideoBuffer";
|
private static final String EVENT_BUFFER = "onVideoBuffer";
|
||||||
private static final String EVENT_IDLE = "onVideoIdle";
|
private static final String EVENT_IDLE = "onVideoIdle";
|
||||||
private static final String EVENT_TIMED_METADATA = "onTimedMetadata";
|
private static final String EVENT_TIMED_METADATA = "onTimedMetadata";
|
||||||
private static final String EVENT_AUDIO_BECOMING_NOISY = "onAudioBecomingNoisy";
|
private static final String EVENT_AUDIO_BECOMING_NOISY = "onVideoAudioBecomingNoisy";
|
||||||
private static final String EVENT_AUDIO_FOCUS_CHANGE = "onAudioFocusChanged";
|
private static final String EVENT_AUDIO_FOCUS_CHANGE = "onAudioFocusChanged";
|
||||||
private static final String EVENT_PLAYBACK_RATE_CHANGE = "onPlaybackRateChange";
|
private static final String EVENT_PLAYBACK_RATE_CHANGE = "onPlaybackRateChange";
|
||||||
|
|
||||||
@ -48,6 +53,10 @@ class VideoEventEmitter {
|
|||||||
EVENT_PROGRESS,
|
EVENT_PROGRESS,
|
||||||
EVENT_SEEK,
|
EVENT_SEEK,
|
||||||
EVENT_END,
|
EVENT_END,
|
||||||
|
EVENT_FULLSCREEN_WILL_PRESENT,
|
||||||
|
EVENT_FULLSCREEN_DID_PRESENT,
|
||||||
|
EVENT_FULLSCREEN_WILL_DISMISS,
|
||||||
|
EVENT_FULLSCREEN_DID_DISMISS,
|
||||||
EVENT_STALLED,
|
EVENT_STALLED,
|
||||||
EVENT_RESUME,
|
EVENT_RESUME,
|
||||||
EVENT_READY,
|
EVENT_READY,
|
||||||
@ -67,6 +76,10 @@ class VideoEventEmitter {
|
|||||||
EVENT_PROGRESS,
|
EVENT_PROGRESS,
|
||||||
EVENT_SEEK,
|
EVENT_SEEK,
|
||||||
EVENT_END,
|
EVENT_END,
|
||||||
|
EVENT_FULLSCREEN_WILL_PRESENT,
|
||||||
|
EVENT_FULLSCREEN_DID_PRESENT,
|
||||||
|
EVENT_FULLSCREEN_WILL_DISMISS,
|
||||||
|
EVENT_FULLSCREEN_DID_DISMISS,
|
||||||
EVENT_STALLED,
|
EVENT_STALLED,
|
||||||
EVENT_RESUME,
|
EVENT_RESUME,
|
||||||
EVENT_READY,
|
EVENT_READY,
|
||||||
@ -89,12 +102,14 @@ class VideoEventEmitter {
|
|||||||
|
|
||||||
private static final String EVENT_PROP_DURATION = "duration";
|
private static final String EVENT_PROP_DURATION = "duration";
|
||||||
private static final String EVENT_PROP_PLAYABLE_DURATION = "playableDuration";
|
private static final String EVENT_PROP_PLAYABLE_DURATION = "playableDuration";
|
||||||
|
private static final String EVENT_PROP_SEEKABLE_DURATION = "seekableDuration";
|
||||||
private static final String EVENT_PROP_CURRENT_TIME = "currentTime";
|
private static final String EVENT_PROP_CURRENT_TIME = "currentTime";
|
||||||
private static final String EVENT_PROP_SEEK_TIME = "seekTime";
|
private static final String EVENT_PROP_SEEK_TIME = "seekTime";
|
||||||
private static final String EVENT_PROP_NATURAL_SIZE = "naturalSize";
|
private static final String EVENT_PROP_NATURAL_SIZE = "naturalSize";
|
||||||
private static final String EVENT_PROP_WIDTH = "width";
|
private static final String EVENT_PROP_WIDTH = "width";
|
||||||
private static final String EVENT_PROP_HEIGHT = "height";
|
private static final String EVENT_PROP_HEIGHT = "height";
|
||||||
private static final String EVENT_PROP_ORIENTATION = "orientation";
|
private static final String EVENT_PROP_ORIENTATION = "orientation";
|
||||||
|
private static final String EVENT_PROP_TEXT_TRACKS = "textTracks";
|
||||||
private static final String EVENT_PROP_HAS_AUDIO_FOCUS = "hasAudioFocus";
|
private static final String EVENT_PROP_HAS_AUDIO_FOCUS = "hasAudioFocus";
|
||||||
private static final String EVENT_PROP_IS_BUFFERING = "isBuffering";
|
private static final String EVENT_PROP_IS_BUFFERING = "isBuffering";
|
||||||
private static final String EVENT_PROP_PLAYBACK_RATE = "playbackRate";
|
private static final String EVENT_PROP_PLAYBACK_RATE = "playbackRate";
|
||||||
@ -114,7 +129,8 @@ class VideoEventEmitter {
|
|||||||
receiveEvent(EVENT_LOAD_START, null);
|
receiveEvent(EVENT_LOAD_START, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
void load(double duration, double currentPosition, int videoWidth, int videoHeight) {
|
void load(double duration, double currentPosition, int videoWidth, int videoHeight,
|
||||||
|
WritableArray textTracks) {
|
||||||
WritableMap event = Arguments.createMap();
|
WritableMap event = Arguments.createMap();
|
||||||
event.putDouble(EVENT_PROP_DURATION, duration / 1000D);
|
event.putDouble(EVENT_PROP_DURATION, duration / 1000D);
|
||||||
event.putDouble(EVENT_PROP_CURRENT_TIME, currentPosition / 1000D);
|
event.putDouble(EVENT_PROP_CURRENT_TIME, currentPosition / 1000D);
|
||||||
@ -129,6 +145,8 @@ class VideoEventEmitter {
|
|||||||
}
|
}
|
||||||
event.putMap(EVENT_PROP_NATURAL_SIZE, naturalSize);
|
event.putMap(EVENT_PROP_NATURAL_SIZE, naturalSize);
|
||||||
|
|
||||||
|
event.putArray(EVENT_PROP_TEXT_TRACKS, textTracks);
|
||||||
|
|
||||||
// TODO: Actually check if you can.
|
// TODO: Actually check if you can.
|
||||||
event.putBoolean(EVENT_PROP_FAST_FORWARD, true);
|
event.putBoolean(EVENT_PROP_FAST_FORWARD, true);
|
||||||
event.putBoolean(EVENT_PROP_SLOW_FORWARD, true);
|
event.putBoolean(EVENT_PROP_SLOW_FORWARD, true);
|
||||||
@ -141,10 +159,11 @@ class VideoEventEmitter {
|
|||||||
receiveEvent(EVENT_LOAD, event);
|
receiveEvent(EVENT_LOAD, event);
|
||||||
}
|
}
|
||||||
|
|
||||||
void progressChanged(double currentPosition, double bufferedDuration) {
|
void progressChanged(double currentPosition, double bufferedDuration, double seekableDuration) {
|
||||||
WritableMap event = Arguments.createMap();
|
WritableMap event = Arguments.createMap();
|
||||||
event.putDouble(EVENT_PROP_CURRENT_TIME, currentPosition / 1000D);
|
event.putDouble(EVENT_PROP_CURRENT_TIME, currentPosition / 1000D);
|
||||||
event.putDouble(EVENT_PROP_PLAYABLE_DURATION, bufferedDuration / 1000D);
|
event.putDouble(EVENT_PROP_PLAYABLE_DURATION, bufferedDuration / 1000D);
|
||||||
|
event.putDouble(EVENT_PROP_SEEKABLE_DURATION, seekableDuration / 1000D);
|
||||||
receiveEvent(EVENT_PROGRESS, event);
|
receiveEvent(EVENT_PROGRESS, event);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -173,6 +192,22 @@ class VideoEventEmitter {
|
|||||||
receiveEvent(EVENT_END, null);
|
receiveEvent(EVENT_END, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void fullscreenWillPresent() {
|
||||||
|
receiveEvent(EVENT_FULLSCREEN_WILL_PRESENT, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
void fullscreenDidPresent() {
|
||||||
|
receiveEvent(EVENT_FULLSCREEN_DID_PRESENT, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
void fullscreenWillDismiss() {
|
||||||
|
receiveEvent(EVENT_FULLSCREEN_WILL_DISMISS, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
void fullscreenDidDismiss() {
|
||||||
|
receiveEvent(EVENT_FULLSCREEN_DID_DISMISS, null);
|
||||||
|
}
|
||||||
|
|
||||||
void error(String errorString, Exception exception) {
|
void error(String errorString, Exception exception) {
|
||||||
WritableMap error = Arguments.createMap();
|
WritableMap error = Arguments.createMap();
|
||||||
error.putString(EVENT_PROP_ERROR_STRING, errorString);
|
error.putString(EVENT_PROP_ERROR_STRING, errorString);
|
||||||
|
@ -1,12 +1,20 @@
|
|||||||
apply plugin: 'com.android.library'
|
apply plugin: 'com.android.library'
|
||||||
|
|
||||||
|
def _ext = rootProject.ext
|
||||||
|
|
||||||
|
def _reactNativeVersion = _ext.has('reactNative') ? _ext.reactNative : '+'
|
||||||
|
def _compileSdkVersion = _ext.has('compileSdkVersion') ? _ext.compileSdkVersion : 27
|
||||||
|
def _buildToolsVersion = _ext.has('buildToolsVersion') ? _ext.buildToolsVersion : '27.0.3'
|
||||||
|
def _minSdkVersion = _ext.has('minSdkVersion') ? _ext.minSdkVersion : 16
|
||||||
|
def _targetSdkVersion = _ext.has('targetSdkVersion') ? _ext.targetSdkVersion : 27
|
||||||
|
|
||||||
android {
|
android {
|
||||||
compileSdkVersion 25
|
compileSdkVersion _compileSdkVersion
|
||||||
buildToolsVersion "25.0.2"
|
buildToolsVersion _buildToolsVersion
|
||||||
|
|
||||||
defaultConfig {
|
defaultConfig {
|
||||||
minSdkVersion 16
|
minSdkVersion _minSdkVersion
|
||||||
targetSdkVersion 25
|
targetSdkVersion _targetSdkVersion
|
||||||
versionCode 1
|
versionCode 1
|
||||||
versionName "1.0"
|
versionName "1.0"
|
||||||
ndk {
|
ndk {
|
||||||
@ -17,6 +25,6 @@ android {
|
|||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
//noinspection GradleDynamicVersion
|
//noinspection GradleDynamicVersion
|
||||||
provided 'com.facebook.react:react-native:+'
|
provided "com.facebook.react:react-native:${_reactNativeVersion}"
|
||||||
compile 'com.yqritc:android-scalablevideoview:1.0.4'
|
compile 'com.yqritc:android-scalablevideoview:1.0.4'
|
||||||
}
|
}
|
||||||
|
@ -5,6 +5,7 @@ import android.content.res.AssetFileDescriptor;
|
|||||||
import android.graphics.Matrix;
|
import android.graphics.Matrix;
|
||||||
import android.media.MediaPlayer;
|
import android.media.MediaPlayer;
|
||||||
import android.net.Uri;
|
import android.net.Uri;
|
||||||
|
import android.os.Build;
|
||||||
import android.os.Handler;
|
import android.os.Handler;
|
||||||
import android.util.Log;
|
import android.util.Log;
|
||||||
import android.view.MotionEvent;
|
import android.view.MotionEvent;
|
||||||
@ -15,6 +16,7 @@ import com.android.vending.expansion.zipfile.APKExpansionSupport;
|
|||||||
import com.android.vending.expansion.zipfile.ZipResourceFile;
|
import com.android.vending.expansion.zipfile.ZipResourceFile;
|
||||||
import com.facebook.react.bridge.Arguments;
|
import com.facebook.react.bridge.Arguments;
|
||||||
import com.facebook.react.bridge.LifecycleEventListener;
|
import com.facebook.react.bridge.LifecycleEventListener;
|
||||||
|
import com.facebook.react.bridge.ReadableMap;
|
||||||
import com.facebook.react.bridge.WritableMap;
|
import com.facebook.react.bridge.WritableMap;
|
||||||
import com.facebook.react.uimanager.ThemedReactContext;
|
import com.facebook.react.uimanager.ThemedReactContext;
|
||||||
import com.facebook.react.uimanager.events.RCTEventEmitter;
|
import com.facebook.react.uimanager.events.RCTEventEmitter;
|
||||||
@ -27,6 +29,9 @@ import java.io.IOException;
|
|||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.lang.Math;
|
import java.lang.Math;
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
|
||||||
|
import javax.annotation.Nullable;
|
||||||
|
|
||||||
@SuppressLint("ViewConstructor")
|
@SuppressLint("ViewConstructor")
|
||||||
public class ReactVideoView extends ScalableVideoView implements MediaPlayer.OnPreparedListener, MediaPlayer
|
public class ReactVideoView extends ScalableVideoView implements MediaPlayer.OnPreparedListener, MediaPlayer
|
||||||
@ -64,6 +69,7 @@ public class ReactVideoView extends ScalableVideoView implements MediaPlayer.OnP
|
|||||||
|
|
||||||
public static final String EVENT_PROP_DURATION = "duration";
|
public static final String EVENT_PROP_DURATION = "duration";
|
||||||
public static final String EVENT_PROP_PLAYABLE_DURATION = "playableDuration";
|
public static final String EVENT_PROP_PLAYABLE_DURATION = "playableDuration";
|
||||||
|
public static final String EVENT_PROP_SEEKABLE_DURATION = "seekableDuration";
|
||||||
public static final String EVENT_PROP_CURRENT_TIME = "currentTime";
|
public static final String EVENT_PROP_CURRENT_TIME = "currentTime";
|
||||||
public static final String EVENT_PROP_SEEK_TIME = "seekTime";
|
public static final String EVENT_PROP_SEEK_TIME = "seekTime";
|
||||||
public static final String EVENT_PROP_NATURALSIZE = "naturalSize";
|
public static final String EVENT_PROP_NATURALSIZE = "naturalSize";
|
||||||
@ -86,6 +92,7 @@ public class ReactVideoView extends ScalableVideoView implements MediaPlayer.OnP
|
|||||||
|
|
||||||
private String mSrcUriString = null;
|
private String mSrcUriString = null;
|
||||||
private String mSrcType = "mp4";
|
private String mSrcType = "mp4";
|
||||||
|
private ReadableMap mRequestHeaders = null;
|
||||||
private boolean mSrcIsNetwork = false;
|
private boolean mSrcIsNetwork = false;
|
||||||
private boolean mSrcIsAsset = false;
|
private boolean mSrcIsAsset = false;
|
||||||
private ScalableType mResizeMode = ScalableType.LEFT_TOP;
|
private ScalableType mResizeMode = ScalableType.LEFT_TOP;
|
||||||
@ -93,11 +100,12 @@ public class ReactVideoView extends ScalableVideoView implements MediaPlayer.OnP
|
|||||||
private boolean mPaused = false;
|
private boolean mPaused = false;
|
||||||
private boolean mMuted = false;
|
private boolean mMuted = false;
|
||||||
private float mVolume = 1.0f;
|
private float mVolume = 1.0f;
|
||||||
|
private float mStereoPan = 0.0f;
|
||||||
private float mProgressUpdateInterval = 250.0f;
|
private float mProgressUpdateInterval = 250.0f;
|
||||||
private float mRate = 1.0f;
|
private float mRate = 1.0f;
|
||||||
|
private float mActiveRate = 1.0f;
|
||||||
private boolean mPlayInBackground = false;
|
private boolean mPlayInBackground = false;
|
||||||
private boolean mActiveStatePauseStatus = false;
|
private boolean mBackgroundPaused = false;
|
||||||
private boolean mActiveStatePauseStatusInitialized = false;
|
|
||||||
|
|
||||||
private int mMainVer = 0;
|
private int mMainVer = 0;
|
||||||
private int mPatchVer = 0;
|
private int mPatchVer = 0;
|
||||||
@ -123,10 +131,11 @@ public class ReactVideoView extends ScalableVideoView implements MediaPlayer.OnP
|
|||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
|
|
||||||
if (mMediaPlayerValid && !isCompleted &&!mPaused) {
|
if (mMediaPlayerValid && !isCompleted && !mPaused && !mBackgroundPaused) {
|
||||||
WritableMap event = Arguments.createMap();
|
WritableMap event = Arguments.createMap();
|
||||||
event.putDouble(EVENT_PROP_CURRENT_TIME, mMediaPlayer.getCurrentPosition() / 1000.0);
|
event.putDouble(EVENT_PROP_CURRENT_TIME, mMediaPlayer.getCurrentPosition() / 1000.0);
|
||||||
event.putDouble(EVENT_PROP_PLAYABLE_DURATION, mVideoBufferedDuration / 1000.0); //TODO:mBufferUpdateRunnable
|
event.putDouble(EVENT_PROP_PLAYABLE_DURATION, mVideoBufferedDuration / 1000.0); //TODO:mBufferUpdateRunnable
|
||||||
|
event.putDouble(EVENT_PROP_SEEKABLE_DURATION, mVideoDuration / 1000.0);
|
||||||
mEventEmitter.receiveEvent(getId(), Events.EVENT_PROGRESS.toString(), event);
|
mEventEmitter.receiveEvent(getId(), Events.EVENT_PROGRESS.toString(), event);
|
||||||
|
|
||||||
// Check for update after an interval
|
// Check for update after an interval
|
||||||
@ -201,16 +210,17 @@ public class ReactVideoView extends ScalableVideoView implements MediaPlayer.OnP
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setSrc(final String uriString, final String type, final boolean isNetwork, final boolean isAsset) {
|
public void setSrc(final String uriString, final String type, final boolean isNetwork, final boolean isAsset, final ReadableMap requestHeaders) {
|
||||||
setSrc(uriString,type,isNetwork,isAsset,0,0);
|
setSrc(uriString, type, isNetwork, isAsset, requestHeaders, 0, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setSrc(final String uriString, final String type, final boolean isNetwork, final boolean isAsset, final int expansionMainVersion, final int expansionPatchVersion) {
|
public void setSrc(final String uriString, final String type, final boolean isNetwork, final boolean isAsset, final ReadableMap requestHeaders, final int expansionMainVersion, final int expansionPatchVersion) {
|
||||||
|
|
||||||
mSrcUriString = uriString;
|
mSrcUriString = uriString;
|
||||||
mSrcType = type;
|
mSrcType = type;
|
||||||
mSrcIsNetwork = isNetwork;
|
mSrcIsNetwork = isNetwork;
|
||||||
mSrcIsAsset = isAsset;
|
mSrcIsAsset = isAsset;
|
||||||
|
mRequestHeaders = requestHeaders;
|
||||||
mMainVer = expansionMainVersion;
|
mMainVer = expansionMainVersion;
|
||||||
mPatchVer = expansionPatchVersion;
|
mPatchVer = expansionPatchVersion;
|
||||||
|
|
||||||
@ -239,7 +249,15 @@ public class ReactVideoView extends ScalableVideoView implements MediaPlayer.OnP
|
|||||||
headers.put("Cookie", cookie);
|
headers.put("Cookie", cookie);
|
||||||
}
|
}
|
||||||
|
|
||||||
setDataSource(uriString);
|
if (mRequestHeaders != null) {
|
||||||
|
headers.putAll(toStringMap(mRequestHeaders));
|
||||||
|
}
|
||||||
|
|
||||||
|
/* According to https://github.com/react-native-community/react-native-video/pull/537
|
||||||
|
* there is an issue with this where it can cause a IOException.
|
||||||
|
* TODO: diagnose this exception and fix it
|
||||||
|
*/
|
||||||
|
setDataSource(mThemedReactContext, parsedUrl, headers);
|
||||||
} else if (isAsset) {
|
} else if (isAsset) {
|
||||||
if (uriString.startsWith("content://")) {
|
if (uriString.startsWith("content://")) {
|
||||||
Uri parsedUrl = Uri.parse(uriString);
|
Uri parsedUrl = Uri.parse(uriString);
|
||||||
@ -285,8 +303,13 @@ public class ReactVideoView extends ScalableVideoView implements MediaPlayer.OnP
|
|||||||
}
|
}
|
||||||
|
|
||||||
WritableMap src = Arguments.createMap();
|
WritableMap src = Arguments.createMap();
|
||||||
|
|
||||||
|
WritableMap wRequestHeaders = Arguments.createMap();
|
||||||
|
wRequestHeaders.merge(mRequestHeaders);
|
||||||
|
|
||||||
src.putString(ReactVideoViewManager.PROP_SRC_URI, uriString);
|
src.putString(ReactVideoViewManager.PROP_SRC_URI, uriString);
|
||||||
src.putString(ReactVideoViewManager.PROP_SRC_TYPE, type);
|
src.putString(ReactVideoViewManager.PROP_SRC_TYPE, type);
|
||||||
|
src.putMap(ReactVideoViewManager.PROP_SRC_HEADERS, wRequestHeaders);
|
||||||
src.putBoolean(ReactVideoViewManager.PROP_SRC_IS_NETWORK, isNetwork);
|
src.putBoolean(ReactVideoViewManager.PROP_SRC_IS_NETWORK, isNetwork);
|
||||||
if(mMainVer>0) {
|
if(mMainVer>0) {
|
||||||
src.putInt(ReactVideoViewManager.PROP_SRC_MAINVER, mMainVer);
|
src.putInt(ReactVideoViewManager.PROP_SRC_MAINVER, mMainVer);
|
||||||
@ -297,6 +320,7 @@ public class ReactVideoView extends ScalableVideoView implements MediaPlayer.OnP
|
|||||||
WritableMap event = Arguments.createMap();
|
WritableMap event = Arguments.createMap();
|
||||||
event.putMap(ReactVideoViewManager.PROP_SRC, src);
|
event.putMap(ReactVideoViewManager.PROP_SRC, src);
|
||||||
mEventEmitter.receiveEvent(getId(), Events.EVENT_LOAD_START.toString(), event);
|
mEventEmitter.receiveEvent(getId(), Events.EVENT_LOAD_START.toString(), event);
|
||||||
|
isCompleted = false;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
prepareAsync(this);
|
prepareAsync(this);
|
||||||
@ -327,11 +351,6 @@ public class ReactVideoView extends ScalableVideoView implements MediaPlayer.OnP
|
|||||||
|
|
||||||
mPaused = paused;
|
mPaused = paused;
|
||||||
|
|
||||||
if ( !mActiveStatePauseStatusInitialized ) {
|
|
||||||
mActiveStatePauseStatus = mPaused;
|
|
||||||
mActiveStatePauseStatusInitialized = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!mMediaPlayerValid) {
|
if (!mMediaPlayerValid) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -343,6 +362,10 @@ public class ReactVideoView extends ScalableVideoView implements MediaPlayer.OnP
|
|||||||
} else {
|
} else {
|
||||||
if (!mMediaPlayer.isPlaying()) {
|
if (!mMediaPlayer.isPlaying()) {
|
||||||
start();
|
start();
|
||||||
|
// Setting the rate unpauses, so we have to wait for an unpause
|
||||||
|
if (mRate != mActiveRate) {
|
||||||
|
setRateModifier(mRate);
|
||||||
|
}
|
||||||
|
|
||||||
// Also Start the Progress Update Handler
|
// Also Start the Progress Update Handler
|
||||||
mProgressUpdateHandler.post(mProgressUpdateRunnable);
|
mProgressUpdateHandler.post(mProgressUpdateRunnable);
|
||||||
@ -350,6 +373,14 @@ public class ReactVideoView extends ScalableVideoView implements MediaPlayer.OnP
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// reduces the volume based on stereoPan
|
||||||
|
private float calulateRelativeVolume() {
|
||||||
|
float relativeVolume = (mVolume * (1 - Math.abs(mStereoPan)));
|
||||||
|
// only one decimal allowed
|
||||||
|
BigDecimal roundRelativeVolume = new BigDecimal(relativeVolume).setScale(1, BigDecimal.ROUND_HALF_UP);
|
||||||
|
return roundRelativeVolume.floatValue();
|
||||||
|
}
|
||||||
|
|
||||||
public void setMutedModifier(final boolean muted) {
|
public void setMutedModifier(final boolean muted) {
|
||||||
mMuted = muted;
|
mMuted = muted;
|
||||||
|
|
||||||
@ -359,7 +390,14 @@ public class ReactVideoView extends ScalableVideoView implements MediaPlayer.OnP
|
|||||||
|
|
||||||
if (mMuted) {
|
if (mMuted) {
|
||||||
setVolume(0, 0);
|
setVolume(0, 0);
|
||||||
|
} else if (mStereoPan < 0) {
|
||||||
|
// louder on the left channel
|
||||||
|
setVolume(mVolume, calulateRelativeVolume());
|
||||||
|
} else if (mStereoPan > 0) {
|
||||||
|
// louder on the right channel
|
||||||
|
setVolume(calulateRelativeVolume(), mVolume);
|
||||||
} else {
|
} else {
|
||||||
|
// same volume on both channels
|
||||||
setVolume(mVolume, mVolume);
|
setVolume(mVolume, mVolume);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -369,6 +407,11 @@ public class ReactVideoView extends ScalableVideoView implements MediaPlayer.OnP
|
|||||||
setMutedModifier(mMuted);
|
setMutedModifier(mMuted);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setStereoPan(final float stereoPan) {
|
||||||
|
mStereoPan = stereoPan;
|
||||||
|
setMutedModifier(mMuted);
|
||||||
|
}
|
||||||
|
|
||||||
public void setProgressUpdateInterval(final float progressUpdateInterval) {
|
public void setProgressUpdateInterval(final float progressUpdateInterval) {
|
||||||
mProgressUpdateInterval = progressUpdateInterval;
|
mProgressUpdateInterval = progressUpdateInterval;
|
||||||
}
|
}
|
||||||
@ -377,8 +420,22 @@ public class ReactVideoView extends ScalableVideoView implements MediaPlayer.OnP
|
|||||||
mRate = rate;
|
mRate = rate;
|
||||||
|
|
||||||
if (mMediaPlayerValid) {
|
if (mMediaPlayerValid) {
|
||||||
// TODO: Implement this.
|
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
|
||||||
Log.e(ReactVideoViewManager.REACT_CLASS, "Setting playback rate is not yet supported on Android");
|
if (!mPaused) { // Applying the rate while paused will cause the video to start
|
||||||
|
/* Per https://stackoverflow.com/questions/39442522/setplaybackparams-causes-illegalstateexception
|
||||||
|
* Some devices throw an IllegalStateException if you set the rate without first calling reset()
|
||||||
|
* TODO: Call reset() then reinitialize the player
|
||||||
|
*/
|
||||||
|
try {
|
||||||
|
mMediaPlayer.setPlaybackParams(mMediaPlayer.getPlaybackParams().setSpeed(rate));
|
||||||
|
mActiveRate = rate;
|
||||||
|
} catch (Exception e) {
|
||||||
|
Log.e(ReactVideoViewManager.REACT_CLASS, "Unable to set rate, unsupported on this device");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Log.e(ReactVideoViewManager.REACT_CLASS, "Setting playback rate is not yet supported on Android versions below 6.0");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -388,7 +445,7 @@ public class ReactVideoView extends ScalableVideoView implements MediaPlayer.OnP
|
|||||||
setPausedModifier(mPaused);
|
setPausedModifier(mPaused);
|
||||||
setMutedModifier(mMuted);
|
setMutedModifier(mMuted);
|
||||||
setProgressUpdateInterval(mProgressUpdateInterval);
|
setProgressUpdateInterval(mProgressUpdateInterval);
|
||||||
// setRateModifier(mRate);
|
setRateModifier(mRate);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setPlayInBackground(final boolean playInBackground) {
|
public void setPlayInBackground(final boolean playInBackground) {
|
||||||
@ -542,39 +599,62 @@ public class ReactVideoView extends ScalableVideoView implements MediaPlayer.OnP
|
|||||||
super.onAttachedToWindow();
|
super.onAttachedToWindow();
|
||||||
|
|
||||||
if(mMainVer>0) {
|
if(mMainVer>0) {
|
||||||
setSrc(mSrcUriString, mSrcType, mSrcIsNetwork,mSrcIsAsset,mMainVer,mPatchVer);
|
setSrc(mSrcUriString, mSrcType, mSrcIsNetwork, mSrcIsAsset, mRequestHeaders, mMainVer, mPatchVer);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
setSrc(mSrcUriString, mSrcType, mSrcIsNetwork,mSrcIsAsset);
|
setSrc(mSrcUriString, mSrcType, mSrcIsNetwork, mSrcIsAsset, mRequestHeaders);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onHostPause() {
|
public void onHostPause() {
|
||||||
if (mMediaPlayer != null && !mPlayInBackground) {
|
if (mMediaPlayerValid && !mPaused && !mPlayInBackground) {
|
||||||
mActiveStatePauseStatus = mPaused;
|
/* Pause the video in background
|
||||||
|
* Don't update the paused prop, developers should be able to update it on background
|
||||||
// Pause the video in background
|
* so that when you return to the app the video is paused
|
||||||
setPausedModifier(true);
|
*/
|
||||||
|
mBackgroundPaused = true;
|
||||||
|
mMediaPlayer.pause();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onHostResume() {
|
public void onHostResume() {
|
||||||
if (mMediaPlayer != null && !mPlayInBackground) {
|
mBackgroundPaused = false;
|
||||||
|
if (mMediaPlayerValid && !mPlayInBackground && !mPaused) {
|
||||||
new Handler().post(new Runnable() {
|
new Handler().post(new Runnable() {
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
// Restore original state
|
// Restore original state
|
||||||
setPausedModifier(mActiveStatePauseStatus);
|
setPausedModifier(false);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onHostDestroy() {
|
public void onHostDestroy() {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* toStringMap converts a {@link ReadableMap} into a HashMap.
|
||||||
|
*
|
||||||
|
* @param readableMap The ReadableMap to be conveted.
|
||||||
|
* @return A HashMap containing the data that was in the ReadableMap.
|
||||||
|
* @see 'Adapted from https://github.com/artemyarulin/react-native-eval/blob/master/android/src/main/java/com/evaluator/react/ConversionUtil.java'
|
||||||
|
*/
|
||||||
|
public static Map<String, String> toStringMap(@Nullable ReadableMap readableMap) {
|
||||||
|
Map<String, String> result = new HashMap<>();
|
||||||
|
if (readableMap == null)
|
||||||
|
return result;
|
||||||
|
|
||||||
|
com.facebook.react.bridge.ReadableMapKeySetIterator iterator = readableMap.keySetIterator();
|
||||||
|
while (iterator.hasNextKey()) {
|
||||||
|
String key = iterator.nextKey();
|
||||||
|
result.put(key, readableMap.getString(key));
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -21,6 +21,7 @@ public class ReactVideoViewManager extends SimpleViewManager<ReactVideoView> {
|
|||||||
public static final String PROP_SRC = "src";
|
public static final String PROP_SRC = "src";
|
||||||
public static final String PROP_SRC_URI = "uri";
|
public static final String PROP_SRC_URI = "uri";
|
||||||
public static final String PROP_SRC_TYPE = "type";
|
public static final String PROP_SRC_TYPE = "type";
|
||||||
|
public static final String PROP_SRC_HEADERS = "requestHeaders";
|
||||||
public static final String PROP_SRC_IS_NETWORK = "isNetwork";
|
public static final String PROP_SRC_IS_NETWORK = "isNetwork";
|
||||||
public static final String PROP_SRC_MAINVER = "mainVer";
|
public static final String PROP_SRC_MAINVER = "mainVer";
|
||||||
public static final String PROP_SRC_PATCHVER = "patchVer";
|
public static final String PROP_SRC_PATCHVER = "patchVer";
|
||||||
@ -30,6 +31,7 @@ public class ReactVideoViewManager extends SimpleViewManager<ReactVideoView> {
|
|||||||
public static final String PROP_PAUSED = "paused";
|
public static final String PROP_PAUSED = "paused";
|
||||||
public static final String PROP_MUTED = "muted";
|
public static final String PROP_MUTED = "muted";
|
||||||
public static final String PROP_VOLUME = "volume";
|
public static final String PROP_VOLUME = "volume";
|
||||||
|
public static final String PROP_STEREO_PAN = "stereoPan";
|
||||||
public static final String PROP_PROGRESS_UPDATE_INTERVAL = "progressUpdateInterval";
|
public static final String PROP_PROGRESS_UPDATE_INTERVAL = "progressUpdateInterval";
|
||||||
public static final String PROP_SEEK = "seek";
|
public static final String PROP_SEEK = "seek";
|
||||||
public static final String PROP_RATE = "rate";
|
public static final String PROP_RATE = "rate";
|
||||||
@ -85,6 +87,7 @@ public class ReactVideoViewManager extends SimpleViewManager<ReactVideoView> {
|
|||||||
src.getString(PROP_SRC_TYPE),
|
src.getString(PROP_SRC_TYPE),
|
||||||
src.getBoolean(PROP_SRC_IS_NETWORK),
|
src.getBoolean(PROP_SRC_IS_NETWORK),
|
||||||
src.getBoolean(PROP_SRC_IS_ASSET),
|
src.getBoolean(PROP_SRC_IS_ASSET),
|
||||||
|
src.getMap(PROP_SRC_HEADERS),
|
||||||
mainVer,
|
mainVer,
|
||||||
patchVer
|
patchVer
|
||||||
);
|
);
|
||||||
@ -94,8 +97,9 @@ public class ReactVideoViewManager extends SimpleViewManager<ReactVideoView> {
|
|||||||
src.getString(PROP_SRC_URI),
|
src.getString(PROP_SRC_URI),
|
||||||
src.getString(PROP_SRC_TYPE),
|
src.getString(PROP_SRC_TYPE),
|
||||||
src.getBoolean(PROP_SRC_IS_NETWORK),
|
src.getBoolean(PROP_SRC_IS_NETWORK),
|
||||||
src.getBoolean(PROP_SRC_IS_ASSET)
|
src.getBoolean(PROP_SRC_IS_ASSET),
|
||||||
);
|
src.getMap(PROP_SRC_HEADERS)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -124,6 +128,11 @@ public class ReactVideoViewManager extends SimpleViewManager<ReactVideoView> {
|
|||||||
videoView.setVolumeModifier(volume);
|
videoView.setVolumeModifier(volume);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ReactProp(name = PROP_STEREO_PAN)
|
||||||
|
public void setStereoPan(final ReactVideoView videoView, final float stereoPan) {
|
||||||
|
videoView.setStereoPan(stereoPan);
|
||||||
|
}
|
||||||
|
|
||||||
@ReactProp(name = PROP_PROGRESS_UPDATE_INTERVAL, defaultFloat = 250.0f)
|
@ReactProp(name = PROP_PROGRESS_UPDATE_INTERVAL, defaultFloat = 250.0f)
|
||||||
public void setProgressUpdateInterval(final ReactVideoView videoView, final float progressUpdateInterval) {
|
public void setProgressUpdateInterval(final ReactVideoView videoView, final float progressUpdateInterval) {
|
||||||
videoView.setProgressUpdateInterval(progressUpdateInterval);
|
videoView.setProgressUpdateInterval(progressUpdateInterval);
|
||||||
|
BIN
docs/AppTransportSecuritySetting.png
Normal file
BIN
docs/AppTransportSecuritySetting.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 26 KiB |
@ -19,6 +19,7 @@
|
|||||||
@property (nonatomic, copy) RCTBubblingEventBlock onVideoSeek;
|
@property (nonatomic, copy) RCTBubblingEventBlock onVideoSeek;
|
||||||
@property (nonatomic, copy) RCTBubblingEventBlock onVideoEnd;
|
@property (nonatomic, copy) RCTBubblingEventBlock onVideoEnd;
|
||||||
@property (nonatomic, copy) RCTBubblingEventBlock onTimedMetadata;
|
@property (nonatomic, copy) RCTBubblingEventBlock onTimedMetadata;
|
||||||
|
@property (nonatomic, copy) RCTBubblingEventBlock onVideoAudioBecomingNoisy;
|
||||||
@property (nonatomic, copy) RCTBubblingEventBlock onVideoFullscreenPlayerWillPresent;
|
@property (nonatomic, copy) RCTBubblingEventBlock onVideoFullscreenPlayerWillPresent;
|
||||||
@property (nonatomic, copy) RCTBubblingEventBlock onVideoFullscreenPlayerDidPresent;
|
@property (nonatomic, copy) RCTBubblingEventBlock onVideoFullscreenPlayerDidPresent;
|
||||||
@property (nonatomic, copy) RCTBubblingEventBlock onVideoFullscreenPlayerWillDismiss;
|
@property (nonatomic, copy) RCTBubblingEventBlock onVideoFullscreenPlayerWillDismiss;
|
||||||
|
395
ios/RCTVideo.m
395
ios/RCTVideo.m
@ -4,6 +4,8 @@
|
|||||||
#import <React/RCTBridgeModule.h>
|
#import <React/RCTBridgeModule.h>
|
||||||
#import <React/RCTEventDispatcher.h>
|
#import <React/RCTEventDispatcher.h>
|
||||||
#import <React/UIView+React.h>
|
#import <React/UIView+React.h>
|
||||||
|
#include <MediaAccessibility/MediaAccessibility.h>
|
||||||
|
#include <AVFoundation/AVFoundation.h>
|
||||||
|
|
||||||
static NSString *const statusKeyPath = @"status";
|
static NSString *const statusKeyPath = @"status";
|
||||||
static NSString *const playbackLikelyToKeepUpKeyPath = @"playbackLikelyToKeepUp";
|
static NSString *const playbackLikelyToKeepUpKeyPath = @"playbackLikelyToKeepUp";
|
||||||
@ -19,12 +21,14 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
BOOL _playerItemObserversSet;
|
BOOL _playerItemObserversSet;
|
||||||
BOOL _playerBufferEmpty;
|
BOOL _playerBufferEmpty;
|
||||||
AVPlayerLayer *_playerLayer;
|
AVPlayerLayer *_playerLayer;
|
||||||
|
BOOL _playerLayerObserverSet;
|
||||||
AVPlayerViewController *_playerViewController;
|
AVPlayerViewController *_playerViewController;
|
||||||
NSURL *_videoURL;
|
NSURL *_videoURL;
|
||||||
|
|
||||||
/* Required to publish events */
|
/* Required to publish events */
|
||||||
RCTEventDispatcher *_eventDispatcher;
|
RCTEventDispatcher *_eventDispatcher;
|
||||||
BOOL _playbackRateObserverRegistered;
|
BOOL _playbackRateObserverRegistered;
|
||||||
|
BOOL _videoLoadStarted;
|
||||||
|
|
||||||
bool _pendingSeek;
|
bool _pendingSeek;
|
||||||
float _pendingSeekTime;
|
float _pendingSeekTime;
|
||||||
@ -41,6 +45,9 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
BOOL _muted;
|
BOOL _muted;
|
||||||
BOOL _paused;
|
BOOL _paused;
|
||||||
BOOL _repeat;
|
BOOL _repeat;
|
||||||
|
BOOL _allowsExternalPlayback;
|
||||||
|
NSArray * _textTracks;
|
||||||
|
NSDictionary * _selectedTextTrack;
|
||||||
BOOL _playbackStalled;
|
BOOL _playbackStalled;
|
||||||
BOOL _playInBackground;
|
BOOL _playInBackground;
|
||||||
BOOL _playWhenInactive;
|
BOOL _playWhenInactive;
|
||||||
@ -68,6 +75,7 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
_controls = NO;
|
_controls = NO;
|
||||||
_playerBufferEmpty = YES;
|
_playerBufferEmpty = YES;
|
||||||
_playInBackground = false;
|
_playInBackground = false;
|
||||||
|
_allowsExternalPlayback = YES;
|
||||||
_playWhenInactive = false;
|
_playWhenInactive = false;
|
||||||
_ignoreSilentSwitch = @"inherit"; // inherit, ignore, obey
|
_ignoreSilentSwitch = @"inherit"; // inherit, ignore, obey
|
||||||
_videoCache = [RCTVideoCache sharedInstance];
|
_videoCache = [RCTVideoCache sharedInstance];
|
||||||
@ -86,19 +94,24 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
selector:@selector(applicationWillEnterForeground:)
|
selector:@selector(applicationWillEnterForeground:)
|
||||||
name:UIApplicationWillEnterForegroundNotification
|
name:UIApplicationWillEnterForegroundNotification
|
||||||
object:nil];
|
object:nil];
|
||||||
|
|
||||||
|
[[NSNotificationCenter defaultCenter] addObserver:self
|
||||||
|
selector:@selector(audioRouteChanged:)
|
||||||
|
name:AVAudioSessionRouteChangeNotification
|
||||||
|
object:nil];
|
||||||
}
|
}
|
||||||
|
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (AVPlayerViewController*)createPlayerViewController:(AVPlayer*)player withPlayerItem:(AVPlayerItem*)playerItem {
|
- (AVPlayerViewController*)createPlayerViewController:(AVPlayer*)player withPlayerItem:(AVPlayerItem*)playerItem {
|
||||||
RCTVideoPlayerViewController* playerLayer= [[RCTVideoPlayerViewController alloc] init];
|
RCTVideoPlayerViewController* playerLayer= [[RCTVideoPlayerViewController alloc] init];
|
||||||
playerLayer.showsPlaybackControls = NO;
|
playerLayer.showsPlaybackControls = YES;
|
||||||
playerLayer.rctDelegate = self;
|
playerLayer.rctDelegate = self;
|
||||||
playerLayer.view.frame = self.bounds;
|
playerLayer.view.frame = self.bounds;
|
||||||
playerLayer.player = _player;
|
playerLayer.player = player;
|
||||||
playerLayer.view.frame = self.bounds;
|
playerLayer.view.frame = self.bounds;
|
||||||
return playerLayer;
|
return playerLayer;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ---------------------------------------------------------
|
/* ---------------------------------------------------------
|
||||||
@ -127,6 +140,17 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
return (kCMTimeRangeZero);
|
return (kCMTimeRangeZero);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
-(void)addPlayerTimeObserver
|
||||||
|
{
|
||||||
|
const Float64 progressUpdateIntervalMS = _progressUpdateInterval / 1000;
|
||||||
|
// @see endScrubbing in AVPlayerDemoPlaybackViewController.m
|
||||||
|
// of https://developer.apple.com/library/ios/samplecode/AVPlayerDemo/Introduction/Intro.html
|
||||||
|
__weak RCTVideo *weakSelf = self;
|
||||||
|
_timeObserver = [_player addPeriodicTimeObserverForInterval:CMTimeMakeWithSeconds(progressUpdateIntervalMS, NSEC_PER_SEC)
|
||||||
|
queue:NULL
|
||||||
|
usingBlock:^(CMTime time) { [weakSelf sendProgressUpdate]; }
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
/* Cancels the previously registered time observer. */
|
/* Cancels the previously registered time observer. */
|
||||||
-(void)removePlayerTimeObserver
|
-(void)removePlayerTimeObserver
|
||||||
@ -143,8 +167,8 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
- (void)dealloc
|
- (void)dealloc
|
||||||
{
|
{
|
||||||
[[NSNotificationCenter defaultCenter] removeObserver:self];
|
[[NSNotificationCenter defaultCenter] removeObserver:self];
|
||||||
[self removePlayerItemObservers];
|
|
||||||
[self removePlayerLayer];
|
[self removePlayerLayer];
|
||||||
|
[self removePlayerItemObservers];
|
||||||
[_player removeObserver:self forKeyPath:playbackRate context:nil];
|
[_player removeObserver:self forKeyPath:playbackRate context:nil];
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -174,6 +198,17 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#pragma mark - Audio events
|
||||||
|
|
||||||
|
- (void)audioRouteChanged:(NSNotification *)notification
|
||||||
|
{
|
||||||
|
NSNumber *reason = [[notification userInfo] objectForKey:AVAudioSessionRouteChangeReasonKey];
|
||||||
|
NSNumber *previousRoute = [[notification userInfo] objectForKey:AVAudioSessionRouteChangePreviousRouteKey];
|
||||||
|
if (reason.unsignedIntValue == AVAudioSessionRouteChangeReasonOldDeviceUnavailable) {
|
||||||
|
self.onVideoAudioBecomingNoisy(@{@"target": self.reactTag});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#pragma mark - Progress
|
#pragma mark - Progress
|
||||||
|
|
||||||
- (void)sendProgressUpdate
|
- (void)sendProgressUpdate
|
||||||
@ -255,9 +290,6 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
* observer set */
|
* observer set */
|
||||||
- (void)removePlayerItemObservers
|
- (void)removePlayerItemObservers
|
||||||
{
|
{
|
||||||
if (_playerLayer) {
|
|
||||||
[_playerLayer removeObserver:self forKeyPath:readyForDisplayKeyPath];
|
|
||||||
}
|
|
||||||
if (_playerItemObserversSet) {
|
if (_playerItemObserversSet) {
|
||||||
[_playerItem removeObserver:self forKeyPath:statusKeyPath];
|
[_playerItem removeObserver:self forKeyPath:statusKeyPath];
|
||||||
[_playerItem removeObserver:self forKeyPath:playbackBufferEmptyKeyPath];
|
[_playerItem removeObserver:self forKeyPath:playbackBufferEmptyKeyPath];
|
||||||
@ -276,6 +308,7 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
|
|
||||||
- (void)setSrc:(NSDictionary *)source
|
- (void)setSrc:(NSDictionary *)source
|
||||||
{
|
{
|
||||||
|
[self removePlayerLayer];
|
||||||
[self removePlayerTimeObserver];
|
[self removePlayerTimeObserver];
|
||||||
[self removePlayerItemObservers];
|
[self removePlayerItemObservers];
|
||||||
[self playerItemForSource:source withCallback:^(AVPlayerItem * playerItem) {
|
[self playerItemForSource:source withCallback:^(AVPlayerItem * playerItem) {
|
||||||
@ -313,6 +346,28 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
];
|
];
|
||||||
|
|
||||||
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
|
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
|
||||||
|
|
||||||
|
// perform on next run loop, otherwise other passed react-props may not be set
|
||||||
|
_playerItem = [self playerItemForSource:source];
|
||||||
|
[self addPlayerItemObservers];
|
||||||
|
|
||||||
|
[_player pause];
|
||||||
|
[_playerViewController.view removeFromSuperview];
|
||||||
|
_playerViewController = nil;
|
||||||
|
|
||||||
|
if (_playbackRateObserverRegistered) {
|
||||||
|
[_player removeObserver:self forKeyPath:playbackRate context:nil];
|
||||||
|
_playbackRateObserverRegistered = NO;
|
||||||
|
}
|
||||||
|
|
||||||
|
_player = [AVPlayer playerWithPlayerItem:_playerItem];
|
||||||
|
_player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
|
||||||
|
|
||||||
|
[_player addObserver:self forKeyPath:playbackRate options:0 context:nil];
|
||||||
|
_playbackRateObserverRegistered = YES;
|
||||||
|
|
||||||
|
[self addPlayerTimeObserver];
|
||||||
|
|
||||||
//Perform on next run loop, otherwise onVideoLoadStart is nil
|
//Perform on next run loop, otherwise onVideoLoadStart is nil
|
||||||
if(self.onVideoLoadStart) {
|
if(self.onVideoLoadStart) {
|
||||||
id uri = [source objectForKey:@"uri"];
|
id uri = [source objectForKey:@"uri"];
|
||||||
@ -324,7 +379,70 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
@"target": self.reactTag
|
@"target": self.reactTag
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
});
|
});
|
||||||
|
_videoLoadStarted = YES;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (NSURL*) urlFilePath:(NSString*) filepath {
|
||||||
|
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
|
||||||
|
|
||||||
|
NSString* relativeFilePath = [filepath lastPathComponent];
|
||||||
|
// the file may be multiple levels below the documents directory
|
||||||
|
NSArray* fileComponents = [filepath componentsSeparatedByString:@"Documents/"];
|
||||||
|
if (fileComponents.count>1) {
|
||||||
|
relativeFilePath = [fileComponents objectAtIndex:1];
|
||||||
|
}
|
||||||
|
|
||||||
|
NSString *path = [paths.firstObject stringByAppendingPathComponent:relativeFilePath];
|
||||||
|
if ([[NSFileManager defaultManager] fileExistsAtPath:path]) {
|
||||||
|
return [NSURL fileURLWithPath:path];
|
||||||
|
}
|
||||||
|
return nil;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)playerItemPrepareText:(AVAsset *)asset withCallback:(void(^)(AVPlayerItem *))handler
|
||||||
|
{
|
||||||
|
if (!_textTracks) {
|
||||||
|
handler([AVPlayerItem playerItemWithAsset:asset]);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// sideload text tracks
|
||||||
|
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
|
||||||
|
|
||||||
|
AVAssetTrack *videoAsset = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject;
|
||||||
|
AVMutableCompositionTrack *videoCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
|
||||||
|
[videoCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration)
|
||||||
|
ofTrack:videoAsset
|
||||||
|
atTime:kCMTimeZero
|
||||||
|
error:nil];
|
||||||
|
|
||||||
|
AVAssetTrack *audioAsset = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject;
|
||||||
|
AVMutableCompositionTrack *audioCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
|
||||||
|
[audioCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration)
|
||||||
|
ofTrack:audioAsset
|
||||||
|
atTime:kCMTimeZero
|
||||||
|
error:nil];
|
||||||
|
|
||||||
|
for (int i = 0; i < _textTracks.count; ++i) {
|
||||||
|
AVURLAsset *textURLAsset;
|
||||||
|
NSString *textUri = [_textTracks objectAtIndex:i][@"uri"];
|
||||||
|
if ([[textUri lowercaseString] hasPrefix:@"http"]) {
|
||||||
|
textURLAsset = [AVURLAsset URLAssetWithURL:[NSURL URLWithString:textUri] options:assetOptions];
|
||||||
|
} else {
|
||||||
|
textURLAsset = [AVURLAsset URLAssetWithURL:[self urlFilePath:textUri] options:nil];
|
||||||
|
}
|
||||||
|
AVAssetTrack *textTrackAsset = [textURLAsset tracksWithMediaType:AVMediaTypeText].firstObject;
|
||||||
|
AVMutableCompositionTrack *textCompTrack = [mixComposition
|
||||||
|
addMutableTrackWithMediaType:AVMediaTypeText
|
||||||
|
preferredTrackID:kCMPersistentTrackID_Invalid];
|
||||||
|
[textCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration)
|
||||||
|
ofTrack:textTrackAsset
|
||||||
|
atTime:kCMTimeZero
|
||||||
|
error:nil];
|
||||||
|
}
|
||||||
|
|
||||||
|
handler([AVPlayerItem playerItemWithAsset:mixComposition]);
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)playerItemForSource:(NSDictionary *)source withCallback:(void(^)(AVPlayerItem *))handler
|
- (void)playerItemForSource:(NSDictionary *)source withCallback:(void(^)(AVPlayerItem *))handler
|
||||||
@ -341,41 +459,36 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
if (isNetwork) {
|
if (isNetwork) {
|
||||||
[_videoCache getItemForUri:uri withCallback:^(AVAsset * _Nullable asset) {
|
[_videoCache getItemForUri:uri withCallback:^(AVAsset * _Nullable asset) {
|
||||||
if (asset) {
|
if (asset) {
|
||||||
handler([AVPlayerItem playerItemWithAsset:asset]);
|
[self playerItemPrepareText:asset withCallback:handler];
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
NSArray *cookies = [[NSHTTPCookieStorage sharedHTTPCookieStorage] cookies];
|
NSArray *cookies = [[NSHTTPCookieStorage sharedHTTPCookieStorage] cookies];
|
||||||
DVURLAsset * dvAsset = [[DVURLAsset alloc] initWithURL:url options:@{AVURLAssetHTTPCookiesKey : cookies} networkTimeout: 10000];
|
DVURLAsset * dvAsset = [[DVURLAsset alloc] initWithURL:url options:@{AVURLAssetHTTPCookiesKey : cookies} networkTimeout: 10000];
|
||||||
dvAsset.loaderDelegate = self;
|
dvAsset.loaderDelegate = self;
|
||||||
handler([AVPlayerItem playerItemWithAsset:dvAsset]);
|
[self playerItemPrepareText:dvAsset withCallback:handler];
|
||||||
}];
|
}];
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
else if (isAsset) {
|
else if (isAsset) {
|
||||||
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:nil];
|
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:nil];
|
||||||
handler([AVPlayerItem playerItemWithAsset:asset]);
|
[self playerItemPrepareText:asset withCallback:handler];
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
handler([AVPlayerItem playerItemWithURL:url]);
|
|
||||||
return;
|
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:[[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:uri ofType:type]] options:nil];
|
||||||
|
[self playerItemPrepareText:asset withCallback:handler];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
|
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
|
||||||
{
|
{
|
||||||
if (object == _playerItem) {
|
if (object == _playerItem) {
|
||||||
|
|
||||||
// When timeMetadata is read the event onTimedMetadata is triggered
|
// When timeMetadata is read the event onTimedMetadata is triggered
|
||||||
if ([keyPath isEqualToString: timedMetadata])
|
if ([keyPath isEqualToString:timedMetadata]) {
|
||||||
{
|
|
||||||
|
|
||||||
|
|
||||||
NSArray<AVMetadataItem *> *items = [change objectForKey:@"new"];
|
NSArray<AVMetadataItem *> *items = [change objectForKey:@"new"];
|
||||||
if (items && ![items isEqual:[NSNull null]] && items.count > 0) {
|
if (items && ![items isEqual:[NSNull null]] && items.count > 0) {
|
||||||
|
|
||||||
NSMutableArray *array = [NSMutableArray new];
|
NSMutableArray *array = [NSMutableArray new];
|
||||||
for (AVMetadataItem *item in items) {
|
for (AVMetadataItem *item in items) {
|
||||||
|
NSString *value = (NSString *)item.value;
|
||||||
NSString *value = item.value;
|
|
||||||
NSString *identifier = item.identifier;
|
NSString *identifier = item.identifier;
|
||||||
|
|
||||||
if (![value isEqual: [NSNull null]]) {
|
if (![value isEqual: [NSNull null]]) {
|
||||||
@ -416,11 +529,12 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
|| (preferredTransform.tx == 0 && preferredTransform.ty == 0))
|
|| (preferredTransform.tx == 0 && preferredTransform.ty == 0))
|
||||||
{
|
{
|
||||||
orientation = @"landscape";
|
orientation = @"landscape";
|
||||||
} else
|
} else {
|
||||||
orientation = @"portrait";
|
orientation = @"portrait";
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if(self.onVideoLoad) {
|
if (self.onVideoLoad && _videoLoadStarted) {
|
||||||
self.onVideoLoad(@{@"duration": [NSNumber numberWithFloat:duration],
|
self.onVideoLoad(@{@"duration": [NSNumber numberWithFloat:duration],
|
||||||
@"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(_playerItem.currentTime)],
|
@"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(_playerItem.currentTime)],
|
||||||
@"canPlayReverse": [NSNumber numberWithBool:_playerItem.canPlayReverse],
|
@"canPlayReverse": [NSNumber numberWithBool:_playerItem.canPlayReverse],
|
||||||
@ -434,13 +548,14 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
@"height": height,
|
@"height": height,
|
||||||
@"orientation": orientation
|
@"orientation": orientation
|
||||||
},
|
},
|
||||||
|
@"textTracks": [self getTextTrackInfo],
|
||||||
@"target": self.reactTag});
|
@"target": self.reactTag});
|
||||||
}
|
}
|
||||||
|
_videoLoadStarted = NO;
|
||||||
|
|
||||||
[self attachListeners];
|
[self attachListeners];
|
||||||
[self applyModifiers];
|
[self applyModifiers];
|
||||||
} else if(_playerItem.status == AVPlayerItemStatusFailed && self.onVideoError) {
|
} else if (_playerItem.status == AVPlayerItemStatusFailed && self.onVideoError) {
|
||||||
self.onVideoError(@{@"error": @{@"code": [NSNumber numberWithInteger: _playerItem.error.code],
|
self.onVideoError(@{@"error": @{@"code": [NSNumber numberWithInteger: _playerItem.error.code],
|
||||||
@"domain": _playerItem.error.domain},
|
@"domain": _playerItem.error.domain},
|
||||||
@"target": self.reactTag});
|
@"target": self.reactTag});
|
||||||
@ -484,10 +599,17 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
- (void)attachListeners
|
- (void)attachListeners
|
||||||
{
|
{
|
||||||
// listen for end of file
|
// listen for end of file
|
||||||
|
[[NSNotificationCenter defaultCenter] removeObserver:self
|
||||||
|
name:AVPlayerItemDidPlayToEndTimeNotification
|
||||||
|
object:[_player currentItem]];
|
||||||
[[NSNotificationCenter defaultCenter] addObserver:self
|
[[NSNotificationCenter defaultCenter] addObserver:self
|
||||||
selector:@selector(playerItemDidReachEnd:)
|
selector:@selector(playerItemDidReachEnd:)
|
||||||
name:AVPlayerItemDidPlayToEndTimeNotification
|
name:AVPlayerItemDidPlayToEndTimeNotification
|
||||||
object:[_player currentItem]];
|
object:[_player currentItem]];
|
||||||
|
|
||||||
|
[[NSNotificationCenter defaultCenter] removeObserver:self
|
||||||
|
name:AVPlayerItemPlaybackStalledNotification
|
||||||
|
object:nil];
|
||||||
[[NSNotificationCenter defaultCenter] addObserver:self
|
[[NSNotificationCenter defaultCenter] addObserver:self
|
||||||
selector:@selector(playbackStalled:)
|
selector:@selector(playbackStalled:)
|
||||||
name:AVPlayerItemPlaybackStalledNotification
|
name:AVPlayerItemPlaybackStalledNotification
|
||||||
@ -512,6 +634,8 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
AVPlayerItem *item = [notification object];
|
AVPlayerItem *item = [notification object];
|
||||||
[item seekToTime:kCMTimeZero];
|
[item seekToTime:kCMTimeZero];
|
||||||
[self applyModifiers];
|
[self applyModifiers];
|
||||||
|
} else {
|
||||||
|
[self removePlayerTimeObserver];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -535,6 +659,12 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
_playInBackground = playInBackground;
|
_playInBackground = playInBackground;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (void)setAllowsExternalPlayback:(BOOL)allowsExternalPlayback
|
||||||
|
{
|
||||||
|
_allowsExternalPlayback = allowsExternalPlayback;
|
||||||
|
_player.allowsExternalPlayback = _allowsExternalPlayback;
|
||||||
|
}
|
||||||
|
|
||||||
- (void)setPlayWhenInactive:(BOOL)playWhenInactive
|
- (void)setPlayWhenInactive:(BOOL)playWhenInactive
|
||||||
{
|
{
|
||||||
_playWhenInactive = playWhenInactive;
|
_playWhenInactive = playWhenInactive;
|
||||||
@ -571,30 +701,42 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
|
|
||||||
- (void)setCurrentTime:(float)currentTime
|
- (void)setCurrentTime:(float)currentTime
|
||||||
{
|
{
|
||||||
[self setSeek: currentTime];
|
NSDictionary *info = @{
|
||||||
|
@"time": [NSNumber numberWithFloat:currentTime],
|
||||||
|
@"tolerance": [NSNumber numberWithInt:100]
|
||||||
|
};
|
||||||
|
[self setSeek:info];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)setSeek:(float)seekTime
|
- (void)setSeek:(NSDictionary *)info
|
||||||
{
|
{
|
||||||
int timeScale = 10000;
|
NSNumber *seekTime = info[@"time"];
|
||||||
|
NSNumber *seekTolerance = info[@"tolerance"];
|
||||||
|
|
||||||
|
int timeScale = 1000;
|
||||||
|
|
||||||
AVPlayerItem *item = _player.currentItem;
|
AVPlayerItem *item = _player.currentItem;
|
||||||
if (item && item.status == AVPlayerItemStatusReadyToPlay) {
|
if (item && item.status == AVPlayerItemStatusReadyToPlay) {
|
||||||
// TODO check loadedTimeRanges
|
// TODO check loadedTimeRanges
|
||||||
|
|
||||||
CMTime cmSeekTime = CMTimeMakeWithSeconds(seekTime, timeScale);
|
CMTime cmSeekTime = CMTimeMakeWithSeconds([seekTime floatValue], timeScale);
|
||||||
CMTime current = item.currentTime;
|
CMTime current = item.currentTime;
|
||||||
// TODO figure out a good tolerance level
|
// TODO figure out a good tolerance level
|
||||||
CMTime tolerance = CMTimeMake(1000, timeScale);
|
CMTime tolerance = CMTimeMake([seekTolerance floatValue], timeScale);
|
||||||
BOOL wasPaused = _paused;
|
BOOL wasPaused = _paused;
|
||||||
|
|
||||||
if (CMTimeCompare(current, cmSeekTime) != 0) {
|
if (CMTimeCompare(current, cmSeekTime) != 0) {
|
||||||
if (!wasPaused) [_player pause];
|
if (!wasPaused) [_player pause];
|
||||||
[_player seekToTime:cmSeekTime toleranceBefore:tolerance toleranceAfter:tolerance completionHandler:^(BOOL finished) {
|
[_player seekToTime:cmSeekTime toleranceBefore:tolerance toleranceAfter:tolerance completionHandler:^(BOOL finished) {
|
||||||
if (!wasPaused) [_player play];
|
if (!_timeObserver) {
|
||||||
|
[self addPlayerTimeObserver];
|
||||||
|
}
|
||||||
|
if (!wasPaused) {
|
||||||
|
[self setPaused:false];
|
||||||
|
}
|
||||||
if(self.onVideoSeek) {
|
if(self.onVideoSeek) {
|
||||||
self.onVideoSeek(@{@"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(item.currentTime)],
|
self.onVideoSeek(@{@"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(item.currentTime)],
|
||||||
@"seekTime": [NSNumber numberWithFloat:seekTime],
|
@"seekTime": seekTime,
|
||||||
@"target": self.reactTag});
|
@"target": self.reactTag});
|
||||||
}
|
}
|
||||||
}];
|
}];
|
||||||
@ -605,7 +747,7 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
} else {
|
} else {
|
||||||
// TODO: See if this makes sense and if so, actually implement it
|
// TODO: See if this makes sense and if so, actually implement it
|
||||||
_pendingSeek = true;
|
_pendingSeek = true;
|
||||||
_pendingSeekTime = seekTime;
|
_pendingSeekTime = [seekTime floatValue];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -637,16 +779,174 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
[_player setMuted:NO];
|
[_player setMuted:NO];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[self setSelectedTextTrack:_selectedTextTrack];
|
||||||
[self setResizeMode:_resizeMode];
|
[self setResizeMode:_resizeMode];
|
||||||
[self setRepeat:_repeat];
|
[self setRepeat:_repeat];
|
||||||
[self setPaused:_paused];
|
[self setPaused:_paused];
|
||||||
[self setControls:_controls];
|
[self setControls:_controls];
|
||||||
|
[self setAllowsExternalPlayback:_allowsExternalPlayback];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)setRepeat:(BOOL)repeat {
|
- (void)setRepeat:(BOOL)repeat {
|
||||||
_repeat = repeat;
|
_repeat = repeat;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (void)setSelectedTextTrack:(NSDictionary *)selectedTextTrack {
|
||||||
|
_selectedTextTrack = selectedTextTrack;
|
||||||
|
if (_textTracks) {
|
||||||
|
[self setSideloadedText];
|
||||||
|
} else {
|
||||||
|
[self setStreamingText];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void) setSideloadedText {
|
||||||
|
NSString *type = _selectedTextTrack[@"type"];
|
||||||
|
NSArray* textTracks = [self getTextTrackInfo];
|
||||||
|
|
||||||
|
// The first few tracks will be audio & video track
|
||||||
|
int firstTextIndex = 0;
|
||||||
|
for (firstTextIndex = 0; firstTextIndex < _player.currentItem.tracks.count; ++firstTextIndex) {
|
||||||
|
if ([_player.currentItem.tracks[firstTextIndex].assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible]) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
int selectedTrackIndex = -1;
|
||||||
|
|
||||||
|
if ([type isEqualToString:@"disabled"]) {
|
||||||
|
// Do nothing. We want to ensure option is nil
|
||||||
|
} else if ([type isEqualToString:@"language"]) {
|
||||||
|
NSString *selectedValue = _selectedTextTrack[@"value"];
|
||||||
|
for (int i = 0; i < textTracks.count; ++i) {
|
||||||
|
NSDictionary *currentTextTrack = [textTracks objectAtIndex:i];
|
||||||
|
if ([selectedValue isEqualToString:currentTextTrack[@"language"]]) {
|
||||||
|
selectedTrackIndex = i;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if ([type isEqualToString:@"title"]) {
|
||||||
|
NSString *selectedValue = _selectedTextTrack[@"value"];
|
||||||
|
for (int i = 0; i < textTracks.count; ++i) {
|
||||||
|
NSDictionary *currentTextTrack = [textTracks objectAtIndex:i];
|
||||||
|
if ([selectedValue isEqualToString:currentTextTrack[@"title"]]) {
|
||||||
|
selectedTrackIndex = i;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if ([type isEqualToString:@"index"]) {
|
||||||
|
if ([_selectedTextTrack[@"value"] isKindOfClass:[NSNumber class]]) {
|
||||||
|
int index = [_selectedTextTrack[@"value"] intValue];
|
||||||
|
if (textTracks.count > index) {
|
||||||
|
selectedTrackIndex = index;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// user's selected language might not be available, or system defaults have captions enabled
|
||||||
|
if (selectedTrackIndex == -1 || [type isEqualToString:@"default"]) {
|
||||||
|
CFArrayRef captioningMediaCharacteristics = MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics(kMACaptionAppearanceDomainUser);
|
||||||
|
NSArray *captionSettings = (__bridge NSArray*)captioningMediaCharacteristics;
|
||||||
|
if ([captionSettings containsObject: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility]) {
|
||||||
|
// iterate through the textTracks to find a matching option, or default to the first object.
|
||||||
|
selectedTrackIndex = 0;
|
||||||
|
|
||||||
|
NSString * systemLanguage = [[NSLocale preferredLanguages] firstObject];
|
||||||
|
for (int i = 0; i < textTracks.count; ++i) {
|
||||||
|
NSDictionary *currentTextTrack = [textTracks objectAtIndex:i];
|
||||||
|
if ([systemLanguage isEqualToString:currentTextTrack[@"language"]]) {
|
||||||
|
selectedTrackIndex = i;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (int i = firstTextIndex; i < _player.currentItem.tracks.count; ++i) {
|
||||||
|
BOOL isEnabled = i == selectedTrackIndex + firstTextIndex;
|
||||||
|
[_player.currentItem.tracks[i] setEnabled:isEnabled];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
-(void) setStreamingText {
|
||||||
|
NSString *type = _selectedTextTrack[@"type"];
|
||||||
|
AVMediaSelectionGroup *group = [_player.currentItem.asset
|
||||||
|
mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
|
||||||
|
AVMediaSelectionOption *mediaOption;
|
||||||
|
|
||||||
|
if ([type isEqualToString:@"disabled"]) {
|
||||||
|
// Do nothing. We want to ensure option is nil
|
||||||
|
} else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) {
|
||||||
|
NSString *value = _selectedTextTrack[@"value"];
|
||||||
|
for (int i = 0; i < group.options.count; ++i) {
|
||||||
|
AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i];
|
||||||
|
NSString *optionValue;
|
||||||
|
if ([type isEqualToString:@"language"]) {
|
||||||
|
optionValue = [currentOption extendedLanguageTag];
|
||||||
|
} else {
|
||||||
|
optionValue = [[[currentOption commonMetadata]
|
||||||
|
valueForKey:@"value"]
|
||||||
|
objectAtIndex:0];
|
||||||
|
}
|
||||||
|
if ([value isEqualToString:optionValue]) {
|
||||||
|
mediaOption = currentOption;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
//} else if ([type isEqualToString:@"default"]) {
|
||||||
|
// option = group.defaultOption; */
|
||||||
|
} else if ([type isEqualToString:@"index"]) {
|
||||||
|
if ([_selectedTextTrack[@"value"] isKindOfClass:[NSNumber class]]) {
|
||||||
|
int index = [_selectedTextTrack[@"value"] intValue];
|
||||||
|
if (group.options.count > index) {
|
||||||
|
mediaOption = [group.options objectAtIndex:index];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else { // default. invalid type or "system"
|
||||||
|
[_player.currentItem selectMediaOptionAutomaticallyInMediaSelectionGroup:group];
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If a match isn't found, option will be nil and text tracks will be disabled
|
||||||
|
[_player.currentItem selectMediaOption:mediaOption inMediaSelectionGroup:group];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)setTextTracks:(NSArray*) textTracks;
|
||||||
|
{
|
||||||
|
_textTracks = textTracks;
|
||||||
|
|
||||||
|
// in case textTracks was set after selectedTextTrack
|
||||||
|
if (_selectedTextTrack) [self setSelectedTextTrack:_selectedTextTrack];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (NSArray *)getTextTrackInfo
|
||||||
|
{
|
||||||
|
|
||||||
|
// if sideloaded, textTracks will already be set
|
||||||
|
if (_textTracks) return _textTracks;
|
||||||
|
|
||||||
|
// if streaming video, we extract the text tracks
|
||||||
|
NSMutableArray *textTracks = [[NSMutableArray alloc] init];
|
||||||
|
AVMediaSelectionGroup *group = [_player.currentItem.asset
|
||||||
|
mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
|
||||||
|
for (int i = 0; i < group.options.count; ++i) {
|
||||||
|
AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i];
|
||||||
|
NSString *title = @"";
|
||||||
|
NSArray *values = [[currentOption commonMetadata] valueForKey:@"value"];
|
||||||
|
if (values.count > 0) {
|
||||||
|
title = [values objectAtIndex:0];
|
||||||
|
}
|
||||||
|
NSString *language = [currentOption extendedLanguageTag] ? [currentOption extendedLanguageTag] : @"";
|
||||||
|
NSDictionary *textTrack = @{
|
||||||
|
@"index": [NSNumber numberWithInt:i],
|
||||||
|
@"title": title,
|
||||||
|
@"language": language
|
||||||
|
};
|
||||||
|
[textTracks addObject:textTrack];
|
||||||
|
}
|
||||||
|
return textTracks;
|
||||||
|
}
|
||||||
|
|
||||||
- (BOOL)getFullscreen
|
- (BOOL)getFullscreen
|
||||||
{
|
{
|
||||||
return _fullscreenPlayerPresented;
|
return _fullscreenPlayerPresented;
|
||||||
@ -723,6 +1023,7 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
// resize mode must be set before layer is added
|
// resize mode must be set before layer is added
|
||||||
[self setResizeMode:_resizeMode];
|
[self setResizeMode:_resizeMode];
|
||||||
[_playerLayer addObserver:self forKeyPath:readyForDisplayKeyPath options:NSKeyValueObservingOptionNew context:nil];
|
[_playerLayer addObserver:self forKeyPath:readyForDisplayKeyPath options:NSKeyValueObservingOptionNew context:nil];
|
||||||
|
_playerLayerObserverSet = YES;
|
||||||
|
|
||||||
[self.layer addSublayer:_playerLayer];
|
[self.layer addSublayer:_playerLayer];
|
||||||
self.layer.needsDisplayOnBoundsChange = YES;
|
self.layer.needsDisplayOnBoundsChange = YES;
|
||||||
@ -751,27 +1052,23 @@ static NSString *const timedMetadata = @"timedMetadata";
|
|||||||
- (void)setProgressUpdateInterval:(float)progressUpdateInterval
|
- (void)setProgressUpdateInterval:(float)progressUpdateInterval
|
||||||
{
|
{
|
||||||
_progressUpdateInterval = progressUpdateInterval;
|
_progressUpdateInterval = progressUpdateInterval;
|
||||||
|
|
||||||
|
if (_timeObserver) {
|
||||||
|
[self removePlayerTimeObserver];
|
||||||
|
[self addPlayerTimeObserver];
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)removePlayerLayer
|
- (void)removePlayerLayer
|
||||||
{
|
{
|
||||||
[_playerLayer removeFromSuperlayer];
|
[_playerLayer removeFromSuperlayer];
|
||||||
[_playerLayer removeObserver:self forKeyPath:readyForDisplayKeyPath];
|
if (_playerLayerObserverSet) {
|
||||||
|
[_playerLayer removeObserver:self forKeyPath:readyForDisplayKeyPath];
|
||||||
|
_playerLayerObserverSet = NO;
|
||||||
|
}
|
||||||
_playerLayer = nil;
|
_playerLayer = nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
#pragma mark - DVAssetLoaderDelegate
|
|
||||||
|
|
||||||
- (void)dvAssetLoaderDelegate:(DVAssetLoaderDelegate *)loaderDelegate
|
|
||||||
didLoadData:(NSData *)data
|
|
||||||
forURL:(NSURL *)url {
|
|
||||||
[_videoCache storeItem:data forUri:[url absoluteString] withCallback:^(BOOL success) {
|
|
||||||
#ifdef DEBUG
|
|
||||||
NSLog(@"data stored succesfully 🎉");
|
|
||||||
#endif
|
|
||||||
}];
|
|
||||||
}
|
|
||||||
|
|
||||||
#pragma mark - RCTVideoPlayerViewControllerDelegate
|
#pragma mark - RCTVideoPlayerViewControllerDelegate
|
||||||
|
|
||||||
- (void)videoPlayerViewControllerWillDismiss:(AVPlayerViewController *)playerViewController
|
- (void)videoPlayerViewControllerWillDismiss:(AVPlayerViewController *)playerViewController
|
||||||
|
@ -23,6 +23,9 @@ RCT_EXPORT_VIEW_PROPERTY(src, NSDictionary);
|
|||||||
RCT_EXPORT_VIEW_PROPERTY(cache, BOOL);
|
RCT_EXPORT_VIEW_PROPERTY(cache, BOOL);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(resizeMode, NSString);
|
RCT_EXPORT_VIEW_PROPERTY(resizeMode, NSString);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(repeat, BOOL);
|
RCT_EXPORT_VIEW_PROPERTY(repeat, BOOL);
|
||||||
|
RCT_EXPORT_VIEW_PROPERTY(allowsExternalPlayback, BOOL);
|
||||||
|
RCT_EXPORT_VIEW_PROPERTY(textTracks, NSArray);
|
||||||
|
RCT_EXPORT_VIEW_PROPERTY(selectedTextTrack, NSDictionary);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(paused, BOOL);
|
RCT_EXPORT_VIEW_PROPERTY(paused, BOOL);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(muted, BOOL);
|
RCT_EXPORT_VIEW_PROPERTY(muted, BOOL);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(controls, BOOL);
|
RCT_EXPORT_VIEW_PROPERTY(controls, BOOL);
|
||||||
@ -31,7 +34,7 @@ RCT_EXPORT_VIEW_PROPERTY(playInBackground, BOOL);
|
|||||||
RCT_EXPORT_VIEW_PROPERTY(playWhenInactive, BOOL);
|
RCT_EXPORT_VIEW_PROPERTY(playWhenInactive, BOOL);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(ignoreSilentSwitch, NSString);
|
RCT_EXPORT_VIEW_PROPERTY(ignoreSilentSwitch, NSString);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(rate, float);
|
RCT_EXPORT_VIEW_PROPERTY(rate, float);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(seek, float);
|
RCT_EXPORT_VIEW_PROPERTY(seek, NSDictionary);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(currentTime, float);
|
RCT_EXPORT_VIEW_PROPERTY(currentTime, float);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(fullscreen, BOOL);
|
RCT_EXPORT_VIEW_PROPERTY(fullscreen, BOOL);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(progressUpdateInterval, float);
|
RCT_EXPORT_VIEW_PROPERTY(progressUpdateInterval, float);
|
||||||
@ -44,6 +47,7 @@ RCT_EXPORT_VIEW_PROPERTY(onVideoProgress, RCTBubblingEventBlock);
|
|||||||
RCT_EXPORT_VIEW_PROPERTY(onVideoSeek, RCTBubblingEventBlock);
|
RCT_EXPORT_VIEW_PROPERTY(onVideoSeek, RCTBubblingEventBlock);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(onVideoEnd, RCTBubblingEventBlock);
|
RCT_EXPORT_VIEW_PROPERTY(onVideoEnd, RCTBubblingEventBlock);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(onTimedMetadata, RCTBubblingEventBlock);
|
RCT_EXPORT_VIEW_PROPERTY(onTimedMetadata, RCTBubblingEventBlock);
|
||||||
|
RCT_EXPORT_VIEW_PROPERTY(onVideoAudioBecomingNoisy, RCTBubblingEventBlock);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(onVideoFullscreenPlayerWillPresent, RCTBubblingEventBlock);
|
RCT_EXPORT_VIEW_PROPERTY(onVideoFullscreenPlayerWillPresent, RCTBubblingEventBlock);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(onVideoFullscreenPlayerDidPresent, RCTBubblingEventBlock);
|
RCT_EXPORT_VIEW_PROPERTY(onVideoFullscreenPlayerDidPresent, RCTBubblingEventBlock);
|
||||||
RCT_EXPORT_VIEW_PROPERTY(onVideoFullscreenPlayerWillDismiss, RCTBubblingEventBlock);
|
RCT_EXPORT_VIEW_PROPERTY(onVideoFullscreenPlayerWillDismiss, RCTBubblingEventBlock);
|
||||||
@ -63,4 +67,9 @@ RCT_EXPORT_VIEW_PROPERTY(onPlaybackRateChange, RCTBubblingEventBlock);
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
+ (BOOL)requiresMainQueueSetup
|
||||||
|
{
|
||||||
|
return YES;
|
||||||
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
@ -8,13 +8,9 @@
|
|||||||
|
|
||||||
- (void)viewDidDisappear:(BOOL)animated
|
- (void)viewDidDisappear:(BOOL)animated
|
||||||
{
|
{
|
||||||
[super viewDidDisappear:animated];
|
[super viewDidDisappear:animated];
|
||||||
[_rctDelegate videoPlayerViewControllerDidDismiss:self];
|
[_rctDelegate videoPlayerViewControllerWillDismiss:self];
|
||||||
}
|
[_rctDelegate videoPlayerViewControllerDidDismiss:self];
|
||||||
|
|
||||||
- (void)viewWillDisappear:(BOOL)animated {
|
|
||||||
[_rctDelegate videoPlayerViewControllerWillDismiss:self];
|
|
||||||
[super viewWillDisappear:animated];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
79
package.json
79
package.json
@ -1,41 +1,48 @@
|
|||||||
{
|
{
|
||||||
"name": "react-native-video",
|
"name": "react-native-video",
|
||||||
"version": "2.0.0",
|
"version": "3.1.0",
|
||||||
"description": "A <Video /> element for react-native",
|
"description": "A <Video /> element for react-native",
|
||||||
"main": "Video.js",
|
"main": "Video.js",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"author":
|
"author": "Brent Vatne <brentvatne@gmail.com> (https://github.com/brentvatne)",
|
||||||
"Brent Vatne <brentvatne@gmail.com> (https://github.com/brentvatne)",
|
"contributors": [
|
||||||
"contributors": [
|
{
|
||||||
{
|
"name": "Isaiah Grey",
|
||||||
"name": "Isaiah Grey",
|
"email": "isaiahgrey@gmail.com"
|
||||||
"email": "isaiahgrey@gmail.com"
|
},
|
||||||
|
{
|
||||||
|
"name": "Johannes Lumpe",
|
||||||
|
"email": "johannes@lum.pe"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Baris Sencan",
|
||||||
|
"email": "baris.sncn@gmail.com"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Hampton Maxwell",
|
||||||
|
"email": "me@hamptonmaxwell.com"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git@github.com:brentvatne/react-native-video.git"
|
||||||
},
|
},
|
||||||
{
|
"devDependencies": {
|
||||||
"name": "Johannes Lumpe",
|
"eslint": "1.10.3",
|
||||||
"email": "johannes@lum.pe"
|
"babel-eslint": "5.0.0-beta8",
|
||||||
|
"eslint-plugin-react": "3.16.1",
|
||||||
|
"eslint-config-airbnb": "4.0.0"
|
||||||
},
|
},
|
||||||
{
|
"dependencies": {
|
||||||
"name": "Baris Sencan",
|
"keymirror": "0.1.1",
|
||||||
"email": "baris.sncn@gmail.com"
|
"prop-types": "^15.5.10"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "node_modules/.bin/eslint *.js"
|
||||||
|
},
|
||||||
|
"rnpm": {
|
||||||
|
"android": {
|
||||||
|
"sourceDir": "./android-exoplayer"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
],
|
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "git@github.com:brentvatne/react-native-video.git"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"babel-eslint": "^8.2.2",
|
|
||||||
"eslint": "^4.18.1",
|
|
||||||
"eslint-config-airbnb": "^16.1.0",
|
|
||||||
"eslint-plugin-react": "^7.7.0",
|
|
||||||
"jest-cli": "^22.4.2"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"keymirror": "0.1.1",
|
|
||||||
"prop-types": "^15.5.10"
|
|
||||||
},
|
|
||||||
"scripts": {
|
|
||||||
"test": "node_modules/.bin/eslint *.js"
|
|
||||||
}
|
|
||||||
}
|
}
|
@ -328,7 +328,7 @@ namespace ReactNativeVideo
|
|||||||
private readonly JObject _eventData;
|
private readonly JObject _eventData;
|
||||||
|
|
||||||
public ReactVideoEvent(string eventName, int viewTag, JObject eventData)
|
public ReactVideoEvent(string eventName, int viewTag, JObject eventData)
|
||||||
: base(viewTag, TimeSpan.FromTicks(Environment.TickCount))
|
: base(viewTag)
|
||||||
{
|
{
|
||||||
_eventName = eventName;
|
_eventName = eventName;
|
||||||
_eventData = eventData;
|
_eventData = eventData;
|
||||||
|
@ -334,7 +334,7 @@ namespace ReactNativeVideo
|
|||||||
private readonly JObject _eventData;
|
private readonly JObject _eventData;
|
||||||
|
|
||||||
public ReactVideoEvent(string eventName, int viewTag, JObject eventData)
|
public ReactVideoEvent(string eventName, int viewTag, JObject eventData)
|
||||||
: base(viewTag, TimeSpan.FromTicks(Environment.TickCount))
|
: base(viewTag)
|
||||||
{
|
{
|
||||||
_eventName = eventName;
|
_eventName = eventName;
|
||||||
_eventData = eventData;
|
_eventData = eventData;
|
||||||
|
Loading…
Reference in New Issue
Block a user